gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package refdiff.evaluation;
import static refdiff.evaluation.RefactoringRelationship.*;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import refdiff.core.diff.Relationship;
public class ResultComparator {
Set<String> groupIds = new LinkedHashSet<>();
Map<String, RefactoringSet> expectedMap = new LinkedHashMap<>();
Map<String, RefactoringSet> notExpectedMap = new LinkedHashMap<>();
Map<String, CompareResult> resultMap = new HashMap<>();
Map<String, Map<KeyPair, String>> fnExplanations = new HashMap<>();
private boolean ignorePullUpToExtractedSupertype = false;
private boolean ignoreMoveToMovedType = false;
private boolean ignoreMoveToRenamedType = false;
public ResultComparator expect(RefactoringSet... sets) {
for (RefactoringSet set : sets) {
expectedMap.put(getProjectRevisionId(set.getProject(), set.getRevision()), set);
}
return this;
}
public ResultComparator expect(Iterable<RefactoringSet> sets) {
for (RefactoringSet set : sets) {
expectedMap.put(getProjectRevisionId(set.getProject(), set.getRevision()), set);
}
return this;
}
public ResultComparator dontExpect(RefactoringSet... sets) {
for (RefactoringSet set : sets) {
notExpectedMap.put(getProjectRevisionId(set.getProject(), set.getRevision()), set);
}
return this;
}
public void remove(String project, String revision) {
String id = getProjectRevisionId(project, revision);
expectedMap.remove(id);
notExpectedMap.remove(id);
}
public ResultComparator dontExpect(Iterable<RefactoringSet> sets) {
for (RefactoringSet set : sets) {
notExpectedMap.put(getProjectRevisionId(set.getProject(), set.getRevision()), set);
}
return this;
}
public ResultComparator compareWith(String groupId, RefactoringSet... actualArray) {
for (RefactoringSet actual : actualArray) {
compareWith(groupId, actual);
}
return this;
}
public ResultComparator compareWith(String groupId, Iterable<RefactoringSet> actualArray) {
for (RefactoringSet actual : actualArray) {
compareWith(groupId, actual);
}
return this;
}
public void compareWith(String groupId, RefactoringSet actual) {
groupIds.add(groupId);
resultMap.put(getResultId(actual.getProject(), actual.getRevision(), groupId), computeResult(actual));
}
public CompareResult computeResult(RefactoringSet actual) {
List<RefactoringRelationship> truePositives = new ArrayList<>();
List<RefactoringRelationship> falsePositives = new ArrayList<>();
List<RefactoringRelationship> falseNegatives = new ArrayList<>();
RefactoringSet expected = expectedMap.get(getProjectRevisionId(actual.getProject(), actual.getRevision()));
Set<RefactoringRelationship> expectedRefactorings = new HashSet<>(expected.getRefactorings());
Set<RefactoringRelationship> expectedUnfiltered = expectedRefactorings;
Set<RefactoringRelationship> actualRefactorings = actual.getRefactorings();
for (RefactoringRelationship r : actualRefactorings) {
if (expectedRefactorings.contains(r)) {
truePositives.add(r);
expectedRefactorings.remove(r);
} else {
boolean ignoreFp = ignoreMoveToMovedType && isMoveToMovedType(r, expectedUnfiltered) ||
ignoreMoveToRenamedType && isMoveToRenamedType(r, expectedUnfiltered) ||
ignorePullUpToExtractedSupertype && isPullUpToExtractedSupertype(r, expectedUnfiltered);
if (!ignoreFp) {
falsePositives.add(r);
}
}
}
for (RefactoringRelationship r : expectedRefactorings) {
falseNegatives.add(r);
}
return new CompareResult(truePositives, falsePositives, falseNegatives);
}
public int getExpectedCount(EnumSet<RefactoringType> refTypesToConsider) {
int sum = 0;
EnumSet<RefactoringType> ignore = EnumSet.complementOf(refTypesToConsider);
for (RefactoringSet set : expectedMap.values()) {
sum += set.ignoring(ignore).getRefactorings().size();
}
return sum;
}
public void printSummary(PrintStream out, EnumSet<RefactoringType> refTypesToConsider) {
for (String groupId : groupIds) {
CompareResult r = getCompareResult(groupId, refTypesToConsider);
out.println("# " + groupId + " #");
out.println("Total " + getResultLine(r.getTPCount(), r.getFPCount(), r.getFNCount()));
for (RefactoringType refType : refTypesToConsider) {
CompareResult resultForRefType = r.filterBy(refType);
int tpRt = resultForRefType.getTPCount();
int fpRt = resultForRefType.getFPCount();
int fnRt = resultForRefType.getFNCount();
if (tpRt > 0 || fpRt > 0 || fnRt > 0) {
out.println(String.format("%-7s" + getResultLine(tpRt, fpRt, fnRt), refType.getAbbreviation()));
}
}
out.println();
}
out.println();
}
public CompareResult getCompareResult(String groupId, EnumSet<RefactoringType> refTypesToConsider) {
CompareResult merged = new CompareResult(new ArrayList<>(), new ArrayList<>(), new ArrayList<>());
for (RefactoringSet expected : expectedMap.values()) {
CompareResult result = resultMap.get(getResultId(expected.getProject(), expected.getRevision(), groupId));
if (result != null) {
merged.mergeWith(result.filterBy(refTypesToConsider));
}
}
return merged;
}
private String getResultLine(int tp, int fp, int fn) {
double precision = getPrecision(tp, fp, fn);
double recall = getRecall(tp, fp, fn);
double f1 = getF1(tp, fp, fn);
// return String.format("& %3d & %3d & %3d & %3d & %.3f & %.3f \\", tp + fn, tp, fp, fn, precision, recall);
return String.format("#: %3d TP: %3d FP: %3d FN: %3d Prec.: %.3f Recall: %.3f F1: %.3f", tp + fn, tp, fp, fn, precision, recall, f1);
}
private static double getPrecision(int tp, int fp, int fn) {
return tp == 0 ? 0.0 : ((double) tp / (tp + fp));
}
private static double getRecall(int tp, int fp, int fn) {
return tp == 0 ? 0.0 : ((double) tp) / (tp + fn);
}
private static double getF1(int tp, int fp, int fn) {
double precision = ResultComparator.getPrecision(tp, fp, fn);
double recall = ResultComparator.getRecall(tp, fp, fn);
return tp == 0 ? 0.0 : 2.0 * precision * recall / (precision + recall);
}
public void printDetails(PrintStream out, EnumSet<RefactoringType> refTypesToConsider, String groupId) {
printDetails(out, refTypesToConsider, groupId, (RefactoringSet rs, RefactoringRelationship r, String label, String cause, EvaluationDetails evaluationDetails) -> {
out.print('\t');
out.print(label);
});
}
public void printDetails(PrintStream out, EnumSet<RefactoringType> refTypesToConsider, String groupId, ResultRowPrinter rowPrinter) {
String[] labels = { "TN", "FP", "FN", "TP" };
EnumSet<RefactoringType> ignore = EnumSet.complementOf(refTypesToConsider);
boolean headerPrinted = false;
for (RefactoringSet expected : expectedMap.values()) {
Set<RefactoringRelationship> all = new HashSet<>();
Set<RefactoringRelationship> expectedRefactorings = expected.ignoring(ignore).getRefactorings();
String id = getProjectRevisionId(expected.getProject(), expected.getRevision());
Set<RefactoringRelationship> notExpectedRefactorings = notExpectedMap.getOrDefault(id, new RefactoringSet(expected.getProject(), expected.getRevision())).getRefactorings();
String header = String.format("Commit\tRef Type\tEntity before\tEntity after\t%s\tDetails", groupId);
CompareResult result = resultMap.get(getResultId(expected.getProject(), expected.getRevision(), groupId));
if (result != null) {
CompareResult resultFiltered = result.filterBy(refTypesToConsider);
all.addAll(resultFiltered.getTruePositives());
all.addAll(resultFiltered.getFalsePositives());
all.addAll(resultFiltered.getFalseNegatives());
} else {
all.addAll(expectedRefactorings); //
}
if (!headerPrinted) {
out.println(header);
headerPrinted = true;
}
if (!all.isEmpty()) {
//out.println(getProjectRevisionId(expected.getProject(), expected.getRevision()));
ArrayList<RefactoringRelationship> allList = new ArrayList<>();
allList.addAll(all);
Collections.sort(allList);
for (RefactoringRelationship r : allList) {
out.print(id);
out.print('\t');
out.print(format(r));
// out.print('\t');
if (result != null) {
Set<RefactoringRelationship> actualRefactorings = new HashSet<>();
actualRefactorings.addAll(result.getTruePositives());
actualRefactorings.addAll(result.getFalsePositives());
int correct = expectedRefactorings.contains(r) ? 2 : 0;
int found = actualRefactorings.contains(r) ? 1 : 0;
String label = labels[correct + found];
String cause = "";
EvaluationDetails evaluationDetails = findEvaluationDetails(r, expected.getRefactorings(), notExpectedRefactorings);
if (label == "FP") {
cause = findFpCause(r, expected.getRefactorings(), notExpectedRefactorings, evaluationDetails);
if (cause.equals("?")) {
label = label + "?";
}
} else if (label == "FN") {
cause = findFnCause(r, actualRefactorings, this.fnExplanations.get(getProjectRevisionId(expected.getProject(), expected.getRevision())));
}
// out.print(label);
rowPrinter.printDetails(expected, r, label, cause, evaluationDetails);
/*
* if (label.equals("FP") || label.equals("FN")) { if (cause != null) { out.print('\t'); out.print(cause); } }
*/
}
out.println();
}
}
}
out.println();
}
public void printDetails2(PrintStream out, EnumSet<RefactoringType> refTypesToConsider) {
String[] labels = { "TN", "FP", "FN", "TP" };
EnumSet<RefactoringType> ignore = EnumSet.complementOf(refTypesToConsider);
String header = String.format("Commit\tRef Type\tDescription\tRelationship\tCst Node Before\tCst Node After\tExpected?");
for (String groupId : this.groupIds) {
header += "\t" + groupId;
}
header += "\tEvaluators\tEvaluators classification";
out.println(header);
for (RefactoringSet expected : expectedMap.values()) {
Set<RefactoringRelationship> all = new HashSet<>();
String id = getProjectRevisionId(expected.getProject(), expected.getRevision());
Set<RefactoringRelationship> expectedRefactorings = expected.ignoring(ignore).getRefactorings();
Set<RefactoringRelationship> notExpectedRefactorings = notExpectedMap.getOrDefault(id, new RefactoringSet(expected.getProject(), expected.getRevision())).getRefactorings();
for (String groupId : this.groupIds) {
CompareResult result = resultMap.get(getResultId(expected.getProject(), expected.getRevision(), groupId));
if (result != null) {
CompareResult resultFiltered = result.filterBy(refTypesToConsider);
all.addAll(resultFiltered.getTruePositives());
all.addAll(resultFiltered.getFalsePositives());
//all.addAll(resultFiltered.getFalseNegatives());
}
}
all.addAll(expectedRefactorings);
if (!all.isEmpty()) {
//out.println(getProjectRevisionId(expected.getProject(), expected.getRevision()));
ArrayList<RefactoringRelationship> allList = new ArrayList<>();
allList.addAll(all);
Collections.sort(allList);
for (RefactoringRelationship r : allList) {
int correct = expectedRefactorings.contains(r) ? 2 : 0;
out.print(id);
out.print('\t');
out.print(r.getRefactoringType().getDisplayName());
// out.print('\t');
// out.print(format(r));
out.print('\t');
String refDescriptionFromOracle = (correct > 0 ? expectedRefactorings : notExpectedRefactorings).stream().filter(i -> i.equals(r)).findFirst().map(i -> i.getDescription()).orElse("");
out.print(refDescriptionFromOracle);
out.print('\t');
Relationship cstRel = r.getCstRelationship();
if (cstRel != null) {
out.print(cstRel.getStandardDescription());
} else {
out.print("\t\t");
}
out.print('\t');
out.print(correct > 0 ? "T" : "F");
for (String groupId : this.groupIds) {
CompareResult result = resultMap.get(getResultId(expected.getProject(), expected.getRevision(), groupId));
out.print('\t');
if (result != null) {
Set<RefactoringRelationship> actualRefactorings = new HashSet<>();
actualRefactorings.addAll(result.getTruePositives());
actualRefactorings.addAll(result.getFalsePositives());
int found = actualRefactorings.contains(r) ? 1 : 0;
String label = labels[correct + found];
out.print(label);
}
}
EvaluationDetails evaluationDetails = findEvaluationDetails(r, expected.getRefactorings(), notExpectedRefactorings);
if (evaluationDetails != null && evaluationDetails.evaluators != null) {
String evaluators = evaluationDetails.evaluators;
out.print('\t');
out.print(evaluationDetails.evaluators);
out.print('\t');
String classification;
if ("Gustavo/Ricardo".equals(evaluators)) {
classification = evaluationDetails.resultA + "/" + evaluationDetails.resultB;
} else if ("Gustavo/Danilo".equals(evaluators)) {
classification = evaluationDetails.resultA + "/" + evaluationDetails.resultC;
} else /* if ("Ricardo/Danilo".equals(evaluators)) */ {
classification = evaluationDetails.resultB + "/" + evaluationDetails.resultC;
}
out.print(classification.replace("?", ""));
} else {
out.print('\t');
out.print('\t');
}
// out.print('\t');
// if (evaluationDetails != null) {
// String fpCause = findFpCause(r, expected.getRefactorings(), notExpectedRefactorings, evaluationDetails);
// if (!"?".equals(fpCause)) {
// out.print(fpCause);
// }
// }
out.println();
}
}
}
out.println();
}
public static String format(RefactoringRelationship r) {
return String.format("%s\t%s\t%s", r.getRefactoringType().getDisplayName(), r.getEntityBefore(), r.getEntityAfter());
}
private String findFpCause(RefactoringRelationship r, Set<RefactoringRelationship> expectedUnfiltered, Set<RefactoringRelationship> blacklisted, EvaluationDetails evaluationDetails) {
if (evaluationDetails != null) {
if (evaluationDetails.commentFinal != null) {
return evaluationDetails.commentFinal;
}
}
if (isPullUpToExtractedSupertype(r, expectedUnfiltered)) {
return "<PullUpToExtractedSupertype>";
}
if (isMoveToRenamedType(r, expectedUnfiltered)) {
return "<MoveToRenamedType>";
}
if (isMoveToMovedType(r, expectedUnfiltered)) {
return "<MoveToMovedType>";
}
if (r.getRefactoringType() == RefactoringType.MOVE_ATTRIBUTE || r.getRefactoringType() == RefactoringType.MOVE_OPERATION) {
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.PULL_UP_ATTRIBUTE, (r.getEntityBefore()), (r.getEntityAfter())))) {
return "<ShouldBePullUp>";
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.PUSH_DOWN_ATTRIBUTE, (r.getEntityBefore()), (r.getEntityAfter())))) {
return "<ShouldBePushDown>";
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.PULL_UP_OPERATION, (r.getEntityBefore()), (r.getEntityAfter())))) {
return "<ShouldBePullUp>";
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.PUSH_DOWN_OPERATION, (r.getEntityBefore()), (r.getEntityAfter())))) {
return "<ShouldBePushDown>";
}
}
if (blacklisted.contains(r)) {
//RefactoringRelationship blacklistedR = blacklisted.stream().filter(br -> br.equals(r)).findFirst().get();
//return blacklistedR.getComment() != null ? blacklistedR.getComment() : "<Blacklist>";
return "<Blacklist>";
}
return "?";
}
private EvaluationDetails findEvaluationDetails(RefactoringRelationship r, Set<RefactoringRelationship> expected, Set<RefactoringRelationship> blacklisted) {
if (expected.contains(r)) {
return expected.stream().filter(br -> br.equals(r)).findFirst().get().getEvaluationDetails();
}
if (blacklisted.contains(r)) {
return blacklisted.stream().filter(br -> br.equals(r)).findFirst().get().getEvaluationDetails();
}
return null;
}
private String findFnCause(RefactoringRelationship r, Set<RefactoringRelationship> actualRefactorings, Map<KeyPair, String> fnCauseMap) {
if (fnCauseMap != null) {
KeyPair keyPair = new KeyPair(r.getEntityBefore(), r.getEntityAfter());
return fnCauseMap.getOrDefault(keyPair, "?");
}
return "?";
}
private boolean isPullUpToExtractedSupertype(RefactoringRelationship r, Set<RefactoringRelationship> expectedUnfiltered) {
if (r.getRefactoringType() == RefactoringType.PULL_UP_ATTRIBUTE || r.getRefactoringType() == RefactoringType.PULL_UP_OPERATION) {
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.EXTRACT_SUPERCLASS, parentOf(r.getEntityBefore()), parentOf(r.getEntityAfter())))) {
return true;
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.EXTRACT_INTERFACE, parentOf(r.getEntityBefore()), parentOf(r.getEntityAfter())))) {
return true;
}
}
return false;
}
private boolean isMoveToRenamedType(RefactoringRelationship r, Set<RefactoringRelationship> expectedUnfiltered) {
if (r.getRefactoringType() == RefactoringType.MOVE_OPERATION || r.getRefactoringType() == RefactoringType.MOVE_ATTRIBUTE) {
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.RENAME_CLASS, parentOf(r.getEntityBefore()), parentOf(r.getEntityAfter())))) {
return true;
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.RENAME_CLASS, parentOf(parentOf(r.getEntityBefore())), parentOf(parentOf(r.getEntityAfter()))))) {
return true;
}
}
return false;
}
private boolean isMoveToMovedType(RefactoringRelationship r, Set<?> expectedUnfiltered) {
if (r.getRefactoringType() == RefactoringType.MOVE_OPERATION || r.getRefactoringType() == RefactoringType.MOVE_ATTRIBUTE) {
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.MOVE_CLASS, parentOf(r.getEntityBefore()), parentOf(r.getEntityAfter())))) {
return true;
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.MOVE_CLASS, parentOf(parentOf(r.getEntityBefore())), parentOf(parentOf(r.getEntityAfter()))))) {
return true;
}
if (expectedUnfiltered.contains(new RefactoringRelationship(RefactoringType.MOVE_CLASS_FOLDER, parentOf(r.getEntityBefore()), parentOf(r.getEntityAfter())))) {
return true;
}
}
return false;
}
private String getProjectRevisionId(String project, String revision) {
if (project.endsWith(".git")) {
return project.substring(0, project.length() - 4) + "/commit/" + revision;
}
return project + "/commit/" + revision;
}
private String getResultId(String project, String revision, String groupId) {
if (project.endsWith(".git")) {
return project.substring(0, project.length() - 4) + "/commit/" + revision + ";" + groupId;
}
return project + "/commit/" + revision + ";" + groupId;
}
public static class CompareResult {
private final Collection<RefactoringRelationship> truePositives;
private final Collection<RefactoringRelationship> falsePositives;
private final Collection<RefactoringRelationship> falseNegatives;
public CompareResult(Collection<RefactoringRelationship> truePositives, Collection<RefactoringRelationship> falsePositives, Collection<RefactoringRelationship> falseNegatives) {
this.truePositives = truePositives;
this.falsePositives = falsePositives;
this.falseNegatives = falseNegatives;
}
public int getTPCount() {
return this.truePositives.size();
}
public int getFPCount() {
return this.falsePositives.size();
}
public int getFNCount() {
return this.falseNegatives.size();
}
public double getPrecision() {
int tp = this.truePositives.size();
int fp = this.falsePositives.size();
int fn = this.falseNegatives.size();
return ResultComparator.getPrecision(tp, fp, fn);
}
public double getRecall() {
int tp = this.truePositives.size();
int fp = this.falsePositives.size();
int fn = this.falseNegatives.size();
return ResultComparator.getRecall(tp, fp, fn);
}
public double getF1() {
int tp = this.truePositives.size();
int fp = this.falsePositives.size();
int fn = this.falseNegatives.size();
return ResultComparator.getF1(tp, fp, fn);
}
public CompareResult filterBy(RefactoringType... rts) {
EnumSet<RefactoringType> refTypes = EnumSet.noneOf(RefactoringType.class);
refTypes.addAll(Arrays.asList(rts));
return filterBy(refTypes);
}
public CompareResult filterBy(EnumSet<RefactoringType> refTypes) {
return new CompareResult(
this.truePositives.stream().filter(r -> isOneOf(r, refTypes)).collect(Collectors.toList()),
this.falsePositives.stream().filter(r -> isOneOf(r, refTypes)).collect(Collectors.toList()),
this.falseNegatives.stream().filter(r -> isOneOf(r, refTypes)).collect(Collectors.toList()));
}
private boolean isOneOf(RefactoringRelationship r, EnumSet<RefactoringType> rts) {
return rts.contains(r.getRefactoringType());
}
public void mergeWith(CompareResult other) {
this.truePositives.addAll(other.truePositives);
this.falsePositives.addAll(other.falsePositives);
this.falseNegatives.addAll(other.falseNegatives);
}
public Collection<RefactoringRelationship> getTruePositives() {
return truePositives;
}
public Collection<RefactoringRelationship> getFalsePositives() {
return falsePositives;
}
public Collection<RefactoringRelationship> getFalseNegatives() {
return falseNegatives;
}
}
public boolean isIgnorePullUpToExtractedSupertype() {
return ignorePullUpToExtractedSupertype;
}
public void setIgnorePullUpToExtractedSupertype(boolean ignorePullUpToExtractedSupertype) {
this.ignorePullUpToExtractedSupertype = ignorePullUpToExtractedSupertype;
}
public boolean isIgnoreMoveToMovedType() {
return ignoreMoveToMovedType;
}
public void setIgnoreMoveToMovedType(boolean ignoreMoveToMovedType) {
this.ignoreMoveToMovedType = ignoreMoveToMovedType;
}
public boolean isIgnoreMoveToRenamedType() {
return ignoreMoveToRenamedType;
}
public void setIgnoreMoveToRenamedType(boolean ignoreMoveToRenamedType) {
this.ignoreMoveToRenamedType = ignoreMoveToRenamedType;
}
public void addFnExplanations(String project, String commit, Map<KeyPair, String> explanations) {
String id = getProjectRevisionId(project, commit);
RefactoringSet expected = this.expectedMap.get(id);
if (expected != null) {
Set<KeyPair> keyPairSet = expected.getRefactorings().stream()
.map(r -> new KeyPair(r.getEntityBefore(), r.getEntityAfter()))
.collect(Collectors.toSet());
Map<KeyPair, String> filteredMap = explanations.entrySet().stream()
.filter(e -> keyPairSet.contains(e.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
this.fnExplanations.put(id, filteredMap);
}
}
}
| |
package com.philihp.weblabora.model;
import static com.philihp.weblabora.model.Wheel.Position.D;
import static com.philihp.weblabora.model.Wheel.Position.F;
import java.util.ArrayList;
import java.util.List;
import com.philihp.weblabora.model.building.Building;
import com.philihp.weblabora.model.building.BuildingEnum;
public class BoardModeFourShortFrance extends BoardMode {
private static final GamePlayers PLAYERS = GamePlayers.FOUR;
private static final GameLength LENGTH = GameLength.SHORT;
private static final GameCountry COUNTRY = GameCountry.FRANCE;
protected BoardModeFourShortFrance(Board board) {
super(board);
}
@Override
protected int[] getWheelArmValues() {
return new int[] { 0, 2, 3, 4, 5, 6, 6, 7, 7, 8, 8, 9, 10 };
}
@Override
public List<Building> roundBuildings() {
List<Building> buildings = new ArrayList<Building>();
for (BuildingEnum buildingId : BuildingEnum.values()) {
char c = buildingId.toString().charAt(0);
if(c != 'G' && c != 'F') continue;
Building building = buildingId.getInstance();
if (board.getSettlementRound().equals(building.getStage())
// less than, not less than or equal to...
&& building.getPlayers().ordinal() < PLAYERS.ordinal()) {
buildings.add(building);
}
}
return buildings;
}
@Override
public List<Building> futureBuildings() {
List<Building> buildings = new ArrayList<Building>();
for(BuildingEnum buildingId : BuildingEnum.values()) {
char c = buildingId.toString().charAt(0);
if(c != 'G' && c != 'F') continue;
Building building = buildingId.getInstance();
if(board.getAllBuildings().containsKey(buildingId) == false
&& building.getPlayers().ordinal() <= PLAYERS.ordinal()) {
buildings.add(building);
}
}
return buildings;
}
@Override
public boolean isExtraRound(int round) {
return round >= 24;
}
@Override
public SettlementRound roundBeforeSettlement(int round) {
switch (round) {
case 2:
return SettlementRound.A;
case 4:
return SettlementRound.B;
case 6:
return SettlementRound.C;
case 8:
return SettlementRound.D;
case 12:
return SettlementRound.E;
default:
return null;
}
}
@Override
public void postMove() {
board.nextActivePlayer();
board.setMoveInRound(board.getMoveInRound()+1);
if(board.isExtraRound() && board.getMoveInRound() == board.players.length+1) {
board.postExtraRound();
}
if(board.isSettling() && board.getMoveInRound() == board.players.length+1) {
board.postSettlement();
}
else if(!board.isSettling() && board.getMoveInRound() == board.players.length+2) {
board.postRound();
}
}
@Override
public void postRound() {
board.setMoveInRound(1);
if(isExtraRound(board.getRound())) {
board.setRound(board.getRound()+1);
board.setExtraRound(true);
}
else if(board.isRoundBeforeSettlement(board.getRound())) {
board.setSettling(true);
}
else {
board.setRound(board.getRound()+1);
}
//5 -- pass starting player
board.setStartingPlayer(board.getStartingPlayer() + 1);
board.getStartingMarker().setOwner(board.players[board.getStartingPlayer()]);
}
@Override
public String getMoveName() {
if(board.isExtraRound()) return "extra";
switch (board.getMoveInRound()) {
case 1:
return "first";
case 2:
return "second";
case 3:
return "third";
case 4:
return "fourth";
case 5:
return "last";
default:
throw new RuntimeException("Illegal Move Number " + board.getMoveInRound());
}
}
@Override
public int grapeActiveOnRound() {
return 4;
}
@Override
public int stoneActiveOnRound() {
return 6;
}
@Override
public void setWheelTokens(Wheel wheel) {
wheel.grape.setPosition(D);
wheel.stone.setPosition(F);
}
@Override
public GamePlayers getPlayers() {
return PLAYERS;
}
@Override
public GameCountry getCountry() {
return COUNTRY;
}
@Override
public GameLength getLength() {
return LENGTH;
}
@Override
public void customizeLandscape(Landscape landscape) {
landscape.getTerrainAt(new Coordinate(0,0)).setTerrainUse(TerrainUseEnum.EMPTY);
landscape.getTerrainAt(new Coordinate(1,0)).setTerrainUse(TerrainUseEnum.EMPTY);
super.customizeLandscape(landscape);
}
@Override
public boolean isProductionBonusActive() {
return true;
}
@Override
public boolean isSecondLayBrotherUsed() {
return false;
}
@Override
public int getMovesInRound() {
return 5;
}
@Override
public int getLastSettlementAfterRound() {
return 13;
}
@Override
protected boolean isRoundStartBonusActive() {
return true;
}
@Override
public boolean isGrapesUsed() {
return true;
}
@Override
boolean isNeutralBuildingPhase() {
return false;
}
@Override
public boolean isStoneUsed() {
return true;
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.aggregation.datasketches.theta;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.yahoo.sketches.theta.Sketch;
import com.yahoo.sketches.theta.Sketches;
import io.druid.data.input.MapBasedRow;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Result;
import io.druid.query.aggregation.AggregationTestHelper;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.aggregation.post.FieldAccessPostAggregator;
import io.druid.query.select.SelectResultValue;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.List;
/**
*/
public class SketchAggregationTest
{
private final AggregationTestHelper helper;
@Rule
public final TemporaryFolder tempFolder = new TemporaryFolder();
public SketchAggregationTest()
{
SketchModule sm = new SketchModule();
sm.configure(null);
helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
}
@Test
public void testSimpleDataIngestAndGpByQuery() throws Exception
{
Sequence seq = helper.createIndexAndRunQueryOnSegment(
new File(this.getClass().getClassLoader().getResource("simple_test_data.tsv").getFile()),
readFileFromClasspathAsString("simple_test_data_record_parser.json"),
readFileFromClasspathAsString("simple_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
5,
readFileFromClasspathAsString("simple_test_data_group_by_query.json")
);
List results = Sequences.toList(seq, Lists.newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(
new MapBasedRow(
DateTime.parse("2014-10-19T00:00:00.000Z"),
ImmutableMap
.<String, Object>builder()
.put("sketch_count", 50.0)
.put("sketchEstimatePostAgg", 50.0)
.put("sketchUnionPostAggEstimate", 50.0)
.put("sketchIntersectionPostAggEstimate", 50.0)
.put("sketchAnotBPostAggEstimate", 0.0)
.put("non_existing_col_validation", 0.0)
.build()
),
results.get(0)
);
}
@Test
public void testSimpleDataIngestAndSelectQuery() throws Exception
{
SketchModule sm = new SketchModule();
sm.configure(null);
AggregationTestHelper selectQueryAggregationTestHelper = AggregationTestHelper.createSelectQueryAggregationTestHelper(
sm.getJacksonModules(),
tempFolder
);
Sequence seq = selectQueryAggregationTestHelper.createIndexAndRunQueryOnSegment(
new File(this.getClass().getClassLoader().getResource("simple_test_data.tsv").getFile()),
readFileFromClasspathAsString("simple_test_data_record_parser.json"),
readFileFromClasspathAsString("simple_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
5000,
readFileFromClasspathAsString("select_query.json")
);
Result<SelectResultValue> result = (Result<SelectResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
Assert.assertEquals(100, result.getValue().getEvents().size());
Assert.assertEquals("AgMDAAAazJMBAAAAAACAPzz9j7pWTMdR", result.getValue().getEvents().get(0).getEvent().get("pty_country"));
}
@Test
public void testSketchDataIngestAndGpByQuery() throws Exception
{
Sequence seq = helper.createIndexAndRunQueryOnSegment(
new File(SketchAggregationTest.class.getClassLoader().getResource("sketch_test_data.tsv").getFile()),
readFileFromClasspathAsString("sketch_test_data_record_parser.json"),
readFileFromClasspathAsString("sketch_test_data_aggregators.json"),
0,
QueryGranularity.NONE,
5,
readFileFromClasspathAsString("sketch_test_data_group_by_query.json")
);
List results = Sequences.toList(seq, Lists.newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(
new MapBasedRow(
DateTime.parse("2014-10-19T00:00:00.000Z"),
ImmutableMap
.<String, Object>builder()
.put("sids_sketch_count", 50.0)
.put("sketchEstimatePostAgg", 50.0)
.put("sketchUnionPostAggEstimate", 50.0)
.put("sketchIntersectionPostAggEstimate", 50.0)
.put("sketchAnotBPostAggEstimate", 0.0)
.put("non_existing_col_validation", 0.0)
.build()
),
results.get(0)
);
}
@Test
public void testThetaCardinalityOnSimpleColumn() throws Exception
{
Sequence seq = helper.createIndexAndRunQueryOnSegment(
new File(SketchAggregationTest.class.getClassLoader().getResource("simple_test_data.tsv").getFile()),
readFileFromClasspathAsString("simple_test_data_record_parser2.json"),
"["
+ " {"
+ " \"type\": \"count\","
+ " \"name\": \"count\""
+ " }"
+ "]",
0,
QueryGranularity.NONE,
5,
readFileFromClasspathAsString("simple_test_data_group_by_query.json")
);
List results = Sequences.toList(seq, Lists.newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(
new MapBasedRow(
DateTime.parse("2014-10-19T00:00:00.000Z"),
ImmutableMap
.<String, Object>builder()
.put("sketch_count", 50.0)
.put("sketchEstimatePostAgg", 50.0)
.put("sketchUnionPostAggEstimate", 50.0)
.put("sketchIntersectionPostAggEstimate", 50.0)
.put("sketchAnotBPostAggEstimate", 0.0)
.put("non_existing_col_validation", 0.0)
.build()
),
results.get(0)
);
}
@Test
public void testSketchMergeAggregatorFactorySerde() throws Exception
{
assertAggregatorFactorySerde(new SketchMergeAggregatorFactory("name", "fieldName", 16, null, null));
assertAggregatorFactorySerde(new SketchMergeAggregatorFactory("name", "fieldName", 16, false, true));
assertAggregatorFactorySerde(new SketchMergeAggregatorFactory("name", "fieldName", 16, true, false));
}
@Test
public void testSketchMergeFinalization() throws Exception
{
Sketch sketch = Sketches.updateSketchBuilder().build(128);
SketchMergeAggregatorFactory agg = new SketchMergeAggregatorFactory("name", "fieldName", 16, null, null);
Assert.assertEquals(0.0, ((Double) agg.finalizeComputation(sketch)).doubleValue(), 0.0001);
agg = new SketchMergeAggregatorFactory("name", "fieldName", 16, true, null);
Assert.assertEquals(0.0, ((Double) agg.finalizeComputation(sketch)).doubleValue(), 0.0001);
agg = new SketchMergeAggregatorFactory("name", "fieldName", 16, false, null);
Assert.assertEquals(sketch, agg.finalizeComputation(sketch));
}
private void assertAggregatorFactorySerde(AggregatorFactory agg) throws Exception
{
Assert.assertEquals(
agg,
helper.getObjectMapper().readValue(
helper.getObjectMapper().writeValueAsString(agg),
AggregatorFactory.class
)
);
}
@Test
public void testSketchEstimatePostAggregatorSerde() throws Exception
{
assertPostAggregatorSerde(
new SketchEstimatePostAggregator(
"name",
new FieldAccessPostAggregator("name", "fieldName")
)
);
}
@Test
public void testSketchSetPostAggregatorSerde() throws Exception
{
assertPostAggregatorSerde(
new SketchSetPostAggregator(
"name",
"INTERSECT",
null,
Lists.<PostAggregator>newArrayList(
new FieldAccessPostAggregator("name1", "fieldName1"),
new FieldAccessPostAggregator("name2", "fieldName2")
)
)
);
}
private void assertPostAggregatorSerde(PostAggregator agg) throws Exception
{
Assert.assertEquals(
agg,
helper.getObjectMapper().readValue(
helper.getObjectMapper().writeValueAsString(agg),
PostAggregator.class
)
);
}
public final static String readFileFromClasspathAsString(String fileName) throws IOException
{
return Files.asCharSource(
new File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
Charset.forName("UTF-8")
).read();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.vfs2.provider.mime;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import javax.mail.Header;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Multipart;
import javax.mail.Part;
import javax.mail.internet.MimeMultipart;
import org.apache.commons.vfs2.FileContentInfoFactory;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileType;
import org.apache.commons.vfs2.NameScope;
import org.apache.commons.vfs2.provider.AbstractFileName;
import org.apache.commons.vfs2.provider.AbstractFileObject;
import org.apache.commons.vfs2.provider.UriParser;
import org.apache.commons.vfs2.util.FileObjectUtils;
/**
* A part of a MIME message.
*/
public class MimeFileObject extends AbstractFileObject<MimeFileSystem> implements FileObject {
private Part part;
private Map<String, Object> attributeMap;
protected MimeFileObject(final AbstractFileName name, final Part part, final MimeFileSystem fileSystem)
throws FileSystemException {
super(name, fileSystem);
setPart(part);
}
/**
* Attaches this file object to its file resource.
*/
@Override
protected void doAttach() throws Exception {
if (part == null) {
if (!getName().equals(getFileSystem().getRootName())) {
final MimeFileObject foParent = (MimeFileObject) FileObjectUtils.getAbstractFileObject(getParent());
setPart(foParent.findPart(getName().getBaseName()));
return;
}
setPart(((MimeFileSystem) getFileSystem()).createCommunicationLink());
}
}
private Part findPart(final String partName) throws Exception {
if (getType() == FileType.IMAGINARY) {
// not existent
return null;
}
if (isMultipart()) {
final Multipart multipart = (Multipart) part.getContent();
if (partName.startsWith(MimeFileSystem.NULL_BP_NAME)) {
final int partNumber = Integer.parseInt(partName.substring(MimeFileSystem.NULL_BP_NAME.length()), 10);
if (partNumber < 0 || partNumber + 1 > multipart.getCount()) {
// non existent
return null;
}
return multipart.getBodyPart(partNumber);
}
for (int i = 0; i < multipart.getCount(); i++) {
final Part childPart = multipart.getBodyPart(i);
if (partName.equals(childPart.getFileName())) {
return childPart;
}
}
}
return null;
}
@Override
protected void doDetach() throws Exception {
}
/**
* Determines the type of the file, returns null if the file does not exist.
*/
@Override
protected FileType doGetType() throws Exception {
if (part == null) {
return FileType.IMAGINARY;
}
if (isMultipart()) {
// we cant have children ...
return FileType.FILE_OR_FOLDER;
}
return FileType.FILE;
}
@Override
protected String[] doListChildren() throws Exception {
return null;
}
/**
* Lists the children of the file. Is only called if {@link #doGetType} returns
* {@link org.apache.commons.vfs2.FileType#FOLDER}.
*/
@Override
protected FileObject[] doListChildrenResolved() throws Exception {
if (part == null) {
return null;
}
final List<MimeFileObject> vfs = new ArrayList<>();
if (isMultipart()) {
final Object container = part.getContent();
if (container instanceof Multipart) {
final Multipart multipart = (Multipart) container;
for (int i = 0; i < multipart.getCount(); i++) {
final Part part = multipart.getBodyPart(i);
String filename = UriParser.encode(part.getFileName());
if (filename == null) {
filename = MimeFileSystem.NULL_BP_NAME + i;
}
final MimeFileObject fo = (MimeFileObject) FileObjectUtils
.getAbstractFileObject(getFileSystem().resolveFile(getFileSystem().getFileSystemManager()
.resolveName(getName(), filename, NameScope.CHILD)));
fo.setPart(part);
vfs.add(fo);
}
}
}
return vfs.toArray(new MimeFileObject[vfs.size()]);
}
private void setPart(final Part part) {
this.part = part;
this.attributeMap = null;
}
/**
* Returns the size of the file content (in bytes).
*/
@Override
protected long doGetContentSize() throws Exception {
return part.getSize();
}
/**
* Returns the last modified time of this file.
*/
@Override
protected long doGetLastModifiedTime() throws Exception {
final Message mm = getMessage();
if (mm == null) {
return -1;
}
if (mm.getSentDate() != null) {
return mm.getSentDate().getTime();
}
if (mm.getReceivedDate() != null) {
mm.getReceivedDate();
}
return 0;
}
private Message getMessage() throws FileSystemException {
if (part instanceof Message) {
return (Message) part;
}
return ((MimeFileObject) FileObjectUtils.getAbstractFileObject(getParent())).getMessage();
}
/**
* Creates an input stream to read the file content from.
*/
@Override
protected InputStream doGetInputStream(final int bufferSize) throws Exception {
if (isMultipart()) {
// deliver the preamble as the only content
final String preamble = ((MimeMultipart) part.getContent()).getPreamble();
if (preamble == null) {
return new ByteArrayInputStream(new byte[] {});
}
return new ByteArrayInputStream(preamble.getBytes(MimeFileSystem.PREAMBLE_CHARSET));
}
return part.getInputStream();
}
boolean isMultipart() throws MessagingException {
return part.getContentType() != null && part.getContentType().startsWith("multipart/");
}
@Override
protected FileContentInfoFactory getFileContentInfoFactory() {
return new MimeFileContentInfoFactory();
}
protected Part getPart() {
return part;
}
/**
* Returns all headers of this part.
* <p>
* The map key is a java.lang.String and the value is a:
* <ul>
* <li>{@code java.lang.Strings} for single entries or a</li>
* <li>{@code java.utils.List<java.lang.Strings>} for entries with multiple values</li>
* </ul>
*/
@Override
protected Map<String, Object> doGetAttributes() throws Exception {
if (attributeMap == null) {
if (part != null) {
attributeMap = new MimeAttributesMap(part);
} else {
attributeMap = Collections.emptyMap();
}
}
return attributeMap;
}
@SuppressWarnings("unchecked") // Javadoc says Part returns Header
protected Enumeration<Header> getAllHeaders() throws MessagingException {
return part.getAllHeaders();
}
}
| |
/* Copyright 2014-2016 ARM Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arm.wlauto.uiauto.googleplaybooks;
import android.os.Bundle;
// Import the uiautomator libraries
import com.android.uiautomator.core.UiObject;
import com.android.uiautomator.core.UiObjectNotFoundException;
import com.android.uiautomator.core.UiSelector;
import com.android.uiautomator.core.UiWatcher;
import com.android.uiautomator.core.UiScrollable;
import com.arm.wlauto.uiauto.UxPerfUiAutomation;
import com.arm.wlauto.uiauto.ApplaunchInterface;
import com.arm.wlauto.uiauto.UiAutoUtils;
import static com.arm.wlauto.uiauto.BaseUiAutomation.FindByCriteria.BY_ID;
import static com.arm.wlauto.uiauto.BaseUiAutomation.FindByCriteria.BY_TEXT;
import static com.arm.wlauto.uiauto.BaseUiAutomation.FindByCriteria.BY_DESC;
import java.util.concurrent.TimeUnit;
import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import android.util.Log;
public class UiAutomation extends UxPerfUiAutomation implements ApplaunchInterface {
private int viewTimeoutSecs = 10;
private long viewTimeout = TimeUnit.SECONDS.toMillis(viewTimeoutSecs);
public void runUiAutomation() throws Exception {
// Override superclass value
this.uiAutoTimeout = TimeUnit.SECONDS.toMillis(8);
parameters = getParams();
String searchBookTitle = parameters.getString("search_book_title");
String libraryBookTitle = parameters.getString("library_book_title");
String chapterPageNumber = parameters.getString("chapter_page_number");
String searchWord = parameters.getString("search_word");
String noteText = "This is a test note";
setScreenOrientation(ScreenOrientation.NATURAL);
runApplicationInitialization();
searchForBook(searchBookTitle);
addToLibrary();
openMyLibrary();
openBook(libraryBookTitle);
UiWatcher pageSyncPopUpWatcher = createPopUpWatcher();
registerWatcher("pageSyncPopUp", pageSyncPopUpWatcher);
runWatchers();
selectChapter(chapterPageNumber);
gesturesTest();
addNote(noteText);
removeNote();
searchForWord(searchWord);
switchPageStyles();
aboutBook();
removeWatcher("pageSyncPopUp");
pressBack();
unsetScreenOrientation();
}
// Get application parameters and clear the initial run dialogues of the application launch.
public void runApplicationInitialization() throws Exception {
getPackageParameters();
String account = parameters.getString("account");
chooseAccount(account);
clearFirstRunDialogues();
dismissSendBooksAsGiftsDialog();
dismissSync();
}
// Sets the UiObject that marks the end of the application launch.
public UiObject getLaunchEndObject() {
UiObject launchEndObject = new UiObject(new UiSelector()
.className("android.widget.ImageButton"));
return launchEndObject;
}
// Returns the launch command for the application.
public String getLaunchCommand() {
String launch_command;
launch_command = UiAutoUtils.createLaunchCommand(parameters);
return launch_command;
}
// Pass the workload parameters, used for applaunch
public void setWorkloadParameters(Bundle workload_parameters) {
parameters = workload_parameters;
}
// If the device has more than one account setup, a prompt appears
// In this case, select the first account in the list, unless `account`
// has been specified as a parameter, otherwise select `account`.
private void chooseAccount(String account) throws Exception {
UiObject accountPopup =
new UiObject(new UiSelector().textContains("Choose an account")
.className("android.widget.TextView"));
if (accountPopup.exists()) {
if ("None".equals(account)) {
// If no account has been specified, pick the first entry in the list
UiObject list =
new UiObject(new UiSelector().className("android.widget.ListView"));
UiObject first = list.getChild(new UiSelector().index(0));
if (!first.exists()) {
// Some devices are not zero indexed. If 0 doesnt exist, pick 1
first = list.getChild(new UiSelector().index(1));
}
first.click();
} else {
// Account specified, select that
clickUiObject(BY_TEXT, account, "android.widget.CheckedTextView");
}
// Click OK to proceed
UiObject ok =
new UiObject(new UiSelector().textContains("OK")
.className("android.widget.Button")
.enabled(true));
ok.clickAndWaitForNewWindow();
}
}
// If there is no sample book in My library we are prompted to choose a
// book the first time application is run. Try to skip the screen or
// pick a random sample book.
private void clearFirstRunDialogues() throws Exception {
UiObject startButton =
new UiObject(new UiSelector().resourceId(packageID + "start_button"));
// First try and skip the sample book selection
if (startButton.exists()) {
startButton.click();
}
UiObject endButton =
new UiObject(new UiSelector().resourceId(packageID + "end_button"));
// Click next button if it exists
if (endButton.exists()) {
endButton.click();
// Select a random sample book to add to My library
sleep(1);
tapDisplayCentre();
sleep(1);
// Click done button (uses same resource-id)
endButton.click();
}
}
private void dismissSendBooksAsGiftsDialog() throws Exception {
UiObject gotIt =
new UiObject(new UiSelector().textContains("GOT IT!"));
if (gotIt.exists()) {
gotIt.click();
}
}
private void dismissSync() throws Exception {
UiObject keepSyncOff =
new UiObject(new UiSelector().textContains("Keep sync off")
.className("android.widget.Button"));
if (keepSyncOff.exists()) {
keepSyncOff.click();
}
}
// Searches for a "free" or "purchased" book title in Google play
private void searchForBook(final String bookTitle) throws Exception {
UiObject search =
new UiObject(new UiSelector().resourceId(packageID + "menu_search"));
if (!search.exists()) {
search =
new UiObject(new UiSelector().resourceId(packageID + "search_box_active_text_view"));
}
search.click();
UiObject searchText =
new UiObject(new UiSelector().textContains("Search")
.className("android.widget.EditText"));
searchText.setText(bookTitle);
pressEnter();
UiObject resultList =
new UiObject(new UiSelector().resourceId("com.android.vending:id/search_results_list"));
if (!resultList.waitForExists(viewTimeout)) {
throw new UiObjectNotFoundException("Could not find \"search results list view\".");
}
// Create a selector so that we can search for siblings of the desired
// book that contains a "free" or "purchased" book identifier
UiObject label =
new UiObject(new UiSelector().fromParent(new UiSelector()
.description(String.format("Book: " + bookTitle))
.className("android.widget.TextView"))
.resourceId("com.android.vending:id/li_label")
.descriptionMatches("^(Purchased|Free)$"));
final int maxSearchTime = 30;
int searchTime = maxSearchTime;
while (!label.exists()) {
if (searchTime > 0) {
uiDeviceSwipeDown(100);
sleep(1);
searchTime--;
} else {
throw new UiObjectNotFoundException(
"Exceeded maximum search time (" + maxSearchTime + " seconds) to find book \"" + bookTitle + "\"");
}
}
// Click on either the first "free" or "purchased" book found that
// matches the book title
label.click();
}
private void addToLibrary() throws Exception {
UiObject add =
new UiObject(new UiSelector().textContains("ADD TO LIBRARY")
.className("android.widget.Button"));
if (add.exists()) {
// add to My Library and opens book by default
add.click();
clickUiObject(BY_TEXT, "BUY", "android.widget.Button", true);
} else {
// opens book
clickUiObject(BY_TEXT, "READ", "android.widget.Button");
}
waitForPage();
UiObject navigationButton =
new UiObject(new UiSelector().description("Navigate up"));
// Return to main app window
pressBack();
// On some devices screen ordering is not preserved so check for
// navigation button to determine current screen
if (navigationButton.exists()) {
pressBack();
pressBack();
}
}
private void openMyLibrary() throws Exception {
String testTag = "open_library";
ActionLogger logger = new ActionLogger(testTag, parameters);
logger.start();
clickUiObject(BY_DESC, "Show navigation drawer");
// To correctly find the UiObject we need to specify the index also here
UiObject myLibrary =
new UiObject(new UiSelector().className("android.widget.TextView")
.text("My library")
.index(3));
myLibrary.clickAndWaitForNewWindow(uiAutoTimeout);
logger.stop();
}
private void openBook(final String bookTitle) throws Exception {
String testTag = "open_book";
ActionLogger logger = new ActionLogger(testTag, parameters);
long maxWaitTimeSeconds = 120;
long maxWaitTime = TimeUnit.SECONDS.toMillis(maxWaitTimeSeconds);
UiSelector bookSelector =
new UiSelector().text(bookTitle)
.className("android.widget.TextView");
UiObject book = new UiObject(bookSelector);
// Check that books are sorted by time added to library. This way we
// can assume any newly downloaded books will be visible on the first
// screen.
clickUiObject(BY_ID, packageID + "menu_sort", "android.widget.TextView");
clickUiObject(BY_TEXT, "Recent", "android.widget.TextView");
// When the book is first added to library it may not appear in
// cardsGrid until it has been fully downloaded. Wait for fully
// downloaded books
UiObject downloadComplete =
new UiObject(new UiSelector().fromParent(bookSelector)
.description("100% downloaded"));
if (!downloadComplete.waitForExists(maxWaitTime)) {
throw new UiObjectNotFoundException(
"Exceeded maximum wait time (" + maxWaitTimeSeconds + " seconds) to download book \"" + bookTitle + "\"");
}
logger.start();
book.click();
waitForPage();
logger.stop();
}
// Creates a watcher for when a pop up warning appears when pages are out
// of sync across multiple devices.
private UiWatcher createPopUpWatcher() throws Exception {
UiWatcher pageSyncPopUpWatcher = new UiWatcher() {
@Override
public boolean checkForCondition() {
UiObject popUpDialogue =
new UiObject(new UiSelector().textStartsWith("You're on page")
.resourceId("android:id/message"));
// Don't sync and stay on the current page
if (popUpDialogue.exists()) {
try {
UiObject stayOnPage =
new UiObject(new UiSelector().text("Yes")
.className("android.widget.Button"));
stayOnPage.click();
} catch (UiObjectNotFoundException e) {
e.printStackTrace();
}
return popUpDialogue.waitUntilGone(viewTimeout);
}
return false;
}
};
return pageSyncPopUpWatcher;
}
private void selectChapter(final String chapterPageNumber) throws Exception {
getDropdownMenu();
UiObject contents = getUiObjectByResourceId(packageID + "menu_reader_toc");
contents.clickAndWaitForNewWindow(uiAutoTimeout);
UiObject toChapterView = getUiObjectByResourceId(packageID + "toc_list_view",
"android.widget.ExpandableListView");
// Navigate to top of chapter view
searchPage(toChapterView, "1", Direction.UP, 10);
// Search for chapter page number
UiObject page = searchPage(toChapterView, chapterPageNumber, Direction.DOWN, 10);
// Go to the page
page.clickAndWaitForNewWindow(viewTimeout);
waitForPage();
}
private void gesturesTest() throws Exception {
String testTag = "gesture";
// Perform a range of swipe tests while browsing home photoplaybooks gallery
LinkedHashMap<String, GestureTestParams> testParams = new LinkedHashMap<String, GestureTestParams>();
testParams.put("swipe_left", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.LEFT, 20));
testParams.put("swipe_right", new GestureTestParams(GestureType.UIDEVICE_SWIPE, Direction.RIGHT, 20));
testParams.put("pinch_out", new GestureTestParams(GestureType.PINCH, PinchType.OUT, 100, 50));
testParams.put("pinch_in", new GestureTestParams(GestureType.PINCH, PinchType.IN, 100, 50));
Iterator<Entry<String, GestureTestParams>> it = testParams.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, GestureTestParams> pair = it.next();
GestureType type = pair.getValue().gestureType;
Direction dir = pair.getValue().gestureDirection;
PinchType pinch = pair.getValue().pinchType;
int steps = pair.getValue().steps;
int percent = pair.getValue().percent;
String runName = String.format(testTag + "_" + pair.getKey());
ActionLogger logger = new ActionLogger(runName, parameters);
UiObject pageView = waitForPage();
logger.start();
switch (type) {
case UIDEVICE_SWIPE:
uiDeviceSwipe(dir, steps);
break;
case PINCH:
uiObjectVertPinch(pageView, pinch, steps, percent);
break;
default:
break;
}
logger.stop();
}
waitForPage();
}
private void addNote(final String text) throws Exception {
String testTag = "note_add";
ActionLogger logger = new ActionLogger(testTag, parameters);
hideDropDownMenu();
UiObject clickable = new UiObject(new UiSelector().longClickable(true));
if (!clickable.exists()){
clickable = new UiObject(new UiSelector().resourceIdMatches(".*/main_page"));
}
logger.start();
uiObjectPerformLongClick(clickable, 100);
UiObject addNoteButton =
new UiObject(new UiSelector().resourceId(packageID + "add_note_button"));
addNoteButton.click();
UiObject noteEditText = getUiObjectByResourceId(packageID + "note_edit_text",
"android.widget.EditText");
noteEditText.setText(text);
clickUiObject(BY_ID, packageID + "note_menu_button", "android.widget.ImageButton");
clickUiObject(BY_TEXT, "Save", "android.widget.TextView");
logger.stop();
waitForPage();
}
private void removeNote() throws Exception {
String testTag = "note_remove";
ActionLogger logger = new ActionLogger(testTag, parameters);
UiObject clickable = new UiObject(new UiSelector().longClickable(true));
if (!clickable.exists()){
clickable = new UiObject(new UiSelector().resourceIdMatches(".*/main_page"));
}
logger.start();
uiObjectPerformLongClick(clickable, 100);
UiObject removeButton =
new UiObject(new UiSelector().resourceId(packageID + "remove_highlight_button"));
removeButton.click();
clickUiObject(BY_TEXT, "Remove", "android.widget.Button");
logger.stop();
waitForPage();
}
private void searchForWord(final String text) throws Exception {
String testTag = "search_word";
ActionLogger logger = new ActionLogger(testTag, parameters);
// Allow extra time for search queries involing high freqency words
final long searchTimeout = TimeUnit.SECONDS.toMillis(20);
getDropdownMenu();
UiObject search =
new UiObject(new UiSelector().resourceId(packageID + "menu_search"));
search.click();
UiObject searchText =
new UiObject(new UiSelector().resourceId(packageID + "search_src_text"));
logger.start();
searchText.setText(text);
pressEnter();
UiObject resultList =
new UiObject(new UiSelector().resourceId(packageID + "search_results_list"));
if (!resultList.waitForExists(searchTimeout)) {
throw new UiObjectNotFoundException("Could not find \"search results list view\".");
}
UiObject searchWeb =
new UiObject(new UiSelector().text("Search web")
.className("android.widget.TextView"));
if (!searchWeb.waitForExists(searchTimeout)) {
throw new UiObjectNotFoundException("Could not find \"Search web view\".");
}
logger.stop();
pressBack();
}
private void switchPageStyles() throws Exception {
String testTag = "style";
getDropdownMenu();
clickUiObject(BY_ID, packageID + "menu_reader_settings", "android.widget.TextView");
// Check for lighting option button on newer versions
UiObject lightingOptionsButton =
new UiObject(new UiSelector().resourceId(packageID + "lighting_options_button"));
if (lightingOptionsButton.exists()) {
lightingOptionsButton.click();
}
String[] styles = {"Night", "Sepia", "Day"};
for (String style : styles) {
try {
ActionLogger logger = new ActionLogger(testTag + "_" + style, parameters);
UiObject pageStyle =
new UiObject(new UiSelector().description(style));
logger.start();
pageStyle.clickAndWaitForNewWindow(viewTimeout);
logger.stop();
} catch (UiObjectNotFoundException e) {
// On some devices the lighting options menu disappears
// between clicks. Searching for the menu again would affect
// the logger timings so log a message and continue
Log.e("GooglePlayBooks", "Could not find pageStyle \"" + style + "\"");
}
}
sleep(2);
tapDisplayCentre(); // exit reader settings dialog
waitForPage();
}
private void aboutBook() throws Exception {
String testTag = "open_about";
ActionLogger logger = new ActionLogger(testTag, parameters);
getDropdownMenu();
clickUiObject(BY_DESC, "More options", "android.widget.ImageView");
UiObject bookInfo = getUiObjectByText("About this book", "android.widget.TextView");
logger.start();
bookInfo.clickAndWaitForNewWindow(uiAutoTimeout);
UiObject detailsPanel =
new UiObject(new UiSelector().resourceId("com.android.vending:id/item_details_panel"));
waitObject(detailsPanel, viewTimeoutSecs);
logger.stop();
pressBack();
}
// Helper for waiting on a page between actions
private UiObject waitForPage() throws Exception {
UiObject activityReader =
new UiObject(new UiSelector().resourceId(packageID + "activity_reader")
.childSelector(new UiSelector()
.focusable(true)));
// On some devices the object in the view hierarchy is found before it
// becomes visible on the screen. Therefore add pause instead.
sleep(3);
if (!activityReader.waitForExists(viewTimeout)) {
throw new UiObjectNotFoundException("Could not find \"activity reader view\".");
}
return activityReader;
}
// Helper for accessing the drop down menu
private void getDropdownMenu() throws Exception {
UiObject actionBar =
new UiObject(new UiSelector().resourceId(packageID + "action_bar"));
if (!actionBar.exists()) {
tapDisplayCentre();
sleep(1); // Allow previous views to settle
}
UiObject card =
new UiObject(new UiSelector().resourceId(packageID + "cards")
.className("android.view.ViewGroup"));
if (card.exists()) {
// On rare occasions tapping a certain word that appears in the centre
// of the display will bring up a card to describe the word.
// (Such as a place will bring a map of its location)
// In this situation, tap centre to go back, and try again
// at a different set of coordinates
int x = (int)(getDisplayCentreWidth() * 0.8);
int y = (int)(getDisplayCentreHeight() * 0.8);
while (card.exists()) {
tapDisplay(x, y);
sleep(1);
}
tapDisplay(x, y);
sleep(1); // Allow previous views to settle
}
if (!actionBar.exists()) {
throw new UiObjectNotFoundException("Could not find \"action bar\".");
}
}
private void hideDropDownMenu() throws Exception {
UiObject actionBar =
new UiObject(new UiSelector().resourceId(packageID + "action_bar"));
if (actionBar.exists()) {
tapDisplayCentre();
sleep(1); // Allow previous views to settle
}
if (actionBar.exists()) {
throw new UiObjectNotFoundException("Could not close \"action bar\".");
}
}
private UiObject searchPage(final UiObject view, final String pagenum, final Direction updown,
final int attempts) throws Exception {
if (attempts <= 0) {
throw new UiObjectNotFoundException("Could not find \"page number\" after several attempts.");
}
UiObject page =
new UiObject(new UiSelector().description(String.format("page " + pagenum))
.className("android.widget.TextView"));
if (!page.exists()) {
// Scroll up by swiping down
if (updown == Direction.UP) {
view.swipeDown(200);
// Default case is to scroll down (swipe up)
} else {
view.swipeUp(200);
}
page = searchPage(view, pagenum, updown, attempts - 1);
}
return page;
}
}
| |
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.semanticweb.elk.owlapi.ElkReasonerFactory;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.io.OWLXMLOntologyFormat;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyFormat;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.reasoner.InferenceType;
import org.semanticweb.owlapi.reasoner.Node;
import org.semanticweb.owlapi.reasoner.NodeSet;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import uk.ac.ox.krr.logmap2.LogMap2_Matcher;
import uk.ac.ox.krr.logmap2.LogMap2_RepairFacility;
import uk.ac.ox.krr.logmap2.mappings.objects.MappingObjectStr;
import uk.ac.ox.krr.logmap2.owlapi.SynchronizedOWLManager;
import uk.ac.ox.krr.logmap2.reasoning.HermiTAccess;
import com.google.common.collect.Sets;
public class DefaultMapping {
public void processMappingsForMarriage(OWLOntology ontology1, OWLOntology ontology2)throws Exception{
// obtain the mappings
Set<MappingObjectStr> mappings = getLogMapMappings(ontology1, ontology2);
mappings = convertMappingsToSub(mappings);
//System.out.println(mappings);
Set<MappingObjectStr> defaultMappings = new HashSet<MappingObjectStr>();
MappingObjectStr m = null;
for(MappingObjectStr mapping: mappings){
//if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
if(mapping.toString().contains("mike")){
m = mapping;
continue;
}
defaultMappings.add(mapping);
//}
}
mappings.remove(m);
saveMergedOntology("/media/kunal/kunal/logmap/marriage.owl", ontology1, ontology2,mappings);
/*for(MappingObjectStr mapping: mappings){
}*/
//mergeTboxOnly(ontology1, ontology2, mappings, );
List<OWLOntology> mappedOntologies = defaultMappingReasoning(ontology1, ontology2, mappings, defaultMappings);
findMappedInstances(mappedOntologies, defaultMappings);
}
public void processMappingsForConference(OWLOntology ontology1, OWLOntology ontology2)throws Exception{
// obtain the mappings
Set<MappingObjectStr> mappings = getLogMapMappings(ontology1, ontology2);
mappings = convertMappingsToSub(mappings);
/*LogMap2_RepairFacility repair = new LogMap2_RepairFacility(ontology1, ontology2, mappings, false, true);
Set<MappingObjectStr> repaired= repair.getCleanMappings();
repair.checkSatisfiabilityInputMappings();
repair.checkSatisfiabilityCleanMappings();
System.out.println(repaired.size());
for(MappingObjectStr m : mappings){
if(!repaired.contains(m)){
System.out.println(m);
}
}*/
/*MappingObjectStr remove = null;
for(MappingObjectStr map: mappings){
if(map.toString().contains("Thoracic_Cavity")){
remove =map;
}
}
mappings.remove(remove);*/
//saveMergedOntology("/media/kunal/kunal/logmap/bio3.owl", ontology1, ontology2,mappings);
/*for(MappingObjectStr map: mappings){
logOutput(map.toString());
}*/
System.out.println(mappings.size());
Set<MappingObjectStr> defaultMappings = new HashSet<MappingObjectStr>();
for(MappingObjectStr mapping: mappings){
logOutput(mapping.toString());
//System.out.println(mapping.getIRIStrEnt1());
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES && (mapping.getIRIStrEnt1().contains("Visceral_Pleura")||
mapping.getIRIStrEnt1().contains("Thoracic_Cavity")||
mapping.getIRIStrEnt1().contains("Pleural_Tissue") )){
defaultMappings.add(mapping);
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES && (mapping.getIRIStrEnt1().contains("Anatomic_Structure_Has_Location"))){
defaultMappings.add(mapping);
}
}
for(MappingObjectStr mapping: mappings){
}
System.out.println(defaultMappings);
//mergeTboxOnly(ontology1, ontology2, mappings, );
List<OWLOntology> mappedOntologies = defaultMappingReasoningBio(ontology1, ontology2, mappings, defaultMappings);
findMappedInstances(mappedOntologies, defaultMappings, mappings, ontology2);
}
public void processMappingsForFood(OWLOntology ontology1, OWLOntology ontology2)throws Exception{
// obtain the mappings
Set<MappingObjectStr> mappings = getLogMapMappings(ontology1, ontology2);
mappings = convertMappingsToSub(mappings);
saveMergedOntology("/media/kunal/kunal/logmap/food.owl", ontology1, ontology2,mappings);
/*System.out.println(mappings);
Set<MappingObjectStr> defaultMappings = new HashSet<MappingObjectStr>();
for(MappingObjectStr mapping: mappings){
//if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
defaultMappings.add(mapping);
//}
}
for(MappingObjectStr mapping: mappings){
}
//mergeTboxOnly(ontology1, ontology2, mappings, );
List<OWLOntology> mappedOntologies = defaultMappingReasoning(ontology1, ontology2, mappings, defaultMappings);
findMappedInstances(mappedOntologies, defaultMappings);*/
}
public void saveMergedOntology(String filepath, OWLOntology ont1, OWLOntology ont2, Set<MappingObjectStr> mappings)throws Exception{
File file = new File(filepath);
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLOntology output = man.createOntology();
man.addAxioms(output, ont1.getAxioms());
man.addAxioms(output, ont2.getAxioms());
man.addAxioms(output, getTboxFromMappings(mappings));
OWLOntologyFormat format = new OWLXMLOntologyFormat();
HermiTAccess hermitAccess = new HermiTAccess(man, output, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
// reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY);
//System.out.println(reasonerHermit.getUnsatisfiableClasses());
man.saveOntology(output, format,IRI.create(file));
}
public void findMappedInstances(List<OWLOntology> ontologies, Set<MappingObjectStr> defaultMappings) throws Exception{
findMappedInstances(ontologies, defaultMappings, null, null);
}
public void findMappedInstances(List<OWLOntology> ontologies, Set<MappingObjectStr> defaultMappings, Set<MappingObjectStr> mappings, OWLOntology o2) throws Exception{
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLDataFactory dataFactory = man.getOWLDataFactory();
Map<OWLClass, Set<ClassCandidate>> classesMap = new HashMap<OWLClass, Set<ClassCandidate>>();
Map<OWLObjectProperty, Set<RoleCandidate>> rolesMap= new HashMap<OWLObjectProperty, Set<RoleCandidate>>();
List<OWLClass> classes = new ArrayList<OWLClass>();
List<OWLObjectProperty> props = new ArrayList<OWLObjectProperty>();
for(MappingObjectStr mapping: defaultMappings){
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass cl = dataFactory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
classes.add(cl);
}else if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty prop = dataFactory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
props.add(prop);
}
}
for(MappingObjectStr mapping: mappings){
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass cl = dataFactory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
classes.add(cl);
}else if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty prop = dataFactory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
props.add(prop);
}
}
if(o2 != null){
Set<OWLClass> cls= o2.getClassesInSignature();
classes = new ArrayList<OWLClass>(cls);
}
for(OWLOntology ont: ontologies){
OWLOntology ontology = man.createOntology();
man.addAxioms(ontology, ont.getAxioms());
HermiTAccess hermitAccess = new HermiTAccess(man, ontology, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY);
System.out.println("Unsatisfiable CLasses :: "+reasonerHermit.getUnsatisfiableClasses());
for(OWLClass cl: classes){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getInstances(cl, false);
Set<ClassCandidate> cand = new HashSet<ClassCandidate>();
for(Node<OWLNamedIndividual> ind: instances){
cand.add(new ClassCandidate(ind.getRepresentativeElement()));
}
if(!classesMap.containsKey(cl)){
classesMap.put(cl, cand);
}else{
classesMap.put(cl, findIntersection(classesMap.get(cl), cand));
}
}
for(OWLObjectProperty prop: props){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getInstances(reasonerHermit.getTopClassNode().getRepresentativeElement(), false);
Set<RoleCandidate> cand = new HashSet<RoleCandidate>();
for(Node<OWLNamedIndividual> ind: instances){
NodeSet<OWLNamedIndividual> individuals =reasonerHermit.getObjectPropertyValues(ind.getRepresentativeElement(), prop);
for(Node<OWLNamedIndividual> rightInd: individuals){
cand.add(new RoleCandidate(ind.getRepresentativeElement(), rightInd.getRepresentativeElement()));
}
}
if(!rolesMap.containsKey(prop)){
rolesMap.put(prop, cand);
}else{
rolesMap.put(prop, findIntersectionRole(rolesMap.get(prop), cand));
}
}
}
System.out.println(classesMap);
System.out.println(rolesMap);
System.out.println("Printing all instances of mapped classes");
for(OWLClass cl: classesMap.keySet()){
List<ClassCandidate> list = new ArrayList<>();
for(ClassCandidate cand: classesMap.get(cl)){
//System.out.println("Printing all instances of class "+ cl);
//System.out.println(cand);
list.add(cand);
}
if(list.size()>0){
System.out.println("Printing all instances of class "+ cl.toString().substring(cl.toString().indexOf("#")+1));
for(ClassCandidate cand: list){
System.out.println(cand);
}
}
}
System.out.println("Printing all instances of mapped properties");
for(OWLObjectProperty prop: rolesMap.keySet()){
for(RoleCandidate cand: rolesMap.get(prop)){
System.out.println("Printing pairs that satisfy role "+ prop);
System.out.println(cand);
}
}
}
private Set<ClassCandidate> findIntersection(Set<ClassCandidate> set1, Set<ClassCandidate> set2){
Set<ClassCandidate> output = new HashSet<ClassCandidate>();
/*System.out.println("Set 1: "+set1);
System.out.println("Set 2: "+set2);*/
for(ClassCandidate e: set1){
for(ClassCandidate s: set2){
if(e.equals(s)){
output.add(e);
break;
}
}
}
return output;
}
private Set<RoleCandidate> findIntersectionRole(Set<RoleCandidate> set1, Set<RoleCandidate> set2){
Set<RoleCandidate> output = new HashSet<RoleCandidate>();
/*System.out.println("Set 1: "+set1);
System.out.println("Set 2: "+set2);*/
for(RoleCandidate e: set1){
for(RoleCandidate s: set2){
if(e.equals(s)){
output.add(e);
break;
}
}
}
return output;
}
private void mergeTboxOnly(OWLOntology ont1, OWLOntology ont2, Set<MappingObjectStr> mappings, Set<MappingObjectStr> defaultMappings) throws Exception{
Set<OWLAxiom> tboxAxioms = ont1.getTBoxAxioms(false);
tboxAxioms.addAll(ont2.getTBoxAxioms(false));
//System.out.println(ont2.getABoxAxioms(false));
//convertMappings to subclass
Set<MappingObjectStr> subMappings = convertMappingsToSub(mappings);
Set<OWLAxiom> tboxMappingAxioms = getTboxFromMappings(subMappings);
LogMap2_RepairFacility logmap2_repair = new LogMap2_RepairFacility(ont1, ont2, subMappings, false, false);
//System.out.println(logmap2_repair.getOWLOntology4CleanMappings(mappings));
OWLOntology mappingsOntology = logmap2_repair.getOWLOntology4CleanMappings(subMappings);
tboxAxioms.addAll(tboxMappingAxioms);
System.out.println(tboxMappingAxioms);
OWLOntologyManager managerMerged = SynchronizedOWLManager.createOWLOntologyManager();
OWLOntology mergedOntology = managerMerged.createOntology(tboxAxioms, IRI.create("http://daselab.org/integration.owl"));
OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
OWLReasoner reasoner = reasonerFactory.createReasoner(mergedOntology);
reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
for(OWLClass cl: ont1.getClassesInSignature()){
System.out.println("Superclasses of "+cl);
System.out.println(reasoner.getSuperClasses(cl, false));
}
/* for(OWLObjectProperty p: ont2.getObjectPropertiesInSignature()){
System.out.println("Super roles of "+p);
System.out.println(reasoner.getSubObjectProperties(p, false));
}*/
HermiTAccess hermitAccess = new HermiTAccess(managerMerged, mergedOntology, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.OBJECT_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.CLASS_ASSERTIONS);
System.out.println(mergedOntology.getAxioms());
for(OWLObjectProperty p: mergedOntology.getObjectPropertiesInSignature()){
System.out.println("Super roles of "+p);
System.out.println(reasonerHermit.getSubObjectProperties(p, true));
System.out.println(reasonerHermit.getEquivalentObjectProperties(p));
}
defaultReasoning(ont1, ont2, mappings, mappings);
}
private Set<OWLAxiom> getTboxFromMappings(Set<MappingObjectStr> mappings)throws Exception{
OWLOntologyManager managerMerged = SynchronizedOWLManager.createOWLOntologyManager();
OWLDataFactory factory = managerMerged.getOWLDataFactory();
OWLOntology ont = managerMerged.createOntology();
for(MappingObjectStr mapping: mappings){
OWLAxiom axiom = null;
//System.out.println("================================ Type of mapping "+mapping.getTypeOfMapping());
//System.out.println("================================ Type of relation "+mapping.getMappingDirection());
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
System.out.println("Inside");
OWLObjectProperty prop1= factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty prop2= factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
if(mapping.getMappingDirection() == MappingObjectStr.SUB){
System.out.println("Inside1");
axiom = factory.getOWLSubObjectPropertyOfAxiom(prop1, prop2);
}else if(mapping.getMappingDirection() == MappingObjectStr.SUP){
axiom = factory.getOWLSubObjectPropertyOfAxiom(prop2, prop1);
}else{
System.out.println("Inside2");
axiom = factory.getOWLEquivalentObjectPropertiesAxiom(prop1, prop2);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass class1= factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass class2= factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
if(mapping.getMappingDirection() == MappingObjectStr.SUB){
axiom = factory.getOWLSubClassOfAxiom(class1, class2);
}else if(mapping.getMappingDirection() == MappingObjectStr.SUP){
axiom = factory.getOWLSubClassOfAxiom(class2, class1);
}else{
axiom = factory.getOWLEquivalentClassesAxiom(class1, class2);
}
}
if(axiom != null){
managerMerged.addAxiom(ont, axiom);
}
}
return ont.getAxioms();
}
private Set<MappingObjectStr> convertMappingsToSub(Set<MappingObjectStr> inputMappings){
Set<MappingObjectStr> outputMappings= new HashSet<MappingObjectStr>();
for(MappingObjectStr inputMapping: inputMappings){
MappingObjectStr mapping = null;
//System.out.println(inputMapping);
if(inputMapping.getMappingDirection() == MappingObjectStr.EQ){
mapping = new MappingObjectStr(inputMapping.getIRIStrEnt1(), inputMapping.getIRIStrEnt2(), inputMapping.getConfidence(), MappingObjectStr.SUB, inputMapping.getTypeOfMapping());
}else{
mapping = inputMapping;
}
outputMappings.add(mapping);
}
return outputMappings;
}
private void defaultReasoning(OWLOntology o1, OWLOntology o2, Set<MappingObjectStr> mappings, Set<MappingObjectStr> defaultMapping)throws Exception{
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.createOntology();
manager.addAxioms(ontology, o2.getAxioms());
OWLOntologyManager manager1 = OWLManager.createOWLOntologyManager();
HermiTAccess hermitAccess = new HermiTAccess(manager1, o1, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.OBJECT_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.CLASS_ASSERTIONS);
OWLDataFactory factory = manager1.getOWLDataFactory();
for(MappingObjectStr mapping: mappings){
if(!defaultMapping.contains(mapping)){
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getInstances(left, false);
for(Node<OWLNamedIndividual> ind : instances){
OWLClassAssertionAxiom axiom = factory.getOWLClassAssertionAxiom(right, ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
OWLObjectPropertyAssertionAxiom axiom = factory.getOWLObjectPropertyAssertionAxiom(right, individual.getRepresentativeElement(), ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
}
}
}
//process default mappings
Map<String, Set<RoleCandidate>> roleCandidates = new HashMap<String, Set<RoleCandidate>> ();
Map<String, Set<Set<RoleCandidate>>> roleCandidatesPSet = new HashMap<String, Set<Set<RoleCandidate>>> ();
for(MappingObjectStr mapping: defaultMapping){
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
if(roleCandidates.get(mapping.toString()) == null){
roleCandidates.put(mapping.toString(), new HashSet<RoleCandidate>());
}
roleCandidates.get(mapping.toString()).add(new RoleCandidate(individual.getRepresentativeElement(), ind.getRepresentativeElement()));
}
}
}
Set<RoleCandidate> roleAssert = roleCandidates.get(mapping.toString());
roleCandidatesPSet.put(mapping.toString(), Sets.powerSet(roleAssert));
System.out.println(roleCandidates.get(mapping.toString()));
}
Set<Set<RoleCandidate>> successfulMappings = new HashSet<Set<RoleCandidate>>();
for(MappingObjectStr mapping: defaultMapping){
for(Set<RoleCandidate> roleCand : roleCandidatesPSet.get(mapping.toString())){
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLOntology test = man.createOntology();
for(RoleCandidate cand: roleCand){
man.addAxioms(test, ontology.getAxioms());
OWLObjectPropertyAssertionAxiom axiom = factory.getOWLObjectPropertyAssertionAxiom(right, cand.getA1(), cand.getA2());
manager.addAxiom(test, axiom);
}
HermiTAccess hermitAccessTest = new HermiTAccess(man, test, true);
OWLReasoner testReasoner = hermitAccessTest.getReasoner();
if(testReasoner.isConsistent()){
boolean add = true;
if(add){
successfulMappings.add(roleCand);
}
}
}
}
Set<Set<RoleCandidate>> filteredMappings = new HashSet<Set<RoleCandidate>>();
for(Set<RoleCandidate> oneMapping: successfulMappings){
boolean add = true;
System.out.println("One " +oneMapping);
for(Set<RoleCandidate> existingMapping: filteredMappings){
System.out.println("Existing :"+existingMapping);
System.out.println("oneMapping.containsAll(existingMapping) "+oneMapping.containsAll(existingMapping));
System.out.println("existingMapping.containsAll(oneMapping) "+existingMapping.containsAll(oneMapping));
if(oneMapping.containsAll(existingMapping) && oneMapping.size() > existingMapping.size()){
filteredMappings.remove(existingMapping);
add = true;
break;
}
if(existingMapping.containsAll(oneMapping)){
add = false;
}
}
if(add){
filteredMappings.add(oneMapping);
}
}
Set <Integer> a = Sets.newHashSet();
Set <Integer> b = Sets.newHashSet();
a.addAll(Arrays.asList(1,2,3));
b.addAll(Arrays.asList(1,2));
System.out.println("a contains b "+a.containsAll(b) );
System.out.println("Successful Mappings "+successfulMappings);
System.out.println("Filtered Mappings "+filteredMappings.size());
}
private List<OWLOntology> defaultMappingReasoning(OWLOntology o1, OWLOntology o2, Set<MappingObjectStr> mappings, Set<MappingObjectStr> defaultMapping)throws Exception{
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.createOntology();
manager.addAxioms(ontology, o2.getAxioms());
List<OWLOntology> returnList = new ArrayList<OWLOntology>();
List<MappingObjectStr> orderedMappings = new ArrayList<MappingObjectStr>();
for(MappingObjectStr obj:defaultMapping){
orderedMappings.add(obj);
}
OWLOntologyManager manager1 = OWLManager.createOWLOntologyManager();
HermiTAccess hermitAccess = new HermiTAccess(manager1, o1, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.OBJECT_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.CLASS_ASSERTIONS);
OWLDataFactory factory = manager1.getOWLDataFactory();
for(MappingObjectStr mapping: mappings){
if(!orderedMappings.contains(mapping)){
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getInstances(left, false);
for(Node<OWLNamedIndividual> ind : instances){
OWLClassAssertionAxiom axiom = factory.getOWLClassAssertionAxiom(right, ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
OWLObjectPropertyAssertionAxiom axiom = factory.getOWLObjectPropertyAssertionAxiom(right, individual.getRepresentativeElement(), ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
}
}
}
System.out.println(orderedMappings);
//process default mappings
Map<String, Set<RoleCandidate>> roleCandidates = new HashMap<String, Set<RoleCandidate>> ();
//Map<String, Set<Object>> classCandidates = new HashMap<String, Set<Object>> ();
List<Set<Object>> classCandidates = new ArrayList<Set<Object>> ();
Map<String, Set<Set<RoleCandidate>>> roleCandidatesPSet = new HashMap<String, Set<Set<RoleCandidate>>> ();
List<Set<Set<Object>>> classCandidatesPSet = new ArrayList<Set<Set<Object>>> ();
for(MappingObjectStr mapping: orderedMappings){
int index = orderedMappings.indexOf(mapping);
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
if(classCandidates.size() < index+1){
classCandidates.add(index, new HashSet<Object>());
}
classCandidates.get(index).add(new RoleCandidate(individual.getRepresentativeElement(), ind.getRepresentativeElement()));
}
}
Set<Object> roleAssert = classCandidates.get(index);
classCandidatesPSet.add(index, Sets.powerSet(roleAssert));
}
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(left, false);
System.out.println(left);
System.out.println(allIndividuals);
for(Node<OWLNamedIndividual> individual : allIndividuals){
if(classCandidates.size() < index +1){
classCandidates.add(index, new HashSet<Object>());
}
classCandidates.get(index).add(new ClassCandidate(individual.getRepresentativeElement()));
}
Set<Object> classAssert = classCandidates.get(index);
//System.out.println(classAssert);
classCandidatesPSet.add(index, Sets.powerSet(classAssert));
}
}
Set<List<Set<Object>>> cart = null;
List<Set<Set<Object>>> values = new ArrayList<Set<Set<Object>>>();
values.addAll(classCandidatesPSet);
cart = Sets.cartesianProduct(values);
Set<OntologyMappingEntity> defaultSuccessMappings = new HashSet<OntologyMappingEntity>();
//Set<List<Set<Object>>> defaultSuccessMappings = new HashSet<List<Set<Object>>>();
System.out.println(cart.size());
int consistent =0;
int inconsistent = 0;
for(List<Set<Object>> candidateMapping: cart){
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLDataFactory dataFactory = man.getOWLDataFactory();
OWLOntology onto = manager.createOntology();
man.addAxioms(onto, ontology.getAxioms());
Iterator<MappingObjectStr> itr = orderedMappings.iterator();
/*System.out.println("yeah "+o.size());
System.out.println("mappings "+orderedMappings.size());*/
for(Set<Object> s: candidateMapping){
MappingObjectStr mapping = itr.next();
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
for(Object map:s){
ClassCandidate cand = (ClassCandidate)map;
OWLClass cl = dataFactory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
OWLClassAssertionAxiom axiom = dataFactory.getOWLClassAssertionAxiom(cl, cand.getA());
cand.getA();
man.addAxiom(onto, axiom);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
for(Object map:s){
RoleCandidate cand = (RoleCandidate)map;
OWLObjectProperty prop = dataFactory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
OWLObjectPropertyAssertionAxiom axiom = dataFactory.getOWLObjectPropertyAssertionAxiom(prop, cand.getA1(), cand.getA2());
man.addAxiom(onto, axiom);
}
}
}
//System.out.println(onto.getAxioms());
hermitAccess = new HermiTAccess(man, onto, true);
reasonerHermit =hermitAccess.getReasoner();
//reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY);
if(reasonerHermit.isConsistent()){
OntologyMappingEntity owlEntity = new OntologyMappingEntity(onto, candidateMapping);
addMappingtoSet(owlEntity, defaultSuccessMappings);
consistent++;
}else{
inconsistent++;
}
reasonerHermit.dispose();
}
for(OntologyMappingEntity ent: defaultSuccessMappings){
returnList.add(ent.getMappedOntology());
}
System.out.println("Num consistent "+ consistent+" \n Num inconsistent"+inconsistent);
System.out.println(defaultSuccessMappings.size());
System.out.println(defaultSuccessMappings);
return returnList;
}
private List<OWLOntology> defaultMappingReasoningBio(OWLOntology o1, OWLOntology o2, Set<MappingObjectStr> mappings, Set<MappingObjectStr> defaultMapping)throws Exception{
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.createOntology();
manager.addAxioms(ontology, o2.getAxioms());
List<OWLOntology> returnList = new ArrayList<OWLOntology>();
List<MappingObjectStr> orderedMappings = new ArrayList<MappingObjectStr>();
for(MappingObjectStr obj:defaultMapping){
orderedMappings.add(obj);
}
OWLOntologyManager manager1 = OWLManager.createOWLOntologyManager();
OWLDataFactory factory = manager1.getOWLDataFactory();
OWLClass clz = factory.getOWLClass(IRI.create("http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Lung"));
OWLNamedIndividual indi = factory.getOWLNamedIndividual(IRI.create("ind1000"));
OWLClassAssertionAxiom ax = factory.getOWLClassAssertionAxiom(clz, indi);
manager1.addAxiom(o1, ax);
clz = factory.getOWLClass(IRI.create("http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Thoracic_Cavity"));
OWLNamedIndividual indi1 = factory.getOWLNamedIndividual(IRI.create("ind1100"));
ax = factory.getOWLClassAssertionAxiom(clz, indi1);
manager1.addAxiom(o1, ax);
clz = factory.getOWLClass(IRI.create("http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Thoracic_Cavity"));
OWLNamedIndividual indi2 = factory.getOWLNamedIndividual(IRI.create("ind1101"));
ax = factory.getOWLClassAssertionAxiom(clz, indi2);
manager1.addAxiom(o1, ax);
clz = factory.getOWLClass(IRI.create("http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Visceral_Pleura"));
OWLNamedIndividual indi3 = factory.getOWLNamedIndividual(IRI.create("ind1102"));
ax = factory.getOWLClassAssertionAxiom(clz, indi3);
manager1.addAxiom(o1, ax);
OWLNamedIndividual indi4 = factory.getOWLNamedIndividual(IRI.create("ind1103"));
ax = factory.getOWLClassAssertionAxiom(clz, indi4);
manager1.addAxiom(o1, ax);
OWLObjectProperty prop1 = factory.getOWLObjectProperty(IRI.create("http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Anatomic_Structure_Has_Location"));
OWLObjectPropertyAssertionAxiom owlPropAxiom = factory.getOWLObjectPropertyAssertionAxiom(prop1, indi3, indi);
manager1.addAxiom(o1, owlPropAxiom);
owlPropAxiom = factory.getOWLObjectPropertyAssertionAxiom(prop1, indi3, indi2);
manager1.addAxiom(o1, owlPropAxiom);
owlPropAxiom = factory.getOWLObjectPropertyAssertionAxiom(prop1, indi4, indi1);
manager1.addAxiom(o1, owlPropAxiom);
saveMergedOntology("/media/kunal/kunal/logmap/bio5.owl", o1, o2,mappings);
HermiTAccess hermitAccess = new HermiTAccess(manager1, o1, true);
OWLReasoner reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.OBJECT_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.CLASS_ASSERTIONS);
for(MappingObjectStr mapping: mappings){
if(!orderedMappings.contains(mapping)){
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getInstances(left, false);
for(Node<OWLNamedIndividual> ind : instances){
//System.out.println(left);
OWLClassAssertionAxiom axiom = factory.getOWLClassAssertionAxiom(right, ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
OWLObjectPropertyAssertionAxiom axiom = factory.getOWLObjectPropertyAssertionAxiom(right, individual.getRepresentativeElement(), ind.getRepresentativeElement());
manager.addAxiom(ontology, axiom);
}
}
}
}
}
int counter =1001;
/*for(MappingObjectStr mapping: orderedMappings){
int index = orderedMappings.indexOf(mapping);
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
if(left.getIRI().toString().contains("Visceral_Pleura")||left.getIRI().toString().contains("Pleura")||
left.getIRI().toString().contains("Thoracic_Cavity")||
left.getIRI().toString().contains("Lung")||
left.getIRI().toString().contains("Organ")
){
OWLNamedIndividual ind = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
manager.addAxiom(o1, factory.getOWLClassAssertionAxiom(left, ind));
counter++;
}
}
}*/
reasonerHermit.dispose();
hermitAccess = new HermiTAccess(manager1, o1, true);
reasonerHermit =hermitAccess.getReasoner();
reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY, InferenceType.OBJECT_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.CLASS_ASSERTIONS);
//process default mappings
Map<String, Set<RoleCandidate>> roleCandidates = new HashMap<String, Set<RoleCandidate>> ();
//Map<String, Set<Object>> classCandidates = new HashMap<String, Set<Object>> ();
List<Set<Object>> classCandidates = new ArrayList<Set<Object>> ();
Map<String, Set<Set<RoleCandidate>>> roleCandidatesPSet = new HashMap<String, Set<Set<RoleCandidate>>> ();
List<Set<Set<Object>>> classCandidatesPSet = new ArrayList<Set<Set<Object>>> ();
for(MappingObjectStr mapping: orderedMappings){
int index = orderedMappings.indexOf(mapping);
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
OWLObjectProperty left =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt1()));
OWLObjectProperty right =factory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
Node<OWLClass> top = reasonerHermit.getTopClassNode();
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(top.getRepresentativeElement(), false);
for(Node<OWLNamedIndividual> individual : allIndividuals){
NodeSet<OWLNamedIndividual> instances= reasonerHermit.getObjectPropertyValues(individual.getRepresentativeElement(), left);
for(Node<OWLNamedIndividual> ind : instances){
if(classCandidates.size() < index+1){
classCandidates.add(index, new HashSet<Object>());
}
classCandidates.get(index).add(new RoleCandidate(individual.getRepresentativeElement(), ind.getRepresentativeElement()));
}
}
/*System.out.println(left.getIRI().toString());
if(left.getIRI().toString().equals("<http://ncicb.nci.nih.gov/xml/owl/EVS/Thesaurus.owl#Organ>")
){
OWLNamedIndividual ind1 = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
counter++;
OWLNamedIndividual ind2 = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
counter++;
classCandidates.add(index, Sets.newHashSet((Object)new RoleCandidate(ind1, ind2)));
}*/
/*OWLNamedIndividual ind1 = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
counter++;
OWLNamedIndividual ind2 = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
counter++;
classCandidates.add(index, Sets.newHashSet(((Object) new RoleCandidate(ind1, ind2))));*/
//Set<Object> roleAssert = classCandidates.get(index);
//classCandidatesPSet.add(index, Sets.powerSet(roleAssert));
}
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
OWLClass left =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt1()));
OWLClass right =factory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
NodeSet<OWLNamedIndividual> allIndividuals = reasonerHermit.getInstances(left, false);
//System.out.println(left);
//System.out.println(allIndividuals);
classCandidates.add(index, new HashSet<Object>());
for(Node<OWLNamedIndividual> individual : allIndividuals){
classCandidates.get(index).add(new ClassCandidate(individual.getRepresentativeElement()));
}
/*if(left.getIRI().toString().contains("Visceral_Pleura")||left.getIRI().toString().contains("Pleura")||
left.getIRI().toString().contains("Thoracic_Cavity")||
left.getIRI().toString().contains("Lung")||
left.getIRI().toString().contains("Organ")
){
OWLNamedIndividual ind = factory.getOWLNamedIndividual(IRI.create("ind"+counter));
counter++;
classCandidates.add(index, Sets.newHashSet((Object)new ClassCandidate(ind)));
}*/
Set<Object> classAssert = classCandidates.get(index);
//System.out.println(classAssert);
classCandidatesPSet.add(index, Sets.powerSet(classAssert));
}
}
Set<List<Set<Object>>> cart = null;
List<Set<Set<Object>>> values = new ArrayList<Set<Set<Object>>>();
values.addAll(classCandidatesPSet);
cart = Sets.cartesianProduct(values);
Set<OntologyMappingEntity> defaultSuccessMappings = new HashSet<OntologyMappingEntity>();
//Set<List<Set<Object>>> defaultSuccessMappings = new HashSet<List<Set<Object>>>();
System.out.println(cart.size());
int consistent =0;
int inconsistent = 0;
int count =1;
for(List<Set<Object>> candidateMapping: cart){
long startTime = System.currentTimeMillis();
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLDataFactory dataFactory = man.getOWLDataFactory();
OWLOntology onto = manager.createOntology();
man.addAxioms(onto, ontology.getAxioms());
Iterator<MappingObjectStr> itr = orderedMappings.iterator();
/*System.out.println("yeah "+o.size());
System.out.println("mappings "+orderedMappings.size());*/
for(Set<Object> s: candidateMapping){
MappingObjectStr mapping = itr.next();
if(mapping.getTypeOfMapping() == MappingObjectStr.CLASSES){
for(Object map:s){
ClassCandidate cand = (ClassCandidate)map;
OWLClass cl = dataFactory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
OWLClassAssertionAxiom axiom = dataFactory.getOWLClassAssertionAxiom(cl, cand.getA());
cand.getA();
man.addAxiom(onto, axiom);
}
}
if(mapping.getTypeOfMapping() == MappingObjectStr.OBJECTPROPERTIES){
for(Object map:s){
RoleCandidate cand = (RoleCandidate)map;
OWLObjectProperty prop = dataFactory.getOWLObjectProperty(IRI.create(mapping.getIRIStrEnt2()));
OWLObjectPropertyAssertionAxiom axiom = dataFactory.getOWLObjectPropertyAssertionAxiom(prop, cand.getA1(), cand.getA2());
man.addAxiom(onto, axiom);
}
}
}
//System.out.println(onto.getAxioms());
hermitAccess = new HermiTAccess(man, onto, true);
reasonerHermit =hermitAccess.getReasoner();
//reasonerHermit.precomputeInferences(InferenceType.CLASS_HIERARCHY);
boolean flag = false;
if(reasonerHermit.isConsistent()){
flag = true;
OntologyMappingEntity owlEntity = new OntologyMappingEntity(onto, candidateMapping);
addMappingtoSet(owlEntity, defaultSuccessMappings);
consistent++;
}else{
inconsistent++;
}
reasonerHermit.dispose();
System.out.println("Time taken to process candidate :: "+count +" :: "+(System.currentTimeMillis() -startTime)+" :: consistent "+ flag);
count++;
}
for(OntologyMappingEntity ent: defaultSuccessMappings){
returnList.add(ent.getMappedOntology());
}
System.out.println("Num consistent "+ consistent+" \n Num inconsistent"+inconsistent);
System.out.println(defaultSuccessMappings.size());
System.out.println(defaultSuccessMappings);
return returnList;
}
public boolean addMappingtoSet(OntologyMappingEntity candidate, Set<OntologyMappingEntity> successfulMappings)throws Exception{
if(successfulMappings.contains(candidate)){
return false;
}
List<OntologyMappingEntity> objectsToRemove = new ArrayList<OntologyMappingEntity>();
for(OntologyMappingEntity one: successfulMappings){
if(checkSubSet(candidate.getDefaultMappings(), one.getDefaultMappings())){
return false;
}else if (checkSubSet(one.getDefaultMappings(), candidate.getDefaultMappings())){
objectsToRemove.add(one);
}
}
successfulMappings.removeAll(objectsToRemove);
successfulMappings.add(candidate);
return true;
}
//returns true of every
public boolean checkSubSet(List<Set<Object>> s1, List<Set<Object>> s2)throws Exception{
logOutput("Comparing lists :: \n"+s1+" \n"+s2);
for(Set<Object> sub1: s1){
int index = s1.indexOf(sub1);
logOutput(s2.get(index)+".containsAll("+sub1+"): "+(s2.get(index).containsAll(sub1) || sub1.isEmpty()));
if(s2.get(index).containsAll(sub1) || sub1.isEmpty()){
continue;
}
logOutput("return value false");
return false;
}
logOutput("return value true");
return true;
}
private void getCandidateAxioms(){
}
private void logOutput(String text)throws Exception{
File file = new File("/media/kunal/kunal/logger.txt");
FileWriter writer = new FileWriter(file, true);
BufferedWriter w = new BufferedWriter(writer);
w.write("\n"+text);
w.flush();
w.close();
}
private Set<MappingObjectStr> getLogMapMappings(OWLOntology ontology1, OWLOntology ontology2){
LogMap2_Matcher matcher = new LogMap2_Matcher(ontology1, ontology2);
return matcher.getLogmap2_Mappings();
}
private static void insertNameIndividuals()throws Exception{
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLOntology onto = man.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/ontologies/oaei2014_FMA_small_overlapping_nci.owl"));
OWLDataFactory fact = man.getOWLDataFactory();
int counter = 1000;
/*for(OWLClass cl: onto.getClassesInSignature()){
if(cl.getIRI().toString().contains("Visceral_Pleura")){
fact.getOWLNamedIndividual(IRI.create("visceral_Pleura"+counter));
counter++;
}
else if(cl.getIRI().toString().contains("Pleura")){
fact.getOWLNamedIndividual(IRI.create("Pleura"+counter));
counter++;
}else if(cl.getIRI().toString().contains("Pleura")){
fact.getOWLNamedIndividual(IRI.create("Pleura"+counter));
counter++;
}
}*/
for(OWLClass cl: onto.getClassesInSignature()){
OWLNamedIndividual ind = fact.getOWLNamedIndividual(IRI.create("ind"+counter));
OWLClassAssertionAxiom axiom = fact.getOWLClassAssertionAxiom(cl, ind);
man.addAxiom(onto, axiom);
counter++;
}
man.saveOntology(onto, IRI.create(new File("/media/kunal/kunal/NCI_indi")));
}
private static void testFood()throws Exception{
OWLOntologyManager ontologyManager1 = OWLManager.createOWLOntologyManager();
OWLOntologyManager ontologyManager2 = OWLManager.createOWLOntologyManager();
OWLOntology ont1 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/logmap/food1.owl"));
OWLOntology ont2 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/logmap/food2.owl"));
DefaultMapping mapping = new DefaultMapping();
mapping.processMappingsForFood(ont1, ont2);
}
private static void testMarriage()throws Exception{
OWLOntologyManager ontologyManager1 = OWLManager.createOWLOntologyManager();
OWLOntologyManager ontologyManager2 = OWLManager.createOWLOntologyManager();
OWLOntology ont1 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/ontologies/marriage2.owl"));
OWLOntology ont2 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/ontologies/marriage1.owl"));
DefaultMapping mapping = new DefaultMapping();
mapping.processMappingsForMarriage(ont1, ont2);
}
private static void testConference()throws Exception{
OWLOntologyManager ontologyManager1 = OWLManager.createOWLOntologyManager();
OWLOntologyManager ontologyManager2 = OWLManager.createOWLOntologyManager();
OWLOntology ont1 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/ontologies/ekaw.owl"));
OWLOntology ont2 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/ontologies/cmt.owl"));
DefaultMapping mapping = new DefaultMapping();
mapping.processMappingsForConference(ont1, ont2);
}
private static void testBio()throws Exception{
OWLOntologyManager ontologyManager1 = OWLManager.createOWLOntologyManager();
OWLOntologyManager ontologyManager2 = OWLManager.createOWLOntologyManager();
OWLOntology ont2 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/onto1/oaei2014_FMA_small_overlapping_nci.owl"));
OWLOntology ont1 = ontologyManager1.loadOntologyFromOntologyDocument(new File("/media/kunal/kunal/onto1/oaei2014_NCI_small_overlapping_fma.owl"));
DefaultMapping mapping = new DefaultMapping();
mapping.processMappingsForConference(ont1, ont2);
}
public static void test(){
Set<Integer> i = Sets.newHashSet(1,2,3);
Set<String> j = Sets.newHashSet("a", "b", "c");
System.out.println(Sets.cartesianProduct(i,j));
}
public static void main(String args[])throws Exception{
//testMarriage();
//testFood();
testBio();
//test();
//insertNameIndividuals();
}
}
| |
/*
* Copyright (c) 2005-2009 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.application.productdefinition.struts.action;
import java.sql.Date;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import junit.framework.Assert;
import org.mifos.application.accounts.financial.business.GLCodeEntity;
import org.mifos.application.accounts.persistence.AccountPersistence;
import org.mifos.application.master.business.MasterDataEntity;
import org.mifos.application.meeting.business.MeetingBO;
import org.mifos.application.productdefinition.business.ProductCategoryBO;
import org.mifos.application.productdefinition.business.SavingsOfferingBO;
import org.mifos.application.productdefinition.util.helpers.ApplicableTo;
import org.mifos.application.productdefinition.util.helpers.InterestCalcType;
import org.mifos.application.productdefinition.util.helpers.PrdStatus;
import org.mifos.application.productdefinition.util.helpers.ProductDefinitionConstants;
import org.mifos.application.productdefinition.util.helpers.RecommendedAmountUnit;
import org.mifos.application.productdefinition.util.helpers.SavingsType;
import org.mifos.application.util.helpers.ActionForwards;
import org.mifos.application.util.helpers.Methods;
import org.mifos.framework.MifosMockStrutsTestCase;
import org.mifos.framework.exceptions.ApplicationException;
import org.mifos.framework.exceptions.InvalidDateException;
import org.mifos.framework.exceptions.PageExpiredException;
import org.mifos.framework.exceptions.SystemException;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
import org.mifos.framework.security.util.ActivityContext;
import org.mifos.framework.security.util.UserContext;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.DateUtils;
import org.mifos.framework.util.helpers.ExceptionConstants;
import org.mifos.framework.util.helpers.Money;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.framework.util.helpers.TestGeneralLedgerCode;
import org.mifos.framework.util.helpers.TestObjectFactory;
public class SavingsPrdActionStrutsTest extends MifosMockStrutsTestCase {
public SavingsPrdActionStrutsTest() throws SystemException, ApplicationException {
super();
}
private static final double DELTA = 0.00000001;
private SavingsOfferingBO product;
private String flowKey;
UserContext userContext = null;
AccountPersistence accountPersistence = new AccountPersistence();
@Override
protected void tearDown() throws Exception {
TestObjectFactory.removeObject(product);
StaticHibernateUtil.closeSession();
super.tearDown();
}
@Override
protected void setUp() throws Exception {
super.setUp();
userContext = TestObjectFactory.getContext();
request.getSession().setAttribute(Constants.USERCONTEXT, userContext);
addRequestParameter("recordLoanOfficerId", "1");
addRequestParameter("recordOfficeId", "1");
ActivityContext ac = TestObjectFactory.getActivityContext();
request.getSession(false).setAttribute("ActivityContext", ac);
flowKey = createFlow(request, SavingsPrdAction.class);
}
public void testLoad() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "load");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.load_success.toString());
List<ProductCategoryBO> productCategories = (List<ProductCategoryBO>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSPRODUCTCATEGORYLIST, request);
Assert.assertEquals("The size of master data for categories", 1, productCategories.size());
for (ProductCategoryBO productCategory : productCategories) {
Assert.assertNotNull(productCategory.getProductType());
}
Assert.assertEquals("The size of applicable list", 3, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSAPPLFORLIST, request)).size());
Assert.assertEquals("The size of applicable list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSTYPELIST, request)).size());
Assert.assertEquals("The size of applicable list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.RECAMNTUNITLIST, request)).size());
Assert.assertEquals("The size of applicable list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.INTCALCTYPESLIST, request)).size());
Assert.assertEquals("The size of applicable list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSRECURRENCETYPELIST, request)).size());
Assert.assertEquals("The size of applicable list", 6, ((List<GLCodeEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSDEPOSITGLCODELIST, request)).size());
Assert.assertEquals("The size of applicable list", 2, ((List<GLCodeEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSINTERESTGLCODELIST, request)).size());
}
public void testPreviewWithOutData() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyActionErrors(new String[] { "Please select the Applicable for.",
"Please select the GL code for deposits.", "Please select the Product category.",
"Please select the Type of deposits.", "Please specify the Product instance name.",
"Please specify the Short name.", "Please specify the Start date.", "errors.mandatory",
"errors.mandatory", "errors.mandatory", "errors.select", "errors.select" });
verifyInputForward();
}
public void testPreviewWithPrdApplToGroupAndAppliesToNotEntered() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "2");
addRequestParameter("savingsType", "2");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
actionPerform();
verifyActionErrors(new String[] { "Please select the Amount Applies to." });
verifyInputForward();
}
public void testPreviewWithMandPrdAndAmountNotEntered() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
actionPerform();
verifyActionErrors(new String[] { ProductDefinitionConstants.ERRORMANDAMOUNT });
verifyInputForward();
}
public void testPreviewWithMandPrdAndZeroAmountEntered() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "0.0");
addRequestParameter("description", "Savings");
addRequestParameter("maxAmntWithdrawl", "10.0");
addRequestParameter("minAmntForInt", "10.0");
actionPerform();
verifyActionErrors(new String[] { ProductDefinitionConstants.ERRORMANDAMOUNT });
verifyInputForward();
}
public void testPreviewWithInterestRateGreaterThanHundred() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
verifyActionErrors(new String[] { ProductDefinitionConstants.ERRORINTRATE });
verifyInputForward();
}
public void testPreviewWithStartDateLessThanCurrentDate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(-1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
verifyActionErrors(new String[] { ProductDefinitionConstants.INVALIDSTARTDATE });
verifyInputForward();
}
public void testPreviewWithEndDateLessThanStartDate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(-1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
verifyActionErrors(new String[] { ProductDefinitionConstants.INVALIDENDDATE });
verifyInputForward();
}
public void testPreview() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.preview_success.toString());
}
public void testCreate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "load");
actionPerform();
flowKey = (String) request.getAttribute(Constants.CURRENTFLOWKEY);
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", TestGeneralLedgerCode.MARGIN_MONEY_ONE.toString());
addRequestParameter("interestGLCode", TestGeneralLedgerCode.INTEREST_CLIENT_MANDATORY_SAVINGS.toString());
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "create");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
Assert.assertNotNull(request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID));
TestObjectFactory.removeObject((SavingsOfferingBO) TestObjectFactory.getObject(SavingsOfferingBO.class,
(Short) request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID)));
}
public void testCreateForPrdApplicableToGroups() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "load");
actionPerform();
flowKey = (String) request.getAttribute(Constants.CURRENTFLOWKEY);
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "2");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", TestGeneralLedgerCode.MARGIN_MONEY_ONE.toString());
addRequestParameter("interestGLCode", TestGeneralLedgerCode.INTEREST_CLIENT_MANDATORY_SAVINGS.toString());
addRequestParameter("recommendedAmount", "120.0");
addRequestParameter("recommendedAmntUnit", "1");
actionPerform();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "create");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
Assert.assertNotNull(request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID));
TestObjectFactory.removeObject((SavingsOfferingBO) TestObjectFactory.getObject(SavingsOfferingBO.class,
(Short) request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID)));
}
public void testCreateForVolProducts() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "load");
actionPerform();
flowKey = (String) request.getAttribute(Constants.CURRENTFLOWKEY);
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "preview");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "2");
addRequestParameter("savingsType", "2");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", TestGeneralLedgerCode.MARGIN_MONEY_ONE.toString());
addRequestParameter("interestGLCode", TestGeneralLedgerCode.INTEREST_CLIENT_MANDATORY_SAVINGS.toString());
addRequestParameter("recommendedAmntUnit", "1");
actionPerform();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "create");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
Assert.assertNotNull(request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID));
TestObjectFactory.removeObject((SavingsOfferingBO) TestObjectFactory.getObject(SavingsOfferingBO.class,
(Short) request.getAttribute(ProductDefinitionConstants.SAVINGSPRODUCTID)));
}
public void testCancelCreate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "cancelCreate");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.cancelCreate_success.toString());
}
public void testCancelEdit() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "cancelEdit");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.cancelEdit_success.toString());
}
public void testPrevious() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previous");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.previous_success.toString());
}
public void testValidate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "validate");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
verifyNoActionErrors();
verifyForward(null);
}
public void testValidateForPreview() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "validate");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
request.setAttribute(ProductDefinitionConstants.METHODCALLED, Methods.preview.toString());
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.preview_failure.toString());
}
public void testVaildateForCreate() throws Exception {
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "validate");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
request.setAttribute(ProductDefinitionConstants.METHODCALLED, Methods.create.toString());
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.create_failure.toString());
}
public void testGet() throws Exception {
String prdName = "Savings_Kendra";
String prdShortName = "SSK";
createSavingsOfferingBO(prdName, prdShortName);
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "get");
addRequestParameter("prdOfferingId", product.getPrdOfferingId().toString());
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.get_success.toString());
Assert.assertEquals(prdName, product.getPrdOfferingName());
Assert.assertEquals(prdShortName, product.getPrdOfferingShortName());
Assert.assertEquals(prdShortName, product.getPrdOfferingShortName());
Assert.assertEquals(PrdStatus.SAVINGS_ACTIVE, product.getStatus());
Assert.assertEquals(2, product.getSavingsType().getId().shortValue());
}
public void testManage() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "manage");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
List<ProductCategoryBO> productCategories = (List<ProductCategoryBO>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSPRODUCTCATEGORYLIST, request);
Assert.assertEquals("The size of master data for categories", 1, productCategories.size());
for (ProductCategoryBO productCategory : productCategories) {
Assert.assertNotNull(productCategory.getProductType());
}
Assert.assertEquals("The size of applicable list", 3, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSAPPLFORLIST, request)).size());
Assert.assertEquals("The size of savings type list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSTYPELIST, request)).size());
Assert.assertEquals("The size of reco amount unit list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.RECAMNTUNITLIST, request)).size());
Assert.assertEquals("The size of interest calculation types list", 2, ((List<MasterDataEntity>) SessionUtils
.getAttribute(ProductDefinitionConstants.INTCALCTYPESLIST, request)).size());
Assert.assertEquals("The size of recurrence type list", 2, ((List<MasterDataEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSRECURRENCETYPELIST, request)).size());
Assert.assertEquals("The size of applicable list", 6, ((List<GLCodeEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSDEPOSITGLCODELIST, request)).size());
Assert.assertEquals("The size of gl codes list", 2, ((List<GLCodeEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSINTERESTGLCODELIST, request)).size());
Assert.assertEquals("The size of status list", 2, ((List<GLCodeEntity>) SessionUtils.getAttribute(
ProductDefinitionConstants.PRDCATEGORYSTATUSLIST, request)).size());
}
public void testPreviewManageWithOutData() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
actionPerform();
Assert.assertEquals("error size", 12, getErrorSize());
Assert.assertEquals("prdOfferingName", 1, getErrorSize("prdOfferingName"));
Assert.assertEquals("prdOfferingShortName", 1, getErrorSize("prdOfferingShortName"));
Assert.assertEquals("prdCategory", 1, getErrorSize("prdCategory"));
Assert.assertEquals("startDate", 1, getErrorSize("startDate"));
Assert.assertEquals("prdApplicableMaster", 1, getErrorSize("prdApplicableMaster"));
Assert.assertEquals("savingsType", 1, getErrorSize("savingsType"));
Assert.assertEquals("interestRate", 1, getErrorSize("interestRate"));
Assert.assertEquals("interestCalcType", 1, getErrorSize("interestCalcType"));
Assert.assertEquals("timeForInterestCacl", 1, getErrorSize("timeForInterestCacl"));
Assert.assertEquals("freqOfInterest", 1, getErrorSize("freqOfInterest"));
Assert.assertEquals("depositGLCode", 1, getErrorSize("depositGLCode"));
Assert.assertEquals("interest", 1, getErrorSize("interest"));
verifyInputForward();
}
public void testPreviewManageWithPrdApplToGroupAndAppliesToNotEntered() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "2");
addRequestParameter("savingsType", "2");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
actionPerform();
Assert.assertEquals("recommendedAmntUnit", 1, getErrorSize("recommendedAmntUnit"));
verifyInputForward();
}
public void testPreviewManageWithMandPrdAndAmountNotEntered() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
actionPerform();
Assert.assertEquals("Manadatory amount", 1, getErrorSize("recommendedAmount"));
verifyInputForward();
}
public void testPreviewManageWithMandPrdAndZeroAmountEntered() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "0.0");
addRequestParameter("description", "Savings");
addRequestParameter("maxAmntWithdrawl", "10.0");
addRequestParameter("minAmntForInt", "10.0");
actionPerform();
Assert.assertEquals("Manadatory amount is 0", 1, getErrorSize("recommendedAmount"));
verifyInputForward();
}
public void testPreviewManageWithInterestRateGreaterThanHundred() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", DateUtils.getCurrentDate(userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.1");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
Assert.assertEquals("interestRate >100", 1, getErrorSize("interestRate"));
verifyInputForward();
}
public void testPreviewManageWithStartDateLessThanCurrentDate() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(-1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
Assert.assertEquals("start date", 1, getErrorSize("startDate"));
verifyInputForward();
}
public void testPreviewManageWithEndDateLessThanStartDate() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(-1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
Assert.assertEquals("endDate", 1, getErrorSize("endDate"));
verifyInputForward();
}
public void testPreviewManage() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "manage");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.previewManage_success.toString());
}
public void testPreviewManageFlowFailure() throws Exception {
try {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "100.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("depositGLCode", "42");
addRequestParameter("interestGLCode", "57");
addRequestParameter("recommendedAmount", "120.0");
actionPerform();
} catch (PageExpiredException pe) {
Assert.assertTrue(true);
Assert.assertEquals(ExceptionConstants.PAGEEXPIREDEXCEPTION, pe.getKey());
}
}
public void testUpdate() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "manage");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("prdOfferingName", "Savings Offering");
addRequestParameter("prdOfferingShortName", "SAVP");
addRequestParameter("prdCategory", "2");
addRequestParameter("startDate", offSetCurrentDate(0, userContext.getPreferredLocale()));
addRequestParameter("endDate", offSetCurrentDate(+1, userContext.getPreferredLocale()));
addRequestParameter("prdApplicableMaster", "1");
addRequestParameter("savingsType", "1");
addRequestParameter("interestRate", "9.0");
addRequestParameter("interestCalcType", "1");
addRequestParameter("timeForInterestCacl", "1");
addRequestParameter("recurTypeFortimeForInterestCacl", "2");
addRequestParameter("freqOfInterest", "1");
addRequestParameter("recommendedAmount", "120.0");
addRequestParameter("status", "5");
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.previewManage_success.toString());
Assert.assertNotNull(request.getAttribute(Constants.CURRENTFLOWKEY));
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "update");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.update_success.toString());
product = (SavingsOfferingBO) StaticHibernateUtil.getSessionTL().get(SavingsOfferingBO.class,
product.getPrdOfferingId());
Assert.assertEquals("Savings Offering", product.getPrdOfferingName());
Assert.assertEquals("SAVP", product.getPrdOfferingShortName());
Assert.assertEquals(2, product.getPrdCategory().getProductCategoryID().intValue());
Assert.assertEquals(PrdStatus.SAVINGS_INACTIVE, product.getStatus());
Assert.assertEquals(SavingsType.MANDATORY, product.getSavingsTypeAsEnum());
Assert.assertEquals(1, product.getInterestCalcType().getId().intValue());
Assert.assertEquals(1, product.getTimePerForInstcalc().getMeeting().getMeetingDetails().getRecurAfter().intValue());
Assert.assertEquals(2, product.getTimePerForInstcalc().getMeeting().getMeetingDetails().getRecurrenceType()
.getRecurrenceId().shortValue());
Assert.assertEquals(1, product.getFreqOfPostIntcalc().getMeeting().getMeetingDetails().getRecurAfter().intValue());
Assert.assertEquals("Recommended Amount", new Money("120"), product.getRecommendedAmount());
Assert.assertEquals(9.0, product.getInterestRate(), DELTA);
Assert.assertNull(request.getAttribute(Constants.CURRENTFLOWKEY));
}
public void testPreviousManage() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previousManage");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previousManage_success.toString());
}
public void testSearch() throws Exception {
createSavingsOfferingBO("prdOfferingName", "SN");
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "search");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.search_success.toString());
List<SavingsOfferingBO> savingsProducts = (List<SavingsOfferingBO>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSPRODUCTLIST, request);
Assert.assertEquals("The size of savings products", 1, savingsProducts.size());
}
public void testSearch_Inactive() throws Exception {
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createSavingsOfferingAndPutInSession();
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "manage");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "previewManage");
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("status", "5");
actionPerform();
verifyNoActionErrors();
verifyForward(ActionForwards.previewManage_success.toString());
Assert.assertNotNull(request.getAttribute(Constants.CURRENTFLOWKEY));
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "update");
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.update_success.toString());
product = (SavingsOfferingBO) StaticHibernateUtil.getSessionTL().get(SavingsOfferingBO.class,
product.getPrdOfferingId());
Assert.assertEquals(PrdStatus.SAVINGS_INACTIVE, product.getStatus());
setRequestPathInfo("/savingsproductaction.do");
addRequestParameter("method", "search");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.search_success.toString());
List<SavingsOfferingBO> savingsProducts = (List<SavingsOfferingBO>) SessionUtils.getAttribute(
ProductDefinitionConstants.SAVINGSPRODUCTLIST, request);
Assert.assertEquals("The size of savings products", 1, savingsProducts.size());
SavingsOfferingBO savingsProduct = (SavingsOfferingBO) StaticHibernateUtil.getSessionTL().get(
SavingsOfferingBO.class, savingsProducts.get(0).getPrdOfferingId());
Assert.assertEquals("Inactive", savingsProduct.getPrdStatus().getPrdState().getName());
product = (SavingsOfferingBO) StaticHibernateUtil.getSessionTL().get(SavingsOfferingBO.class,
product.getPrdOfferingId());
}
private String offSetCurrentDate(int noOfDays, Locale locale) throws InvalidDateException {
Calendar currentDateCalendar = new GregorianCalendar();
int year = currentDateCalendar.get(Calendar.YEAR);
int month = currentDateCalendar.get(Calendar.MONTH);
int day = currentDateCalendar.get(Calendar.DAY_OF_MONTH);
currentDateCalendar = new GregorianCalendar(year, month, day + noOfDays);
java.sql.Date currentDate = new java.sql.Date(currentDateCalendar.getTimeInMillis());
SimpleDateFormat format = (SimpleDateFormat) DateFormat.getDateInstance(DateFormat.SHORT, locale);
String userfmt = DateUtils.convertToCurrentDateFormat(format.toPattern());
return DateUtils.convertDbToUserFmt(currentDate.toString(), userfmt);
}
private SavingsOfferingBO createSavingsOfferingBO(String productName, String shortName) {
MeetingBO meetingIntCalc = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
MeetingBO meetingIntPost = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
product = TestObjectFactory.createSavingsProduct(productName, shortName, ApplicableTo.CLIENTS, new Date(System
.currentTimeMillis()), PrdStatus.SAVINGS_ACTIVE, 300.0, RecommendedAmountUnit.PER_INDIVIDUAL, 1.2,
200.0, 200.0, SavingsType.VOLUNTARY, InterestCalcType.MINIMUM_BALANCE, meetingIntCalc, meetingIntPost);
return product;
}
private void createSavingsOfferingAndPutInSession() throws Exception {
String prdName = "Savings_Kendra";
String prdShortName = "SSK";
createSavingsOfferingBO(prdName, prdShortName);
SessionUtils.setAttribute(Constants.BUSINESS_KEY, product, request);
}
}
| |
/*
* EVE Swagger Interface
* An OpenAPI for EVE Online
*
* The version of the OpenAPI document: 1.10.1
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package net.troja.eve.esi.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.io.Serializable;
/**
* 200 ok object
*/
@ApiModel(description = "200 ok object")
public class StarResponse implements Serializable {
private static final long serialVersionUID = 1L;
public static final String SERIALIZED_NAME_AGE = "age";
@SerializedName(SERIALIZED_NAME_AGE)
private Long age;
public static final String SERIALIZED_NAME_LUMINOSITY = "luminosity";
@SerializedName(SERIALIZED_NAME_LUMINOSITY)
private Float luminosity;
public static final String SERIALIZED_NAME_NAME = "name";
@SerializedName(SERIALIZED_NAME_NAME)
private String name;
public static final String SERIALIZED_NAME_RADIUS = "radius";
@SerializedName(SERIALIZED_NAME_RADIUS)
private Long radius;
public static final String SERIALIZED_NAME_SOLAR_SYSTEM_ID = "solar_system_id";
@SerializedName(SERIALIZED_NAME_SOLAR_SYSTEM_ID)
private Integer solarSystemId;
/**
* spectral_class string
*/
@JsonAdapter(SpectralClassEnum.Adapter.class)
public enum SpectralClassEnum {
K2_V("K2 V"),
K4_V("K4 V"),
G2_V("G2 V"),
G8_V("G8 V"),
M7_V("M7 V"),
K7_V("K7 V"),
M2_V("M2 V"),
K5_V("K5 V"),
M3_V("M3 V"),
G0_V("G0 V"),
G7_V("G7 V"),
G3_V("G3 V"),
F9_V("F9 V"),
G5_V("G5 V"),
F6_V("F6 V"),
K8_V("K8 V"),
K9_V("K9 V"),
K6_V("K6 V"),
G9_V("G9 V"),
G6_V("G6 V"),
G4_VI("G4 VI"),
G4_V("G4 V"),
F8_V("F8 V"),
F2_V("F2 V"),
F1_V("F1 V"),
K3_V("K3 V"),
F0_VI("F0 VI"),
G1_VI("G1 VI"),
G0_VI("G0 VI"),
K1_V("K1 V"),
M4_V("M4 V"),
M1_V("M1 V"),
M6_V("M6 V"),
M0_V("M0 V"),
K2_IV("K2 IV"),
G2_VI("G2 VI"),
K0_V("K0 V"),
K5_IV("K5 IV"),
F5_VI("F5 VI"),
G6_VI("G6 VI"),
F6_VI("F6 VI"),
F2_IV("F2 IV"),
G3_VI("G3 VI"),
M8_V("M8 V"),
F1_VI("F1 VI"),
K1_IV("K1 IV"),
F7_V("F7 V"),
G5_VI("G5 VI"),
M5_V("M5 V"),
G7_VI("G7 VI"),
F5_V("F5 V"),
F4_VI("F4 VI"),
F8_VI("F8 VI"),
K3_IV("K3 IV"),
F4_IV("F4 IV"),
F0_V("F0 V"),
G7_IV("G7 IV"),
G8_VI("G8 VI"),
F2_VI("F2 VI"),
F4_V("F4 V"),
F7_VI("F7 VI"),
F3_V("F3 V"),
G1_V("G1 V"),
G9_VI("G9 VI"),
F3_IV("F3 IV"),
F9_VI("F9 VI"),
M9_V("M9 V"),
K0_IV("K0 IV"),
F1_IV("F1 IV"),
G4_IV("G4 IV"),
F3_VI("F3 VI"),
K4_IV("K4 IV"),
G5_IV("G5 IV"),
G3_IV("G3 IV"),
G1_IV("G1 IV"),
K7_IV("K7 IV"),
G0_IV("G0 IV"),
K6_IV("K6 IV"),
K9_IV("K9 IV"),
G2_IV("G2 IV"),
F9_IV("F9 IV"),
F0_IV("F0 IV"),
K8_IV("K8 IV"),
G8_IV("G8 IV"),
F6_IV("F6 IV"),
F5_IV("F5 IV"),
A0("A0"),
A0IV("A0IV"),
A0IV2("A0IV2");
private String value;
SpectralClassEnum(String value) {
this.value = value;
}
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
public static SpectralClassEnum fromValue(String value) {
for (SpectralClassEnum b : SpectralClassEnum.values()) {
if (b.value.equals(value)) {
return b;
}
}
return null;
}
public static class Adapter extends TypeAdapter<SpectralClassEnum> {
@Override
public void write(final JsonWriter jsonWriter, final SpectralClassEnum enumeration) throws IOException {
jsonWriter.value(enumeration.getValue());
}
@Override
public SpectralClassEnum read(final JsonReader jsonReader) throws IOException {
String value = jsonReader.nextString();
return SpectralClassEnum.fromValue(value);
}
}
}
public static final String SERIALIZED_NAME_SPECTRAL_CLASS = "spectral_class";
@SerializedName(SERIALIZED_NAME_SPECTRAL_CLASS)
private String spectralClass;
private SpectralClassEnum spectralClassEnum;
public static final String SERIALIZED_NAME_TEMPERATURE = "temperature";
@SerializedName(SERIALIZED_NAME_TEMPERATURE)
private Integer temperature;
public static final String SERIALIZED_NAME_TYPE_ID = "type_id";
@SerializedName(SERIALIZED_NAME_TYPE_ID)
private Integer typeId;
public StarResponse age(Long age) {
this.age = age;
return this;
}
/**
* Age of star in years
*
* @return age
**/
@ApiModelProperty(required = true, value = "Age of star in years")
public Long getAge() {
return age;
}
public void setAge(Long age) {
this.age = age;
}
public StarResponse luminosity(Float luminosity) {
this.luminosity = luminosity;
return this;
}
/**
* luminosity number
*
* @return luminosity
**/
@ApiModelProperty(required = true, value = "luminosity number")
public Float getLuminosity() {
return luminosity;
}
public void setLuminosity(Float luminosity) {
this.luminosity = luminosity;
}
public StarResponse name(String name) {
this.name = name;
return this;
}
/**
* name string
*
* @return name
**/
@ApiModelProperty(required = true, value = "name string")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public StarResponse radius(Long radius) {
this.radius = radius;
return this;
}
/**
* radius integer
*
* @return radius
**/
@ApiModelProperty(required = true, value = "radius integer")
public Long getRadius() {
return radius;
}
public void setRadius(Long radius) {
this.radius = radius;
}
public StarResponse solarSystemId(Integer solarSystemId) {
this.solarSystemId = solarSystemId;
return this;
}
/**
* solar_system_id integer
*
* @return solarSystemId
**/
@ApiModelProperty(required = true, value = "solar_system_id integer")
public Integer getSolarSystemId() {
return solarSystemId;
}
public void setSolarSystemId(Integer solarSystemId) {
this.solarSystemId = solarSystemId;
}
public StarResponse spectralClass(SpectralClassEnum spectralClassEnum) {
this.spectralClassEnum = spectralClassEnum;
return this;
}
public StarResponse spectralClassString(String spectralClass) {
this.spectralClass = spectralClass;
return this;
}
/**
* spectral_class string
*
* @return spectralClass
**/
@ApiModelProperty(required = true, value = "spectral_class string")
public SpectralClassEnum getSpectralClass() {
if (spectralClassEnum == null) {
spectralClassEnum = SpectralClassEnum.fromValue(spectralClass);
}
return spectralClassEnum;
}
public String getSpectralClassString() {
return spectralClass;
}
public void setSpectralClass(SpectralClassEnum spectralClassEnum) {
this.spectralClassEnum = spectralClassEnum;
}
public void setSpectralClassString(String spectralClass) {
this.spectralClass = spectralClass;
}
public StarResponse temperature(Integer temperature) {
this.temperature = temperature;
return this;
}
/**
* temperature integer
*
* @return temperature
**/
@ApiModelProperty(required = true, value = "temperature integer")
public Integer getTemperature() {
return temperature;
}
public void setTemperature(Integer temperature) {
this.temperature = temperature;
}
public StarResponse typeId(Integer typeId) {
this.typeId = typeId;
return this;
}
/**
* type_id integer
*
* @return typeId
**/
@ApiModelProperty(required = true, value = "type_id integer")
public Integer getTypeId() {
return typeId;
}
public void setTypeId(Integer typeId) {
this.typeId = typeId;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StarResponse starResponse = (StarResponse) o;
return Objects.equals(this.age, starResponse.age) && Objects.equals(this.luminosity, starResponse.luminosity)
&& Objects.equals(this.name, starResponse.name) && Objects.equals(this.radius, starResponse.radius)
&& Objects.equals(this.solarSystemId, starResponse.solarSystemId)
&& Objects.equals(this.spectralClass, starResponse.spectralClass)
&& Objects.equals(this.temperature, starResponse.temperature)
&& Objects.equals(this.typeId, starResponse.typeId);
}
@Override
public int hashCode() {
return Objects.hash(age, luminosity, name, radius, solarSystemId, spectralClass, temperature, typeId);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class StarResponse {\n");
sb.append(" age: ").append(toIndentedString(age)).append("\n");
sb.append(" luminosity: ").append(toIndentedString(luminosity)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" radius: ").append(toIndentedString(radius)).append("\n");
sb.append(" solarSystemId: ").append(toIndentedString(solarSystemId)).append("\n");
sb.append(" spectralClass: ").append(toIndentedString(spectralClass)).append("\n");
sb.append(" temperature: ").append(toIndentedString(temperature)).append("\n");
sb.append(" typeId: ").append(toIndentedString(typeId)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.reindex;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import java.io.IOException;
import java.util.Arrays;
import static java.lang.Math.min;
import static org.elasticsearch.core.TimeValue.parseTimeValue;
import static org.elasticsearch.core.TimeValue.timeValueMillis;
import static org.elasticsearch.core.TimeValue.timeValueNanos;
import static org.hamcrest.Matchers.containsString;
public class BulkByScrollTaskTests extends ESTestCase {
public void testStatusHatesNegatives() {
checkStatusNegatives(-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, "sliceId");
checkStatusNegatives(null, -1, 0, 0, 0, 0, 0, 0, 0, 0, "total");
checkStatusNegatives(null, 0, -1, 0, 0, 0, 0, 0, 0, 0, "updated");
checkStatusNegatives(null, 0, 0, -1, 0, 0, 0, 0, 0, 0, "created");
checkStatusNegatives(null, 0, 0, 0, -1, 0, 0, 0, 0, 0, "deleted");
checkStatusNegatives(null, 0, 0, 0, 0, -1, 0, 0, 0, 0, "batches");
checkStatusNegatives(null, 0, 0, 0, 0, 0, -1, 0, 0, 0, "versionConflicts");
checkStatusNegatives(null, 0, 0, 0, 0, 0, 0, -1, 0, 0, "noops");
checkStatusNegatives(null, 0, 0, 0, 0, 0, 0, 0, -1, 0, "bulkRetries");
checkStatusNegatives(null, 0, 0, 0, 0, 0, 0, 0, 0, -1, "searchRetries");
}
/**
* Build a task status with only some values. Used for testing negative values.
*/
private void checkStatusNegatives(
Integer sliceId,
long total,
long updated,
long created,
long deleted,
int batches,
long versionConflicts,
long noops,
long bulkRetries,
long searchRetries,
String fieldName
) {
TimeValue throttle = parseTimeValue(randomPositiveTimeValue(), "test");
TimeValue throttledUntil = parseTimeValue(randomPositiveTimeValue(), "test");
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new BulkByScrollTask.Status(
sliceId,
total,
updated,
created,
deleted,
batches,
versionConflicts,
noops,
bulkRetries,
searchRetries,
throttle,
0f,
null,
throttledUntil
)
);
assertEquals(e.getMessage(), fieldName + " must be greater than 0 but was [-1]");
}
public void testXContentRepresentationOfUnlimitedRequestsPerSecond() throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
BulkByScrollTask.Status status = new BulkByScrollTask.Status(
null,
0,
0,
0,
0,
0,
0,
0,
0,
0,
timeValueMillis(0),
Float.POSITIVE_INFINITY,
null,
timeValueMillis(0)
);
status.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertThat(Strings.toString(builder), containsString("\"requests_per_second\":-1"));
}
public void testXContentRepresentationOfUnfinishedSlices() throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
BulkByScrollTask.Status completedStatus = new BulkByScrollTask.Status(
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
timeValueMillis(0),
Float.POSITIVE_INFINITY,
null,
timeValueMillis(0)
);
BulkByScrollTask.Status status = new BulkByScrollTask.Status(
Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(completedStatus)),
null
);
status.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"slice_id\":2"));
}
public void testXContentRepresentationOfSliceFailures() throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
Exception e = new Exception();
BulkByScrollTask.Status status = new BulkByScrollTask.Status(
Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(e)),
null
);
status.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"type\":\"exception\""));
}
public void testMergeStatuses() {
BulkByScrollTask.StatusOrException[] statuses = new BulkByScrollTask.StatusOrException[between(2, 100)];
boolean containsNullStatuses = randomBoolean();
int mergedTotal = 0;
int mergedUpdated = 0;
int mergedCreated = 0;
int mergedDeleted = 0;
int mergedBatches = 0;
int mergedVersionConflicts = 0;
int mergedNoops = 0;
int mergedBulkRetries = 0;
int mergedSearchRetries = 0;
TimeValue mergedThrottled = timeValueNanos(0);
float mergedRequestsPerSecond = 0;
TimeValue mergedThrottledUntil = timeValueNanos(Integer.MAX_VALUE);
for (int i = 0; i < statuses.length; i++) {
if (containsNullStatuses && rarely()) {
continue;
}
int total = between(0, 10000);
int updated = between(0, total);
int created = between(0, total - updated);
int deleted = between(0, total - updated - created);
int batches = between(0, 10);
int versionConflicts = between(0, 100);
int noops = total - updated - created - deleted;
int bulkRetries = between(0, 100);
int searchRetries = between(0, 100);
TimeValue throttled = timeValueNanos(between(0, 10000));
float requestsPerSecond = randomValueOtherThanMany(r -> r <= 0, () -> randomFloat());
String reasonCancelled = randomBoolean() ? null : "test";
TimeValue throttledUntil = timeValueNanos(between(0, 1000));
statuses[i] = new BulkByScrollTask.StatusOrException(
new BulkByScrollTask.Status(
i,
total,
updated,
created,
deleted,
batches,
versionConflicts,
noops,
bulkRetries,
searchRetries,
throttled,
requestsPerSecond,
reasonCancelled,
throttledUntil
)
);
mergedTotal += total;
mergedUpdated += updated;
mergedCreated += created;
mergedDeleted += deleted;
mergedBatches += batches;
mergedVersionConflicts += versionConflicts;
mergedNoops += noops;
mergedBulkRetries += bulkRetries;
mergedSearchRetries += searchRetries;
mergedThrottled = timeValueNanos(mergedThrottled.nanos() + throttled.nanos());
mergedRequestsPerSecond += requestsPerSecond;
mergedThrottledUntil = timeValueNanos(min(mergedThrottledUntil.nanos(), throttledUntil.nanos()));
}
String reasonCancelled = randomBoolean() ? randomAlphaOfLength(10) : null;
BulkByScrollTask.Status merged = new BulkByScrollTask.Status(Arrays.asList(statuses), reasonCancelled);
assertEquals(mergedTotal, merged.getTotal());
assertEquals(mergedUpdated, merged.getUpdated());
assertEquals(mergedCreated, merged.getCreated());
assertEquals(mergedDeleted, merged.getDeleted());
assertEquals(mergedBatches, merged.getBatches());
assertEquals(mergedVersionConflicts, merged.getVersionConflicts());
assertEquals(mergedNoops, merged.getNoops());
assertEquals(mergedBulkRetries, merged.getBulkRetries());
assertEquals(mergedSearchRetries, merged.getSearchRetries());
assertEquals(mergedThrottled, merged.getThrottled());
assertEquals(mergedRequestsPerSecond, merged.getRequestsPerSecond(), 0.0001f);
assertEquals(mergedThrottledUntil, merged.getThrottledUntil());
assertEquals(reasonCancelled, merged.getReasonCancelled());
}
}
| |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.ticket.registry.support.kryo;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.serializers.FieldSerializer;
import net.spy.memcached.CachedData;
import org.apache.commons.collections4.map.ListOrderedMap;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.jasig.cas.TestUtils;
import org.jasig.cas.authentication.AcceptUsersAuthenticationHandler;
import org.jasig.cas.authentication.Authentication;
import org.jasig.cas.authentication.AuthenticationBuilder;
import org.jasig.cas.authentication.AuthenticationHandler;
import org.jasig.cas.authentication.BasicCredentialMetaData;
import org.jasig.cas.authentication.Credential;
import org.jasig.cas.authentication.CredentialMetaData;
import org.jasig.cas.authentication.DefaultAuthenticationBuilder;
import org.jasig.cas.authentication.DefaultHandlerResult;
import org.jasig.cas.authentication.HttpBasedServiceCredential;
import org.jasig.cas.authentication.PreventedException;
import org.jasig.cas.authentication.RememberMeCredential;
import org.jasig.cas.authentication.UsernamePasswordCredential;
import org.jasig.cas.authentication.principal.DefaultPrincipalFactory;
import org.jasig.cas.authentication.principal.PrincipalFactory;
import org.jasig.cas.authentication.principal.Service;
import org.jasig.cas.services.RegisteredServiceImpl;
import org.jasig.cas.ticket.ExpirationPolicy;
import org.jasig.cas.ticket.ServiceTicket;
import org.jasig.cas.ticket.TicketGrantingTicket;
import org.jasig.cas.ticket.TicketGrantingTicketImpl;
import org.jasig.cas.ticket.support.NeverExpiresExpirationPolicy;
import org.joda.time.DateTime;
import org.junit.Test;
import javax.security.auth.login.AccountNotFoundException;
import javax.security.auth.login.FailedLoginException;
import javax.validation.constraints.NotNull;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.*;
/**
* Unit test for {@link KryoTranscoder} class.
*
* @author Marvin S. Addison
* @since 3.0.0
*/
@SuppressWarnings("rawtypes")
public class KryoTranscoderTests {
private static final String ST_ID = "ST-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK";
private static final String TGT_ID = "TGT-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK-cas1";
private static final String USERNAME = "handymanbob";
private static final String PASSWORD = "foo";
private static final String NICKNAME_KEY = "nickname";
private static final String NICKNAME_VALUE = "bob";
private final KryoTranscoder transcoder;
private final Map<String, Object> principalAttributes;
public KryoTranscoderTests() {
transcoder = new KryoTranscoder();
final Map<Class<?>, Serializer> serializerMap = new HashMap<Class<?>, Serializer>();
serializerMap.put(
MockServiceTicket.class,
new FieldSerializer(transcoder.getKryo(), MockServiceTicket.class));
serializerMap.put(
MockTicketGrantingTicket.class,
new FieldSerializer(transcoder.getKryo(), MockTicketGrantingTicket.class));
transcoder.setSerializerMap(serializerMap);
transcoder.initialize();
this.principalAttributes = new HashMap<>();
this.principalAttributes.put(NICKNAME_KEY, NICKNAME_VALUE);
}
@Test
public void verifyEncodeDecodeTGTImpl() throws Exception {
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final AuthenticationBuilder bldr = new DefaultAuthenticationBuilder(
new DefaultPrincipalFactory()
.createPrincipal("user", Collections.unmodifiableMap(this.principalAttributes)));
bldr.setAttributes(Collections.unmodifiableMap(this.principalAttributes));
bldr.setAuthenticationDate(new DateTime());
bldr.addCredential(new BasicCredentialMetaData(userPassCredential));
bldr.addFailure("error", AccountNotFoundException.class);
bldr.addSuccess("authn", new DefaultHandlerResult(
new AcceptUsersAuthenticationHandler(),
new BasicCredentialMetaData(userPassCredential)));
final TicketGrantingTicket parent =
new TicketGrantingTicketImpl(TGT_ID, TestUtils.getService(), null, bldr.build(),
new NeverExpiresExpirationPolicy());
final TicketGrantingTicket expectedTGT =
new TicketGrantingTicketImpl(TGT_ID, TestUtils.getService(),
null, bldr.build(),
new NeverExpiresExpirationPolicy());
final ServiceTicket ticket = expectedTGT.grantServiceTicket(ST_ID,
TestUtils.getService(),
new NeverExpiresExpirationPolicy(), false);
CachedData result = transcoder.encode(expectedTGT);
final TicketGrantingTicket resultTicket = (TicketGrantingTicket) transcoder.decode(result);
assertEquals(expectedTGT, resultTicket);
result = transcoder.encode(ticket);
final ServiceTicket resultStTicket = (ServiceTicket) transcoder.decode(result);
assertEquals(ticket, resultStTicket);
}
@Test
public void verifyEncodeDecode() throws Exception {
final ServiceTicket expectedST =
new MockServiceTicket(ST_ID);
assertEquals(expectedST, transcoder.decode(transcoder.encode(expectedST)));
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, this.principalAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
internalProxyTest("http://localhost");
internalProxyTest("https://localhost:8080/path/file.html?p1=v1&p2=v2#fragment");
}
private void internalProxyTest(final String proxyUrl) throws MalformedURLException {
final RegisteredServiceImpl svc = new RegisteredServiceImpl();
svc.setServiceId("https://some.app.edu");
final Credential proxyCredential = new HttpBasedServiceCredential(new URL(proxyUrl), svc);
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, proxyCredential, this.principalAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableMap() throws Exception {
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, Collections.unmodifiableMap(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableList() throws Exception {
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final List<String> values = new ArrayList<>();
values.add(NICKNAME_VALUE);
final Map<String, Object> newAttributes = new HashMap<>();
newAttributes.put(NICKNAME_KEY, Collections.unmodifiableList(values));
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithLinkedHashMap() throws Exception {
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, new LinkedHashMap<String, Object>(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithListOrderedMap() throws Exception {
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
@SuppressWarnings("unchecked")
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, ListOrderedMap.listOrderedMap(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableSet() throws Exception {
final Map<String, Object> newAttributes = new HashMap<>();
final Set<String> values = new HashSet<>();
values.add(NICKNAME_VALUE);
newAttributes.put(NICKNAME_KEY, Collections.unmodifiableSet(values));
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithSingleton() throws Exception {
final Map<String, Object> newAttributes = new HashMap<>();
newAttributes.put(NICKNAME_KEY, Collections.singleton(NICKNAME_VALUE));
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
@Test
public void verifyEncodeDecodeTGTWithSingletonMap() throws Exception {
final Map<String, Object> newAttributes = Collections.singletonMap(NICKNAME_KEY, (Object) NICKNAME_VALUE);
final Credential userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false);
assertEquals(expectedTGT, transcoder.decode(transcoder.encode(expectedTGT)));
}
static class MockServiceTicket implements ServiceTicket {
private static final long serialVersionUID = -206395373480723831L;
private String id;
MockServiceTicket() { /* for serialization */ }
MockServiceTicket(final String id) {
this.id = id;
}
public Service getService() {
return null;
}
public boolean isFromNewLogin() {
return false;
}
public boolean isValidFor(final Service service) {
return false;
}
public TicketGrantingTicket grantTicketGrantingTicket(final String id, final Authentication authentication,
final ExpirationPolicy expirationPolicy) {
return null;
}
public String getId() {
return id;
}
public boolean isExpired() {
return false;
}
public TicketGrantingTicket getGrantingTicket() {
return null;
}
public long getCreationTime() {
return 0;
}
public int getCountOfUses() {
return 0;
}
@Override
public boolean equals(final Object other) {
return other instanceof MockServiceTicket && ((MockServiceTicket) other).getId().equals(id);
}
@Override
public int hashCode() {
final HashCodeBuilder bldr = new HashCodeBuilder(17, 33);
return bldr.append(this.id)
.toHashCode();
}
}
private static class MockTicketGrantingTicket implements TicketGrantingTicket {
private static final long serialVersionUID = 4829406617873497061L;
private final String id;
private int usageCount;
private Service proxiedBy;
private final Date creationDate = new Date();
private final Authentication authentication;
/** Factory to create the principal type. **/
@NotNull
private final PrincipalFactory principalFactory = new DefaultPrincipalFactory();
/** Constructor for serialization support. */
MockTicketGrantingTicket() {
this.id = null;
this.authentication = null;
}
MockTicketGrantingTicket(final String id, final Credential credential, final Map<String, Object> principalAttributes) {
this.id = id;
final CredentialMetaData credentialMetaData = new BasicCredentialMetaData(credential);
final AuthenticationBuilder builder = new DefaultAuthenticationBuilder();
builder.setPrincipal(this.principalFactory.createPrincipal(USERNAME, principalAttributes));
builder.setAuthenticationDate(new DateTime());
builder.addCredential(credentialMetaData);
builder.addAttribute(RememberMeCredential.AUTHENTICATION_ATTRIBUTE_REMEMBER_ME, Boolean.TRUE);
final AuthenticationHandler handler = new MockAuthenticationHandler();
try {
builder.addSuccess(handler.getName(), handler.authenticate(credential));
} catch (final Exception e) {
throw new RuntimeException(e);
}
builder.addFailure(handler.getName(), FailedLoginException.class);
this.authentication = builder.build();
}
@Override
public Authentication getAuthentication() {
return this.authentication;
}
@Override
public List<Authentication> getSupplementalAuthentications() {
return Collections.emptyList();
}
@Override
public ServiceTicket grantServiceTicket(
final String id,
final Service service,
final ExpirationPolicy expirationPolicy,
final boolean credentialsProvided) {
this.usageCount++;
return new MockServiceTicket(id);
}
@Override
public Service getProxiedBy() {
return proxiedBy;
}
@Override
public Map<String, Service> getServices() {
return Collections.emptyMap();
}
@Override
public void removeAllServices() {}
@Override
public void markTicketExpired() {}
@Override
public boolean isRoot() {
return true;
}
@Override
public TicketGrantingTicket getRoot() {
return this;
}
@Override
public List<Authentication> getChainedAuthentications() {
return Collections.emptyList();
}
@Override
public String getId() {
return this.id;
}
@Override
public boolean isExpired() {
return false;
}
@Override
public TicketGrantingTicket getGrantingTicket() {
return this;
}
@Override
public long getCreationTime() {
return this.creationDate.getTime();
}
@Override
public int getCountOfUses() {
return this.usageCount;
}
@Override
public boolean equals(final Object other) {
return other instanceof MockTicketGrantingTicket
&& ((MockTicketGrantingTicket) other).getId().equals(this.id)
&& ((MockTicketGrantingTicket) other).getCountOfUses() == this.usageCount
&& ((MockTicketGrantingTicket) other).getCreationTime() == this.creationDate.getTime()
&& ((MockTicketGrantingTicket) other).getAuthentication().equals(this.authentication);
}
@Override
public int hashCode() {
final HashCodeBuilder bldr = new HashCodeBuilder(17, 33);
return bldr.append(this.id)
.append(this.usageCount)
.append(this.creationDate.getTime())
.append(this.authentication).toHashCode();
}
}
private static class MockAuthenticationHandler implements AuthenticationHandler {
@Override
public DefaultHandlerResult authenticate(final Credential credential) throws GeneralSecurityException, PreventedException {
if (credential instanceof HttpBasedServiceCredential) {
return new DefaultHandlerResult(this, (HttpBasedServiceCredential) credential);
} else {
return new DefaultHandlerResult(this, new BasicCredentialMetaData(credential));
}
}
@Override
public boolean supports(final Credential credential) {
return true;
}
@Override
public String getName() {
return this.getClass().getSimpleName();
}
}
}
| |
package com.stayingalive.stayingaliveapp.game;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.InputListener;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Button;
import com.badlogic.gdx.scenes.scene2d.ui.ProgressBar;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.TextButton;
import com.badlogic.gdx.scenes.scene2d.ui.Touchpad;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.badlogic.gdx.scenes.scene2d.utils.Drawable;
import com.badlogic.gdx.scenes.scene2d.utils.TextureRegionDrawable;
import com.stayingalive.stayingaliveapp.StayingAliveGame;
import com.stayingalive.stayingaliveapp.screen.GameScreen;
import com.stayingalive.stayingaliveapp.screen.MainScreen;
import com.stayingalive.stayingaliveapp.screen.ViewPortConstants;
/**
* Created by mauriciolara on 11/28/14.
*
* Via this class we process the game inputs
*
* NOTE: Here we initialize the controllers section of the game screen
*
* REMEMBER TO OVERWRITE THE NEXT METHODS IN YOUR SCREEN
* - show
* - update
* - render
*/
public class InputHandler {
public static interface Callbacks{
public void powerUpClick( PowerUp.Type type );
public void shieldTouchDown();
public void shieldTouchUp();
}
private StayingAliveGame mGame;
private Callbacks mCallbacks;
private Stage mStage;
private Touchpad mTouchpad;
private Touchpad.TouchpadStyle mTouchpadStyle;
private Skin mTouchpadSkin;
private Drawable mTouchpadBackground;
private Drawable mTouchpadKnob;
private ProgressBar mShieldBar;
/* Buttons */
private Button mPowerupButton;
private Button mShieldButton;
private Button.ButtonStyle STYLE_BTTN_SHIELD;
private Button.ButtonStyle STYLE_POWER_UP_NULL;
private Button.ButtonStyle STYLE_POWER_UP_BLOW_EM_ALL;
private Button.ButtonStyle STYLE_POWER_UP_SHIELD;
private Button.ButtonStyle STYLE_POWER_UP_SLOW_MO;
public PowerUp.Type actualPowerUp = null;
public InputHandler(Stage stage, StayingAliveGame game, Callbacks callbacks ){
mStage = stage;
mCallbacks = callbacks;
mGame = game;
initializeButtonsStyles(game);
}
public void setProgressShieldValue( double value ){
mShieldBar.setValue( (float) value );
}
private void initializeShieldBar(){
Skin progressBarSkin = new Skin();
Pixmap background = new Pixmap(10, 10, Pixmap.Format.RGBA8888);
background.setColor(Color.WHITE);
background.fill();
progressBarSkin.add("background", new Texture(background));
Pixmap knob = new Pixmap(10, 50, Pixmap.Format.RGBA8888);
knob.setColor(Color.WHITE);
knob.fill();
progressBarSkin.add("knob", new Texture(background));
ProgressBar.ProgressBarStyle barStyle = new ProgressBar.ProgressBarStyle(
progressBarSkin.newDrawable("background", Color.DARK_GRAY),
progressBarSkin.newDrawable("knob", Color.RED) );
barStyle.knobBefore = barStyle.knob;
mShieldBar = new ProgressBar(0, 10, 0.1f, false, barStyle);
mShieldBar.setBounds(
200/* x-position */,
200 /* y-position */,
ViewPortConstants.VIEWPORT_WIDTH / 2 /* width */,
250 /* height */);
mStage.addActor( mShieldBar );
mShieldBar.setValue(10);
}
private void initializeTouchpad(){
mTouchpadSkin = new Skin();
mTouchpadSkin.add("touchpadBackground", new Texture("touchpad/TouchpadOuter.png"));
mTouchpadSkin.add("touchKnob", new Texture("touchpad/TouchpadIntro.png"));
mTouchpadStyle = new Touchpad.TouchpadStyle();
mTouchpadBackground = mTouchpadSkin.getDrawable("touchpadBackground");
mTouchpadKnob = mTouchpadSkin.getDrawable("touchKnob");
mTouchpadStyle.background = mTouchpadBackground;
mTouchpadStyle.knob = mTouchpadKnob;
/* initializing the touchpad */
mTouchpad = new Touchpad(50, mTouchpadStyle);
mTouchpad.setBounds(
50 /* x-position */,
50 /* y-position */,
280 /* width */,
250 /* height */);
mStage.addActor(mTouchpad);
}
public void setPowerUp( PowerUp.Type type ){
actualPowerUp = type;
if( type == null ){
/* null */
mPowerupButton.setStyle( STYLE_POWER_UP_NULL );
}else{
switch( type ){
case POWER_UP_SLOW_MO:
mPowerupButton.setStyle( STYLE_POWER_UP_SLOW_MO );
break;
case POWER_UP_SHIELD:
mPowerupButton.setStyle( STYLE_POWER_UP_SHIELD );
break;
case POWER_UP_BLOW_EM_ALL:
mPowerupButton.setStyle( STYLE_POWER_UP_BLOW_EM_ALL );
break;
default:
throw new IllegalArgumentException("Invalid type");
}
}
}
private void initializeButtonsStyles(StayingAliveGame game){
TextureAtlas atlas = game.getAssetManager().get("uiskin/uiskin.atlas", TextureAtlas.class );
/* we initialize the buttons styles */
TextureRegionDrawable power_null = new TextureRegionDrawable( atlas.findRegion("Button-PowerUp-Empty") );
STYLE_POWER_UP_NULL = new Button.ButtonStyle();
STYLE_POWER_UP_NULL.up = power_null;
STYLE_POWER_UP_NULL.down = power_null;
TextureRegionDrawable power_shield = new TextureRegionDrawable( atlas.findRegion("Button-PowerUp-Shield") );
STYLE_POWER_UP_SHIELD = new Button.ButtonStyle();
STYLE_POWER_UP_SHIELD.up = power_shield;
STYLE_POWER_UP_SHIELD.down = power_shield;
TextureRegionDrawable power_blow_em_all = new TextureRegionDrawable( atlas.findRegion("Button-PowerUp-Bomb") );
STYLE_POWER_UP_BLOW_EM_ALL = new Button.ButtonStyle();
STYLE_POWER_UP_BLOW_EM_ALL.up = power_blow_em_all;
STYLE_POWER_UP_BLOW_EM_ALL.down = power_blow_em_all;
TextureRegionDrawable power_slow_mo = new TextureRegionDrawable( atlas.findRegion("Button-PowerUp-SlowDown") );
STYLE_POWER_UP_SLOW_MO = new Button.ButtonStyle();
STYLE_POWER_UP_SLOW_MO.up = power_slow_mo;
STYLE_POWER_UP_SLOW_MO.down = power_slow_mo;
TextureRegionDrawable button_shield = new TextureRegionDrawable( atlas.findRegion("Button-Shield") );
STYLE_BTTN_SHIELD = new Button.ButtonStyle();
STYLE_BTTN_SHIELD.up = button_shield;
STYLE_BTTN_SHIELD.down = button_shield;
}
private void initializeButtons(){
mPowerupButton = new Button(STYLE_POWER_UP_NULL);
mPowerupButton.setBounds(
450 /* x-position */,
140 /* y-position */,
110 /* width */,
110 /* height */);
mPowerupButton.addListener(new ClickListener(){
@Override
public void clicked( InputEvent event, float x, float y ){
mCallbacks.powerUpClick( actualPowerUp );
setPowerUp( null );
}
});
mStage.addActor( mPowerupButton );
mShieldButton = new Button( STYLE_BTTN_SHIELD );
mShieldButton.setBounds(
600 /* x-position */,
170 /* y-position */,
110 /* width */,
110 /* height */);
mShieldButton.addListener(new InputListener() {
public boolean touchDown (InputEvent event, float x, float y, int pointer, int button) {
mCallbacks.shieldTouchDown();
return true;
}
public void touchUp (InputEvent event, float x, float y, int pointer, int button) {
mCallbacks.shieldTouchUp();
}
});
mStage.addActor( mShieldButton );
final GameScreen gameScreen = (GameScreen) mGame.getScreen();
TextButton bttnExtiGame = new TextButton("Exit", gameScreen.getSkin() );
bttnExtiGame.setBounds(
(ViewPortConstants.VIEWPORT_WIDTH / 2) - 50 /* x-position */,
20/* y-position */,
100 /* width */,
50 /* height */);
bttnExtiGame.addListener( new ClickListener(){
@Override
public void clicked(InputEvent event, float x, float y ){
/* Send to options screen*/
mGame.setScreen( new MainScreen( mGame ));
}
});
mStage.addActor( bttnExtiGame );
}
/**
* Should be called at the beginning of the show method in the game screen.
*
* */
public void show(){
initializeTouchpad();
initializeButtons();
initializeShieldBar();
}
public InputValues render(){
InputValues values = new InputValues();
values.knobPercentageX = mTouchpad.getKnobPercentX();
values.knobPercentageY = mTouchpad.getKnobPercentY();
return values;
}
/**
*
* Simple container class that contains the touchpad event values
* */
public static class InputValues{
public float knobPercentageX;
public float knobPercentageY;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes.committed;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.FileEditorManagerListener;
import com.intellij.openapi.project.Project;
import consulo.util.dataholder.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vcs.CachingCommittedChangesProvider;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.EditorNotificationPanel;
import com.intellij.util.Consumer;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.text.DateFormatUtil;
import consulo.logging.Logger;
import jakarta.inject.Inject;
import jakarta.inject.Provider;
import jakarta.inject.Singleton;
import org.jetbrains.annotations.NonNls;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
/**
* @author yole
* todo: use EditorNotifications
*/
@Singleton
public class OutdatedVersionNotifier {
private static final Logger LOG = Logger.getInstance(OutdatedVersionNotifier.class);
private final Provider<FileEditorManager> myFileEditorManager;
private final CommittedChangesCache myCache;
private final Project myProject;
private static final Key<OutdatedRevisionPanel> PANEL_KEY = Key.create("OutdatedRevisionPanel");
private volatile boolean myIncomingChangesRequested;
@Inject
public OutdatedVersionNotifier(Provider<FileEditorManager> fileEditorManager,
CommittedChangesCache cache,
Project project) {
myFileEditorManager = fileEditorManager;
myCache = cache;
myProject = project;
MessageBusConnection busConnection = project.getMessageBus().connect();
busConnection.subscribe(CommittedChangesCache.COMMITTED_TOPIC, new CommittedChangesAdapter() {
@Override
public void incomingChangesUpdated(@Nullable final List<CommittedChangeList> receivedChanges) {
if (myCache.getCachedIncomingChanges() == null) {
requestLoadIncomingChanges();
}
else {
updateAllEditorsLater();
}
}
@Override
public void changesCleared() {
updateAllEditorsLater();
}
});
busConnection.subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, new MyFileEditorManagerListener());
}
private void requestLoadIncomingChanges() {
debug("Requesting load of incoming changes");
if (!myIncomingChangesRequested) {
myIncomingChangesRequested = true;
myCache.loadIncomingChangesAsync(new Consumer<List<CommittedChangeList>>() {
@Override
public void consume(final List<CommittedChangeList> committedChangeLists) {
myIncomingChangesRequested = false;
updateAllEditorsLater();
}
}, true);
}
}
private static void debug(@NonNls String message) {
LOG.debug(message);
}
private void updateAllEditorsLater() {
debug("Queueing update of editors");
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
updateAllEditors();
}
}, myProject.getDisposed());
}
private void updateAllEditors() {
if (myCache.getCachedIncomingChanges() == null) {
requestLoadIncomingChanges();
return;
}
debug("Updating editors");
final VirtualFile[] files = myFileEditorManager.get().getOpenFiles();
for(VirtualFile file: files) {
final Pair<CommittedChangeList,Change> pair = myCache.getIncomingChangeList(file);
final FileEditor[] fileEditors = myFileEditorManager.get().getEditors(file);
for(FileEditor editor: fileEditors) {
final OutdatedRevisionPanel oldPanel = editor.getUserData(PANEL_KEY);
if (pair != null) {
if (oldPanel != null) {
oldPanel.setChangeList(pair.first, pair.second);
}
else {
initPanel(pair.first, pair.second, editor);
}
}
else if (oldPanel != null) {
myFileEditorManager.get().removeTopComponent(editor, oldPanel);
editor.putUserData(PANEL_KEY, null);
}
}
}
}
private void initPanel(final CommittedChangeList list, final Change c, final FileEditor editor) {
if (!isIncomingChangesSupported(list)) {
return;
}
final OutdatedRevisionPanel component = new OutdatedRevisionPanel(list, c);
editor.putUserData(PANEL_KEY, component);
myFileEditorManager.get().addTopComponent(editor, component);
}
private class MyFileEditorManagerListener implements FileEditorManagerListener {
@Override
public void fileOpened(@Nonnull FileEditorManager source, @Nonnull VirtualFile file) {
if (myCache.getCachedIncomingChanges() == null) {
requestLoadIncomingChanges();
}
else {
final Pair<CommittedChangeList, Change> pair = myCache.getIncomingChangeList(file);
if (pair != null) {
final FileEditor[] fileEditors = source.getEditors(file);
for(FileEditor editor: fileEditors) {
initPanel(pair.first, pair.second, editor);
}
}
}
}
}
private static class OutdatedRevisionPanel extends EditorNotificationPanel {
private CommittedChangeList myChangeList;
public OutdatedRevisionPanel(CommittedChangeList changeList, final Change c) {
super();
createActionLabel(VcsBundle.message("outdated.version.show.diff.action"), "Compare.LastVersion");
createActionLabel(VcsBundle.message("outdated.version.update.project.action"), "Vcs.UpdateProject");
myChangeList = changeList;
updateLabelText(c);
}
private void updateLabelText(final Change c) {
String comment = myChangeList.getComment();
int pos = comment.indexOf("\n");
if (pos >= 0) {
comment = comment.substring(0, pos).trim() + "...";
}
final String formattedDate = DateFormatUtil.formatPrettyDateTime(myChangeList.getCommitDate());
final boolean dateIsPretty = ! formattedDate.contains("/");
final String key = c.getType() == Change.Type.DELETED ? "outdated.version.text.deleted" :
(dateIsPretty ? "outdated.version.pretty.date.text" : "outdated.version.text");
myLabel.setText(VcsBundle.message(key, myChangeList.getCommitterName(), formattedDate, comment));
}
public void setChangeList(final CommittedChangeList changeList, final Change c) {
myChangeList = changeList;
updateLabelText(c);
}
}
private static boolean isIncomingChangesSupported(@Nonnull CommittedChangeList list) {
CachingCommittedChangesProvider provider = list.getVcs().getCachingCommittedChangesProvider();
return provider != null && provider.supportsIncomingChanges();
}
}
| |
package org.rabix.bindings.helper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.rabix.bindings.BindingException;
import org.rabix.bindings.mapper.FileMappingException;
import org.rabix.bindings.mapper.FilePathMapper;
import org.rabix.bindings.model.DataType;
import org.rabix.bindings.model.DirectoryValue;
import org.rabix.bindings.model.FileValue;
import org.rabix.bindings.model.Job;
import org.rabix.bindings.transformer.FileTransformer;
import org.rabix.common.helper.CloneHelper;
public class FileValueHelper {
/**
* Creates copy of value (in common format) in which all FileValues are updated using fileTransformer
* @param value
* @param fileTransformer
* @return copy of value with replaced FileValues
*/
public static Object updateFileValues(Object value, FileTransformer fileTransformer) throws BindingException {
if (value instanceof FileValue) {
FileValue origFile = (FileValue) value;
return fileTransformer.transform(origFile);
} else if (value instanceof List) {
List<Object> ret = new ArrayList<>();
for (Object o : (List<?>) value) {
Object newValue = updateFileValues(o, fileTransformer);
ret.add(newValue);
}
return ret;
} else if (value instanceof Map) {
Map<Object, Object> ret = new HashMap<>();
for (Object key : ((Map<?, ?>) value).keySet()) {
Object newValue = updateFileValues(((Map<?, ?>) value).get(key), fileTransformer);
ret.put(key, newValue);
}
return ret;
}
return CloneHelper.deepCopy(value);
}
/**
* Parses value (in common format) and extracts all FileValue objects
* @param value
* @return List of FileValue objects
*/
public static List<FileValue> getFilesFromValue(Object value) {
List<FileValue> ret = new ArrayList<>();
if (value instanceof List) {
for (Object o : (List<?>) value) {
ret.addAll(getFilesFromValue(o));
}
} else if (value instanceof FileValue) {
ret.add((FileValue)value);
} else if (value instanceof Map) {
for (Object key : ((Map<?, ?>) value).keySet()) {
ret.addAll(getFilesFromValue(((Map<?, ?>) value).get(key)));
}
}
return ret;
}
/**
* Reads the type of input value (in common format)
* @param value
* @return DataType object that represents value's type
*/
public static DataType getDataTypeFromValue(Object value) {
if (value==null)
return new DataType(DataType.Type.NULL);
// DIRECTORY
if (value instanceof DirectoryValue)
return new DataType(DataType.Type.DIRECTORY);
// FILE
if (value instanceof FileValue)
return new DataType(DataType.Type.FILE);
//ARRAY
if (value instanceof List) {
Set<DataType> arrayTypes = new HashSet<>();
DataType arrayType;
for (Object element: (List<?>)value) {
arrayTypes.add(getDataTypeFromValue(element));
}
if (arrayTypes.isEmpty())
arrayType = new DataType(DataType.Type.EMPTY);
else if (arrayTypes.size() == 1)
arrayType = arrayTypes.iterator().next();
else
arrayType = new DataType(DataType.Type.UNION, arrayTypes);
return new DataType(DataType.Type.ARRAY, arrayType);
}
// RECORD
if (value instanceof Map) {
Map<String, DataType> subTypes = new HashMap<>();
Map<?, ?> valueMap = (Map<?, ?>) value;
for (Object key: valueMap.keySet()) {
subTypes.put((String)key, getDataTypeFromValue(valueMap.get(key)));
}
return new DataType(DataType.Type.RECORD, subTypes);
}
// PRIMITIVE
for (DataType.Type t : DataType.Type.values()) {
if (t.primitiveTypes !=null && t.isPrimitive(value))
return new DataType(t, value);
}
return new DataType(DataType.Type.ANY);
}
/**
* Maps input file paths using the particular {@link FilePathMapper}
*
* @param job Job object
* @param fileMapper FileMapper object
* @return Updated Job object
* @throws BindingException
*/
@SuppressWarnings("unchecked")
public static Job mapInputFilePaths(Job job, FilePathMapper fileMapper) throws BindingException {
Map<String, Object> inputs = job.getInputs();
Map<String, Object> clonedInputs = (Map<String, Object>) CloneHelper.deepCopy(inputs);
try {
mapValue(clonedInputs, fileMapper, job.getConfig());
} catch (FileMappingException e) {
throw new BindingException("Failed to map input file paths", e);
}
return Job.cloneWithInputs(job, clonedInputs);
}
/**
* Maps output file paths using the particular {@link FilePathMapper}
*
* @param job Job object
* @param fileMapper FileMapper object
* @return Updated Job object
* @throws BindingException
*/
@SuppressWarnings("unchecked")
public static Job mapOutputFilePaths(Job job, FilePathMapper fileMapper) throws BindingException {
Map<String, Object> outputs = job.getOutputs();
Map<String, Object> clonedOutputs = (Map<String, Object>) CloneHelper.deepCopy(outputs);
try {
mapValue(clonedOutputs, fileMapper, job.getConfig());
} catch (FileMappingException e) {
throw new BindingException("Failed to map output file paths", e);
}
return Job.cloneWithOutputs(job, clonedOutputs);
}
@SuppressWarnings("unchecked")
private static void mapValue(Object value, FilePathMapper fileMapper, Map<String, Object> config) throws FileMappingException {
if (value instanceof FileValue || value instanceof DirectoryValue) {
FileValue fileValue = (FileValue) value;
if (fileValue.getPath() != null) {
fileValue.setPath(fileMapper.map(fileValue.getPath(), config));
}
if (fileValue.getDirname() != null) {
fileValue.setDirname(fileMapper.map(fileValue.getDirname(), config));
}
List<FileValue> secondaryFiles = fileValue.getSecondaryFiles();
if (secondaryFiles != null) {
mapValue(secondaryFiles, fileMapper, config);
}
if (value instanceof DirectoryValue) {
List<FileValue> listing = ((DirectoryValue) value).getListing();
if (listing != null) {
mapValue(listing, fileMapper, config);
}
}
return;
}
if (value instanceof List<?>) {
for (Object singleValue : (List<?>) value) {
mapValue(singleValue, fileMapper, config);
}
return;
}
if (value instanceof Map<?, ?>) {
for (Object singleValue : ((Map<String, Object>) value).values()) {
mapValue(singleValue, fileMapper, config);
}
}
}
/**
* Gets a set of input {@link FileValue} objects with their secondary files
*
* @param job Job object
* @return FileValue objects
* @throws BindingException
*/
public static Set<FileValue> getInputFiles(Job job) throws BindingException {
return findFiles(job.getInputs());
}
/**
* Gets a set of output {@link FileValue} objects with their secondary files
*
* @param job Job object
* @return FileValue objects
* @throws BindingException
*/
public static Set<FileValue> getOutputFiles(Job job) throws BindingException {
return findFiles(job.getOutputs());
}
@SuppressWarnings("unchecked")
private static Set<FileValue> findFiles(Object value) {
Set<FileValue> fileValues = new HashSet<>();
if (value instanceof FileValue || value instanceof DirectoryValue) {
fileValues.add((FileValue) value);
return fileValues;
}
if (value instanceof List<?>) {
for (Object singleValue : (List<?>) value) {
fileValues.addAll(findFiles(singleValue));
}
return fileValues;
}
if (value instanceof Map<?, ?>) {
for (Object singleValue : ((Map<String, Object>) value).values()) {
fileValues.addAll(findFiles(singleValue));
}
return fileValues;
}
return fileValues;
}
/**
* Updates input files
*
* @param job Job object
* @param fileTransformer FileTransformer that transforms old file values into new ones
* @return Updated Job object
* @throws BindingException
*/
@SuppressWarnings("unchecked")
public static Job updateInputFiles(Job job, FileTransformer fileTransformer) throws BindingException {
Map<String, Object> clonedInputs = (Map<String, Object>) CloneHelper.deepCopy(job.getInputs());
clonedInputs = (Map<String, Object>) updateFileValues(clonedInputs, fileTransformer);
return Job.cloneWithInputs(job, clonedInputs);
}
/**
* Updates output files
*
* @param job Job object
* @param fileTransformer FileTransformer that transforms old file values into new ones
* @return Updated Job object
* @throws BindingException
*/
@SuppressWarnings("unchecked")
public static Job updateOutputFiles(Job job, FileTransformer fileTransformer) throws BindingException {
Map<String, Object> clonedOutputs = (Map<String, Object>) CloneHelper.deepCopy(job.getOutputs());
clonedOutputs = (Map<String, Object>) updateFileValues(clonedOutputs, fileTransformer);
return Job.cloneWithOutputs(job, clonedOutputs);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.connector;
import com.facebook.presto.connector.informationSchema.InformationSchemaConnector;
import com.facebook.presto.connector.system.SystemConnector;
import com.facebook.presto.index.IndexManager;
import com.facebook.presto.metadata.Catalog;
import com.facebook.presto.metadata.CatalogManager;
import com.facebook.presto.metadata.HandleResolver;
import com.facebook.presto.metadata.InternalNodeManager;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.security.AccessControlManager;
import com.facebook.presto.spi.PageIndexerFactory;
import com.facebook.presto.spi.PageSorter;
import com.facebook.presto.spi.SystemTable;
import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
import com.facebook.presto.spi.connector.Connector;
import com.facebook.presto.spi.connector.ConnectorAccessControl;
import com.facebook.presto.spi.connector.ConnectorContext;
import com.facebook.presto.spi.connector.ConnectorFactory;
import com.facebook.presto.spi.connector.ConnectorIndexProvider;
import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider;
import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
import com.facebook.presto.spi.connector.ConnectorRecordSetProvider;
import com.facebook.presto.spi.connector.ConnectorRecordSinkProvider;
import com.facebook.presto.spi.connector.ConnectorSplitManager;
import com.facebook.presto.spi.procedure.Procedure;
import com.facebook.presto.spi.session.PropertyMetadata;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.split.PageSinkManager;
import com.facebook.presto.split.PageSourceManager;
import com.facebook.presto.split.RecordPageSinkProvider;
import com.facebook.presto.split.RecordPageSourceProvider;
import com.facebook.presto.split.SplitManager;
import com.facebook.presto.sql.planner.NodePartitioningManager;
import com.facebook.presto.transaction.TransactionManager;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.airlift.node.NodeInfo;
import javax.annotation.PreDestroy;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.facebook.presto.connector.ConnectorId.createInformationSchemaConnectorId;
import static com.facebook.presto.connector.ConnectorId.createSystemTablesConnectorId;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
@ThreadSafe
public class ConnectorManager
{
private static final Logger log = Logger.get(ConnectorManager.class);
private final MetadataManager metadataManager;
private final CatalogManager catalogManager;
private final AccessControlManager accessControlManager;
private final SplitManager splitManager;
private final PageSourceManager pageSourceManager;
private final IndexManager indexManager;
private final NodePartitioningManager nodePartitioningManager;
private final PageSinkManager pageSinkManager;
private final HandleResolver handleResolver;
private final InternalNodeManager nodeManager;
private final TypeManager typeManager;
private final PageSorter pageSorter;
private final PageIndexerFactory pageIndexerFactory;
private final NodeInfo nodeInfo;
private final TransactionManager transactionManager;
@GuardedBy("this")
private final ConcurrentMap<String, ConnectorFactory> connectorFactories = new ConcurrentHashMap<>();
@GuardedBy("this")
private final ConcurrentMap<ConnectorId, MaterializedConnector> connectors = new ConcurrentHashMap<>();
private final AtomicBoolean stopped = new AtomicBoolean();
@Inject
public ConnectorManager(
MetadataManager metadataManager,
CatalogManager catalogManager,
AccessControlManager accessControlManager,
SplitManager splitManager,
PageSourceManager pageSourceManager,
IndexManager indexManager,
NodePartitioningManager nodePartitioningManager,
PageSinkManager pageSinkManager,
HandleResolver handleResolver,
InternalNodeManager nodeManager,
NodeInfo nodeInfo,
TypeManager typeManager,
PageSorter pageSorter,
PageIndexerFactory pageIndexerFactory,
TransactionManager transactionManager)
{
this.metadataManager = metadataManager;
this.catalogManager = catalogManager;
this.accessControlManager = accessControlManager;
this.splitManager = splitManager;
this.pageSourceManager = pageSourceManager;
this.indexManager = indexManager;
this.nodePartitioningManager = nodePartitioningManager;
this.pageSinkManager = pageSinkManager;
this.handleResolver = handleResolver;
this.nodeManager = nodeManager;
this.typeManager = typeManager;
this.pageSorter = pageSorter;
this.pageIndexerFactory = pageIndexerFactory;
this.nodeInfo = nodeInfo;
this.transactionManager = transactionManager;
}
@PreDestroy
public synchronized void stop()
{
if (stopped.getAndSet(true)) {
return;
}
for (Map.Entry<ConnectorId, MaterializedConnector> entry : connectors.entrySet()) {
Connector connector = entry.getValue().getConnector();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(connector.getClass().getClassLoader())) {
connector.shutdown();
}
catch (Throwable t) {
log.error(t, "Error shutting down connector: %s", entry.getKey());
}
}
}
public synchronized void addConnectorFactory(ConnectorFactory connectorFactory)
{
checkState(!stopped.get(), "ConnectorManager is stopped");
ConnectorFactory existingConnectorFactory = connectorFactories.putIfAbsent(connectorFactory.getName(), connectorFactory);
checkArgument(existingConnectorFactory == null, "Connector %s is already registered", connectorFactory.getName());
handleResolver.addConnectorName(connectorFactory.getName(), connectorFactory.getHandleResolver());
}
public synchronized ConnectorId createConnection(String catalogName, String connectorName, Map<String, String> properties)
{
requireNonNull(connectorName, "connectorName is null");
ConnectorFactory connectorFactory = connectorFactories.get(connectorName);
checkArgument(connectorFactory != null, "No factory for connector %s", connectorName);
return createConnection(catalogName, connectorFactory, properties);
}
private synchronized ConnectorId createConnection(String catalogName, ConnectorFactory connectorFactory, Map<String, String> properties)
{
checkState(!stopped.get(), "ConnectorManager is stopped");
requireNonNull(catalogName, "catalogName is null");
requireNonNull(properties, "properties is null");
requireNonNull(connectorFactory, "connectorFactory is null");
checkArgument(!catalogManager.getCatalog(catalogName).isPresent(), "A catalog already exists for %s", catalogName);
ConnectorId connectorId = new ConnectorId(catalogName);
checkState(!connectors.containsKey(connectorId), "A connector %s already exists", connectorId);
addCatalogConnector(catalogName, connectorId, connectorFactory, properties);
return connectorId;
}
private synchronized void addCatalogConnector(String catalogName, ConnectorId connectorId, ConnectorFactory factory, Map<String, String> properties)
{
// create all connectors before adding, so a broken connector does not leave the system half updated
MaterializedConnector connector = new MaterializedConnector(connectorId, createConnector(connectorId, factory, properties));
MaterializedConnector informationSchemaConnector = new MaterializedConnector(
createInformationSchemaConnectorId(connectorId),
new InformationSchemaConnector(catalogName, nodeManager, metadataManager, accessControlManager));
ConnectorId systemId = createSystemTablesConnectorId(connectorId);
MaterializedConnector systemConnector = new MaterializedConnector(systemId, new SystemConnector(
systemId,
nodeManager,
connector.getSystemTables(),
transactionId -> transactionManager.getConnectorTransaction(transactionId, connectorId)));
Catalog catalog = new Catalog(
catalogName,
connector.getConnectorId(),
connector.getConnector(),
informationSchemaConnector.getConnectorId(),
informationSchemaConnector.getConnector(),
systemConnector.getConnectorId(),
systemConnector.getConnector());
try {
addConnectorInternal(connector);
addConnectorInternal(informationSchemaConnector);
addConnectorInternal(systemConnector);
catalogManager.registerCatalog(catalog);
}
catch (Throwable e) {
catalogManager.removeCatalog(catalog.getCatalogName());
removeConnectorInternal(systemConnector.getConnectorId());
removeConnectorInternal(informationSchemaConnector.getConnectorId());
removeConnectorInternal(connector.getConnectorId());
throw e;
}
}
private synchronized void addConnectorInternal(MaterializedConnector connector)
{
checkState(!stopped.get(), "ConnectorManager is stopped");
ConnectorId connectorId = connector.getConnectorId();
checkState(!connectors.containsKey(connectorId), "A connector %s already exists", connectorId);
connectors.put(connectorId, connector);
splitManager.addConnectorSplitManager(connectorId, connector.getSplitManager());
pageSourceManager.addConnectorPageSourceProvider(connectorId, connector.getPageSourceProvider());
connector.getPageSinkProvider()
.ifPresent(pageSinkProvider -> pageSinkManager.addConnectorPageSinkProvider(connectorId, pageSinkProvider));
connector.getIndexProvider()
.ifPresent(indexProvider -> indexManager.addIndexProvider(connectorId, indexProvider));
connector.getPartitioningProvider()
.ifPresent(partitioningProvider -> nodePartitioningManager.addPartitioningProvider(connectorId, partitioningProvider));
metadataManager.getProcedureRegistry().addProcedures(connectorId, connector.getProcedures());
connector.getAccessControl()
.ifPresent(accessControl -> accessControlManager.addCatalogAccessControl(connectorId, accessControl));
metadataManager.getTablePropertyManager().addProperties(connectorId, connector.getTableProperties());
metadataManager.getSchemaPropertyManager().addProperties(connectorId, connector.getSchemaProperties());
metadataManager.getSessionPropertyManager().addConnectorSessionProperties(connectorId, connector.getSessionProperties());
}
public synchronized void dropConnection(String catalogName)
{
requireNonNull(catalogName, "catalogName is null");
catalogManager.removeCatalog(catalogName).ifPresent(connectorId -> {
// todo wait for all running transactions using the connector to complete before removing the services
removeConnectorInternal(connectorId);
removeConnectorInternal(createInformationSchemaConnectorId(connectorId));
removeConnectorInternal(createSystemTablesConnectorId(connectorId));
});
}
private synchronized void removeConnectorInternal(ConnectorId connectorId)
{
splitManager.removeConnectorSplitManager(connectorId);
pageSourceManager.removeConnectorPageSourceProvider(connectorId);
pageSinkManager.removeConnectorPageSinkProvider(connectorId);
indexManager.removeIndexProvider(connectorId);
nodePartitioningManager.removePartitioningProvider(connectorId);
metadataManager.getProcedureRegistry().removeProcedures(connectorId);
accessControlManager.removeCatalogAccessControl(connectorId);
metadataManager.getTablePropertyManager().removeProperties(connectorId);
metadataManager.getSchemaPropertyManager().removeProperties(connectorId);
metadataManager.getSessionPropertyManager().removeConnectorSessionProperties(connectorId);
MaterializedConnector materializedConnector = connectors.remove(connectorId);
if (materializedConnector != null) {
Connector connector = materializedConnector.getConnector();
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(connector.getClass().getClassLoader())) {
connector.shutdown();
}
catch (Throwable t) {
log.error(t, "Error shutting down connector: %s", connectorId);
}
}
}
private Connector createConnector(ConnectorId connectorId, ConnectorFactory factory, Map<String, String> properties)
{
ConnectorContext context = new ConnectorContextInstance(
new ConnectorAwareNodeManager(nodeManager, nodeInfo.getEnvironment(), connectorId),
typeManager,
pageSorter,
pageIndexerFactory);
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(factory.getClass().getClassLoader())) {
return factory.create(connectorId.getCatalogName(), properties, context);
}
}
private static class MaterializedConnector
{
private final ConnectorId connectorId;
private final Connector connector;
private final ConnectorSplitManager splitManager;
private final Set<SystemTable> systemTables;
private final Set<Procedure> procedures;
private final ConnectorPageSourceProvider pageSourceProvider;
private final Optional<ConnectorPageSinkProvider> pageSinkProvider;
private final Optional<ConnectorIndexProvider> indexProvider;
private final Optional<ConnectorNodePartitioningProvider> partitioningProvider;
private final Optional<ConnectorAccessControl> accessControl;
private final List<PropertyMetadata<?>> sessionProperties;
private final List<PropertyMetadata<?>> tableProperties;
private final List<PropertyMetadata<?>> schemaProperties;
public MaterializedConnector(ConnectorId connectorId, Connector connector)
{
this.connectorId = requireNonNull(connectorId, "connectorId is null");
this.connector = requireNonNull(connector, "connector is null");
splitManager = connector.getSplitManager();
checkState(splitManager != null, "Connector %s does not have a split manager", connectorId);
Set<SystemTable> systemTables = connector.getSystemTables();
requireNonNull(systemTables, "Connector %s returned a null system tables set");
this.systemTables = ImmutableSet.copyOf(systemTables);
Set<Procedure> procedures = connector.getProcedures();
requireNonNull(procedures, "Connector %s returned a null procedures set");
this.procedures = ImmutableSet.copyOf(procedures);
ConnectorPageSourceProvider connectorPageSourceProvider = null;
try {
connectorPageSourceProvider = connector.getPageSourceProvider();
requireNonNull(connectorPageSourceProvider, format("Connector %s returned a null page source provider", connectorId));
}
catch (UnsupportedOperationException ignored) {
}
if (connectorPageSourceProvider == null) {
ConnectorRecordSetProvider connectorRecordSetProvider = null;
try {
connectorRecordSetProvider = connector.getRecordSetProvider();
requireNonNull(connectorRecordSetProvider, format("Connector %s returned a null record set provider", connectorId));
}
catch (UnsupportedOperationException ignored) {
}
checkState(connectorRecordSetProvider != null, "Connector %s has neither a PageSource or RecordSet provider", connectorId);
connectorPageSourceProvider = new RecordPageSourceProvider(connectorRecordSetProvider);
}
this.pageSourceProvider = connectorPageSourceProvider;
ConnectorPageSinkProvider connectorPageSinkProvider = null;
try {
connectorPageSinkProvider = connector.getPageSinkProvider();
requireNonNull(connectorPageSinkProvider, format("Connector %s returned a null page sink provider", connectorId));
}
catch (UnsupportedOperationException ignored) {
}
if (connectorPageSinkProvider == null) {
ConnectorRecordSinkProvider connectorRecordSinkProvider;
try {
connectorRecordSinkProvider = connector.getRecordSinkProvider();
requireNonNull(connectorRecordSinkProvider, format("Connector %s returned a null record sink provider", connectorId));
connectorPageSinkProvider = new RecordPageSinkProvider(connectorRecordSinkProvider);
}
catch (UnsupportedOperationException ignored) {
}
}
this.pageSinkProvider = Optional.ofNullable(connectorPageSinkProvider);
ConnectorIndexProvider indexProvider = null;
try {
indexProvider = connector.getIndexProvider();
requireNonNull(indexProvider, format("Connector %s returned a null index provider", connectorId));
}
catch (UnsupportedOperationException ignored) {
}
this.indexProvider = Optional.ofNullable(indexProvider);
ConnectorNodePartitioningProvider partitioningProvider = null;
try {
partitioningProvider = connector.getNodePartitioningProvider();
requireNonNull(partitioningProvider, format("Connector %s returned a null partitioning provider", connectorId));
}
catch (UnsupportedOperationException ignored) {
}
this.partitioningProvider = Optional.ofNullable(partitioningProvider);
ConnectorAccessControl accessControl = null;
try {
accessControl = connector.getAccessControl();
}
catch (UnsupportedOperationException ignored) {
}
this.accessControl = Optional.ofNullable(accessControl);
List<PropertyMetadata<?>> sessionProperties = connector.getSessionProperties();
requireNonNull(sessionProperties, "Connector %s returned a null system properties set");
this.sessionProperties = ImmutableList.copyOf(sessionProperties);
List<PropertyMetadata<?>> tableProperties = connector.getTableProperties();
requireNonNull(tableProperties, "Connector %s returned a null table properties set");
this.tableProperties = ImmutableList.copyOf(tableProperties);
List<PropertyMetadata<?>> schemaProperties = connector.getSchemaProperties();
requireNonNull(schemaProperties, "Connector %s returned a null schema properties set");
this.schemaProperties = ImmutableList.copyOf(schemaProperties);
}
public ConnectorId getConnectorId()
{
return connectorId;
}
public Connector getConnector()
{
return connector;
}
public ConnectorSplitManager getSplitManager()
{
return splitManager;
}
public Set<SystemTable> getSystemTables()
{
return systemTables;
}
public Set<Procedure> getProcedures()
{
return procedures;
}
public ConnectorPageSourceProvider getPageSourceProvider()
{
return pageSourceProvider;
}
public Optional<ConnectorPageSinkProvider> getPageSinkProvider()
{
return pageSinkProvider;
}
public Optional<ConnectorIndexProvider> getIndexProvider()
{
return indexProvider;
}
public Optional<ConnectorNodePartitioningProvider> getPartitioningProvider()
{
return partitioningProvider;
}
public Optional<ConnectorAccessControl> getAccessControl()
{
return accessControl;
}
public List<PropertyMetadata<?>> getSessionProperties()
{
return sessionProperties;
}
public List<PropertyMetadata<?>> getTableProperties()
{
return tableProperties;
}
public List<PropertyMetadata<?>> getSchemaProperties()
{
return schemaProperties;
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.coordinator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.metamx.emitter.EmittingLogger;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.concurrent.ScheduledExecutors;
import io.druid.server.coordination.DataSegmentChangeRequest;
import io.druid.server.coordination.SegmentChangeRequestDrop;
import io.druid.server.coordination.SegmentChangeRequestLoad;
import io.druid.server.coordination.SegmentChangeRequestNoop;
import io.druid.timeline.DataSegment;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.api.CuratorWatcher;
import org.apache.curator.utils.ZKPaths;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.data.Stat;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
/**
*/
public class LoadQueuePeon
{
private static final EmittingLogger log = new EmittingLogger(LoadQueuePeon.class);
private static final int DROP = 0;
private static final int LOAD = 1;
private static void executeCallbacks(List<LoadPeonCallback> callbacks)
{
for (LoadPeonCallback callback : callbacks) {
if (callback != null) {
callback.execute();
}
}
}
private final CuratorFramework curator;
private final String basePath;
private final ObjectMapper jsonMapper;
private final ScheduledExecutorService processingExecutor;
private final ExecutorService callBackExecutor;
private final DruidCoordinatorConfig config;
private final AtomicLong queuedSize = new AtomicLong(0);
private final AtomicInteger failedAssignCount = new AtomicInteger(0);
private final ConcurrentSkipListMap<DataSegment, SegmentHolder> segmentsToLoad = new ConcurrentSkipListMap<>(
DruidCoordinator.SEGMENT_COMPARATOR
);
private final ConcurrentSkipListMap<DataSegment, SegmentHolder> segmentsToDrop = new ConcurrentSkipListMap<>(
DruidCoordinator.SEGMENT_COMPARATOR
);
private final Object lock = new Object();
private volatile SegmentHolder currentlyProcessing = null;
private boolean stopped = false;
LoadQueuePeon(
CuratorFramework curator,
String basePath,
ObjectMapper jsonMapper,
ScheduledExecutorService processingExecutor,
ExecutorService callbackExecutor,
DruidCoordinatorConfig config
)
{
this.curator = curator;
this.basePath = basePath;
this.jsonMapper = jsonMapper;
this.callBackExecutor = callbackExecutor;
this.processingExecutor = processingExecutor;
this.config = config;
}
@JsonProperty
public Set<DataSegment> getSegmentsToLoad()
{
return segmentsToLoad.keySet();
}
@JsonProperty
public Set<DataSegment> getSegmentsToDrop()
{
return segmentsToDrop.keySet();
}
public long getLoadQueueSize()
{
return queuedSize.get();
}
public int getAndResetFailedAssignCount()
{
return failedAssignCount.getAndSet(0);
}
public void loadSegment(
final DataSegment segment,
final LoadPeonCallback callback
)
{
synchronized (lock) {
if ((currentlyProcessing != null) &&
currentlyProcessing.getSegmentIdentifier().equals(segment.getIdentifier())) {
if (callback != null) {
currentlyProcessing.addCallback(callback);
}
return;
}
}
synchronized (lock) {
final SegmentHolder existingHolder = segmentsToLoad.get(segment);
if (existingHolder != null) {
if ((callback != null)) {
existingHolder.addCallback(callback);
}
return;
}
}
log.info("Asking server peon[%s] to load segment[%s]", basePath, segment.getIdentifier());
queuedSize.addAndGet(segment.getSize());
segmentsToLoad.put(segment, new SegmentHolder(segment, LOAD, Collections.singletonList(callback)));
}
public void dropSegment(
final DataSegment segment,
final LoadPeonCallback callback
)
{
synchronized (lock) {
if ((currentlyProcessing != null) &&
currentlyProcessing.getSegmentIdentifier().equals(segment.getIdentifier())) {
if (callback != null) {
currentlyProcessing.addCallback(callback);
}
return;
}
}
synchronized (lock) {
final SegmentHolder existingHolder = segmentsToDrop.get(segment);
if (existingHolder != null) {
if (callback != null) {
existingHolder.addCallback(callback);
}
return;
}
}
log.info("Asking server peon[%s] to drop segment[%s]", basePath, segment.getIdentifier());
segmentsToDrop.put(segment, new SegmentHolder(segment, DROP, Collections.singletonList(callback)));
}
private void processSegmentChangeRequest() {
if (currentlyProcessing == null) {
if (!segmentsToDrop.isEmpty()) {
currentlyProcessing = segmentsToDrop.firstEntry().getValue();
log.info("Server[%s] dropping [%s]", basePath, currentlyProcessing.getSegmentIdentifier());
} else if (!segmentsToLoad.isEmpty()) {
currentlyProcessing = segmentsToLoad.firstEntry().getValue();
log.info("Server[%s] loading [%s]", basePath, currentlyProcessing.getSegmentIdentifier());
} else {
return;
}
try {
if (currentlyProcessing == null) {
if(!stopped) {
log.makeAlert("Crazy race condition! server[%s]", basePath)
.emit();
}
actionCompleted();
return;
}
log.info("Server[%s] processing segment[%s]", basePath, currentlyProcessing.getSegmentIdentifier());
final String path = ZKPaths.makePath(basePath, currentlyProcessing.getSegmentIdentifier());
final byte[] payload = jsonMapper.writeValueAsBytes(currentlyProcessing.getChangeRequest());
curator.create().withMode(CreateMode.EPHEMERAL).forPath(path, payload);
processingExecutor.schedule(
new Runnable()
{
@Override
public void run()
{
try {
if (curator.checkExists().forPath(path) != null) {
failAssign(new ISE("%s was never removed! Failing this operation!", path));
}
}
catch (Exception e) {
failAssign(e);
}
}
},
config.getLoadTimeoutDelay().getMillis(),
TimeUnit.MILLISECONDS
);
final Stat stat = curator.checkExists().usingWatcher(
new CuratorWatcher()
{
@Override
public void process(WatchedEvent watchedEvent) throws Exception
{
switch (watchedEvent.getType()) {
case NodeDeleted:
entryRemoved(watchedEvent.getPath());
break;
default:
// do nothing
}
}
}
).forPath(path);
if (stat == null) {
final byte[] noopPayload = jsonMapper.writeValueAsBytes(new SegmentChangeRequestNoop());
// Create a node and then delete it to remove the registered watcher. This is a work-around for
// a zookeeper race condition. Specifically, when you set a watcher, it fires on the next event
// that happens for that node. If no events happen, the watcher stays registered foreverz.
// Couple that with the fact that you cannot set a watcher when you create a node, but what we
// want is to create a node and then watch for it to get deleted. The solution is that you *can*
// set a watcher when you check to see if it exists so, we first create the node and then set a
// watcher on its existence. However, if already does not exist by the time the existence check
// returns, then the watcher that was set will never fire (nobody will ever create the node
// again) and thus lead to a slow, but real, memory leak. So, we create another node to cause
// that watcher to fire and delete it right away.
//
// We do not create the existence watcher first, because then it will fire when we create the
// node and we'll have the same race when trying to refresh that watcher.
curator.create().withMode(CreateMode.EPHEMERAL).forPath(path, noopPayload);
entryRemoved(path);
}
} catch (Exception e) {
failAssign(e);
}
} else {
log.info(
"Server[%s] skipping doNext() because something is currently loading[%s].",
basePath,
currentlyProcessing.getSegmentIdentifier()
);
}
}
private void actionCompleted()
{
if (currentlyProcessing != null) {
switch (currentlyProcessing.getType()) {
case LOAD:
segmentsToLoad.remove(currentlyProcessing.getSegment());
queuedSize.addAndGet(-currentlyProcessing.getSegmentSize());
break;
case DROP:
segmentsToDrop.remove(currentlyProcessing.getSegment());
break;
default:
throw new UnsupportedOperationException();
}
final List<LoadPeonCallback> callbacks = currentlyProcessing.getCallbacks();
currentlyProcessing = null;
callBackExecutor.execute(
new Runnable()
{
@Override
public void run()
{
executeCallbacks(callbacks);
}
}
);
}
}
public void start()
{
ScheduledExecutors.scheduleAtFixedRate(
processingExecutor,
config.getLoadQueuePeonRepeatDelay(),
config.getLoadQueuePeonRepeatDelay(),
new Callable<ScheduledExecutors.Signal>()
{
@Override
public ScheduledExecutors.Signal call()
{
processSegmentChangeRequest();
if (stopped) {
return ScheduledExecutors.Signal.STOP;
} else {
return ScheduledExecutors.Signal.REPEAT;
}
}
}
);
}
public void stop()
{
synchronized (lock) {
if (currentlyProcessing != null) {
executeCallbacks(currentlyProcessing.getCallbacks());
currentlyProcessing = null;
}
if (!segmentsToDrop.isEmpty()) {
for (SegmentHolder holder : segmentsToDrop.values()) {
executeCallbacks(holder.getCallbacks());
}
}
segmentsToDrop.clear();
if (!segmentsToLoad.isEmpty()) {
for (SegmentHolder holder : segmentsToLoad.values()) {
executeCallbacks(holder.getCallbacks());
}
}
segmentsToLoad.clear();
queuedSize.set(0L);
failedAssignCount.set(0);
stopped = true;
}
}
private void entryRemoved(String path)
{
synchronized (lock) {
if (currentlyProcessing == null) {
log.warn("Server[%s] an entry[%s] was removed even though it wasn't loading!?", basePath, path);
return;
}
if (!ZKPaths.getNodeFromPath(path).equals(currentlyProcessing.getSegmentIdentifier())) {
log.warn(
"Server[%s] entry [%s] was removed even though it's not what is currently loading[%s]",
basePath, path, currentlyProcessing
);
return;
}
actionCompleted();
log.info("Server[%s] done processing [%s]", basePath, path);
}
}
private void failAssign(Exception e)
{
synchronized (lock) {
log.error(e, "Server[%s], throwable caught when submitting [%s].", basePath, currentlyProcessing);
failedAssignCount.getAndIncrement();
// Act like it was completed so that the coordinator gives it to someone else
actionCompleted();
}
}
private static class SegmentHolder
{
private final DataSegment segment;
private final DataSegmentChangeRequest changeRequest;
private final int type;
private final List<LoadPeonCallback> callbacks = Lists.newArrayList();
private SegmentHolder(
DataSegment segment,
int type,
Collection<LoadPeonCallback> callbacks
)
{
this.segment = segment;
this.type = type;
this.changeRequest = (type == LOAD)
? new SegmentChangeRequestLoad(segment)
: new SegmentChangeRequestDrop(segment);
this.callbacks.addAll(callbacks);
}
public DataSegment getSegment()
{
return segment;
}
public int getType()
{
return type;
}
public String getSegmentIdentifier()
{
return segment.getIdentifier();
}
public long getSegmentSize()
{
return segment.getSize();
}
public void addCallbacks(Collection<LoadPeonCallback> newCallbacks)
{
synchronized (callbacks) {
callbacks.addAll(newCallbacks);
}
}
public void addCallback(LoadPeonCallback newCallback)
{
synchronized (callbacks) {
callbacks.add(newCallback);
}
}
public List<LoadPeonCallback> getCallbacks()
{
synchronized (callbacks) {
return callbacks;
}
}
public DataSegmentChangeRequest getChangeRequest()
{
return changeRequest;
}
@Override
public String toString()
{
return changeRequest.toString();
}
}
}
| |
/* $Id: ForcedParamManager.java 1439174 2013-01-27 20:43:31Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.jobs;
import org.apache.manifoldcf.core.interfaces.*;
import org.apache.manifoldcf.agents.interfaces.*;
import org.apache.manifoldcf.crawler.interfaces.*;
import java.util.*;
/** This class manages the "jobforcedparams" table, which contains the forced parameters for each job.
*
* <br><br>
* <b>jobforcedparams</b>
* <table border="1" cellpadding="3" cellspacing="0">
* <tr class="TableHeadingColor">
* <th>Field</th><th>Type</th><th>Description </th>
* <tr><td>ownerid</td><td>BIGINT</td><td>Reference:jobs.id</td></tr>
* <tr><td>paramname</td><td>VARCHAR(255)</td><td></td></tr>
* <tr><td>paramvalue</td><td>VARCHAR(255)</td><td></td></tr>
* </table>
* <br><br>
*
*/
public class ForcedParamManager extends org.apache.manifoldcf.core.database.BaseTable
{
public static final String _rcsid = "@(#)$Id: ForcedParamManager.java 1439174 2013-01-27 20:43:31Z kwright $";
// Schema
public final static String ownerIDField = "ownerid";
public final static String paramNameField = "paramname";
public final static String paramValueField = "paramvalue";
/** Constructor.
*@param threadContext is the thread context.
*@param database is the database instance.
*/
public ForcedParamManager(IThreadContext threadContext, IDBInterface database)
throws ManifoldCFException
{
super(database,"jobforcedparams");
}
/** Install or upgrade.
*@param ownerTable is the name of the table that owns this one.
*@param owningTablePrimaryKey is the primary key of the owning table.
*/
public void install(String ownerTable, String owningTablePrimaryKey)
throws ManifoldCFException
{
// Standard practice: outer loop
while (true)
{
Map existing = getTableSchema(null,null);
if (existing == null)
{
HashMap map = new HashMap();
map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false));
map.put(paramNameField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false));
map.put(paramValueField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
performCreate(map,null);
}
else
{
// Upgrade code goes here, as needed
}
// Index management
IndexDescription ownerIndex = new IndexDescription(true,new String[]{ownerIDField,paramNameField});
// Get rid of indexes that shouldn't be there
Map indexes = getTableIndexes(null,null);
Iterator iter = indexes.keySet().iterator();
while (iter.hasNext())
{
String indexName = (String)iter.next();
IndexDescription id = (IndexDescription)indexes.get(indexName);
if (ownerIndex != null && id.equals(ownerIndex))
ownerIndex = null;
else if (indexName.indexOf("_pkey") == -1)
// This index shouldn't be here; drop it
performRemoveIndex(indexName);
}
// Add the ones we didn't find
if (ownerIndex != null)
performAddIndex(null,ownerIndex);
break;
}
}
/** Uninstall.
*/
public void deinstall()
throws ManifoldCFException
{
performDrop(null);
}
/** Read rows for a given owner id.
*@param id is the owner id.
*@return a map of param name to param set.
*/
public Map<String,Set<String>> readRows(Long id)
throws ManifoldCFException
{
ArrayList list = new ArrayList();
list.add(id);
IResultSet set = performQuery("SELECT "+paramNameField+","+paramValueField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list,
null,null);
Map<String,Set<String>> rval = new HashMap<String,Set<String>>();
if (set.getRowCount() == 0)
return rval;
for (int i = 0; i < set.getRowCount(); i++)
{
IResultRow row = set.getRow(i);
String paramName = (String)row.getValue(paramNameField);
String paramValue = (String)row.getValue(paramValueField);
if (paramValue == null)
paramValue = "";
Set<String> valueSet = rval.get(paramName);
if (valueSet == null)
{
valueSet = new HashSet<String>();
rval.put(paramName,valueSet);
}
valueSet.add(paramValue);
}
return rval;
}
/** Fill in a set of param maps corresponding to a set of owner id's.
*@param returnValues is a map keyed by ownerID, with value of JobDescription.
*@param ownerIDList is the list of owner id's.
*@param ownerIDParams is the corresponding set of owner id parameters.
*/
public void getRows(Map<Long,JobDescription> returnValues, String ownerIDList, ArrayList ownerIDParams)
throws ManifoldCFException
{
IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+")",ownerIDParams,
null,null);
for (int i = 0; i < set.getRowCount(); i++)
{
IResultRow row = set.getRow(i);
Long ownerID = (Long)row.getValue(ownerIDField);
String paramName = (String)row.getValue(paramNameField);
String paramValue = (String)row.getValue(paramValueField);
if (paramValue == null)
paramValue = "";
returnValues.get(ownerID).addForcedMetadataValue(paramName,paramValue);
}
}
/** Write a filter list into the database.
*@param ownerID is the owning identifier.
*@param list is the job description to write hopcount filters for.
*/
public void writeRows(Long ownerID, IJobDescription list)
throws ManifoldCFException
{
Map map = new HashMap();
beginTransaction();
try
{
Map<String,Set<String>> forcedMetadata = list.getForcedMetadata();
for (String paramName : forcedMetadata.keySet())
{
Set<String> forcedValue = forcedMetadata.get(paramName);
for (String paramValue : forcedValue)
{
map.clear();
map.put(paramNameField,paramName);
map.put(paramValueField,paramValue);
map.put(ownerIDField,ownerID);
performInsert(map,null);
}
}
}
catch (ManifoldCFException e)
{
signalRollback();
throw e;
}
catch (Error e)
{
signalRollback();
throw e;
}
finally
{
endTransaction();
}
}
/** Delete rows.
*@param ownerID is the owner whose rows to delete.
*/
public void deleteRows(Long ownerID)
throws ManifoldCFException
{
ArrayList list = new ArrayList();
list.add(ownerID);
performDelete("WHERE "+ownerIDField+"=?",list,null);
}
}
| |
/*
* Titan Robotics Framework Library
* Copyright (c) 2015 Titan Robotics Club (http://www.titanrobotics.net)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package frclib;
import java.io.InputStream;
import java.io.IOException;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.RobotBase;
import edu.wpi.first.wpilibj.communication.*;
import edu.wpi.first.wpilibj.communication.FRCNetworkCommunicationsLibrary.*;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import hallib.HalDashboard;
import hallib.HalDbgLog;
import hallib.HalUtil;
import trclib.TrcDbgTrace;
import trclib.TrcRobot.*;
import trclib.TrcTaskMgr;
/**
* This class defines and implements the FrcRobotBase object. The FrcRobotBase
* object implements a cooperative multitasking robot. Different subsystems
* register themselves as CoopTasks. FrcRobotBase uses the TaskMgr to task
* switch between different subsystem tasks at various points in the robot
* loop. This basically simulates a cooperative multitasking scheduler that
* task switches between them in different modes.
*/
public abstract class FrcRobotBase extends RobotBase
{
private static final String moduleName = "FrcRobotBase";
private static final boolean debugEnabled = false;
private static final boolean dashboardEnabled = true;
private TrcDbgTrace dbgTrace = null;
private static TrcDbgTrace robotTracer = null;
/**
* This method is called to initialize the robot.
*/
public abstract void initRobot();
private TrcTaskMgr taskMgr = new TrcTaskMgr();
private HalDashboard dashboard = new HalDashboard();
private static FrcRobotBase instance;
private String progName;
private RobotMode teleOpMode = null;
private RobotMode autoMode = null;
private RobotMode testMode = null;
private RobotMode disabledMode = null;
private static double modeStartTime = 0.0;
private static double modeElapsedTime = 0.0;
/**
* Constructor.
*/
public FrcRobotBase(final String progName)
{
if (debugEnabled)
{
dbgTrace = new TrcDbgTrace(
moduleName,
false,
TrcDbgTrace.TraceLevel.API,
TrcDbgTrace.MsgLevel.INFO);
}
FrcRobotBase.instance = this;
this.progName = progName;
dashboard.clearDisplay();
} //FrcRobotBase
public static FrcRobotBase getInstance()
{
return instance;
} //getInstance
public static TrcDbgTrace getRobotTracer()
{
if (robotTracer == null)
{
robotTracer = new TrcDbgTrace(
moduleName,
false,
TrcDbgTrace.TraceLevel.API,
TrcDbgTrace.MsgLevel.INFO);
}
return robotTracer;
} //getRobotTracer
public static double getModeElapsedTime()
{
modeElapsedTime = HalUtil.getCurrentTime() - modeStartTime;
return modeElapsedTime;
} //getModeElapsedTime
public void setupRobotModes(
RobotMode teleOpMode,
RobotMode autoMode,
RobotMode testMode,
RobotMode disabledMode)
{
this.teleOpMode = teleOpMode;
this.autoMode = autoMode;
this.testMode = testMode;
this.disabledMode = disabledMode;
} //setupRobotModes
/**
* Start a competition.
* This specific StartCompetition() implements "main loop" behavior like
* that of the FRC control system in 2008 and earlier, with a primary
* (slow) loop that is called periodically, and a "fast loop" (a.k.a.
* "spin loop") that is called as fast as possible with no delay between
* calls. This code needs to track the order of the field starting to
* ensure that everything happens in the right order. Repeatedly run the
* correct method, either Autonomous or TeleOp when the robot is
* enabled. After running the correct method, wait for some state to
* change, either the other mode starts or the robot is disabled. Then go
* back and wait for the robot to be enabled again.
*/
public void startCompetition()
{
final String funcName = "startCompetition";
System.out.printf(
HalDbgLog.ESC_PREFIX + HalDbgLog.SGR_FG_BLACK +
HalDbgLog.ESC_SEP + HalDbgLog.SGR_BG_WHITE +
HalDbgLog.ESC_SUFFIX +
"\n****************************************\n" +
"Host Name: %s\n" +
" Program: %s\n"+
// " Compiled: %s, %s" +
"\n****************************************\n" +
HalDbgLog.ESC_NORMAL,
getHostName(), progName);
UsageReporting.report(
tResourceType.kResourceType_Framework,
tInstances.kFramework_Iterative);
initRobot();
//
// We call this now (not in prestart like default) so that the robot
// won't enable until the initialization has finished. This is useful
// because otherwise it's sometimes possible to enable the robot
// before the code is ready.
//
FRCNetworkCommunicationsLibrary.
FRCNetworkCommunicationObserveUserProgramStarting();
LiveWindow.setEnabled(false);
//
// loop forever, calling the appropriate mode-dependent function
//
final double timesliceThreshold = 0.05;
RunMode prevMode = RunMode.INVALID_MODE;
RunMode currMode = RunMode.INVALID_MODE;
while (true)
{
prevMode = currMode;
//
// Determine the current run mode.
//
if (isDisabled())
{
currMode = RunMode.DISABLED_MODE;
}
else if (isTest())
{
currMode = RunMode.TEST_MODE;
}
else if (isAutonomous())
{
currMode = RunMode.AUTO_MODE;
}
else if (isOperatorControl())
{
currMode = RunMode.TELEOP_MODE;
}
else
{
currMode = RunMode.INVALID_MODE;
}
if (currMode != prevMode)
{
//
// Detected mode transition.
//
if (debugEnabled)
{
dbgTrace.traceInfo(
funcName,
"Mode Transition: %s->%s.",
prevMode.toString(), currMode.toString());
}
//
// Execute all stop tasks for previous mode.
//
if (prevMode != RunMode.INVALID_MODE)
{
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.STOP_TASK, prevMode);
}
//
// Stop previous mode.
//
if (prevMode == RunMode.DISABLED_MODE &&
disabledMode != null)
{
disabledMode.stopMode();
}
else if (prevMode == RunMode.TEST_MODE && testMode != null)
{
testMode.stopMode();
}
else if (prevMode == RunMode.AUTO_MODE && autoMode != null)
{
autoMode.stopMode();
}
else if (prevMode == RunMode.TELEOP_MODE && teleOpMode != null)
{
teleOpMode.stopMode();
}
//
// Start current mode.
//
modeStartTime = HalUtil.getCurrentTime();
if (currMode == RunMode.DISABLED_MODE)
{
LiveWindow.setEnabled(false);
if (disabledMode != null)
{
disabledMode.startMode();
}
}
else if (currMode == RunMode.TEST_MODE)
{
LiveWindow.setEnabled(true);
if (testMode != null)
{
testMode.startMode();
}
}
else if (currMode == RunMode.AUTO_MODE)
{
LiveWindow.setEnabled(false);
if (autoMode != null)
{
autoMode.startMode();
}
}
else if (currMode == RunMode.TELEOP_MODE)
{
LiveWindow.setEnabled(false);
if (teleOpMode != null)
{
teleOpMode.startMode();
}
}
//
// Execute all start tasks for current mode.
//
if (currMode != RunMode.INVALID_MODE)
{
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.START_TASK, currMode);
}
}
modeElapsedTime = HalUtil.getCurrentTime() - modeStartTime;
if (nextPeriodReady())
{
//
// Run periodic mode.
//
double timeSliceStart = Timer.getFPGATimestamp();
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.PREPERIODIC_TASK, currMode);
if (currMode == RunMode.DISABLED_MODE)
{
FRCNetworkCommunicationsLibrary.
FRCNetworkCommunicationObserveUserProgramDisabled();
if (disabledMode != null)
{
disabledMode.runPeriodic(modeElapsedTime);
}
}
else if (currMode == RunMode.TEST_MODE)
{
FRCNetworkCommunicationsLibrary.
FRCNetworkCommunicationObserveUserProgramTest();
if (testMode != null)
{
testMode.runPeriodic(modeElapsedTime);
}
}
else if (currMode == RunMode.AUTO_MODE)
{
FRCNetworkCommunicationsLibrary.
FRCNetworkCommunicationObserveUserProgramAutonomous();
if (autoMode != null)
{
autoMode.runPeriodic(modeElapsedTime);
}
}
else if (currMode == RunMode.TELEOP_MODE)
{
FRCNetworkCommunicationsLibrary.
FRCNetworkCommunicationObserveUserProgramTeleop();
if (teleOpMode != null)
{
teleOpMode.runPeriodic(modeElapsedTime);
}
}
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.POSTPERIODIC_TASK, currMode);
double timeSliceUsed =
Timer.getFPGATimestamp() - timeSliceStart;
if (debugEnabled)
{
if (timeSliceUsed > timesliceThreshold)
{
dbgTrace.traceWarn(
funcName,
"%s takes too long (%5.3fs)\n",
currMode.toString(), timeSliceUsed);
}
}
}
//
// Run continuous mode.
//
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.PRECONTINUOUS_TASK, currMode);
if (currMode == RunMode.DISABLED_MODE && disabledMode != null)
{
disabledMode.runContinuous(modeElapsedTime);
}
else if (currMode == RunMode.TEST_MODE && testMode != null)
{
testMode.runContinuous(modeElapsedTime);
}
else if (currMode == RunMode.AUTO_MODE && autoMode != null)
{
autoMode.runContinuous(modeElapsedTime);
}
else if (currMode == RunMode.TELEOP_MODE && teleOpMode != null)
{
teleOpMode.runContinuous(modeElapsedTime);
}
taskMgr.executeTaskType(
TrcTaskMgr.TaskType.POSTCONTINUOUS_TASK, currMode);
if (dashboardEnabled)
{
dashboard.displayPrintf(
0,
"[%3d:%06.3f] %s",
(int)(modeElapsedTime/60),
modeElapsedTime%60,
currMode.toString());
}
}
} //startCompetition
private String getHostName()
{
String hostName = null;
try
{
byte[] buff = new byte[256];
Process proc = Runtime.getRuntime().exec("hostname");
InputStream inStream = proc.getInputStream();
inStream.read(buff, 0, buff.length);
hostName = new String(buff);
}
catch(IOException e)
{
e.printStackTrace();
}
return hostName;
} //getHostName
/**
* Determine if the appropriate next periodic function should be called.
* Call the periodic functions whenever a packet is received from the
* Driver Station or about every 20 msec.
*/
private boolean nextPeriodReady()
{
return m_ds.isNewControlData();
} //nextPeriodReady
} //class FrcRobotBase
| |
package com.quollwriter.ui.panels;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.lang.reflect.*;
import java.text.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import javax.swing.text.*;
import javax.swing.text.html.*;
import javax.swing.tree.*;
import com.gentlyweb.properties.*;
import com.gentlyweb.utils.*;
import com.jgoodies.forms.builder.*;
import com.jgoodies.forms.factories.*;
import com.jgoodies.forms.layout.*;
import com.quollwriter.*;
import com.quollwriter.ui.*;
import com.quollwriter.ui.forms.*;
import com.quollwriter.data.*;
import com.quollwriter.events.*;
import com.quollwriter.ui.actionHandlers.*;
//import com.quollwriter.ui.components.*;
import com.quollwriter.ui.components.ActionAdapter;
import com.quollwriter.ui.components.Header;
import com.quollwriter.ui.components.IconProvider;
import com.quollwriter.ui.components.Runner;
public abstract class AbstractObjectViewPanel<E extends AbstractProjectViewer, O extends UserConfigurableObject> extends ProjectObjectQuollPanel<E, O>
{
private ActionMap actions = null;
private List<Component> topLevelComps = new ArrayList ();
private Header title = null;
private JPanel linkedTo = null;
private JSplitPane leftSplitPane = null;
private JSplitPane rightSplitPane = null;
private JSplitPane mainSplitPane = null;
//private JTree linkedToEditTree = null;
//private JTree linkedToViewTree = null;
private boolean panesInited = false;
private EditPanel detailsPanel = null;
protected JComponent bottomPanel = null;
protected LinkedToEditPanel linkedToPanel = null;
private ActionListener deleteObjectAction = null;
private Map<EditPanel, String> sectionsNeedingSave = new HashMap ();
private ObjectDocumentsEditPanel objDocsEditPanel = null;
private int bottomPanelHeight = 0;
private boolean highlight = false;
public AbstractObjectViewPanel (E pv,
O n)
throws GeneralException
{
super (pv,
n);
}
@Override
public String getTitle ()
{
return this.getForObject ().getName ();
}
public Header getHeader ()
{
return this.title;
}
public ObjectDocumentsEditPanel getObjectDocumentsEditPanel ()
{
return this.objDocsEditPanel;
}
public EditPanel getDetailsPanel ()
{
return this.detailsPanel;
}
public void init ()
throws GeneralException
{
this.viewer.setLinks (this.obj);
this.title = UIUtils.createHeader (this.obj.getName (),
Constants.PANEL_TITLE,
this.obj.getUserConfigurableObjectType ().getIcon24x24 (),
null);
final AbstractObjectViewPanel _this = this;
final Header tTitle = this.title;
this.addObjectPropertyChangedListener (new PropertyChangedAdapter ()
{
public void propertyChanged (PropertyChangedEvent ev)
{
if (ev.getChangeType ().equals (NamedObject.NAME))
{
_this.refresh ();
}
}
});
this.topLevelComps.add (this.title);
this.add (this.title,
0);
final Box b = new Box (BoxLayout.Y_AXIS);
b.setBorder (new EmptyBorder (0, 7, 0, 7));
b.setAlignmentX (Component.LEFT_ALIGNMENT);
b.setOpaque (true);
b.setBackground (null);
this.mainSplitPane = UIUtils.createSplitPane (JSplitPane.HORIZONTAL_SPLIT);
this.mainSplitPane.setAlignmentX (Component.LEFT_ALIGNMENT);
this.mainSplitPane.setBorder (UIUtils.createPadding (0, 0, 3, 0));
this.mainSplitPane.setResizeWeight (1);
this.leftSplitPane = UIUtils.createSplitPane (JSplitPane.VERTICAL_SPLIT);
this.leftSplitPane.setResizeWeight (1d);
this.rightSplitPane = UIUtils.createSplitPane (JSplitPane.VERTICAL_SPLIT);
this.rightSplitPane.setBorder (null);
this.rightSplitPane.setResizeWeight (0.5d);
this.mainSplitPane.setLeftComponent (this.leftSplitPane);
this.mainSplitPane.setRightComponent (this.rightSplitPane);
final JComponent botEp = this.getBottomPanel ();
if (botEp != null)
{
if (botEp instanceof RefreshablePanel)
{
((RefreshablePanel) botEp).init ();
}
this.bottomPanel = botEp;
}
this.detailsPanel = this.getDetailEditPanel (this.viewer,
this.obj);
this.detailsPanel.init ();
this.detailsPanel.addActionListener (new ActionListener ()
{
@Override
public void actionPerformed (ActionEvent ev)
{
if (ev.getActionCommand ().equals ("edit-visible"))
{
botEp.setVisible (false);
_this.bottomPanelHeight = botEp.getSize ().height;
}
if (ev.getActionCommand ().equals ("view-visible"))
{
botEp.setPreferredSize (new Dimension (botEp.getPreferredSize ().width,
_this.bottomPanelHeight));
botEp.setVisible (true);
_this.leftSplitPane.setDividerLocation (_this.leftSplitPane.getSize ().height - _this.bottomPanelHeight);
}
}
});
this.leftSplitPane.setTopComponent (this.detailsPanel);
this.leftSplitPane.setBottomComponent (botEp);
this.linkedToPanel = new LinkedToEditPanel<UserConfigurableObject, AbstractProjectViewer> (this.obj,
this.viewer);
this.rightSplitPane.setBottomComponent (this.linkedToPanel);
this.linkedToPanel.init ();
this.linkedToPanel.setMinimumSize (new Dimension (200, 100));
this.linkedToPanel.addActionListener (new ActionAdapter ()
{
public void actionPerformed (ActionEvent ev)
{
if (ev.getID () == EditPanel.EDIT_VISIBLE)
{
_this.setHasUnsavedChanges (_this.linkedToPanel,
true);
}
if ((ev.getID () == EditPanel.CANCELLED) ||
(ev.getID () == EditPanel.VIEW_VISIBLE) ||
(ev.getID () == EditPanel.SAVED))
{
_this.setHasUnsavedChanges (_this.linkedToPanel,
false);
}
}
});
this.objDocsEditPanel = new ObjectDocumentsEditPanel (this.viewer,
this.obj);
this.objDocsEditPanel.init ();
this.rightSplitPane.setTopComponent (this.objDocsEditPanel);
b.add (this.mainSplitPane);
this.refresh ();
this.add (b);
this.setOpaque (false);
this.repaint ();
this.repaint ();
this.doInit ();
ActionMap actions = this.getActionMap ();
InputMap im = this.getInputMap (JComponent.WHEN_IN_FOCUSED_WINDOW);
im.put (KeyStroke.getKeyStroke (KeyEvent.VK_E,
Event.CTRL_MASK),
"edit");
im.put (KeyStroke.getKeyStroke (KeyEvent.VK_D,
Event.CTRL_MASK),
"adddocument");
im.put (KeyStroke.getKeyStroke (KeyEvent.VK_L,
Event.CTRL_MASK),
"editlinkedto");
actions.put ("edit",
new ActionAdapter ()
{
public void actionPerformed (ActionEvent ev)
{
_this.editObject ();
}
});
actions.put ("adddocument",
new ActionAdapter ()
{
public void actionPerformed (ActionEvent ev)
{
_this.objDocsEditPanel.showAddDocument ();
}
});
actions.put ("editlinkedto",
new ActionAdapter ()
{
public void actionPerformed (ActionEvent ev)
{
_this.linkedToPanel.showEditPanel ();
}
});
this.setReadyForUse (true);
UIUtils.doLater (new ActionListener ()
{
@Override
public void actionPerformed (ActionEvent ev)
{
_this.leftSplitPane.setDividerLocation (0.8d);
_this.rightSplitPane.setDividerLocation (0.5d);
}
});
}
public void editObject ()
{
this.detailsPanel.showEditPanel ();
}
public abstract JComponent getBottomPanel ();
public abstract EditPanel getDetailEditPanel (E viewer,
NamedObject obj)
throws GeneralException;
public abstract ActionListener getDeleteObjectAction (E viewer,
NamedObject obj);
public abstract void doInit ();
public abstract void doRefresh ();
public abstract void doFillToolBar (JToolBar tb,
boolean fullScreen);
public void fillToolBar (JToolBar tb,
boolean fullScreen)
{
final AbstractObjectViewPanel _this = this;
this.doFillToolBar (tb,
fullScreen);
/*
tb.add (UIUtils.createToolBarButton ("print",
"Click to print the " + Environment.getObjectTypeName (this.obj.getObjectType ()) + " details",
"print",
UIUtils.getComingSoonAction (this.viewer)));
*/
/*
tb.add (UIUtils.createToolBarButton ("delete",
"Click to delete this " + Environment.getObjectTypeName (this.obj.getObjectType ()),
"delete",
this.getDeleteObjectAction (this.viewer,
this.obj)));
*/
}
public void fillPopupMenu (MouseEvent ev,
JPopupMenu popup)
{
}
public void setState (Map<String, String> s,
boolean hasFocus)
{
try
{
int v = Integer.parseInt (s.get (Constants.ASSET_MAIN_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME));
if (v <= 0)
{
return;
}
this.mainSplitPane.setDividerLocation (v);
} catch (Exception e)
{
return;
}
try
{
int v = Integer.parseInt (s.get (Constants.ASSET_LEFT_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME));
if (v > 0)
{
this.leftSplitPane.setDividerLocation (v);
}
} catch (Exception e)
{
}
try
{
int v = Integer.parseInt (s.get (Constants.ASSET_RIGHT_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME));
if (v > 0)
{
this.rightSplitPane.setDividerLocation (v);
}
} catch (Exception e)
{
}
this.panesInited = true;
}
public void getState (Map<String, Object> m)
{
m.put (Constants.ASSET_MAIN_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME,
this.mainSplitPane.getDividerLocation ());
m.put (Constants.ASSET_LEFT_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME,
this.leftSplitPane.getDividerLocation ());
m.put (Constants.ASSET_RIGHT_SPLIT_PANE_DIVIDER_LOCATION_PROPERTY_NAME,
this.rightSplitPane.getDividerLocation ());
}
public List<Component> getTopLevelComponents ()
{
return this.topLevelComps;
}
public void refreshLinkedToTree ()
{
this.linkedToPanel.refreshLinkedToTree ();
}
@Override
public void refresh ()
{
this.title.setTitle (this.obj.getName ());
this.viewer.setLinks (this.obj);
this.detailsPanel.refreshViewPanel ();
this.linkedToPanel.refreshViewPanel ();
if ((this.bottomPanel != null)
&&
(this.bottomPanel instanceof RefreshablePanel)
)
{
((RefreshablePanel) this.bottomPanel).refresh ();
}
this.doRefresh ();
this.repaint ();
}
public boolean saveUnsavedChanges ()
throws Exception
{
Iterator<EditPanel> iter = this.sectionsNeedingSave.keySet ().iterator ();
while (iter.hasNext ())
{
if (!iter.next ().doSave ())
{
return false;
}
}
return true;
}
public void setHasUnsavedChanges (EditPanel ep,
boolean hasChanges)
{
if (hasChanges)
{
this.sectionsNeedingSave.put (ep,
"");
} else
{
this.sectionsNeedingSave.remove (ep);
}
this.setHasUnsavedChanges (this.sectionsNeedingSave.size () > 0);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: DijkstraAlgorithm.java 750418 2009-03-05 11:03:54Z vhennebert $ */
package org.apache.xmlgraphics.util.dijkstra;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
/**
* This is an implementation of Dijkstra's algorithm to find the shortest path for a directed
* graph with non-negative edge weights.
* @see <a href="http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm">WikiPedia on Dijkstra's
* Algorithm</a>
*/
public class DijkstraAlgorithm {
/** Infinity value for distances. */
public static final int INFINITE = Integer.MAX_VALUE;
/** Compares penalties between two possible destinations. */
private final Comparator penaltyComparator = new Comparator() {
public int compare(Object left, Object right) {
int leftPenalty = getLowestPenalty((Vertex)left);
int rightPenalty = getLowestPenalty((Vertex)right);
if (leftPenalty < rightPenalty) {
return -1;
} else if (leftPenalty == rightPenalty) {
return ((Comparable)left).compareTo(right);
} else {
return 1;
}
}
};
/** The directory of edges */
private EdgeDirectory edgeDirectory;
/** The priority queue for all vertices under inspection, ordered by penalties/distances. */
private TreeSet priorityQueue = new TreeSet(penaltyComparator);
//Set<Vertex>
/** The set of vertices for which the lowest penalty has been found. */
private Set finishedVertices = new java.util.HashSet();
//Set<Vertex>
/** The currently known lowest penalties for all vertices. */
private Map lowestPenalties = new java.util.HashMap();
//Map<Vertex,Integer>
/** Map of all predecessors in the spanning tree of best routes. */
private Map predecessors = new java.util.HashMap();
//Map<Vertex,Vertex>
/**
* Main Constructor.
* @param edgeDirectory the edge directory this instance should work on
*/
public DijkstraAlgorithm(EdgeDirectory edgeDirectory) {
this.edgeDirectory = edgeDirectory;
}
/**
* Returns the penalty between two vertices.
* @param start the start vertex
* @param end the end vertex
* @return the penalty between two vertices, or 0 if no single edge between the two vertices
* exists.
*/
protected int getPenalty(Vertex start, Vertex end) {
return this.edgeDirectory.getPenalty(start, end);
}
/**
* Returns an iterator over all valid destinations for a given vertex.
* @param origin the origin from which to search for destinations
* @return the iterator over all valid destinations for a given vertex
*/
protected Iterator getDestinations(Vertex origin) {
return this.edgeDirectory.getDestinations(origin);
}
private void reset() {
finishedVertices.clear();
priorityQueue.clear();
lowestPenalties.clear();
predecessors.clear();
}
/**
* Run Dijkstra's shortest path algorithm. After this method is finished you can use
* {@link #getPredecessor(Vertex)} to reconstruct the best/shortest path starting from the
* destination backwards.
* @param start the starting vertex
* @param destination the destination vertex.
*/
public void execute(Vertex start, Vertex destination) {
if (start == null || destination == null) {
throw new NullPointerException("start and destination may not be null");
}
reset();
setShortestDistance(start, 0);
priorityQueue.add(start);
// the current node
Vertex u;
// extract the vertex with the shortest distance
while (priorityQueue.size() > 0) {
u = (Vertex)priorityQueue.first();
priorityQueue.remove(u);
if (destination.equals(u)) {
//Destination reached
break;
}
finishedVertices.add(u);
relax(u);
}
}
/**
* Compute new lowest penalties for neighboring vertices. Update the lowest penalties and the
* predecessor map if a better solution is found.
* @param u the vertex to process
*/
private void relax(Vertex u) {
Iterator iter = getDestinations(u);
while (iter.hasNext()) {
Vertex v = (Vertex)iter.next();
// skip node already settled
if (isFinished(v)) {
continue;
}
int shortDist = getLowestPenalty(u) + getPenalty(u, v);
if (shortDist < getLowestPenalty(v)) {
// assign new shortest distance and mark unsettled
setShortestDistance(v, shortDist);
// assign predecessor in shortest path
setPredecessor(v, u);
}
}
}
private void setPredecessor(Vertex a, Vertex b) {
predecessors.put(a, b);
}
/**
* Indicates whether a shortest route to a vertex has been found.
* @param v the vertex
* @return true if the shortest route to this vertex has been found.
*/
private boolean isFinished(Vertex v) {
return finishedVertices.contains(v);
}
private void setShortestDistance(Vertex vertex, int distance) {
//Remove so it is inserted at the right position after the lowest penalty changes for this
//vertex.
priorityQueue.remove(vertex);
//Update the lowest penalty.
lowestPenalties.put(vertex, new Integer(distance));
//Insert the vertex again at the new position based on the lowest penalty
priorityQueue.add(vertex);
}
/**
* Returns the lowest penalty from the start point to a given vertex.
* @param vertex the vertex
* @return the lowest penalty or {@link DijkstraAlgorithm#INFINITE} if there is no route to
* the destination.
*/
public int getLowestPenalty(Vertex vertex) {
Integer d = ((Integer)lowestPenalties.get(vertex));
return (d == null) ? INFINITE : d.intValue();
}
/**
* Returns the vertex's predecessor on the shortest path.
* @param vertex the vertex for which to find the predecessor
* @return the vertex's predecessor on the shortest path, or
* <code>null</code> if there is no route to the destination.
*/
public Vertex getPredecessor(Vertex vertex) {
return (Vertex)predecessors.get(vertex);
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io.parquet;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport;
import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
import org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetInputFormat;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.MessageType;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import static junit.framework.Assert.assertTrue;
import static junit.framework.TestCase.assertFalse;
import static org.apache.parquet.column.ParquetProperties.WriterVersion.PARQUET_1_0;
import static org.apache.parquet.hadoop.api.ReadSupport.PARQUET_READ_SCHEMA;
import static org.apache.parquet.hadoop.metadata.CompressionCodecName.GZIP;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import static org.junit.Assert.assertEquals;
public class VectorizedColumnReaderTestBase {
protected final static int nElements = 2500;
protected final static int UNIQUE_NUM = 10;
protected final static int NULL_FREQUENCY = 13;
protected final static Configuration conf = new Configuration();
protected final static Path file = new Path("target/test/TestParquetVectorReader/testParquetFile");
protected static final MessageType schema = parseMessageType(
"message hive_schema { "
+ "required int32 int32_field; "
+ "required int64 int64_field; "
+ "required int96 int96_field; "
+ "required double double_field; "
+ "required float float_field; "
+ "required boolean boolean_field; "
+ "required fixed_len_byte_array(3) flba_field; "
+ "optional fixed_len_byte_array(1) some_null_field; "
+ "optional fixed_len_byte_array(1) all_null_field; "
+ "required binary binary_field; "
+ "optional binary binary_field_some_null; "
+ "required binary value (DECIMAL(5,2)); "
+ "required group struct_field {"
+ " required int32 a;\n"
+ " required double b;\n"
+ "}\n"
+ "optional group nested_struct_field {"
+ " optional group nsf {"
+ " optional int32 c;\n"
+ " optional int32 d;\n"
+ " }\n"
+ " optional double e;\n"
+ "}\n"
+ "optional group struct_field_some_null {"
+ " optional int32 f;\n"
+ " optional double g;\n"
+ "}\n"
+ "optional group map_field (MAP) {\n"
+ " repeated group map (MAP_KEY_VALUE) {\n"
+ " required binary key;\n"
+ " optional binary value;\n"
+ " }\n"
+ "}\n"
+ "optional group array_list (LIST) {\n"
+ " repeated group bag {\n"
+ " optional int32 array_element;\n"
+ " }\n"
+ "}\n"
+ "} ");
protected static void removeFile() throws IOException {
FileSystem fs = file.getFileSystem(conf);
if (fs.exists(file)) {
fs.delete(file, true);
}
}
protected static ParquetWriter<Group> initWriterFromFile() throws IOException {
GroupWriteSupport.setSchema(schema, conf);
return new ParquetWriter<>(
file,
new GroupWriteSupport(),
GZIP, 1024 * 1024, 1024, 1024 * 1024,
true, false, PARQUET_1_0, conf);
}
protected static int getIntValue(
boolean isDictionaryEncoding,
int index) {
return isDictionaryEncoding ? index % UNIQUE_NUM : index;
}
protected static double getDoubleValue(
boolean isDictionaryEncoding,
int index) {
return isDictionaryEncoding ? index % UNIQUE_NUM : index;
}
protected static long getLongValue(
boolean isDictionaryEncoding,
int index) {
return isDictionaryEncoding ? (long) 2 * index % UNIQUE_NUM : (long) 2 * index;
}
protected static float getFloatValue(
boolean isDictionaryEncoding,
int index) {
return (float) (isDictionaryEncoding ? index % UNIQUE_NUM * 2.0 : index * 2.0);
}
protected static boolean getBooleanValue(
float index) {
return (index % 2 == 0);
}
protected static String getTimestampStr(int index) {
String s = String.valueOf(index);
int l = 4 - s.length();
for (int i = 0; i < l; i++) {
s = "0" + s;
}
return "99999999" + s;
}
protected static HiveDecimal getDecimal(
boolean isDictionaryEncoding,
int index) {
int decimalVal = index % 100;
String decimalStr = (decimalVal < 10) ? "0" + String.valueOf(decimalVal) : String.valueOf
(decimalVal);
int intVal = (isDictionaryEncoding) ? index % UNIQUE_NUM : index / 100;
String d = String.valueOf(intVal) + decimalStr;
BigInteger bi = new BigInteger(d);
BigDecimal bd = new BigDecimal(bi);
return HiveDecimal.create(bd);
}
protected static Binary getTimestamp(
boolean isDictionaryEncoding,
int index) {
String s = isDictionaryEncoding ? getTimestampStr(index % UNIQUE_NUM) : getTimestampStr(index);
return Binary.fromReusedByteArray(s.getBytes());
}
protected static String getStr(
boolean isDictionaryEncoding,
int index) {
int binaryLen = isDictionaryEncoding ? index % UNIQUE_NUM : index;
String v = "";
while (binaryLen > 0) {
char t = (char) ('a' + binaryLen % 26);
binaryLen /= 26;
v = t + v;
}
return v;
}
protected static Binary getBinaryValue(
boolean isDictionaryEncoding,
int index) {
return Binary.fromString(getStr(isDictionaryEncoding, index));
}
protected static boolean isNull(int index) {
return (index % NULL_FREQUENCY == 0);
}
protected VectorizedParquetRecordReader createParquetReader(String schemaString, Configuration conf)
throws IOException, InterruptedException, HiveException {
conf.set(PARQUET_READ_SCHEMA, schemaString);
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true);
HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp");
Job vectorJob = new Job(conf, "read vector");
ParquetInputFormat.setInputPaths(vectorJob, file);
ParquetInputFormat parquetInputFormat = new ParquetInputFormat(GroupReadSupport.class);
InputSplit split = (InputSplit) parquetInputFormat.getSplits(vectorJob).get(0);
initialVectorizedRowBatchCtx(conf);
return new VectorizedParquetRecordReader(split, new JobConf(conf));
}
protected static void writeData(ParquetWriter<Group> writer, boolean isDictionaryEncoding) throws IOException {
SimpleGroupFactory f = new SimpleGroupFactory(schema);
for (int i = 0; i < nElements; i++) {
boolean isNull = isNull(i);
int intVal = getIntValue(isDictionaryEncoding, i);
long longVal = getLongValue(isDictionaryEncoding, i);
Binary timeStamp = getTimestamp(isDictionaryEncoding, i);
HiveDecimal decimalVal = getDecimal(isDictionaryEncoding, i).setScale(2);
double doubleVal = getDoubleValue(isDictionaryEncoding, i);
float floatVal = getFloatValue(isDictionaryEncoding, i);
boolean booleanVal = getBooleanValue(i);
Binary binary = getBinaryValue(isDictionaryEncoding, i);
Group group = f.newGroup()
.append("int32_field", intVal)
.append("int64_field", longVal)
.append("int96_field", timeStamp)
.append("double_field", doubleVal)
.append("float_field", floatVal)
.append("boolean_field", booleanVal)
.append("flba_field", "abc");
if (!isNull) {
group.append("some_null_field", "x");
}
group.append("binary_field", binary);
if (!isNull) {
group.append("binary_field_some_null", binary);
}
HiveDecimalWritable w = new HiveDecimalWritable(decimalVal);
group.append("value", Binary.fromConstantByteArray(w.getInternalStorage()));
group.addGroup("struct_field")
.append("a", intVal)
.append("b", doubleVal);
Group g = group.addGroup("nested_struct_field");
g.addGroup("nsf").append("c", intVal).append("d", intVal);
g.append("e", doubleVal);
Group some_null_g = group.addGroup("struct_field_some_null");
if (i % 2 != 0) {
some_null_g.append("f", intVal);
}
if (i % 3 != 0) {
some_null_g.append("g", doubleVal);
}
Group mapGroup = group.addGroup("map_field");
if (i % 13 != 1) {
mapGroup.addGroup("map").append("key", binary).append("value", "abc");
} else {
mapGroup.addGroup("map").append("key", binary);
}
Group arrayGroup = group.addGroup("array_list");
for (int j = 0; j < i % 4; j++) {
arrayGroup.addGroup("bag").append("array_element", intVal);
}
writer.write(group);
}
writer.close();
}
protected void initialVectorizedRowBatchCtx(Configuration conf) throws HiveException {
MapWork mapWork = new MapWork();
VectorizedRowBatchCtx rbCtx = new VectorizedRowBatchCtx();
rbCtx.init(createStructObjectInspector(conf), new String[0]);
mapWork.setVectorMode(true);
mapWork.setVectorizedRowBatchCtx(rbCtx);
Utilities.setMapWork(conf, mapWork);
}
private StructObjectInspector createStructObjectInspector(Configuration conf) {
// Create row related objects
String columnNames = conf.get(IOConstants.COLUMNS);
List<String> columnNamesList = DataWritableReadSupport.getColumnNames(columnNames);
String columnTypes = conf.get(IOConstants.COLUMNS_TYPES);
List<TypeInfo> columnTypesList = DataWritableReadSupport.getColumnTypes(columnTypes);
TypeInfo rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNamesList, columnTypesList);
return new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo);
}
protected void intRead(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS,"int32_field");
conf.set(IOConstants.COLUMNS_TYPES,"int");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required int32 int32_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
LongColumnVector vector = (LongColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if(c == nElements){
break;
}
assertEquals("Failed at " + c, getIntValue(isDictionaryEncoding, c), vector.vector[i]);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
protected void longRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "int64_field");
conf.set(IOConstants.COLUMNS_TYPES, "bigint");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required int64 int64_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
LongColumnVector vector = (LongColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals("Failed at " + c, getLongValue(isDictionaryEncoding, c), vector.vector[i]);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
protected void doubleRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "double_field");
conf.set(IOConstants.COLUMNS_TYPES, "double");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required double double_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals("Failed at " + c, getDoubleValue(isDictionaryEncoding, c), vector.vector[i],
0);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
protected void floatRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "float_field");
conf.set(IOConstants.COLUMNS_TYPES, "float");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required float float_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
DoubleColumnVector vector = (DoubleColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals("Failed at " + c, getFloatValue(isDictionaryEncoding, c), vector.vector[i],
0);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
protected void booleanRead() throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "boolean_field");
conf.set(IOConstants.COLUMNS_TYPES, "boolean");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required boolean boolean_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
LongColumnVector vector = (LongColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals("Failed at " + c, (getBooleanValue(c) ? 1 : 0), vector.vector[i]);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
protected void binaryRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "binary_field_some_null");
conf.set(IOConstants.COLUMNS_TYPES, "string");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message test { required binary binary_field_some_null;}", conf);
VectorizedRowBatch previous = reader.createValue();
int c = 0;
try {
while (reader.next(NullWritable.get(), previous)) {
BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
boolean noNull = true;
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
String actual;
assertEquals("Null assert failed at " + c, isNull(c), vector.isNull[i]);
if (!vector.isNull[i]) {
actual = new String(ArrayUtils
.subarray(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
assertEquals("failed at " + c, getStr(isDictionaryEncoding, c), actual);
} else {
noNull = false;
}
c++;
}
assertEquals("No Null check failed at " + c, noNull, vector.noNulls);
assertFalse(vector.isRepeating);
}
assertEquals("It doesn't exit at expected position", nElements, c);
} finally {
reader.close();
}
}
protected void structRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "struct_field");
conf.set(IOConstants.COLUMNS_TYPES, "struct<a:int,b:double>");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
String schema = "message hive_schema {\n"
+ "group struct_field {\n"
+ " optional int32 a;\n"
+ " optional double b;\n"
+ "}\n"
+ "}\n";
VectorizedParquetRecordReader reader = createParquetReader(schema, conf);
VectorizedRowBatch previous = reader.createValue();
int c = 0;
try {
while (reader.next(NullWritable.get(), previous)) {
StructColumnVector vector = (StructColumnVector) previous.cols[0];
LongColumnVector cv = (LongColumnVector) vector.fields[0];
DoubleColumnVector dv = (DoubleColumnVector) vector.fields[1];
for (int i = 0; i < cv.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals(getIntValue(isDictionaryEncoding, c), cv.vector[i]);
assertEquals(getDoubleValue(isDictionaryEncoding, c), dv.vector[i], 0);
assertFalse(vector.isNull[i]);
assertFalse(vector.isRepeating);
c++;
}
}
assertEquals("It doesn't exit at expected position", nElements, c);
} finally {
reader.close();
}
}
protected void nestedStructRead0(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "nested_struct_field");
conf.set(IOConstants.COLUMNS_TYPES, "struct<nsf:struct<c:int,d:int>,e:double>");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
String schema = "message hive_schema {\n"
+ "group nested_struct_field {\n"
+ " optional group nsf {\n"
+ " optional int32 c;\n"
+ " optional int32 d;\n"
+ " }"
+ "optional double e;\n"
+ "}\n";
VectorizedParquetRecordReader reader = createParquetReader(schema, conf);
VectorizedRowBatch previous = reader.createValue();
int c = 0;
try {
while (reader.next(NullWritable.get(), previous)) {
StructColumnVector vector = (StructColumnVector) previous.cols[0];
StructColumnVector sv = (StructColumnVector) vector.fields[0];
LongColumnVector cv = (LongColumnVector) sv.fields[0];
LongColumnVector dv = (LongColumnVector) sv.fields[1];
DoubleColumnVector ev = (DoubleColumnVector) vector.fields[1];
for (int i = 0; i < cv.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals(getIntValue(isDictionaryEncoding, c), cv.vector[i]);
assertEquals(getIntValue(isDictionaryEncoding, c), dv.vector[i]);
assertEquals(getDoubleValue(isDictionaryEncoding, c), ev.vector[i], 0);
assertFalse(vector.isNull[i]);
assertFalse(vector.isRepeating);
c++;
}
}
assertEquals("It doesn't exit at expected position", nElements, c);
} finally {
reader.close();
}
}
protected void nestedStructRead1(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "nested_struct_field");
conf.set(IOConstants.COLUMNS_TYPES, "struct<nsf:struct<c:int>>");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
String schema = "message hive_schema {\n"
+ "group nested_struct_field {\n"
+ " optional group nsf {\n"
+ " optional int32 c;\n"
+ " }"
+ "}\n";
VectorizedParquetRecordReader reader = createParquetReader(schema, conf);
VectorizedRowBatch previous = reader.createValue();
int c = 0;
try {
while (reader.next(NullWritable.get(), previous)) {
StructColumnVector vector = (StructColumnVector) previous.cols[0];
StructColumnVector sv = (StructColumnVector) vector.fields[0];
LongColumnVector cv = (LongColumnVector) sv.fields[0];
for (int i = 0; i < cv.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals(getIntValue(isDictionaryEncoding, c), cv.vector[i]);
assertFalse(vector.isNull[i]);
assertFalse(vector.isRepeating);
c++;
}
}
assertEquals("It doesn't exit at expected position", nElements, c);
} finally {
reader.close();
}
}
protected void structReadSomeNull(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "struct_field_some_null");
conf.set(IOConstants.COLUMNS_TYPES, "struct<f:int,g:double>");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
String schema = "message hive_schema {\n"
+ "group struct_field_some_null {\n"
+ " optional int32 f;\n"
+ " optional double g;\n"
+ "}\n";
VectorizedParquetRecordReader reader = createParquetReader(schema, conf);
VectorizedRowBatch previous = reader.createValue();
int c = 0;
try {
while (reader.next(NullWritable.get(), previous)) {
StructColumnVector sv = (StructColumnVector) previous.cols[0];
LongColumnVector fv = (LongColumnVector) sv.fields[0];
DoubleColumnVector gv = (DoubleColumnVector) sv.fields[1];
for (int i = 0; i < fv.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals(c % 2 == 0, fv.isNull[i]);
assertEquals(c % 3 == 0, gv.isNull[i]);
assertEquals(c % /* 2*3 = */6 == 0, sv.isNull[i]);
if (!sv.isNull[i]) {
if (!fv.isNull[i]) {
assertEquals(getIntValue(isDictionaryEncoding, c), fv.vector[i]);
}
if (!gv.isNull[i]) {
assertEquals(getDoubleValue(isDictionaryEncoding, c), gv.vector[i], 0);
}
}
assertFalse(fv.isRepeating);
c++;
}
}
assertEquals("It doesn't exit at expected position", nElements, c);
} finally {
reader.close();
}
}
protected void decimalRead(boolean isDictionaryEncoding) throws Exception {
Configuration conf = new Configuration();
conf.set(IOConstants.COLUMNS, "value");
conf.set(IOConstants.COLUMNS_TYPES, "decimal(5,2)");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader =
createParquetReader("message hive_schema { required value (DECIMAL(5,2));}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
DecimalColumnVector vector = (DecimalColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
assertEquals("Check failed at pos " + c, getDecimal(isDictionaryEncoding, c),
vector.vector[i].getHiveDecimal());
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
}
| |
package mvm.rya.rdftriplestore.evaluation;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import mvm.rya.api.RdfCloudTripleStoreConfiguration;
import mvm.rya.api.persist.joinselect.SelectivityEvalDAO;
import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
import org.openrdf.query.BindingSet;
import org.openrdf.query.Dataset;
import org.openrdf.query.algebra.Join;
import org.openrdf.query.algebra.TupleExpr;
import org.openrdf.query.algebra.evaluation.QueryOptimizer;
import org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics;
import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
public class QueryJoinSelectOptimizer implements QueryOptimizer {
private final EvaluationStatistics statistics;
private final SelectivityEvalDAO eval;
private final RdfCloudTripleStoreConfiguration config;
public QueryJoinSelectOptimizer(EvaluationStatistics statistics, SelectivityEvalDAO eval) {
System.out.println("Entering join optimizer!");
this.statistics = statistics;
this.eval = eval;
this.config = eval.getConf();
}
/**
* Applies generally applicable optimizations: path expressions are sorted from more to less specific.
*
* @param tupleExpr
*/
public void optimize(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings) {
tupleExpr.visit(new JoinVisitor());
}
protected class JoinVisitor extends QueryModelVisitorBase<RuntimeException> {
@Override
public void meet(Join node) {
try {
if (node.getLeftArg() instanceof FixedStatementPattern && node.getRightArg() instanceof DoNotExpandSP) {
return;
}
TupleExpr partialQuery = null;
List<TupleExpr> joinArgs = getJoinArgs(node, new ArrayList<TupleExpr>());
Map<TupleExpr,Double> cardinalityMap = new HashMap<TupleExpr,Double>();
for (TupleExpr tupleExpr : joinArgs) {
double cardinality = statistics.getCardinality(tupleExpr);
cardinalityMap.put(tupleExpr, cardinality);
}
while (!joinArgs.isEmpty()) {
TePairCost tpc = getBestTupleJoin(partialQuery, joinArgs);
List<TupleExpr> tePair = tpc.getTePair();
if (partialQuery == null) {
if (tePair.size() != 2) {
throw new IllegalStateException();
}
if (!(tePair.get(0) instanceof Join)) {
tePair.get(0).visit(this);
}
if (!(tePair.get(1) instanceof Join)) {
tePair.get(1).visit(this);
}
if (tePair.get(1) instanceof Join) {
partialQuery = new Join(tePair.get(0), ((Join) tePair.get(1)).getLeftArg());
partialQuery = new Join(partialQuery, ((Join) tePair.get(1)).getRightArg());
joinArgs.remove(tePair.get(0));
joinArgs.remove(tePair.get(1));
} else {
partialQuery = new Join(tePair.get(0), tePair.get(1));
joinArgs.remove(tePair.get(0));
joinArgs.remove(tePair.get(1));
}
} else {
if (tePair.size() != 1) {
throw new IllegalStateException();
}
if (!(tePair.get(0) instanceof Join)) {
tePair.get(0).visit(this);
}
if (tePair.get(0) instanceof Join) {
partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getLeftArg());
partialQuery = new Join(partialQuery, ((Join) tePair.get(0)).getRightArg());
joinArgs.remove(tePair.get(0));
} else {
partialQuery = new Join(partialQuery, tePair.get(0));
joinArgs.remove(tePair.get(0));
}
}
}
// Replace old join hierarchy
node.replaceWith(partialQuery);
} catch (Exception e) {
e.printStackTrace();
}
}
protected <L extends List<TupleExpr>> L getJoinArgs(TupleExpr tupleExpr, L joinArgs) {
if (tupleExpr instanceof Join) {
if (!(((Join) tupleExpr).getLeftArg() instanceof FixedStatementPattern) && !(((Join) tupleExpr).getRightArg() instanceof DoNotExpandSP)) {
Join join = (Join) tupleExpr;
getJoinArgs(join.getLeftArg(), joinArgs);
getJoinArgs(join.getRightArg(), joinArgs);
} else {
joinArgs.add(tupleExpr);
}
} else {
joinArgs.add(tupleExpr);
}
return joinArgs;
}
public TePairCost getBestTupleJoin(TupleExpr partialQuery, List<TupleExpr> teList) throws Exception {
double tempCost = 0;
double bestCost = Double.MAX_VALUE;
List<TupleExpr> bestJoinNodes = new ArrayList<TupleExpr>();
if (partialQuery == null) {
double jSelect = 0;
double card1 = 0;
double card2 = 0;
TupleExpr teMin1 = null;
TupleExpr teMin2 = null;
double bestCard1 = 0;
double bestCard2 = 0;
for (int i = 0; i < teList.size(); i++) {
for (int j = i + 1; j < teList.size(); j++) {
jSelect = eval.getJoinSelect(config, teList.get(i), teList.get(j));
card1 = statistics.getCardinality(teList.get(i));
card2 = statistics.getCardinality(teList.get(j));
tempCost = card1 + card2 + card1 * card2 * jSelect;
// System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
// + jSelect + ", and nodes are "
// + teList.get(i) + " and " + teList.get(j));
// TODO this generates a nullpointer exception if tempCost = Double.Max
if (bestCost > tempCost) {
teMin1 = teList.get(i);
teMin2 = teList.get(j);
bestCard1 = card1;
bestCard2 = card2;
bestCost = tempCost;
if (bestCost == 0) {
bestJoinNodes.add(teMin1);
bestJoinNodes.add(teMin2);
return new TePairCost(0.0, bestJoinNodes);
}
}
}
}
if (bestCard1 < bestCard2) {
bestJoinNodes.add(teMin1);
bestJoinNodes.add(teMin2);
} else {
bestJoinNodes.add(teMin2);
bestJoinNodes.add(teMin1);
}
//System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + jSelect + ", and best cost is" + bestCost);
return new TePairCost(bestCost, bestJoinNodes);
} else {
double card1 = statistics.getCardinality(partialQuery);
TupleExpr bestTe = null;
double card2 = 0;
double select = 0;
for (TupleExpr te : teList) {
select = eval.getJoinSelect(config, partialQuery, te);
card2 = statistics.getCardinality(te);
tempCost = card1 + card2 + card1 * card2 * select;
// System.out.println("Optimizer: TempCost is " + tempCost + " cards are " + card1 + ", " + card2 + ", selectivity is "
// + select + ", and nodes are "
// + partialQuery + " and " + te);
if (bestCost > tempCost) {
bestTe = te;
bestCost = tempCost;
}
}
List<TupleExpr> teList2 = new ArrayList<TupleExpr>();
teList2.add(bestTe);
//System.out.println("Optimizer: Card1 is " + card1 + ", card2 is " + card2 + ", selectivity is " + select + ", and best cost is" + bestCost);
return new TePairCost(bestCost, teList2);
}
}
// **************************************************************************************
public class TePairCost {
private double cost;
private List<TupleExpr> tePair;
public TePairCost(double cost, List<TupleExpr> tePair) {
this.cost = cost;
this.tePair = tePair;
}
public double getCost() {
return cost;
}
public List<TupleExpr> getTePair() {
return tePair;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.cartridge.agent.util;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cartridge.agent.config.CartridgeAgentConfiguration;
import org.apache.stratos.common.util.CommandUtils;
import org.apache.stratos.messaging.domain.topology.Cluster;
import org.apache.stratos.messaging.domain.topology.Member;
import org.apache.stratos.messaging.domain.topology.Service;
import org.apache.stratos.messaging.domain.topology.Topology;
import org.apache.stratos.messaging.message.receiver.topology.TopologyManager;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
/**
* Cartridge agent extension utility methods.
*/
public class ExtensionUtils {
private static final Log log = LogFactory.getLog(ExtensionUtils.class);
private static String getExtensionsDir() {
String extensionsDir = System.getProperty(CartridgeAgentConstants.EXTENSIONS_DIR);
if (StringUtils.isBlank(extensionsDir)) {
throw new RuntimeException(String.format("System property not found: %s", CartridgeAgentConstants.EXTENSIONS_DIR));
}
return extensionsDir;
}
private static String prepareCommand(String scriptFile) throws FileNotFoundException {
String extensionsDir = getExtensionsDir();
String filePath = (extensionsDir.endsWith(File.separator)) ?
extensionsDir + scriptFile :
extensionsDir + File.separator + scriptFile;
File file = new File(filePath);
if (file.exists() && !file.isDirectory()) {
return filePath;
}
throw new FileNotFoundException("Script file not found:" + filePath);
}
public static void addPayloadParameters(Map<String, String> envParameters) {
envParameters.put("STRATOS_APP_PATH", CartridgeAgentConfiguration.getInstance().getAppPath());
envParameters.put("STRATOS_PARAM_FILE_PATH", System.getProperty(CartridgeAgentConstants.PARAM_FILE_PATH));
envParameters.put("STRATOS_SERVICE_NAME", CartridgeAgentConfiguration.getInstance().getServiceName());
envParameters.put("STRATOS_TENANT_ID", CartridgeAgentConfiguration.getInstance().getTenantId());
envParameters.put("STRATOS_CARTRIDGE_KEY", CartridgeAgentConfiguration.getInstance().getCartridgeKey());
envParameters.put("STRATOS_LB_CLUSTER_ID", CartridgeAgentConfiguration.getInstance().getLbClusterId());
envParameters.put("STRATOS_CLUSTER_ID", CartridgeAgentConfiguration.getInstance().getClusterId());
envParameters.put("STRATOS_NETWORK_PARTITION_ID", CartridgeAgentConfiguration.getInstance().getNetworkPartitionId());
envParameters.put("STRATOS_PARTITION_ID", CartridgeAgentConfiguration.getInstance().getPartitionId());
envParameters.put("STRATOS_PERSISTENCE_MAPPINGS", CartridgeAgentConfiguration.getInstance().getPersistenceMappings());
envParameters.put("STRATOS_REPO_URL", CartridgeAgentConfiguration.getInstance().getRepoUrl());
// Add LB instance public/private IPs to environment parameters
String lbClusterIdInPayload = CartridgeAgentConfiguration.getInstance().getLbClusterId();
String[] memberIps = getLbMemberIp(lbClusterIdInPayload);
String lbIp, lbPublicIp;
if (memberIps != null && memberIps.length > 1) {
lbIp = memberIps[0];
lbPublicIp = memberIps[1];
} else {
lbIp = CartridgeAgentConfiguration.getInstance().getLbPrivateIp();
lbPublicIp = CartridgeAgentConfiguration.getInstance().getLbPublicIp();
}
envParameters.put("STRATOS_LB_IP", lbIp);
envParameters.put("STRATOS_LB_PUBLIC_IP", lbPublicIp);
Topology topology = TopologyManager.getTopology();
if (TopologyManager.isInitialized()) {
Service service = topology.getService(CartridgeAgentConfiguration.getInstance().getServiceName());
Cluster cluster = service.getCluster(CartridgeAgentConfiguration.getInstance().getClusterId());
String memberIdInPayload = CartridgeAgentConfiguration.getInstance().getMemberId();
addProperties(service.getProperties(), envParameters, "SERVICE_PROPERTY");
addProperties(cluster.getProperties(), envParameters, "CLUSTER_PROPERTY");
addProperties(cluster.getMember(memberIdInPayload).getProperties(), envParameters, "MEMBER_PROPERTY");
}
}
public static void addProperties(Properties properties, Map<String, String> envParameters, String prefix) {
if (properties == null || properties.entrySet() == null) {
return;
}
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
envParameters.put("STRATOS_ " + prefix + "_" + entry.getKey().toString(), entry.getValue().toString());
if (log.isDebugEnabled()) {
log.debug(String.format("Property added: [key] %s [value] %s",
"STRATOS_ " + prefix + "_" + entry.getKey().toString(), entry.getValue().toString()));
}
}
}
public static String[] getLbMemberIp(String lbClusterId) {
Topology topology = TopologyManager.getTopology();
Collection<Service> serviceCollection = topology.getServices();
for (Service service : serviceCollection) {
Collection<Cluster> clusterCollection = service.getClusters();
for (Cluster cluster : clusterCollection) {
Collection<Member> memberCollection = cluster.getMembers();
for (Member member : memberCollection) {
if (member.getClusterId().equals(lbClusterId)) {
return new String[]{member.getDefaultPrivateIP(), member.getDefaultPublicIP()};
}
}
}
}
return null;
}
private static Map<String, String> cleanProcessParameters(Map<String, String> envParameters) {
Iterator<Map.Entry<String, String>> iter = envParameters.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, String> entry = iter.next();
if (entry.getValue() == null) {
iter.remove();
}
}
return envParameters;
}
public static void executeStartServersExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing start servers extension");
}
String script = System.getProperty(CartridgeAgentConstants.START_SERVERS_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Start server script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute start servers extension", e);
}
}
}
public static void executeCleanupExtension() {
try {
if (log.isDebugEnabled()) {
log.debug("Executing cleanup extension");
}
String script = System.getProperty(CartridgeAgentConstants.CLEAN_UP_SCRIPT);
String command = prepareCommand(script);
String output = CommandUtils.executeCommand(command);
if (log.isDebugEnabled()) {
log.debug("Cleanup script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute cleanup extension", e);
}
}
}
public static void executeInstanceStartedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing instance started extension");
}
String script = System.getProperty(CartridgeAgentConstants.INSTANCE_STARTED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Instance started script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute instance started extension", e);
}
}
}
public static void executeInstanceActivatedExtension() {
try {
if (log.isDebugEnabled()) {
log.debug("Executing instance activated extension");
}
String script = System.getProperty(CartridgeAgentConstants.INSTANCE_ACTIVATED_SCRIPT);
String command = prepareCommand(script);
String output = CommandUtils.executeCommand(command);
if (log.isDebugEnabled()) {
log.debug("Instance activated script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute instance activated extension", e);
}
}
}
public static void executeArtifactsUpdatedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing artifacts updated extension");
}
String script = System.getProperty(CartridgeAgentConstants.ARTIFACTS_UPDATED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Artifacts updated script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute artifacts updated extension", e);
}
}
}
public static void executeCopyArtifactsExtension(String source, String destination) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing artifacts copy extension");
}
String command = prepareCommand(System.getProperty(CartridgeAgentConstants.ARTIFACTS_COPY_SCRIPT));
CommandUtils.executeCommand(command + " " + source + " " + destination);
} catch (Exception e) {
log.error("Could not execute artifacts copy extension", e);
}
}
/*
This will execute the volume mounting script which format and mount the
persistance volumes.
*/
public static void executeVolumeMountExtension(String persistenceMappingsPayload) {
try {
if (log.isDebugEnabled()) {
log.debug(String.format("Executing volume mounting extension: [payload] %s", persistenceMappingsPayload));
}
String script = System.getProperty(CartridgeAgentConstants.MOUNT_VOLUMES_SCRIPT);
String command = prepareCommand(script);
//String payloadPath = System.getProperty(CartridgeAgentConstants.PARAM_FILE_PATH);
// add payload file path as argument so inside the script we can source
// it to get the env variables set by the startup script
String output = CommandUtils.executeCommand(command + " " + persistenceMappingsPayload);
if (log.isDebugEnabled()) {
log.debug("Volume mount script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute volume mounting extension", e);
}
}
}
public static void executeMemberActivatedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing member activated extension");
}
String script = System.getProperty(CartridgeAgentConstants.MEMBER_ACTIVATED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Member activated script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute member activated extension", e);
}
}
}
public static void executeMemberTerminatedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing member terminated extension");
}
String script = System.getProperty(CartridgeAgentConstants.MEMBER_TERMINATED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Member terminated script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute member terminated extension", e);
}
}
}
public static void executeMemberStartedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing member started extension");
}
String script = System.getProperty(CartridgeAgentConstants.MEMBER_STARTED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Member started script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute member started extension", e);
}
}
}
public static void executeMemberSuspendedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing member suspended extension");
}
String script = System.getProperty(CartridgeAgentConstants.MEMBER_SUSPENDED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Member suspended script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute member suspended extension", e);
}
}
}
public static void executeCompleteTopologyExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing complete topology extension");
}
String script = System.getProperty(CartridgeAgentConstants.COMPLETE_TOPOLOGY_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Complete topology script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute complete topology extension", e);
}
}
}
public static void executeCompleteTenantExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing complete tenant extension");
}
String script = System.getProperty(CartridgeAgentConstants.COMPLETE_TENANT_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Complete tenant script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute complete tenant extension", e);
}
}
}
public static void executeDomainMappingAddedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing domain mapping added extension");
}
String script = System.getProperty(CartridgeAgentConstants.DOMAIN_MAPPING_ADDED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Domain mapping added script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute domain mapping added extension", e);
}
}
}
public static void executeDomainMappingRemovedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing domain mapping removed extension");
}
String script = System.getProperty(CartridgeAgentConstants.DOMAIN_MAPPING_REMOVED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
String output = CommandUtils.executeCommand(command, envParameters);
if (log.isDebugEnabled()) {
log.debug("Domain mapping removed script returned:" + output);
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not execute domain mapping removed extension", e);
}
}
}
public static void executeTenantSubscribedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing tenant subscribed extension");
}
String script = System.getProperty(CartridgeAgentConstants.TENANT_SUBSCRIBED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
CommandUtils.executeCommand(command, envParameters);
} catch (Exception e) {
log.error("Could not execute tenant subscribed extension", e);
}
}
public static void executeTenantUnSubscribedExtension(Map<String, String> envParameters) {
try {
if (log.isDebugEnabled()) {
log.debug("Executing tenant un-subscribed extension");
}
String script = System.getProperty(CartridgeAgentConstants.TENANT_UNSUBSCRIBED_SCRIPT);
String command = prepareCommand(script);
addPayloadParameters(envParameters);
cleanProcessParameters(envParameters);
CommandUtils.executeCommand(command, envParameters);
} catch (Exception e) {
log.error("Could not execute tenant un-subscribed extension", e);
}
}
public static boolean isTopologyInitialized() {
TopologyManager.acquireReadLock();
boolean active = TopologyManager.isInitialized();
TopologyManager.releaseReadLock();
return active;
}
public static void waitForCompleteTopology() {
while (!isTopologyInitialized()) {
if (log.isInfoEnabled()) {
log.info("Waiting for complete topology event...");
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
}
}
}
/**
* Check if the specified member is in the topology and is in MemberStatus.Initialized state
*
* @param serviceName
* @param clusterId
* @param memberId
* @return true if member is present in the topology and in initialized state, false otherwise
*/
public static boolean checkTopologyConsistency(String serviceName, String clusterId, String memberId) {
Member activatedMember = getMemberFromTopology(serviceName, clusterId, memberId);
// this doesn't work for all events: if (activatedMember.getStatus() != MemberStatus.Initialized) {
if (activatedMember == null) {
if (log.isErrorEnabled()) {
//log.error(String.format("Member found in topology, but not in initialized state [member] %s", memberId));
log.error(String.format("Member not found in topology [member] %s %s %s ", memberId, clusterId, serviceName));
}
return false;
}
return true;
}
/**
* Gets the specified member from the topology
*
* @param serviceName
* @param clusterId
* @param memberId
* @return {@link org.apache.stratos.messaging.domain.topology.Member} if member is in the topology, null otherwise
*/
public static Member getMemberFromTopology(String serviceName, String clusterId, String memberId) {
Topology topology = TopologyManager.getTopology();
Service service = topology.getService(serviceName);
if (service == null) {
if (log.isErrorEnabled()) {
log.error(String.format("Service not found in topology [service] %s", serviceName));
}
return null;
}
Cluster cluster = service.getCluster(clusterId);
if (cluster == null) {
if (log.isErrorEnabled()) {
log.error(String.format("Cluster id not found in topology [cluster] %s", clusterId));
}
return null;
}
Member activatedMember = cluster.getMember(memberId);
if (activatedMember == null) {
if (log.isErrorEnabled()) {
log.error(String.format("Member id not found in topology [member] %s", memberId));
}
return null;
}
return activatedMember;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.unit.core.journal.impl;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.activemq.artemis.core.io.SequentialFile;
import org.apache.activemq.artemis.core.io.SequentialFileFactory;
import org.apache.activemq.artemis.core.journal.EncodingSupport;
import org.apache.activemq.artemis.core.journal.LoaderCallback;
import org.apache.activemq.artemis.core.journal.PreparedTransactionInfo;
import org.apache.activemq.artemis.core.journal.RecordInfo;
import org.apache.activemq.artemis.core.journal.TransactionFailureCallback;
import org.apache.activemq.artemis.core.journal.impl.JournalImpl;
import org.apache.activemq.artemis.tests.unit.UnitTestLogger;
import org.apache.activemq.artemis.tests.unit.core.journal.impl.fakes.FakeSequentialFileFactory;
import org.apache.activemq.artemis.tests.unit.core.journal.impl.fakes.SimpleEncoding;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.Wait;
import org.jboss.logging.Logger;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class AlignedJournalImplTest extends ActiveMQTestBase {
private static final Logger log = Logger.getLogger(AlignedJournalImplTest.class);
// Constants -----------------------------------------------------
private static final LoaderCallback dummyLoader = new LoaderCallback() {
@Override
public void addPreparedTransaction(final PreparedTransactionInfo preparedTransaction) {
}
@Override
public void addRecord(final RecordInfo info) {
}
@Override
public void deleteRecord(final long id) {
}
@Override
public void updateRecord(final RecordInfo info) {
}
@Override
public void failedTransaction(final long transactionID,
final List<RecordInfo> records,
final List<RecordInfo> recordsToDelete) {
}
};
// Attributes ----------------------------------------------------
private SequentialFileFactory factory;
JournalImpl journalImpl = null;
private ArrayList<RecordInfo> records = null;
private ArrayList<Long> incompleteTransactions = null;
private ArrayList<PreparedTransactionInfo> transactions = null;
// Static --------------------------------------------------------
// Constructors --------------------------------------------------
// Public --------------------------------------------------------
// This test just validates basic alignment on the FakeSequentialFile itself
@Test
public void testBasicAlignment() throws Exception {
FakeSequentialFileFactory factory = new FakeSequentialFileFactory(200, true);
SequentialFile file = factory.createSequentialFile("test1");
file.open();
try {
ByteBuffer buffer = ByteBuffer.allocateDirect(200);
for (int i = 0; i < 200; i++) {
buffer.put(i, (byte) 1);
}
file.writeDirect(buffer, true);
buffer = ByteBuffer.allocate(400);
for (int i = 0; i < 400; i++) {
buffer.put(i, (byte) 2);
}
file.writeDirect(buffer, true);
buffer = ByteBuffer.allocate(600);
file.position(0);
file.read(buffer);
for (int i = 0; i < 200; i++) {
Assert.assertEquals((byte) 1, buffer.get(i));
}
for (int i = 201; i < 600; i++) {
Assert.assertEquals("Position " + i, (byte) 2, buffer.get(i));
}
} catch (Exception ignored) {
}
}
@Test
public void testInconsistentAlignment() throws Exception {
factory = new FakeSequentialFileFactory(512, true);
try {
journalImpl = new JournalImpl(2000, 2, 2, 0, 0, factory, "tt", "tt", 1000);
Assert.fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException ignored) {
// expected
}
}
@Test
public void testSimpleAdd() throws Exception {
final int JOURNAL_SIZE = 1060;
setupAndLoadJournal(JOURNAL_SIZE, 10);
journalImpl.appendAddRecord(13, (byte) 14, new SimpleEncoding(1, (byte) 15), false);
journalImpl.forceMoveNextFile();
journalImpl.checkReclaimStatus();
setupAndLoadJournal(JOURNAL_SIZE, 10);
Assert.assertEquals(1, records.size());
Assert.assertEquals(13, records.get(0).id);
Assert.assertEquals(14, records.get(0).userRecordType);
Assert.assertEquals(1, records.get(0).data.length);
Assert.assertEquals(15, records.get(0).data[0]);
}
@Test
public void testAppendAndUpdateRecords() throws Exception {
final int JOURNAL_SIZE = 1060;
setupAndLoadJournal(JOURNAL_SIZE, 10);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 25; i++) {
byte[] bytes = new byte[5];
for (int j = 0; j < bytes.length; j++) {
bytes[j] = (byte) i;
}
journalImpl.appendAddRecord(i * 100L, (byte) i, bytes, false);
}
for (int i = 25; i < 50; i++) {
EncodingSupport support = new SimpleEncoding(5, (byte) i);
journalImpl.appendAddRecord(i * 100L, (byte) i, support, false);
}
setupAndLoadJournal(JOURNAL_SIZE, 1024);
Assert.assertEquals(50, records.size());
int i = 0;
for (RecordInfo recordItem : records) {
Assert.assertEquals(i * 100L, recordItem.id);
Assert.assertEquals(i, recordItem.getUserRecordType());
Assert.assertEquals(5, recordItem.data.length);
for (int j = 0; j < 5; j++) {
Assert.assertEquals((byte) i, recordItem.data[j]);
}
i++;
}
for (i = 40; i < 50; i++) {
byte[] bytes = new byte[10];
for (int j = 0; j < 10; j++) {
bytes[j] = (byte) 'x';
}
journalImpl.appendUpdateRecord(i * 100L, (byte) i, bytes, false);
}
setupAndLoadJournal(JOURNAL_SIZE, 1024);
i = 0;
for (RecordInfo recordItem : records) {
if (i < 50) {
Assert.assertEquals(i * 100L, recordItem.id);
Assert.assertEquals(i, recordItem.getUserRecordType());
Assert.assertEquals(5, recordItem.data.length);
for (int j = 0; j < 5; j++) {
Assert.assertEquals((byte) i, recordItem.data[j]);
}
} else {
Assert.assertEquals((i - 10) * 100L, recordItem.id);
Assert.assertEquals(i - 10, recordItem.getUserRecordType());
Assert.assertTrue(recordItem.isUpdate);
Assert.assertEquals(10, recordItem.data.length);
for (int j = 0; j < 10; j++) {
Assert.assertEquals((byte) 'x', recordItem.data[j]);
}
}
i++;
}
journalImpl.stop();
}
@Test
public void testPartialDelete() throws Exception {
final int JOURNAL_SIZE = 10000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
journalImpl.setAutoReclaim(false);
journalImpl.checkReclaimStatus();
journalImpl.debugWait();
Assert.assertEquals(2, factory.listFiles("tt").size());
UnitTestLogger.LOGGER.debug("Initial:--> " + journalImpl.debug());
UnitTestLogger.LOGGER.debug("_______________________________");
for (int i = 0; i < 50; i++) {
journalImpl.appendAddRecord(i, (byte) 1, new SimpleEncoding(1, (byte) 'x'), false);
}
journalImpl.forceMoveNextFile();
// as the request to a new file is asynchronous, we need to make sure the
// async requests are done
journalImpl.debugWait();
Assert.assertEquals(3, factory.listFiles("tt").size());
for (int i = 10; i < 50; i++) {
journalImpl.appendDeleteRecord(i, false);
}
journalImpl.debugWait();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(10, records.size());
Assert.assertEquals(3, factory.listFiles("tt").size());
}
@Test
public void testAddAndDeleteReclaimWithoutTransactions() throws Exception {
final int JOURNAL_SIZE = 10000;
setupAndLoadJournal(JOURNAL_SIZE, 1);
journalImpl.setAutoReclaim(false);
journalImpl.checkReclaimStatus();
journalImpl.debugWait();
Assert.assertEquals(2, factory.listFiles("tt").size());
UnitTestLogger.LOGGER.debug("Initial:--> " + journalImpl.debug());
UnitTestLogger.LOGGER.debug("_______________________________");
for (int i = 0; i < 50; i++) {
journalImpl.appendAddRecord(i, (byte) 1, new SimpleEncoding(1, (byte) 'x'), false);
}
// as the request to a new file is asynchronous, we need to make sure the
// async requests are done
journalImpl.debugWait();
Assert.assertEquals(2, factory.listFiles("tt").size());
for (int i = 0; i < 50; i++) {
journalImpl.appendDeleteRecord(i, false);
}
journalImpl.forceMoveNextFile();
journalImpl.appendAddRecord(1000, (byte) 1, new SimpleEncoding(1, (byte) 'x'), false);
journalImpl.debugWait();
Assert.assertEquals(3, factory.listFiles("tt").size());
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(1, records.size());
Assert.assertEquals(1000, records.get(0).id);
journalImpl.checkReclaimStatus();
UnitTestLogger.LOGGER.debug(journalImpl.debug());
journalImpl.debugWait();
UnitTestLogger.LOGGER.debug("Final:--> " + journalImpl.debug());
UnitTestLogger.LOGGER.debug("_______________________________");
UnitTestLogger.LOGGER.debug("Files bufferSize:" + factory.listFiles("tt").size());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
@Test
public void testReloadWithTransaction() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
journalImpl.appendAddRecordTransactional(1, 1, (byte) 1, new SimpleEncoding(1, (byte) 1));
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
try {
journalImpl.appendCommitRecord(1L, true);
// This was supposed to throw an exception, as the transaction was
// forgotten (interrupted by a reload).
Assert.fail("Supposed to throw exception");
} catch (Exception e) {
UnitTestLogger.LOGGER.warn(e);
}
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
}
@Test
public void testReloadWithInterruptedTransaction() throws Exception {
final int JOURNAL_SIZE = 1100;
setupAndLoadJournal(JOURNAL_SIZE, 100);
journalImpl.setAutoReclaim(false);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(77L, 1, (byte) 1, new SimpleEncoding(1, (byte) 1));
journalImpl.forceMoveNextFile();
}
journalImpl.debugWait();
Assert.assertEquals(12, factory.listFiles("tt").size());
journalImpl.appendAddRecordTransactional(78L, 1, (byte) 1, new SimpleEncoding(1, (byte) 1));
Assert.assertEquals(12, factory.listFiles("tt").size());
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
Assert.assertEquals(2, incompleteTransactions.size());
Assert.assertEquals((Long) 77L, incompleteTransactions.get(0));
Assert.assertEquals((Long) 78L, incompleteTransactions.get(1));
try {
journalImpl.appendCommitRecord(77L, true);
// This was supposed to throw an exception, as the transaction was
// forgotten (interrupted by a reload).
Assert.fail("Supposed to throw exception");
} catch (Exception e) {
UnitTestLogger.LOGGER.debug("Expected exception " + e, e);
}
setupAndLoadJournal(JOURNAL_SIZE, 100);
journalImpl.forceMoveNextFile();
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
}
@Test
public void testReloadWithCompletedTransaction() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1, i, (byte) 1, new SimpleEncoding(1, (byte) 1));
journalImpl.forceMoveNextFile();
}
journalImpl.appendCommitRecord(1L, false);
journalImpl.debugWait();
Assert.assertEquals(12, factory.listFiles("tt").size());
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(10, records.size());
Assert.assertEquals(0, transactions.size());
journalImpl.checkReclaimStatus();
Assert.assertEquals(10, journalImpl.getDataFilesCount());
Assert.assertEquals(12, factory.listFiles("tt").size());
for (int i = 0; i < 10; i++) {
journalImpl.appendDeleteRecordTransactional(2L, i);
journalImpl.forceMoveNextFile();
}
journalImpl.appendCommitRecord(2L, false);
journalImpl.appendAddRecord(100, (byte) 1, new SimpleEncoding(5, (byte) 1), false);
journalImpl.forceMoveNextFile();
journalImpl.appendAddRecord(101, (byte) 1, new SimpleEncoding(5, (byte) 1), false);
journalImpl.checkReclaimStatus();
Assert.assertEquals(1, journalImpl.getDataFilesCount());
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(1, journalImpl.getDataFilesCount());
Assert.assertEquals(3, factory.listFiles("tt").size());
}
@Test
public void testTotalSize() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
journalImpl.appendAddRecordTransactional(1L, 2L, (byte) 3, new SimpleEncoding(1900 - JournalImpl.SIZE_ADD_RECORD_TX - 1, (byte) 4));
journalImpl.appendCommitRecord(1L, false);
journalImpl.debugWait();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(1, records.size());
}
@Test
public void testReloadInvalidCheckSizeOnTransaction() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(2, factory.listFiles("tt").size());
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 2; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
journalImpl.appendCommitRecord(1L, false);
journalImpl.debugWait();
log.debug("Files = " + factory.listFiles("tt"));
SequentialFile file = factory.createSequentialFile("tt-1.tt");
file.open();
ByteBuffer buffer = ByteBuffer.allocate(100);
// Messing up with the first record (removing the position)
file.position(100);
file.read(buffer);
// jumping RecordType, FileId, TransactionID, RecordID, VariableSize,
// RecordType, RecordBody (that we know it is 1 )
buffer.position(1 + 4 + 8 + 8 + 4 + 1 + 1 + 1);
int posCheckSize = buffer.position();
Assert.assertEquals(JournalImpl.SIZE_ADD_RECORD_TX + 2, buffer.getInt());
buffer.position(posCheckSize);
buffer.putInt(-1);
buffer.rewind();
// Changing the check bufferSize, so reload will ignore this record
file.position(100);
file.writeDirect(buffer, true);
file.close();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, journalImpl.getDataFilesCount());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
@Test
public void testPartiallyBrokenFile() throws Exception {
final int JOURNAL_SIZE = 20000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(2, factory.listFiles("tt").size());
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 20; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
journalImpl.appendAddRecordTransactional(2L, i + 20L, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
journalImpl.appendCommitRecord(1L, false);
journalImpl.appendCommitRecord(2L, false);
journalImpl.debugWait();
SequentialFile file = factory.createSequentialFile("tt-1.tt");
file.open();
ByteBuffer buffer = ByteBuffer.allocate(100);
// Messing up with the first record (removing the position)
file.position(100);
file.read(buffer);
// jumping RecordType, FileId, TransactionID, RecordID, VariableSize,
// RecordType, RecordBody (that we know it is 1 )
buffer.position(1 + 4 + 8 + 8 + 4 + 1 + 1 + 1);
int posCheckSize = buffer.position();
Assert.assertEquals(JournalImpl.SIZE_ADD_RECORD_TX + 2, buffer.getInt());
buffer.position(posCheckSize);
buffer.putInt(-1);
buffer.rewind();
// Changing the check bufferSize, so reload will ignore this record
file.position(100);
file.writeDirect(buffer, true);
file.close();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(20, records.size());
journalImpl.checkReclaimStatus();
}
@Test
public void testReduceFreeFiles() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 100, 10);
Assert.assertEquals(10, factory.listFiles("tt").size());
setupAndLoadJournal(JOURNAL_SIZE, 100, 2);
Assert.assertEquals(10, factory.listFiles("tt").size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecord(i, (byte) 0, new SimpleEncoding(1, (byte) 0), false);
journalImpl.forceMoveNextFile();
}
setupAndLoadJournal(JOURNAL_SIZE, 100, 2);
Assert.assertEquals(10, records.size());
Assert.assertEquals(12, factory.listFiles("tt").size());
for (int i = 0; i < 10; i++) {
journalImpl.appendDeleteRecord(i, false);
}
journalImpl.forceMoveNextFile();
journalImpl.checkReclaimStatus();
setupAndLoadJournal(JOURNAL_SIZE, 100, 2);
Assert.assertEquals(0, records.size());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
@Test
public void testReloadIncompleteTransaction() throws Exception {
final int JOURNAL_SIZE = 2000;
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(2, factory.listFiles("tt").size());
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
for (int i = 10; i < 20; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
journalImpl.appendCommitRecord(1L, false);
journalImpl.debugWait();
SequentialFile file = factory.createSequentialFile("tt-1.tt");
file.open();
ByteBuffer buffer = ByteBuffer.allocate(100);
// Messing up with the first record (removing the position)
file.position(100);
file.read(buffer);
buffer.position(1);
buffer.putInt(-1);
buffer.rewind();
// Messing up with the first record (changing the fileID, so Journal
// reload will think the record came from a different journal usage)
file.position(100);
buffer.rewind();
file.writeDirect(buffer, true);
file.close();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, journalImpl.getDataFilesCount());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
@Test
public void testPrepareAloneOnSeparatedFile() throws Exception {
final int JOURNAL_SIZE = 20000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
journalImpl.forceMoveNextFile();
SimpleEncoding xidEncoding = new SimpleEncoding(10, (byte) 'a');
journalImpl.appendPrepareRecord(1L, xidEncoding, false);
journalImpl.appendCommitRecord(1L, false);
for (int i = 0; i < 10; i++) {
journalImpl.appendDeleteRecordTransactional(2L, i);
}
journalImpl.appendCommitRecord(2L, false);
journalImpl.appendAddRecord(100L, (byte) 0, new SimpleEncoding(1, (byte) 10), false); // Add
// anything
// to
// keep
// holding
// the
// file
journalImpl.forceMoveNextFile();
journalImpl.checkReclaimStatus();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(1, records.size());
}
@Test
public void testCommitWithMultipleFiles() throws Exception {
final int JOURNAL_SIZE = 20000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 50; i++) {
if (i == 10) {
journalImpl.forceMoveNextFile();
}
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 15));
}
journalImpl.appendCommitRecord(1L, false);
for (int i = 0; i < 10; i++) {
if (i == 5) {
journalImpl.forceMoveNextFile();
}
journalImpl.appendDeleteRecordTransactional(2L, i);
}
journalImpl.appendCommitRecord(2L, false);
journalImpl.forceMoveNextFile();
journalImpl.checkReclaimStatus();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(40, records.size());
}
@Test
public void testSimplePrepare() throws Exception {
final int JOURNAL_SIZE = 3 * 1024;
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
SimpleEncoding xid = new SimpleEncoding(10, (byte) 1);
journalImpl.appendAddRecord(10L, (byte) 0, new SimpleEncoding(10, (byte) 0), false);
journalImpl.appendDeleteRecordTransactional(1L, 10L, new SimpleEncoding(100, (byte) 'j'));
journalImpl.appendPrepareRecord(1, xid, false);
journalImpl.debugWait();
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(1, transactions.size());
Assert.assertEquals(1, transactions.get(0).getRecordsToDelete().size());
Assert.assertEquals(1, records.size());
for (RecordInfo record : transactions.get(0).getRecordsToDelete()) {
byte[] data = record.data;
Assert.assertEquals(100, data.length);
for (byte element : data) {
Assert.assertEquals((byte) 'j', element);
}
}
Assert.assertEquals(10, transactions.get(0).getExtraData().length);
for (int i = 0; i < 10; i++) {
Assert.assertEquals((byte) 1, transactions.get(0).getExtraData()[i]);
}
journalImpl.appendCommitRecord(1L, false);
journalImpl.debugWait();
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, transactions.size());
Assert.assertEquals(0, records.size());
}
@Test
public void testReloadWithPreparedTransaction() throws Exception {
final int JOURNAL_SIZE = 3 * 1024;
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1, i, (byte) 1, new SimpleEncoding(50, (byte) 1));
journalImpl.forceMoveNextFile();
}
journalImpl.debugWait();
SimpleEncoding xid1 = new SimpleEncoding(10, (byte) 1);
journalImpl.appendPrepareRecord(1L, xid1, false);
Assert.assertEquals(12, factory.listFiles("tt").size());
setupAndLoadJournal(JOURNAL_SIZE, 1024);
Assert.assertEquals(0, records.size());
Assert.assertEquals(1, transactions.size());
Assert.assertEquals(10, transactions.get(0).getExtraData().length);
for (int i = 0; i < 10; i++) {
Assert.assertEquals((byte) 1, transactions.get(0).getExtraData()[i]);
}
journalImpl.checkReclaimStatus();
Assert.assertEquals(10, journalImpl.getDataFilesCount());
Assert.assertEquals(12, factory.listFiles("tt").size());
journalImpl.appendCommitRecord(1L, false);
setupAndLoadJournal(JOURNAL_SIZE, 1024);
Assert.assertEquals(10, records.size());
journalImpl.checkReclaimStatus();
for (int i = 0; i < 10; i++) {
journalImpl.appendDeleteRecordTransactional(2L, i);
}
SimpleEncoding xid2 = new SimpleEncoding(15, (byte) 2);
journalImpl.appendPrepareRecord(2L, xid2, false);
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(1, transactions.size());
Assert.assertEquals(15, transactions.get(0).getExtraData().length);
for (byte element : transactions.get(0).getExtraData()) {
Assert.assertEquals(2, element);
}
Assert.assertEquals(10, journalImpl.getDataFilesCount());
Assert.assertEquals(12, factory.listFiles("tt").size());
journalImpl.appendCommitRecord(2L, false);
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
journalImpl.forceMoveNextFile();
// Reclaiming should still be able to reclaim a file if a transaction was ignored
journalImpl.checkReclaimStatus();
journalImpl.flush();
}
@Test
public void testReloadInvalidPrepared() throws Exception {
final int JOURNAL_SIZE = 3000;
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1, i, (byte) 1, new SimpleEncoding(50, (byte) 1));
}
journalImpl.appendPrepareRecord(1L, new SimpleEncoding(13, (byte) 0), false);
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(1, transactions.size());
SequentialFile file = factory.createSequentialFile("tt-1.tt");
file.open();
ByteBuffer buffer = ByteBuffer.allocate(100);
// Messing up with the first record (removing the position)
file.position(100);
file.read(buffer);
buffer.position(1);
buffer.putInt(-1);
buffer.rewind();
// Messing up with the first record (changing the fileID, so Journal
// reload will think the record came from a different journal usage)
file.position(100);
file.writeDirect(buffer, true);
file.close();
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
}
@Test
public void testReclaimAfterRollabck() throws Exception {
final int JOURNAL_SIZE = 2000;
final int COUNT = 10;
setupAndLoadJournal(JOURNAL_SIZE, 1);
for (int i = 0; i < COUNT; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 0));
journalImpl.forceMoveNextFile();
}
journalImpl.appendRollbackRecord(1L, false);
journalImpl.forceMoveNextFile();
// wait for the previous call to forceMoveNextFile() to complete
assertTrue(Wait.waitFor(() -> factory.listFiles("tt").size() == COUNT + 3, 2000, 50));
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, journalImpl.getDataFilesCount());
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, journalImpl.getDataFilesCount());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
// It should be ok to write records on AIO, and later read then on NIO
@Test
public void testDecreaseAlignment() throws Exception {
final int JOURNAL_SIZE = 512 * 4;
setupAndLoadJournal(JOURNAL_SIZE, 512);
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 0));
}
journalImpl.appendCommitRecord(1L, false);
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(10, records.size());
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(10, records.size());
}
// It should be ok to write records on NIO, and later read then on AIO
@Test
public void testIncreaseAlignment() throws Exception {
final int JOURNAL_SIZE = 512 * 4;
setupAndLoadJournal(JOURNAL_SIZE, 1);
for (int i = 0; i < 10; i++) {
journalImpl.appendAddRecordTransactional(1L, i, (byte) 0, new SimpleEncoding(1, (byte) 0));
}
journalImpl.appendCommitRecord(1L, false);
setupAndLoadJournal(JOURNAL_SIZE, 100);
Assert.assertEquals(10, records.size());
setupAndLoadJournal(JOURNAL_SIZE, 512);
Assert.assertEquals(10, records.size());
}
@Test
public void testEmptyPrepare() throws Exception {
final int JOURNAL_SIZE = 512 * 4;
setupAndLoadJournal(JOURNAL_SIZE, 1);
journalImpl.appendPrepareRecord(2L, new SimpleEncoding(10, (byte) 'j'), false);
journalImpl.forceMoveNextFile();
journalImpl.appendAddRecord(1L, (byte) 0, new SimpleEncoding(10, (byte) 'k'), false);
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(1, journalImpl.getDataFilesCount());
Assert.assertEquals(1, transactions.size());
journalImpl.forceMoveNextFile();
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(1, journalImpl.getDataFilesCount());
Assert.assertEquals(1, transactions.size());
journalImpl.appendCommitRecord(2L, false);
journalImpl.appendDeleteRecord(1L, false);
journalImpl.forceMoveNextFile();
setupAndLoadJournal(JOURNAL_SIZE, 0);
journalImpl.forceMoveNextFile();
journalImpl.debugWait();
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, transactions.size());
Assert.assertEquals(0, journalImpl.getDataFilesCount());
}
@Test
public void testReclaimingAfterConcurrentAddsAndDeletesTx() throws Exception {
testReclaimingAfterConcurrentAddsAndDeletes(true);
}
@Test
public void testReclaimingAfterConcurrentAddsAndDeletesNonTx() throws Exception {
testReclaimingAfterConcurrentAddsAndDeletes(false);
}
public void testReclaimingAfterConcurrentAddsAndDeletes(final boolean transactional) throws Exception {
final int JOURNAL_SIZE = 10 * 1024;
setupAndLoadJournal(JOURNAL_SIZE, 1);
Assert.assertEquals(0, records.size());
Assert.assertEquals(0, transactions.size());
final CountDownLatch latchReady = new CountDownLatch(2);
final CountDownLatch latchStart = new CountDownLatch(1);
final AtomicInteger finishedOK = new AtomicInteger(0);
final BlockingQueue<Integer> queueDelete = new LinkedBlockingQueue<>();
final int NUMBER_OF_ELEMENTS = 500;
Thread t1 = new Thread() {
@Override
public void run() {
try {
latchReady.countDown();
ActiveMQTestBase.waitForLatch(latchStart);
for (int i = 0; i < NUMBER_OF_ELEMENTS; i++) {
if (transactional) {
journalImpl.appendAddRecordTransactional(i, i, (byte) 1, new SimpleEncoding(50, (byte) 1));
journalImpl.appendCommitRecord(i, false);
} else {
journalImpl.appendAddRecord(i, (byte) 1, new SimpleEncoding(50, (byte) 1), false);
}
queueDelete.offer(i);
}
finishedOK.incrementAndGet();
} catch (Exception e) {
e.printStackTrace();
}
}
};
Thread t2 = new Thread() {
@Override
public void run() {
try {
latchReady.countDown();
ActiveMQTestBase.waitForLatch(latchStart);
for (int i = 0; i < NUMBER_OF_ELEMENTS; i++) {
Integer toDelete = queueDelete.poll(10, TimeUnit.SECONDS);
if (toDelete == null) {
break;
}
if (transactional) {
journalImpl.appendDeleteRecordTransactional(toDelete, toDelete, new SimpleEncoding(50, (byte) 1));
journalImpl.appendCommitRecord(i, false);
} else {
journalImpl.appendDeleteRecord(toDelete, false);
}
}
finishedOK.incrementAndGet();
} catch (Exception e) {
e.printStackTrace();
}
}
};
t1.start();
t2.start();
ActiveMQTestBase.waitForLatch(latchReady);
latchStart.countDown();
t1.join();
t2.join();
Assert.assertEquals(2, finishedOK.intValue());
journalImpl.debugWait();
journalImpl.forceMoveNextFile();
journalImpl.debugWait();
journalImpl.checkReclaimStatus();
Assert.assertEquals(0, journalImpl.getDataFilesCount());
Assert.assertEquals(2, factory.listFiles("tt").size());
}
@Test
public void testAlignmentOverReload() throws Exception {
factory = new FakeSequentialFileFactory(512, false);
journalImpl = new JournalImpl(512 + 512 * 3, 20, 20, 0, 0, factory, "amq", "amq", 1000);
journalImpl.start();
journalImpl.load(AlignedJournalImplTest.dummyLoader);
journalImpl.appendAddRecord(1L, (byte) 0, new SimpleEncoding(100, (byte) 'a'), false);
journalImpl.appendAddRecord(2L, (byte) 0, new SimpleEncoding(100, (byte) 'b'), false);
journalImpl.appendAddRecord(3L, (byte) 0, new SimpleEncoding(100, (byte) 'b'), false);
journalImpl.appendAddRecord(4L, (byte) 0, new SimpleEncoding(100, (byte) 'b'), false);
journalImpl.stop();
journalImpl = new JournalImpl(512 + 1024 + 512, 20, 20, 0, 0, factory, "amq", "amq", 1000);
addActiveMQComponent(journalImpl);
journalImpl.start();
journalImpl.load(AlignedJournalImplTest.dummyLoader);
// It looks silly, but this forceMoveNextFile is in place to replicate one
// specific bug caught during development
journalImpl.forceMoveNextFile();
journalImpl.appendDeleteRecord(1L, false);
journalImpl.appendDeleteRecord(2L, false);
journalImpl.appendDeleteRecord(3L, false);
journalImpl.appendDeleteRecord(4L, false);
journalImpl.stop();
journalImpl = new JournalImpl(512 + 1024 + 512, 20, 20, 0, 0, factory, "amq", "amq", 1000);
addActiveMQComponent(journalImpl);
journalImpl.start();
ArrayList<RecordInfo> info = new ArrayList<>();
ArrayList<PreparedTransactionInfo> trans = new ArrayList<>();
journalImpl.load(info, trans, null);
Assert.assertEquals(0, info.size());
Assert.assertEquals(0, trans.size());
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
@Override
@Before
public void setUp() throws Exception {
super.setUp();
records = new ArrayList<>();
transactions = new ArrayList<>();
incompleteTransactions = new ArrayList<>();
factory = null;
journalImpl = null;
}
@Override
@After
public void tearDown() throws Exception {
stopComponent(journalImpl);
if (factory != null)
factory.stop();
records = null;
transactions = null;
incompleteTransactions = null;
factory = null;
journalImpl = null;
super.tearDown();
}
// Private -------------------------------------------------------
private void setupAndLoadJournal(final int journalSize, final int alignment) throws Exception {
setupAndLoadJournal(journalSize, alignment, 2);
}
private void setupAndLoadJournal(final int journalSize,
final int alignment,
final int numberOfMinimalFiles) throws Exception {
if (factory == null) {
factory = new FakeSequentialFileFactory(alignment, true);
}
if (journalImpl != null) {
journalImpl.stop();
}
journalImpl = new JournalImpl(journalSize, numberOfMinimalFiles, numberOfMinimalFiles, 0, 0, factory, "tt", "tt", 1000);
addActiveMQComponent(journalImpl);
journalImpl.start();
records.clear();
transactions.clear();
incompleteTransactions.clear();
journalImpl.load(records, transactions, new TransactionFailureCallback() {
@Override
public void failedTransaction(final long transactionID,
final List<RecordInfo> records,
final List<RecordInfo> recordsToDelete) {
log.debug("records.length = " + records.size());
incompleteTransactions.add(transactionID);
}
});
}
// Inner classes -------------------------------------------------
}
| |
/*
* #%L
* ELK Utilities for Logging
*
* $Id$
* $HeadURL$
* %%
* Copyright (C) 2011 - 2012 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.semanticweb.elk.protege.ui;
import java.awt.Component;
import java.awt.Dimension;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicReference;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JCheckBox;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextArea;
import org.semanticweb.elk.protege.ProtegeSuppressedMessages;
import org.semanticweb.elk.util.logging.ElkMessage;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.AppenderBase;
/**
* A Logback Appender that creates dialogs in order to "log" messages.
*
* Only one dialog is shown at any single time: the user has to close it before
* getting the next. The display of message dialogs is controlled by an
* independent thread, so that the code that reports a message does not have to
* wait for the user to close dialogs. Incoming events are queued and processed
* in order.
*
* For events of type ElkMessage (rather than plain String), the appender allows
* to filter by message type, that is, it offers the user the option to not show
* such messages again. Here, "such messages" means messages of the same message
* type.
*
* @author Markus Kroetzsch
* @author "Yevgeny Kazakov"
*
*/
public class MessageDialogAppender extends AppenderBase<ILoggingEvent> implements Runnable {
private final ConcurrentLinkedQueue<ILoggingEvent> eventBuffer_ = new ConcurrentLinkedQueue<ILoggingEvent>();
private final AtomicReference<String> messengerThreadName_ = new AtomicReference<String>(
"");
private final ProtegeSuppressedMessages supperssedMessages_;
public MessageDialogAppender() {
super();
supperssedMessages_ = ProtegeSuppressedMessages.getInstance().reload();
}
/**
* Shut down. This discards all events, to ensure that the message reporting
* thread will die too.
*/
@Override
public void stop() {
super.stop();
synchronized (eventBuffer_) {
eventBuffer_.clear(); // shoot the messenger
}
}
/**
* Append a logging event. This is what log4j calls to log an event.
*/
@Override
protected void append(ILoggingEvent event) {
if (!Thread.currentThread().getName()
.equals(messengerThreadName_.get())) {
eventBuffer_.add(event);
}
// Else: drop event. Recursive message creation is thus blocked; even if
// displaying the message would create new events, they will not lead to
// endless reporting (unless the messenger creates new threads; we
// cannot prevent this).
// Also note that get() above is needed.
ensureMessengerRuns();
}
/**
* Make sure that a messenger thread is run.
*/
protected void ensureMessengerRuns() {
if (messengerThreadName_.compareAndSet("", "Initialising thread ...")) {
Thread messengerThread = new Thread(this);
messengerThreadName_.set(messengerThread.getName());
messengerThread.start();
}
}
/**
* Generate the additional check box message specific to the given event
*
* @param event
* the event for which the check box message should be generated
* @return the generated check box message
*/
@SuppressWarnings("static-method")
protected String getCheckboxMessage(ILoggingEvent event) {
return "Do not show further messages of this kind";
}
/**
* Display a dialog window to inform the user about one message event.
*
* @param event
* the event for which to display the message
* @return {@code true} if the message has been shown
*/
protected boolean showMessage(ILoggingEvent event) {
String messageTitle;
int messageLevel;
if (event.getLevel().isGreaterOrEqual(Level.ERROR)) {
messageTitle = "ELK Error";
messageLevel = JOptionPane.ERROR_MESSAGE;
} else if (event.getLevel().isGreaterOrEqual(Level.WARN)) {
messageTitle = "ELK Warning";
messageLevel = JOptionPane.WARNING_MESSAGE;
} else {
messageTitle = "ELK Information";
messageLevel = JOptionPane.INFORMATION_MESSAGE;
}
ElkMessage elkMessage = ElkMessage.deserialize(event
.getFormattedMessage());
String messageType = null;
if (elkMessage != null) {
messageType = elkMessage.getMessageType();
if (supperssedMessages_.checkSuppressed(messageType))
return false;
}
JPanel panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));
String messageText = elkMessage.getMessage();
// truncating too long message text
if (messageText.length() > 520) {
messageText = messageText.substring(0, 500) + "...";
}
WrappingLabel label = new WrappingLabel(messageText, 600);
label.setAlignmentX(Component.LEFT_ALIGNMENT);
panel.add(label);
// it is important that the checkbox message is not too wide
JCheckBox ignoreMessageButton = new JCheckBox(String.format(
getCheckboxMessage(event), 450));
if (messageType != null) {
ignoreMessageButton.setAlignmentX(Component.LEFT_ALIGNMENT);
panel.add(Box.createRigidArea(new Dimension(0, 10)));
panel.add(ignoreMessageButton);
}
// // Later, it could be possible to abort the reasoner here:
// Object[] options = { "Continue", "Abort Reasoner" };
// int result = JOptionPane.showOptionDialog(null, radioPanel,
// messageTitle,
// JOptionPane.DEFAULT_OPTION, messageLevel, null, options,
// options[0]);
JOptionPane.showMessageDialog(null, panel, messageTitle, messageLevel);
if (ignoreMessageButton.isSelected()) {
supperssedMessages_.addWarningType(messageType);
}
return true;
}
/**
* Display messages until none are left to display. Then reset the
* registered thread name and die.
*/
@Override
public void run() {
while (!eventBuffer_.isEmpty()) {
showMessage(eventBuffer_.poll());
}
messengerThreadName_.set("");
// If another thread has added new events just before the
// messengerThreadName was reset here, then it could happen that the
// messenger dies while there is still work to do. To avoid this, we
// check again one last time, and create a new messenger if needed:
if (!eventBuffer_.isEmpty()) {
ensureMessengerRuns();
}
}
}
class WrappingLabel extends JTextArea {
private static final long serialVersionUID = -1028283148775499046L;
public WrappingLabel(String text, int width) {
super(text);
setBackground(null);
setEditable(false);
setBorder(null);
setFocusable(false);
setLineWrap(true);
setWrapStyleWord(true);
setText(text);
setSize(width, 1);
setSize(getPreferredSize());
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.search.searches;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressIndicatorProvider;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiSearchScopeUtil;
import com.intellij.psi.search.SearchScope;
import com.intellij.reference.SoftReference;
import com.intellij.util.Processor;
import com.intellij.util.Query;
import com.intellij.util.QueryExecutor;
import com.intellij.util.containers.Stack;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.ref.Reference;
import java.util.Set;
/**
* @author max
*/
public class ClassInheritorsSearch extends ExtensibleQueryFactory<PsiClass, ClassInheritorsSearch.SearchParameters> {
public static ExtensionPointName<QueryExecutor> EP_NAME = ExtensionPointName.create("com.intellij.classInheritorsSearch");
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.search.searches.ClassInheritorsSearch");
public static final ClassInheritorsSearch INSTANCE = new ClassInheritorsSearch();
static {
INSTANCE.registerExecutor(new QueryExecutor<PsiClass, SearchParameters>() {
@Override
public boolean execute(@NotNull final SearchParameters parameters, @NotNull final Processor<PsiClass> consumer) {
final PsiClass baseClass = parameters.getClassToProcess();
final SearchScope searchScope = parameters.getScope();
LOG.assertTrue(searchScope != null);
ProgressIndicator progress = ProgressIndicatorProvider.getGlobalProgressIndicator();
if (progress != null) {
progress.pushState();
String className = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
return baseClass.getName();
}
});
progress.setText(className != null ?
PsiBundle.message("psi.search.inheritors.of.class.progress", className) :
PsiBundle.message("psi.search.inheritors.progress"));
}
boolean result = processInheritors(consumer, baseClass, searchScope, parameters);
if (progress != null) {
progress.popState();
}
return result;
}
});
}
public static class SearchParameters {
private final PsiClass myClass;
private final SearchScope myScope;
private final boolean myCheckDeep;
private final boolean myCheckInheritance;
private final boolean myIncludeAnonymous;
private final Condition<String> myNameCondition;
public SearchParameters(@NotNull final PsiClass aClass, @NotNull SearchScope scope, final boolean checkDeep, final boolean checkInheritance, boolean includeAnonymous) {
this(aClass, scope, checkDeep, checkInheritance, includeAnonymous, Condition.TRUE);
}
public SearchParameters(@NotNull final PsiClass aClass, @NotNull SearchScope scope, final boolean checkDeep, final boolean checkInheritance,
boolean includeAnonymous, @NotNull final Condition<String> nameCondition) {
myClass = aClass;
myScope = scope;
myCheckDeep = checkDeep;
myCheckInheritance = checkInheritance;
myIncludeAnonymous = includeAnonymous;
myNameCondition = nameCondition;
}
@NotNull
public PsiClass getClassToProcess() {
return myClass;
}
@NotNull public Condition<String> getNameCondition() {
return myNameCondition;
}
public boolean isCheckDeep() {
return myCheckDeep;
}
public SearchScope getScope() {
return myScope;
}
public boolean isCheckInheritance() {
return myCheckInheritance;
}
public boolean isIncludeAnonymous() {
return myIncludeAnonymous;
}
}
private ClassInheritorsSearch() {}
public static Query<PsiClass> search(@NotNull final PsiClass aClass, @NotNull SearchScope scope, final boolean checkDeep, final boolean checkInheritance, boolean includeAnonymous) {
return search(new SearchParameters(aClass, scope, checkDeep, checkInheritance, includeAnonymous));
}
public static Query<PsiClass> search(@NotNull SearchParameters parameters) {
return INSTANCE.createQuery(parameters);
}
public static Query<PsiClass> search(@NotNull final PsiClass aClass, @NotNull SearchScope scope, final boolean checkDeep, final boolean checkInheritance) {
return search(aClass, scope, checkDeep, checkInheritance, true);
}
public static Query<PsiClass> search(@NotNull final PsiClass aClass, @NotNull SearchScope scope, final boolean checkDeep) {
return search(aClass, scope, checkDeep, true);
}
public static Query<PsiClass> search(@NotNull final PsiClass aClass, final boolean checkDeep) {
return search(aClass, aClass.getUseScope(), checkDeep);
}
public static Query<PsiClass> search(@NotNull PsiClass aClass) {
return search(aClass, true);
}
private static boolean processInheritors(@NotNull final Processor<PsiClass> consumer,
@NotNull final PsiClass baseClass,
@NotNull final SearchScope searchScope,
@NotNull final SearchParameters parameters) {
if (baseClass instanceof PsiAnonymousClass || isFinal(baseClass)) return true;
final String qname = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
public String compute() {
return baseClass.getQualifiedName();
}
});
if (CommonClassNames.JAVA_LANG_OBJECT.equals(qname)) {
return AllClassesSearch.search(searchScope, baseClass.getProject(), parameters.getNameCondition()).forEach(new Processor<PsiClass>() {
@Override
public boolean process(final PsiClass aClass) {
ProgressIndicatorProvider.checkCanceled();
final String qname1 = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Override
@Nullable
public String compute() {
return aClass.getQualifiedName();
}
});
return CommonClassNames.JAVA_LANG_OBJECT.equals(qname1) || consumer.process(aClass);
}
});
}
final Ref<PsiClass> currentBase = Ref.create(null);
final Stack<Pair<Reference<PsiClass>, String>> stack = new Stack<Pair<Reference<PsiClass>, String>>();
// there are two sets for memory optimization: it's cheaper to hold FQN than PsiClass
final Set<String> processedFqns = new THashSet<String>(); // FQN of processed classes if the class has one
final Set<PsiClass> processed = new THashSet<PsiClass>(); // processed classes without FQN (e.g. anonymous)
final Processor<PsiClass> processor = new Processor<PsiClass>() {
@Override
public boolean process(final PsiClass candidate) {
ProgressIndicatorProvider.checkCanceled();
final Ref<Boolean> result = new Ref<Boolean>();
final String[] fqn = new String[1];
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
fqn[0] = candidate.getQualifiedName();
if (parameters.isCheckInheritance() || parameters.isCheckDeep() && !(candidate instanceof PsiAnonymousClass)) {
if (!candidate.isInheritor(currentBase.get(), false)) {
result.set(true);
return;
}
}
if (PsiSearchScopeUtil.isInScope(searchScope, candidate)) {
if (candidate instanceof PsiAnonymousClass) {
result.set(consumer.process(candidate));
}
else {
final String name = candidate.getName();
if (name != null && parameters.getNameCondition().value(name) && !consumer.process(candidate)) result.set(false);
}
}
}
});
if (!result.isNull()) return result.get().booleanValue();
if (parameters.isCheckDeep() && !(candidate instanceof PsiAnonymousClass) && !isFinal(candidate)) {
Reference<PsiClass> ref = fqn[0] == null ? createHardReference(candidate) : new SoftReference<PsiClass>(candidate);
stack.push(Pair.create(ref, fqn[0]));
}
return true;
}
};
stack.push(Pair.create(createHardReference(baseClass), qname));
final GlobalSearchScope projectScope = GlobalSearchScope.allScope(baseClass.getProject());
final JavaPsiFacade facade = JavaPsiFacade.getInstance(projectScope.getProject());
while (!stack.isEmpty()) {
ProgressIndicatorProvider.checkCanceled();
Pair<Reference<PsiClass>, String> pair = stack.pop();
PsiClass psiClass = pair.getFirst().get();
final String fqn = pair.getSecond();
if (psiClass == null) {
psiClass = ApplicationManager.getApplication().runReadAction(new Computable<PsiClass>() {
@Override
public PsiClass compute() {
return facade.findClass(fqn, projectScope);
}
});
if (psiClass == null) continue;
}
if (fqn == null) {
if (!processed.add(psiClass)) continue;
}
else {
if (!processedFqns.add(fqn)) continue;
}
currentBase.set(psiClass);
if (!DirectClassInheritorsSearch.search(psiClass, projectScope, parameters.isIncludeAnonymous(), false).forEach(processor)) return false;
}
return true;
}
private static Reference<PsiClass> createHardReference(final PsiClass candidate) {
return new SoftReference<PsiClass>(candidate){
@Override
public PsiClass get() {
return candidate;
}
};
}
private static boolean isFinal(@NotNull final PsiClass baseClass) {
return ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
@Override
public Boolean compute() {
return Boolean.valueOf(baseClass.hasModifierProperty(PsiModifier.FINAL));
}
}).booleanValue();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.StreamSupport;
public abstract class FieldMapper extends Mapper implements Cloneable {
public static final Setting<Boolean> IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", false, false, Setting.Scope.INDEX);
public static final Setting<Boolean> COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", false, false, Setting.Scope.INDEX);
public abstract static class Builder<T extends Builder, Y extends FieldMapper> extends Mapper.Builder<T, Y> {
protected final MappedFieldType fieldType;
protected final MappedFieldType defaultFieldType;
private final IndexOptions defaultOptions;
protected boolean omitNormsSet = false;
protected String indexName;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected boolean docValuesSet = false;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
protected Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) {
super(name);
this.fieldType = fieldType.clone();
this.defaultFieldType = defaultFieldType.clone();
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
this.docValuesSet = fieldType.hasDocValues();
multiFieldsBuilder = new MultiFields.Builder();
}
public MappedFieldType fieldType() {
return fieldType;
}
public T index(boolean index) {
if (index) {
if (fieldType.indexOptions() == IndexOptions.NONE) {
/*
* the logic here is to reset to the default options only if we are not indexed ie. options are null
* if the fieldType has a non-null option we are all good it might have been set through a different
* call.
*/
IndexOptions options = getDefaultIndexOption();
if (options == IndexOptions.NONE) {
// can happen when an existing type on the same index has disabled indexing
// since we inherit the default field type from the first mapper that is
// created on an index
throw new IllegalArgumentException("mapper [" + name + "] has different [index] values from other types of the same index");
}
fieldType.setIndexOptions(options);
}
} else {
fieldType.setIndexOptions(IndexOptions.NONE);
}
return builder;
}
protected IndexOptions getDefaultIndexOption() {
return defaultOptions;
}
public T store(boolean store) {
this.fieldType.setStored(store);
return builder;
}
public T docValues(boolean docValues) {
this.fieldType.setHasDocValues(docValues);
this.docValuesSet = true;
return builder;
}
public T storeTermVectors(boolean termVectors) {
if (termVectors != this.fieldType.storeTermVectors()) {
this.fieldType.setStoreTermVectors(termVectors);
} // don't set it to false, it is default and might be flipped by a more specific option
return builder;
}
public T storeTermVectorOffsets(boolean termVectorOffsets) {
if (termVectorOffsets) {
this.fieldType.setStoreTermVectors(termVectorOffsets);
}
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
return builder;
}
public T storeTermVectorPositions(boolean termVectorPositions) {
if (termVectorPositions) {
this.fieldType.setStoreTermVectors(termVectorPositions);
}
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
return builder;
}
public T storeTermVectorPayloads(boolean termVectorPayloads) {
if (termVectorPayloads) {
this.fieldType.setStoreTermVectors(termVectorPayloads);
}
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
return builder;
}
public T tokenized(boolean tokenized) {
this.fieldType.setTokenized(tokenized);
return builder;
}
public T boost(float boost) {
this.fieldType.setBoost(boost);
return builder;
}
public T omitNorms(boolean omitNorms) {
this.fieldType.setOmitNorms(omitNorms);
this.omitNormsSet = true;
return builder;
}
public T indexOptions(IndexOptions indexOptions) {
this.fieldType.setIndexOptions(indexOptions);
this.indexOptionsSet = true;
return builder;
}
public T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.fieldType.setIndexAnalyzer(indexAnalyzer);
return builder;
}
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.fieldType.setSearchAnalyzer(searchAnalyzer);
return builder;
}
public T searchQuoteAnalyzer(NamedAnalyzer searchQuoteAnalyzer) {
this.fieldType.setSearchQuoteAnalyzer(searchQuoteAnalyzer);
return builder;
}
public T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
public T similarity(SimilarityProvider similarity) {
this.fieldType.setSimilarity(similarity);
return builder;
}
public T normsLoading(MappedFieldType.Loading normsLoading) {
this.fieldType.setNormsLoading(normsLoading);
return builder;
}
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public Builder nullValue(Object nullValue) {
this.fieldType.setNullValue(nullValue);
return this;
}
public T addMultiField(Mapper.Builder mapperBuilder) {
multiFieldsBuilder.add(mapperBuilder);
return builder;
}
public T copyTo(CopyTo copyTo) {
this.copyTo = copyTo;
return builder;
}
protected String buildFullName(BuilderContext context) {
return context.path().pathAsText(name);
}
protected boolean defaultDocValues(Version indexCreated) {
if (indexCreated.onOrAfter(Version.V_3_0_0)) {
// add doc values by default to keyword (boolean, numerics, etc.) fields
return fieldType.tokenized() == false;
} else {
return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
}
}
protected void setupFieldType(BuilderContext context) {
fieldType.setName(buildFullName(context));
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
if (fieldDataSettings != null) {
Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
}
boolean defaultDocValues = defaultDocValues(context.indexCreatedVersion());
defaultFieldType.setHasDocValues(defaultDocValues);
if (docValuesSet == false) {
fieldType.setHasDocValues(defaultDocValues);
}
}
}
protected MappedFieldType fieldType;
protected final MappedFieldType defaultFieldType;
protected MultiFields multiFields;
protected CopyTo copyTo;
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName);
assert indexSettings != null;
fieldType.freeze();
this.fieldType = fieldType;
defaultFieldType.freeze();
this.defaultFieldType = defaultFieldType;
this.multiFields = multiFields;
this.copyTo = copyTo;
}
@Override
public String name() {
return fieldType().name();
}
public MappedFieldType fieldType() {
return fieldType;
}
/**
* List of fields where this field should be copied to
*/
public CopyTo copyTo() {
return copyTo;
}
/**
* Parse using the provided {@link ParseContext} and return a mapping
* update if dynamic mappings modified the mappings, or {@code null} if
* mappings were not modified.
*/
public Mapper parse(ParseContext context) throws IOException {
final List<Field> fields = new ArrayList<>(2);
try {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()
// don't set boosts eg. on dv fields
&& field.fieldType().indexOptions() != IndexOptions.NONE) {
field.setBoost(fieldType().boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
}
multiFields.parse(this, context);
return null;
}
/**
* Parse the field value and populate <code>fields</code>.
*/
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public Iterator<Mapper> iterator() {
return multiFields.iterator();
}
@Override
protected FieldMapper clone() {
try {
return (FieldMapper) super.clone();
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
@Override
public FieldMapper merge(Mapper mergeWith, boolean updateAllTypes) {
FieldMapper merged = clone();
merged.doMerge(mergeWith, updateAllTypes);
return merged;
}
/**
* Merge changes coming from {@code mergeWith} in place.
* @param updateAllTypes TODO
*/
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof FieldMapper) {
mergedType = ((FieldMapper) mergeWith).contentType();
}
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
}
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
multiFields = multiFields.merge(fieldMergeWith.multiFields);
// apply changeable values
this.fieldType = fieldMergeWith.fieldType;
this.copyTo = fieldMergeWith.copyTo;
}
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name());
if (newFieldType == null) {
// this field does not exist in the mappings yet
// this can happen if this mapper represents a mapping update
return this;
} else if (fieldType.getClass() != newFieldType.getClass()) {
throw new IllegalStateException("Mixing up field types: " + fieldType.getClass() + " != " + newFieldType.getClass());
}
MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType);
if (fieldType == newFieldType && multiFields == updatedMultiFields) {
return this; // no change
}
FieldMapper updated = clone();
updated.fieldType = newFieldType;
updated.multiFields = updatedMultiFields;
return updated;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(simpleName());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
}
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (includeDefaults || fieldType().boost() != 1.0f) {
builder.field("boost", fieldType().boost());
}
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
if (includeDefaults || indexed != defaultIndexed ||
fieldType().tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized()));
}
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
builder.field("store", fieldType().stored());
}
doXContentDocValues(builder, includeDefaults);
if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType()));
}
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) {
builder.startObject("norms");
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType().omitNorms());
}
if (fieldType().normsLoading() != null) {
builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading());
}
builder.endObject();
}
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
}
if (fieldType().similarity() != null) {
builder.field("similarity", fieldType().similarity().name());
} else if (includeDefaults) {
builder.field("similarity", SimilarityService.DEFAULT_SIMILARITY);
}
if (includeDefaults || hasCustomFieldDataSettings()) {
builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap());
}
multiFields.toXContent(builder, params);
if (copyTo != null) {
copyTo.toXContent(builder, params);
}
}
protected final void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (fieldType.tokenized() == false) {
return;
}
if (fieldType().indexAnalyzer() == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else {
boolean hasDefaultIndexAnalyzer = fieldType().indexAnalyzer().name().equals("default");
boolean hasDifferentSearchAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false;
boolean hasDifferentSearchQuoteAnalyzer = fieldType().searchAnalyzer().name().equals(fieldType().searchQuoteAnalyzer().name()) == false;
if (includeDefaults || hasDefaultIndexAnalyzer == false || hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
builder.field("analyzer", fieldType().indexAnalyzer().name());
if (hasDifferentSearchAnalyzer || hasDifferentSearchQuoteAnalyzer) {
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
if (hasDifferentSearchQuoteAnalyzer) {
builder.field("search_quote_analyzer", fieldType().searchQuoteAnalyzer().name());
}
}
}
}
}
protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (includeDefaults || defaultFieldType.hasDocValues() != fieldType().hasDocValues()) {
builder.field("doc_values", fieldType().hasDocValues());
}
}
protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return TypeParsers.INDEX_OPTIONS_OFFSETS;
case DOCS_AND_FREQS:
return TypeParsers.INDEX_OPTIONS_FREQS;
case DOCS_AND_FREQS_AND_POSITIONS:
return TypeParsers.INDEX_OPTIONS_POSITIONS;
case DOCS:
return TypeParsers.INDEX_OPTIONS_DOCS;
default:
throw new IllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
}
}
public static String termVectorOptionsToString(FieldType fieldType) {
if (!fieldType.storeTermVectors()) {
return "no";
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "yes";
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "with_offsets";
} else {
StringBuilder builder = new StringBuilder("with");
if (fieldType.storeTermVectorPositions()) {
builder.append("_positions");
}
if (fieldType.storeTermVectorOffsets()) {
builder.append("_offsets");
}
if (fieldType.storeTermVectorPayloads()) {
builder.append("_payloads");
}
return builder.toString();
}
}
/* Only protected so that string can override it */
protected Object indexTokenizeOption(boolean indexed, boolean tokenized) {
return indexed;
}
protected boolean hasCustomFieldDataSettings() {
return fieldType().fieldDataType() != null && fieldType().fieldDataType().equals(defaultFieldType.fieldDataType()) == false;
}
protected abstract String contentType();
public static class MultiFields {
public static MultiFields empty() {
return new MultiFields(ImmutableOpenMap.<String, FieldMapper>of());
}
public static class Builder {
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
public Builder add(Mapper.Builder builder) {
mapperBuilders.put(builder.name(), builder);
return this;
}
@SuppressWarnings("unchecked")
public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) {
if (mapperBuilders.isEmpty()) {
return empty();
} else {
context.path().add(mainFieldBuilder.name());
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
String key = cursor.key;
Mapper.Builder value = cursor.value;
Mapper mapper = value.build(context);
assert mapper instanceof FieldMapper;
mapperBuilders.put(key, mapper);
}
context.path().remove();
ImmutableOpenMap.Builder<String, FieldMapper> mappers = mapperBuilders.cast();
return new MultiFields(mappers.build());
}
}
}
private final ImmutableOpenMap<String, FieldMapper> mappers;
private MultiFields(ImmutableOpenMap<String, FieldMapper> mappers) {
ImmutableOpenMap.Builder<String, FieldMapper> builder = new ImmutableOpenMap.Builder<>();
// we disable the all in multi-field mappers
for (ObjectObjectCursor<String, FieldMapper> cursor : mappers) {
FieldMapper mapper = cursor.value;
if (mapper instanceof AllFieldMapper.IncludeInAll) {
mapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
}
builder.put(cursor.key, mapper);
}
this.mappers = builder.build();
}
public void parse(FieldMapper mainField, ParseContext context) throws IOException {
// TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part of the mappings
if (mappers.isEmpty()) {
return;
}
context = context.createMultiFieldContext();
context.path().add(mainField.simpleName());
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
cursor.value.parse(context);
}
context.path().remove();
}
public MultiFields merge(MultiFields mergeWith) {
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = ImmutableOpenMap.builder(mappers);
for (ObjectCursor<FieldMapper> cursor : mergeWith.mappers.values()) {
FieldMapper mergeWithMapper = cursor.value;
FieldMapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
if (mergeIntoMapper == null) {
// we disable the all in multi-field mappers
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
mergeWithMapper = (FieldMapper) ((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
}
newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper);
} else {
FieldMapper merged = mergeIntoMapper.merge(mergeWithMapper, false);
newMappersBuilder.put(merged.simpleName(), merged); // override previous definition
}
}
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
return new MultiFields(mappers);
}
public MultiFields updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType);
if (updated != cursor.value) {
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(updated.simpleName(), updated);
}
}
if (newMappersBuilder == null) {
return this;
}
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
return new MultiFields(mappers);
}
public Iterator<Mapper> iterator() {
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!mappers.isEmpty()) {
// sort the mappers so we get consistent serialization format
Mapper[] sortedMappers = mappers.values().toArray(Mapper.class);
Arrays.sort(sortedMappers, new Comparator<Mapper>() {
@Override
public int compare(Mapper o1, Mapper o2) {
return o1.name().compareTo(o2.name());
}
});
builder.startObject("fields");
for (Mapper mapper : sortedMappers) {
mapper.toXContent(builder, params);
}
builder.endObject();
}
return builder;
}
}
/**
* Represents a list of fields with optional boost factor where the current field should be copied to
*/
public static class CopyTo {
private final List<String> copyToFields;
private CopyTo(List<String> copyToFields) {
this.copyToFields = copyToFields;
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!copyToFields.isEmpty()) {
builder.startArray("copy_to");
for (String field : copyToFields) {
builder.value(field);
}
builder.endArray();
}
return builder;
}
public static class Builder {
private final List<String> copyToBuilders = new ArrayList<>();
public Builder add(String field) {
copyToBuilders.add(field);
return this;
}
public CopyTo build() {
return new CopyTo(Collections.unmodifiableList(copyToBuilders));
}
}
public List<String> copyToFields() {
return copyToFields;
}
}
/**
* Fields might not be available before indexing, for example _all, token_count,...
* When get is called and these fields are requested, this case needs special treatment.
*
* @return If the field is available before indexing or not.
*/
public boolean isGenerated() {
return false;
}
}
| |
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.award.document.authorization;
import org.kuali.coeus.sys.framework.auth.KcTransactionalDocumentAuthorizerBase;
import org.kuali.coeus.sys.framework.auth.task.ApplicationTask;
import org.kuali.coeus.sys.framework.service.KcServiceLocator;
import org.kuali.kra.award.awardhierarchy.AwardHierarchy;
import org.kuali.kra.award.awardhierarchy.AwardHierarchyService;
import org.kuali.kra.award.document.AwardDocument;
import org.kuali.kra.award.home.Award;
import org.kuali.kra.award.infrastructure.AwardTaskNames;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.TaskName;
import org.kuali.kra.timeandmoney.AwardHierarchyNode;
import org.kuali.rice.coreservice.framework.parameter.ParameterConstants;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kew.api.WorkflowDocument;
import org.kuali.rice.kew.api.exception.WorkflowException;
import org.kuali.rice.kim.api.identity.Person;
import org.kuali.rice.kim.api.permission.PermissionService;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.kns.authorization.AuthorizationConstants;
import org.kuali.rice.krad.bo.DocumentHeader;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import java.util.*;
/**
* This class is the Award Document Authorizer. It determines the edit modes and
* document actions for all award documents.
*/
public class AwardDocumentAuthorizer extends KcTransactionalDocumentAuthorizerBase {
private AwardHierarchyService awardHierarchyService;
/**
* @see org.kuali.rice.kns.document.authorization.TransactionalDocumentAuthorizer#getEditModes(
* org.kuali.rice.krad.document.Document, org.kuali.rice.kim.api.identity.Person, java.util.Set)
*/
public Set<String> getEditModes(Document document, Person user, Set<String> currentEditModes) {
Set<String> editModes = new HashSet<String>();
AwardDocument awardDocument = (AwardDocument) document;
if (awardDocument.getAward().getAwardId() == null) {
if (canCreateAward(user.getPrincipalId())) {
editModes.add(AuthorizationConstants.EditMode.FULL_ENTRY);
if (canViewChartOfAccountsElement(awardDocument)) {
editModes.add("viewChartOfAccountsElement");
}
if (canViewAccountElement(awardDocument)) {
editModes.add("viewAccountElement");
}
}
else {
editModes.add(AuthorizationConstants.EditMode.UNVIEWABLE);
}
}
else {
boolean isCanceled = awardDocument.isCanceled();
if (!awardDocument.isCanceled() && canExecuteAwardTask(user.getPrincipalId(), awardDocument, AwardTaskNames.MODIFY_AWARD.getAwardTaskName())) {
editModes.add(AuthorizationConstants.EditMode.FULL_ENTRY);
}
else if (canExecuteAwardTask(user.getPrincipalId(), awardDocument, AwardTaskNames.VIEW_AWARD.getAwardTaskName())) {
editModes.add(AuthorizationConstants.EditMode.VIEW_ONLY);
}
else {
editModes.add(AuthorizationConstants.EditMode.UNVIEWABLE);
}
if (!isCanceled && canExecuteAwardTask(user.getPrincipalId(), awardDocument, TaskName.ADD_BUDGET)) {
editModes.add("addBudget");
}
if (canExecuteAwardTask(user.getPrincipalId(), awardDocument, TaskName.OPEN_BUDGETS)) {
editModes.add("openBudgets");
}
if (!isCanceled && canExecuteAwardTask(user.getPrincipalId(), awardDocument, TaskName.MODIFY_BUDGET)) {
editModes.add("modifyAwardBudget");
}
if (canCreateAward(user.getPrincipalId())) {
editModes.add(Constants.CAN_CREATE_AWARD_KEY);
}
if (canCreateAwardAccount(document, user)) {
editModes.add("createAwardAccount");
}
if (awardHasHierarchyChildren(document)) {
editModes.add("awardSync");
}
if (canViewChartOfAccountsElement(awardDocument)) {
editModes.add("viewChartOfAccountsElement");
}
if (canViewAccountElement(awardDocument)) {
editModes.add("viewAccountElement");
}
}
return editModes;
}
@Override
public boolean canInitiate(String documentTypeName, Person user) {
return canCreateAward(user.getPrincipalId());
}
/**
* This method decides if a user has permissions to create a financial account.
* @param document
* @param user
* @return hasPermission
*/
public boolean canCreateAwardAccount(Document document, Person user) {
AwardDocument awardDocument = (AwardDocument) document;
Award award = awardDocument.getAward();
boolean hasPermission = false;
String status = document.getDocumentHeader().getWorkflowDocument().getStatus().getCode();
// if document is in processed or final state
if (status.equalsIgnoreCase(KewApiConstants.ROUTE_HEADER_PROCESSED_CD) ||
status.equalsIgnoreCase(KewApiConstants.ROUTE_HEADER_FINAL_CD)) {
// if the integration parameter is ON
if (isFinancialSystemIntegrationParameterOn()) {
hasPermission = hasCreateAccountPermission(awardDocument);
// only the OSP admin can create a financial account
// if account has already been created, anyone can see it
if (award.getFinancialAccountDocumentNumber() != null) {
hasPermission = true;
}
}
}
return hasPermission;
}
protected boolean isFinancialSystemIntegrationParameterOn() {
Boolean awardAccountParameter = getParameterService().getParameterValueAsBoolean (
Constants.PARAMETER_MODULE_AWARD,
ParameterConstants.DOCUMENT_COMPONENT,
Constants.FIN_SYSTEM_INTEGRATION_ON_OFF_PARAMETER);
return awardAccountParameter;
}
public boolean hasCreateAccountPermission(AwardDocument document) {
return canExecuteAwardTask(GlobalVariables.getUserSession().getPrincipalId(), document, AwardTaskNames.CREATE_AWARD_ACCOUNT.getAwardTaskName());
}
/*
* This only appears when the integration is ON
*/
public boolean canViewChartOfAccountsElement(AwardDocument document) {
if (hasCreateAccountPermission(document) && isFinancialSystemIntegrationParameterOn()) {
return true;
}
return false;
}
/*
* This field appears even if the financial integration if OFF
* but when it is ON, the user needs to have
* the create account permission to view it.
*/
public boolean canViewAccountElement(AwardDocument document) {
boolean hasPermission = true;
if (isFinancialSystemIntegrationParameterOn()) {
if (!hasCreateAccountPermission(document)) {
hasPermission = false;
}
}
return hasPermission;
}
@Override
public boolean canOpen(Document document, Person user) {
AwardDocument awardDocument = (AwardDocument) document;
if (awardDocument.getAward().getAwardId() == null) {
return canCreateAward(user.getPrincipalId());
}
return canExecuteAwardTask(user.getPrincipalId(), (AwardDocument) document, AwardTaskNames.VIEW_AWARD.getAwardTaskName());
}
@Override
public boolean canEdit(Document document, Person user) {
boolean isCanceled = ((AwardDocument)document).isCanceled();
return !isCanceled && canExecuteAwardTask(user.getPrincipalId(), (AwardDocument) document, AwardTaskNames.MODIFY_AWARD.getAwardTaskName());
}
@Override
public boolean canSave(Document document, Person user) {
return canEdit(document, user);
}
@Override
public boolean canCopy(Document document, Person user) {
return false;
}
private boolean doesAwardHierarchyContainFinalChildren(AwardHierarchy currentAward, Map<String, AwardHierarchyNode> awardHierarchyNodes) {
for(AwardHierarchy child : currentAward.getChildren()) {
AwardHierarchyNode childInfo = awardHierarchyNodes.get(child.getAwardNumber());
if(childInfo.isAwardDocumentFinalStatus()) {
return true;
}
doesAwardHierarchyContainFinalChildren(childInfo, awardHierarchyNodes);
}
return false;
}
private boolean isCurrentAwardTheFirstVersion(Award currentAward) {
if(currentAward.getSequenceNumber() == 1) {
return true;
}else {
return false;
}
}
/**
* @throws WorkflowException
* @throws WorkflowException
* @see org.kuali.coeus.sys.framework.auth.KcTransactionalDocumentAuthorizerBase#canCancel(org.kuali.rice.krad.document.Document, org.kuali.rice.kim.api.identity.Person)
*/
@Override
public boolean canCancel(Document document, Person user) {
if(!canEdit(document, user)) {
return false;
}
boolean canCancel = true;
DocumentHeader docHeader = document.getDocumentHeader();
WorkflowDocument workflowDoc = docHeader.getWorkflowDocument();
if(workflowDoc.isSaved()) {
//User cannot cancel if there are FINAL child awards and if this document is the first version
//which could possibly happen after an AH is copied
AwardDocument awardDocument = (AwardDocument) document;
AwardHierarchyService awardHierarchyService = KcServiceLocator.getService(AwardHierarchyService.class);
Award currentAward = awardDocument.getAward();
Map<String, AwardHierarchyNode> awardHierarchyNodes = new HashMap<String, AwardHierarchyNode>();
Map<String, AwardHierarchy> awardHierarchyItems = awardHierarchyService.getAwardHierarchy(awardDocument.getAward().getAwardNumber(), new ArrayList<String>());
AwardHierarchy currentAwardNode = awardHierarchyItems.get(currentAward.getAwardNumber());
if(currentAwardNode.isRootNode() && isCurrentAwardTheFirstVersion(currentAward)) {
awardHierarchyService.populateAwardHierarchyNodes(awardHierarchyItems, awardHierarchyNodes, currentAward.getAwardNumber(), currentAward.getSequenceNumber().toString());
canCancel = !doesAwardHierarchyContainFinalChildren(currentAwardNode, awardHierarchyNodes);
}
}
return canCancel;
}
/**
* Can the user approve the given document?
* @param document the document
* @param user the user
* @return true if the user can approve the document; otherwise false
*/
@Override
public boolean canApprove(Document document, Person user) {
return isEnroute(document) && super.canApprove(document, user);
}
/**
* Can the user disapprove the given document?
* @param document the document
* @param user the user
* @return true if the user can disapprove the document; otherwise false
*/
@Override
public boolean canDisapprove(Document document, Person user) {
return canApprove(document, user);
}
/**
* Can the user blanket approve the given document?
* @param document the document
* @param user the user
* @return true if the user can blanket approve the document; otherwise false
*/
@Override
public boolean canBlanketApprove(Document document, Person user) {
boolean canBA = false;
PermissionService permService = KcServiceLocator.getService(KimApiServiceLocator.KIM_PERMISSION_SERVICE);
canBA =
(!(isFinal(document)||isProcessed (document))&&
permService.hasPermission (user.getPrincipalId(), "KC-AWARD", "Blanket Approve AwardDocument"));
if (!isFinal(document) &&canBA){
// check system parameter - if Y, use default workflow behavior: allow a user with the permission
// to perform the blanket approve action at any time
try {
if ( getParameterService().getParameterValueAsBoolean(KRADConstants.KNS_NAMESPACE, KRADConstants.DetailTypes.DOCUMENT_DETAIL_TYPE, KRADConstants.SystemGroupParameterNames.ALLOW_ENROUTE_BLANKET_APPROVE_WITHOUT_APPROVAL_REQUEST_IND) ) {
return canEdit(document);
}
} catch ( IllegalArgumentException ex ) {
// do nothing, the parameter does not exist and defaults to "N"
}
// (prior to routing)
WorkflowDocument workflowDocument = document.getDocumentHeader().getWorkflowDocument();
if ( canRoute(document)){
return true;
}
// or to a user with an approval action request
if ( workflowDocument.isApprovalRequested() ) {
return true;
}
}
return false;
}
/**
* Does the user have permission to create a award?
* @param user the user
* @return true if the user can create a award; otherwise false
*/
private boolean canCreateAward(String userId) {
ApplicationTask task = new ApplicationTask(TaskName.CREATE_AWARD);
return getTaskAuthorizationService().isAuthorized(userId, task);
}
/**
* Does the user have permission to execute the given task for a award?
* @param username the user's username
* @param doc the award document
* @param taskName the name of the task
* @return true if has permission; otherwise false
*/
private boolean canExecuteAwardTask(String userId, AwardDocument doc, String taskName) {
AwardTask task = new AwardTask(taskName, doc.getAward());
return getTaskAuthorizationService().isAuthorized(userId, task);
}
protected boolean awardHasHierarchyChildren(Document document) {
AwardDocument awardDocument = (AwardDocument) document;
AwardHierarchy hierarchy = getAwardHierarchyService().loadAwardHierarchyBranch(awardDocument.getAward().getAwardNumber());
return hierarchy != null && hierarchy.hasChildren();
}
public AwardHierarchyService getAwardHierarchyService() {
if (awardHierarchyService == null) {
awardHierarchyService =
KcServiceLocator.getService(AwardHierarchyService.class);
}
return awardHierarchyService;
}
@Override
public boolean canSendNoteFyi(Document document, Person user) {
return false;
}
@Override
public boolean canFyi(Document document, Person user) {
return isProcessed(document) && super.canFyi(document, user);
}
@Override
public boolean canRoute(Document document, Person user) {
boolean canRoute = false;
PermissionService permService = KcServiceLocator.getService(KimApiServiceLocator.KIM_PERMISSION_SERVICE);
canRoute =
(!(isFinal(document)||isProcessed (document))&&
permService.hasPermission (user.getPrincipalId(), "KC-AWARD", "Submit Award"));
return canRoute;
}
@Override
public boolean canAcknowledge(Document document, Person user) {
return isProcessed (document) && super.canAcknowledge(document, user);
}
protected boolean isProcessed (Document document){
boolean isProcessed = false;
String status = document.getDocumentHeader().getWorkflowDocument().getStatus().getCode();
// if document is in processed state
if (status.equalsIgnoreCase(KewApiConstants.ROUTE_HEADER_PROCESSED_CD))
isProcessed = true;
return isProcessed;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BLOCK_GROUP_INDEX_MASK;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_BLOCKS_IN_GROUP;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.util.reflection.Whitebox;
import org.mockito.stubbing.Answer;
/**
* Tests the sequential blockGroup ID generation mechanism and blockGroup ID
* collision handling.
*/
public class TestSequentialBlockGroupId {
private static final Log LOG = LogFactory
.getLog("TestSequentialBlockGroupId");
private final ErasureCodingPolicy ecPolicy =
StripedFileTestUtil.getDefaultECPolicy();
private final short REPLICATION = 1;
private final long SEED = 0;
private final int dataBlocks = ecPolicy.getNumDataUnits();
private final int parityBlocks = ecPolicy.getNumParityUnits();
private final int cellSize = ecPolicy.getCellSize();
private final int stripesPerBlock = 2;
private final int blockSize = cellSize * stripesPerBlock;
private final int numDNs = dataBlocks + parityBlocks + 2;
private final int blockGrpCount = 4;
private final int fileLen = blockSize * dataBlocks * blockGrpCount;
private MiniDFSCluster cluster;
private DistributedFileSystem fs;
private SequentialBlockGroupIdGenerator blockGrpIdGenerator;
private Path ecDir = new Path("/ecDir");
@Before
public void setup() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
cluster.waitActive();
fs = cluster.getFileSystem();
fs.enableErasureCodingPolicy(
StripedFileTestUtil.getDefaultECPolicy().getName());
blockGrpIdGenerator = cluster.getNamesystem().getBlockManager()
.getBlockIdManager().getBlockGroupIdGenerator();
fs.mkdirs(ecDir);
cluster.getFileSystem().getClient().setErasureCodingPolicy("/ecDir",
StripedFileTestUtil.getDefaultECPolicy().getName());
}
@After
public void teardown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Test that blockGroup IDs are generating unique value.
*/
@Test(timeout = 60000)
public void testBlockGroupIdGeneration() throws IOException {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
// Create a file that is 4 blocks long.
Path path = new Path(ecDir, "testBlockGrpIdGeneration.dat");
DFSTestUtil.createFile(fs, path, cellSize, fileLen, blockSize, REPLICATION,
SEED);
List<LocatedBlock> blocks = DFSTestUtil.getAllBlocks(fs, path);
assertThat("Wrong BlockGrps", blocks.size(), is(blockGrpCount));
// initialising the block group generator for verifying the block id
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Ensure that the block IDs are generating unique value.
for (int i = 0; i < blocks.size(); ++i) {
blockGrpIdGenerator
.skipTo((blockGrpIdGenerator.getCurrentValue() & ~BLOCK_GROUP_INDEX_MASK)
+ MAX_BLOCKS_IN_GROUP);
long nextBlockExpectedId = blockGrpIdGenerator.getCurrentValue();
long nextBlockGrpId = blocks.get(i).getBlock().getBlockId();
LOG.info("BlockGrp" + i + " id is " + nextBlockGrpId);
assertThat("BlockGrpId mismatches!", nextBlockGrpId,
is(nextBlockExpectedId));
}
// verify that the blockGroupId resets on #clear call.
cluster.getNamesystem().getBlockManager().clear();
assertThat("BlockGrpId mismatches!", blockGrpIdGenerator.getCurrentValue(),
is(Long.MIN_VALUE));
}
/**
* Test that collisions in the blockGroup ID space are handled gracefully.
*/
@Test(timeout = 60000)
public void testTriggerBlockGroupIdCollision() throws IOException {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
// Create a file with a few blocks to rev up the global block ID
// counter.
Path path1 = new Path(ecDir, "testBlockGrpIdCollisionDetection_file1.dat");
DFSTestUtil.createFile(fs, path1, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks1 = DFSTestUtil.getAllBlocks(fs, path1);
assertThat("Wrong BlockGrps", blocks1.size(), is(blockGrpCount));
// Rewind the block ID counter in the name system object. This will result
// in block ID collisions when we try to allocate new blocks.
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Trigger collisions by creating a new file.
Path path2 = new Path(ecDir, "testBlockGrpIdCollisionDetection_file2.dat");
DFSTestUtil.createFile(fs, path2, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks2 = DFSTestUtil.getAllBlocks(fs, path2);
assertThat("Wrong BlockGrps", blocks2.size(), is(blockGrpCount));
// Make sure that file1 and file2 block IDs are different
for (LocatedBlock locBlock1 : blocks1) {
long blockId1 = locBlock1.getBlock().getBlockId();
for (LocatedBlock locBlock2 : blocks2) {
long blockId2 = locBlock2.getBlock().getBlockId();
assertThat("BlockGrpId mismatches!", blockId1, is(not(blockId2)));
}
}
}
/**
* Test that collisions in the blockGroup ID when the id is occupied by legacy
* block.
*/
@Test(timeout = 60000)
public void testTriggerBlockGroupIdCollisionWithLegacyBlockId()
throws Exception {
long blockGroupIdInitialValue = blockGrpIdGenerator.getCurrentValue();
blockGrpIdGenerator
.skipTo((blockGrpIdGenerator.getCurrentValue() & ~BLOCK_GROUP_INDEX_MASK)
+ MAX_BLOCKS_IN_GROUP);
final long curBlockGroupIdValue = blockGrpIdGenerator.getCurrentValue();
// Creates contiguous block with negative blockId so that it would trigger
// collision during blockGroup Id generation
FSNamesystem fsn = cluster.getNamesystem();
// Replace SequentialBlockIdGenerator with a spy
SequentialBlockIdGenerator blockIdGenerator = spy(fsn.getBlockManager()
.getBlockIdManager().getBlockIdGenerator());
Whitebox.setInternalState(fsn.getBlockManager().getBlockIdManager(),
"blockIdGenerator", blockIdGenerator);
SequentialBlockIdGenerator spySequentialBlockIdGenerator = new SequentialBlockIdGenerator(
null) {
@Override
public long nextValue() {
return curBlockGroupIdValue;
}
};
final Answer<Object> delegator = new GenericTestUtils.DelegateAnswer(
spySequentialBlockIdGenerator);
doAnswer(delegator).when(blockIdGenerator).nextValue();
Path path1 = new Path("/testCollisionWithLegacyBlock_file1.dat");
DFSTestUtil.createFile(fs, path1, 1024, REPLICATION, SEED);
List<LocatedBlock> contiguousBlocks = DFSTestUtil.getAllBlocks(fs, path1);
assertThat(contiguousBlocks.size(), is(1));
Assert.assertEquals("Unexpected BlockId!", curBlockGroupIdValue,
contiguousBlocks.get(0).getBlock().getBlockId());
// Reset back to the initial value to trigger collision
blockGrpIdGenerator.setCurrentValue(blockGroupIdInitialValue);
// Trigger collisions by creating a new file.
Path path2 = new Path(ecDir, "testCollisionWithLegacyBlock_file2.dat");
DFSTestUtil.createFile(fs, path2, cellSize, fileLen, blockSize,
REPLICATION, SEED);
List<LocatedBlock> blocks2 = DFSTestUtil.getAllBlocks(fs, path2);
assertThat("Wrong BlockGrps", blocks2.size(), is(blockGrpCount));
// Make sure that file1 and file2 block IDs are different
for (LocatedBlock locBlock1 : contiguousBlocks) {
long blockId1 = locBlock1.getBlock().getBlockId();
for (LocatedBlock locBlock2 : blocks2) {
long blockId2 = locBlock2.getBlock().getBlockId();
assertThat("BlockGrpId mismatches!", blockId1, is(not(blockId2)));
}
}
}
}
| |
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ejb.access;
import java.lang.reflect.Proxy;
import java.rmi.RemoteException;
import javax.ejb.CreateException;
import javax.ejb.EJBHome;
import javax.ejb.EJBObject;
import javax.naming.NamingException;
import org.easymock.MockControl;
import org.springframework.jndi.JndiTemplate;
import org.springframework.remoting.RemoteAccessException;
/**
* @author Rod Johnson
* @author Juergen Hoeller
* @since 21.05.2003
*/
public class SimpleRemoteStatelessSessionProxyFactoryBeanTests extends SimpleRemoteSlsbInvokerInterceptorTests {
protected SimpleRemoteSlsbInvokerInterceptor createInterceptor() {
return new SimpleRemoteStatelessSessionProxyFactoryBean();
}
protected Object configuredProxy(SimpleRemoteSlsbInvokerInterceptor si, Class ifc) throws NamingException {
SimpleRemoteStatelessSessionProxyFactoryBean fb = (SimpleRemoteStatelessSessionProxyFactoryBean) si;
fb.setBusinessInterface(ifc);
fb.afterPropertiesSet();
return fb.getObject();
}
public void testInvokesMethod() throws Exception {
final int value = 11;
final String jndiName = "foo";
MockControl ec = MockControl.createControl(MyEjb.class);
MyEjb myEjb = (MyEjb) ec.getMock();
myEjb.getValue();
ec.setReturnValue(value, 1);
myEjb.remove();
ec.setVoidCallable(1);
ec.replay();
MockControl mc = MockControl.createControl(MyHome.class);
final MyHome home = (MyHome) mc.getMock();
home.create();
mc.setReturnValue(myEjb, 1);
mc.replay();
JndiTemplate jt = new JndiTemplate() {
public Object lookup(String name) {
// parameterize
assertTrue(name.equals("java:comp/env/" + jndiName));
return home;
}
};
SimpleRemoteStatelessSessionProxyFactoryBean fb = new SimpleRemoteStatelessSessionProxyFactoryBean();
fb.setJndiName(jndiName);
fb.setResourceRef(true);
fb.setBusinessInterface(MyBusinessMethods.class);
fb.setJndiTemplate(jt);
// Need lifecycle methods
fb.afterPropertiesSet();
MyBusinessMethods mbm = (MyBusinessMethods) fb.getObject();
assertTrue(Proxy.isProxyClass(mbm.getClass()));
assertEquals("Returns expected value", value, mbm.getValue());
mc.verify();
ec.verify();
}
public void testRemoteException() throws Exception {
final RemoteException rex = new RemoteException();
final String jndiName = "foo";
MockControl ec = MockControl.createControl(MyEjb.class);
MyEjb myEjb = (MyEjb) ec.getMock();
myEjb.getValue();
ec.setThrowable(rex);
// TODO might want to control this behaviour...
// Do we really want to call remove after a remote exception?
myEjb.remove();
ec.setVoidCallable(1);
ec.replay();
MockControl mc = MockControl.createControl(MyHome.class);
final MyHome home = (MyHome) mc.getMock();
home.create();
mc.setReturnValue(myEjb, 1);
mc.replay();
JndiTemplate jt = new JndiTemplate() {
public Object lookup(String name) {
// parameterize
assertTrue(name.equals("java:comp/env/" + jndiName));
return home;
}
};
SimpleRemoteStatelessSessionProxyFactoryBean fb = new SimpleRemoteStatelessSessionProxyFactoryBean();
fb.setJndiName(jndiName);
fb.setResourceRef(true);
fb.setBusinessInterface(MyBusinessMethods.class);
fb.setJndiTemplate(jt);
// Need lifecycle methods
fb.afterPropertiesSet();
MyBusinessMethods mbm = (MyBusinessMethods) fb.getObject();
assertTrue(Proxy.isProxyClass(mbm.getClass()));
try {
mbm.getValue();
fail("Should've thrown remote exception");
}
catch (RemoteException ex) {
assertSame("Threw expected RemoteException", rex, ex);
}
mc.verify();
ec.verify();
}
public void testCreateException() throws Exception {
final String jndiName = "foo";
final CreateException cex = new CreateException();
MockControl mc = MockControl.createControl(MyHome.class);
final MyHome home = (MyHome) mc.getMock();
home.create();
mc.setThrowable(cex);
mc.replay();
JndiTemplate jt = new JndiTemplate() {
public Object lookup(String name) {
// parameterize
assertTrue(name.equals(jndiName));
return home;
}
};
SimpleRemoteStatelessSessionProxyFactoryBean fb = new SimpleRemoteStatelessSessionProxyFactoryBean();
fb.setJndiName(jndiName);
// rely on default setting of resourceRef=false, no auto addition of java:/comp/env prefix
fb.setBusinessInterface(MyBusinessMethods.class);
assertEquals(fb.getBusinessInterface(), MyBusinessMethods.class);
fb.setJndiTemplate(jt);
// Need lifecycle methods
fb.afterPropertiesSet();
MyBusinessMethods mbm = (MyBusinessMethods) fb.getObject();
assertTrue(Proxy.isProxyClass(mbm.getClass()));
try {
mbm.getValue();
fail("Should have failed to create EJB");
}
catch (RemoteException ex) {
// expected
}
mc.verify();
}
public void testCreateExceptionWithLocalBusinessInterface() throws Exception {
final String jndiName = "foo";
final CreateException cex = new CreateException();
MockControl mc = MockControl.createControl(MyHome.class);
final MyHome home = (MyHome) mc.getMock();
home.create();
mc.setThrowable(cex);
mc.replay();
JndiTemplate jt = new JndiTemplate() {
public Object lookup(String name) {
// parameterize
assertTrue(name.equals(jndiName));
return home;
}
};
SimpleRemoteStatelessSessionProxyFactoryBean fb = new SimpleRemoteStatelessSessionProxyFactoryBean();
fb.setJndiName(jndiName);
// rely on default setting of resourceRef=false, no auto addition of java:/comp/env prefix
fb.setBusinessInterface(MyLocalBusinessMethods.class);
assertEquals(fb.getBusinessInterface(), MyLocalBusinessMethods.class);
fb.setJndiTemplate(jt);
// Need lifecycle methods
fb.afterPropertiesSet();
MyLocalBusinessMethods mbm = (MyLocalBusinessMethods) fb.getObject();
assertTrue(Proxy.isProxyClass(mbm.getClass()));
try {
mbm.getValue();
fail("Should have failed to create EJB");
}
catch (RemoteAccessException ex) {
assertTrue(ex.getCause() == cex);
}
mc.verify();
}
public void testNoBusinessInterfaceSpecified() throws Exception {
// Will do JNDI lookup to get home but won't call create
// Could actually try to figure out interface from create?
final String jndiName = "foo";
MockControl mc = MockControl.createControl(MyHome.class);
final MyHome home = (MyHome) mc.getMock();
mc.replay();
JndiTemplate jt = new JndiTemplate() {
public Object lookup(String name) throws NamingException {
// parameterize
assertTrue(name.equals(jndiName));
return home;
}
};
SimpleRemoteStatelessSessionProxyFactoryBean fb = new SimpleRemoteStatelessSessionProxyFactoryBean();
fb.setJndiName(jndiName);
// rely on default setting of resourceRef=false, no auto addition of java:/comp/env prefix
// Don't set business interface
fb.setJndiTemplate(jt);
// Check it's a singleton
assertTrue(fb.isSingleton());
try {
fb.afterPropertiesSet();
fail("Should have failed to create EJB");
}
catch (IllegalArgumentException ex) {
// TODO more appropriate exception?
assertTrue(ex.getMessage().indexOf("businessInterface") != 1);
}
// Expect no methods on home
mc.verify();
}
protected static interface MyHome extends EJBHome {
MyBusinessMethods create() throws CreateException, RemoteException;
}
protected static interface MyBusinessMethods {
int getValue() throws RemoteException;
}
protected static interface MyLocalBusinessMethods {
int getValue();
}
protected static interface MyEjb extends EJBObject, MyBusinessMethods {
}
}
| |
package com.google.ads.googleads.v9.services;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
* <pre>
* Service to manage campaign conversion goal.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/ads/googleads/v9/services/campaign_conversion_goal_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class CampaignConversionGoalServiceGrpc {
private CampaignConversionGoalServiceGrpc() {}
public static final String SERVICE_NAME = "google.ads.googleads.v9.services.CampaignConversionGoalService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest,
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> getMutateCampaignConversionGoalsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "MutateCampaignConversionGoals",
requestType = com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest.class,
responseType = com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest,
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> getMutateCampaignConversionGoalsMethod() {
io.grpc.MethodDescriptor<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest, com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> getMutateCampaignConversionGoalsMethod;
if ((getMutateCampaignConversionGoalsMethod = CampaignConversionGoalServiceGrpc.getMutateCampaignConversionGoalsMethod) == null) {
synchronized (CampaignConversionGoalServiceGrpc.class) {
if ((getMutateCampaignConversionGoalsMethod = CampaignConversionGoalServiceGrpc.getMutateCampaignConversionGoalsMethod) == null) {
CampaignConversionGoalServiceGrpc.getMutateCampaignConversionGoalsMethod = getMutateCampaignConversionGoalsMethod =
io.grpc.MethodDescriptor.<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest, com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "MutateCampaignConversionGoals"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse.getDefaultInstance()))
.setSchemaDescriptor(new CampaignConversionGoalServiceMethodDescriptorSupplier("MutateCampaignConversionGoals"))
.build();
}
}
}
return getMutateCampaignConversionGoalsMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static CampaignConversionGoalServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceStub>() {
@java.lang.Override
public CampaignConversionGoalServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceStub(channel, callOptions);
}
};
return CampaignConversionGoalServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static CampaignConversionGoalServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceBlockingStub>() {
@java.lang.Override
public CampaignConversionGoalServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceBlockingStub(channel, callOptions);
}
};
return CampaignConversionGoalServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static CampaignConversionGoalServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<CampaignConversionGoalServiceFutureStub>() {
@java.lang.Override
public CampaignConversionGoalServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceFutureStub(channel, callOptions);
}
};
return CampaignConversionGoalServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* Service to manage campaign conversion goal.
* </pre>
*/
public static abstract class CampaignConversionGoalServiceImplBase implements io.grpc.BindableService {
/**
* <pre>
* Creates, updates or removes campaign conversion goals. Operation statuses
* are returned.
* </pre>
*/
public void mutateCampaignConversionGoals(com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMutateCampaignConversionGoalsMethod(), responseObserver);
}
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getMutateCampaignConversionGoalsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest,
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse>(
this, METHODID_MUTATE_CAMPAIGN_CONVERSION_GOALS)))
.build();
}
}
/**
* <pre>
* Service to manage campaign conversion goal.
* </pre>
*/
public static final class CampaignConversionGoalServiceStub extends io.grpc.stub.AbstractAsyncStub<CampaignConversionGoalServiceStub> {
private CampaignConversionGoalServiceStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected CampaignConversionGoalServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceStub(channel, callOptions);
}
/**
* <pre>
* Creates, updates or removes campaign conversion goals. Operation statuses
* are returned.
* </pre>
*/
public void mutateCampaignConversionGoals(com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest request,
io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getMutateCampaignConversionGoalsMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* <pre>
* Service to manage campaign conversion goal.
* </pre>
*/
public static final class CampaignConversionGoalServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<CampaignConversionGoalServiceBlockingStub> {
private CampaignConversionGoalServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected CampaignConversionGoalServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceBlockingStub(channel, callOptions);
}
/**
* <pre>
* Creates, updates or removes campaign conversion goals. Operation statuses
* are returned.
* </pre>
*/
public com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse mutateCampaignConversionGoals(com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getMutateCampaignConversionGoalsMethod(), getCallOptions(), request);
}
}
/**
* <pre>
* Service to manage campaign conversion goal.
* </pre>
*/
public static final class CampaignConversionGoalServiceFutureStub extends io.grpc.stub.AbstractFutureStub<CampaignConversionGoalServiceFutureStub> {
private CampaignConversionGoalServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected CampaignConversionGoalServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new CampaignConversionGoalServiceFutureStub(channel, callOptions);
}
/**
* <pre>
* Creates, updates or removes campaign conversion goals. Operation statuses
* are returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse> mutateCampaignConversionGoals(
com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getMutateCampaignConversionGoalsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_MUTATE_CAMPAIGN_CONVERSION_GOALS = 0;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final CampaignConversionGoalServiceImplBase serviceImpl;
private final int methodId;
MethodHandlers(CampaignConversionGoalServiceImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_MUTATE_CAMPAIGN_CONVERSION_GOALS:
serviceImpl.mutateCampaignConversionGoals((com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsRequest) request,
(io.grpc.stub.StreamObserver<com.google.ads.googleads.v9.services.MutateCampaignConversionGoalsResponse>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private static abstract class CampaignConversionGoalServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
CampaignConversionGoalServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.ads.googleads.v9.services.CampaignConversionGoalServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("CampaignConversionGoalService");
}
}
private static final class CampaignConversionGoalServiceFileDescriptorSupplier
extends CampaignConversionGoalServiceBaseDescriptorSupplier {
CampaignConversionGoalServiceFileDescriptorSupplier() {}
}
private static final class CampaignConversionGoalServiceMethodDescriptorSupplier
extends CampaignConversionGoalServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final String methodName;
CampaignConversionGoalServiceMethodDescriptorSupplier(String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (CampaignConversionGoalServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new CampaignConversionGoalServiceFileDescriptorSupplier())
.addMethod(getMutateCampaignConversionGoalsMethod())
.build();
}
}
}
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.examples;
import com.google.common.collect.Maps;
import org.apache.giraph.graph.BasicVertex;
import org.apache.giraph.graph.BspUtils;
import org.apache.giraph.graph.LongDoubleFloatDoubleVertex;
import org.apache.giraph.graph.VertexReader;
import org.apache.giraph.graph.VertexWriter;
import org.apache.giraph.graph.WorkerContext;
import org.apache.giraph.lib.TextVertexOutputFormat;
import org.apache.giraph.lib.TextVertexOutputFormat.TextVertexWriter;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
/**
* Demonstrates the basic Pregel PageRank implementation.
*/
public class SimplePageRankVertex extends LongDoubleFloatDoubleVertex {
/** Number of supersteps for this test */
public static final int MAX_SUPERSTEPS = 30;
/** Logger */
private static final Logger LOG =
Logger.getLogger(SimplePageRankVertex.class);
@Override
public void compute(Iterator<DoubleWritable> msgIterator) {
LongSumAggregator sumAggreg = (LongSumAggregator) getAggregator("sum");
MinAggregator minAggreg = (MinAggregator) getAggregator("min");
MaxAggregator maxAggreg = (MaxAggregator) getAggregator("max");
if (getSuperstep() >= 1) {
double sum = 0;
while (msgIterator.hasNext()) {
sum += msgIterator.next().get();
}
DoubleWritable vertexValue =
new DoubleWritable((0.15f / getNumVertices()) + 0.85f * sum);
setVertexValue(vertexValue);
maxAggreg.aggregate(vertexValue);
minAggreg.aggregate(vertexValue);
sumAggreg.aggregate(1L);
LOG.info(getVertexId() + ": PageRank=" + vertexValue +
" max=" + maxAggreg.getAggregatedValue() +
" min=" + minAggreg.getAggregatedValue());
}
if (getSuperstep() < MAX_SUPERSTEPS) {
long edges = getNumOutEdges();
sendMsgToAllEdges(
new DoubleWritable(getVertexValue().get() / edges));
} else {
voteToHalt();
}
}
public static class SimplePageRankVertexWorkerContext extends
WorkerContext {
public static double finalMax, finalMin;
public static long finalSum;
@Override
public void preApplication()
throws InstantiationException, IllegalAccessException {
registerAggregator("sum", LongSumAggregator.class);
registerAggregator("min", MinAggregator.class);
registerAggregator("max", MaxAggregator.class);
}
@Override
public void postApplication() {
LongSumAggregator sumAggreg =
(LongSumAggregator) getAggregator("sum");
MinAggregator minAggreg =
(MinAggregator) getAggregator("min");
MaxAggregator maxAggreg =
(MaxAggregator) getAggregator("max");
finalSum = sumAggreg.getAggregatedValue().get();
finalMax = maxAggreg.getAggregatedValue().get();
finalMin = minAggreg.getAggregatedValue().get();
LOG.info("aggregatedNumVertices=" + finalSum);
LOG.info("aggregatedMaxPageRank=" + finalMax);
LOG.info("aggregatedMinPageRank=" + finalMin);
}
@Override
public void preSuperstep() {
LongSumAggregator sumAggreg =
(LongSumAggregator) getAggregator("sum");
MinAggregator minAggreg =
(MinAggregator) getAggregator("min");
MaxAggregator maxAggreg =
(MaxAggregator) getAggregator("max");
if (getSuperstep() >= 3) {
LOG.info("aggregatedNumVertices=" +
sumAggreg.getAggregatedValue() +
" NumVertices=" + getNumVertices());
if (sumAggreg.getAggregatedValue().get() != getNumVertices()) {
throw new RuntimeException("wrong value of SumAggreg: " +
sumAggreg.getAggregatedValue() + ", should be: " +
getNumVertices());
}
DoubleWritable maxPagerank =
(DoubleWritable) maxAggreg.getAggregatedValue();
LOG.info("aggregatedMaxPageRank=" + maxPagerank.get());
DoubleWritable minPagerank =
(DoubleWritable) minAggreg.getAggregatedValue();
LOG.info("aggregatedMinPageRank=" + minPagerank.get());
}
useAggregator("sum");
useAggregator("min");
useAggregator("max");
sumAggreg.setAggregatedValue(new LongWritable(0L));
}
@Override
public void postSuperstep() { }
}
/**
* Simple VertexReader that supports {@link SimplePageRankVertex}
*/
public static class SimplePageRankVertexReader extends
GeneratedVertexReader<LongWritable, DoubleWritable, FloatWritable,
DoubleWritable> {
/** Class logger */
private static final Logger LOG =
Logger.getLogger(SimplePageRankVertexReader.class);
public SimplePageRankVertexReader() {
super();
}
@Override
public boolean nextVertex() {
return totalRecords > recordsRead;
}
@Override
public BasicVertex<LongWritable, DoubleWritable, FloatWritable, DoubleWritable>
getCurrentVertex() throws IOException {
BasicVertex<LongWritable, DoubleWritable, FloatWritable, DoubleWritable>
vertex = BspUtils.createVertex(configuration);
LongWritable vertexId = new LongWritable(
(inputSplit.getSplitIndex() * totalRecords) + recordsRead);
DoubleWritable vertexValue = new DoubleWritable(vertexId.get() * 10d);
long destVertexId =
(vertexId.get() + 1) %
(inputSplit.getNumSplits() * totalRecords);
float edgeValue = vertexId.get() * 100f;
Map<LongWritable, FloatWritable> edges = Maps.newHashMap();
edges.put(new LongWritable(destVertexId), new FloatWritable(edgeValue));
vertex.initialize(vertexId, vertexValue, edges, null);
++recordsRead;
if (LOG.isInfoEnabled()) {
LOG.info("next: Return vertexId=" + vertex.getVertexId().get() +
", vertexValue=" + vertex.getVertexValue() +
", destinationId=" + destVertexId + ", edgeValue=" + edgeValue);
}
return vertex;
}
}
/**
* Simple VertexInputFormat that supports {@link SimplePageRankVertex}
*/
public static class SimplePageRankVertexInputFormat extends
GeneratedVertexInputFormat<LongWritable,
DoubleWritable, FloatWritable, DoubleWritable> {
@Override
public VertexReader<LongWritable, DoubleWritable, FloatWritable, DoubleWritable>
createVertexReader(InputSplit split,
TaskAttemptContext context)
throws IOException {
return new SimplePageRankVertexReader();
}
}
/**
* Simple VertexWriter that supports {@link SimplePageRankVertex}
*/
public static class SimplePageRankVertexWriter extends
TextVertexWriter<LongWritable, DoubleWritable, FloatWritable> {
public SimplePageRankVertexWriter(
RecordWriter<Text, Text> lineRecordWriter) {
super(lineRecordWriter);
}
@Override
public void writeVertex(
BasicVertex<LongWritable, DoubleWritable, FloatWritable, ?> vertex)
throws IOException, InterruptedException {
getRecordWriter().write(
new Text(vertex.getVertexId().toString()),
new Text(vertex.getVertexValue().toString()));
}
}
/**
* Simple VertexOutputFormat that supports {@link SimplePageRankVertex}
*/
public static class SimplePageRankVertexOutputFormat extends
TextVertexOutputFormat<LongWritable, DoubleWritable, FloatWritable> {
@Override
public VertexWriter<LongWritable, DoubleWritable, FloatWritable>
createVertexWriter(TaskAttemptContext context)
throws IOException, InterruptedException {
RecordWriter<Text, Text> recordWriter =
textOutputFormat.getRecordWriter(context);
return new SimplePageRankVertexWriter(recordWriter);
}
}
}
| |
package org.newdawn.slick;
import java.awt.Color;
import java.awt.FontMetrics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
//import com.dozenx.game.opengl.util.ShaderUtils;
import com.dozenx.game.opengl.util.OpenglUtils;
import com.dozenx.game.opengl.util.ShaderUtils;
import com.sun.prism.ps.Shader;
import org.newdawn.slick.opengl.GLUtils;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.renderer.Renderer;
import org.newdawn.slick.opengl.renderer.SGL;
import org.newdawn.slick.util.BufferedImageUtil;
/**
* A TrueType font implementation for Slick
*
* @author James Chambers (Jimmy)
* @author Jeremy Adams (elias4444)
* @author Kevin Glass (kevglass)
* @author Peter Korzuszek (genail)
*/
public class TrueTypeFont implements Font {
/** The renderer to use for all GL operations */
private static final SGL GL = Renderer.get();
/** Array that holds necessary information about the font characters */
private IntObject[] charArray = new IntObject[256];
/** Map of user defined font characters (Character <-> IntObject) */
private Map customChars = new HashMap();
/** Boolean flag on whether AntiAliasing is enabled or not */
private boolean antiAlias;
/** Font's size */
private int fontSize = 0;
/** Font's height */
private int fontHeight = 0;
/** Texture used to cache the font 0-255 characters */
private Texture fontTexture;
/** Default font texture width */
private int textureWidth = 512;
/** Default font texture height */
private int textureHeight = 512;
/** A reference to Java's AWT Font that we create our font texture from */
private java.awt.Font font;
/** The font metrics for our Java AWT font */
private FontMetrics fontMetrics;
/**
* This is a special internal class that holds our necessary information for
* the font characters. This includes width, height, and where the character
* is stored on the font texture.
*/
private class IntObject {
/** Character's width */
public int width;
/** Character's height */
public int height;
/** Character's stored x position */
public int storedX;
/** Character's stored y position */
public int storedY;
}
/**
* Constructor for the TrueTypeFont class Pass in the preloaded standard
* Java TrueType font, and whether you want it to be cached with
* AntiAliasing applied.
*
* @param font
* Standard Java AWT font
* @param antiAlias
* Whether or not to apply AntiAliasing to the cached font
* @param additionalChars
* Characters of font that will be used in addition of first 256 (by unicode).
*/
public TrueTypeFont(java.awt.Font font, boolean antiAlias, char[] additionalChars) {
GLUtils.checkGLContext();
OpenglUtils.checkGLError();
this.font = font;
this.fontSize = font.getSize();
this.antiAlias = antiAlias;
OpenglUtils.checkGLError();
createSet( additionalChars );
}
/**
* Constructor for the TrueTypeFont class Pass in the preloaded standard
* Java TrueType font, and whether you want it to be cached with
* AntiAliasing applied.
*
* @param font
* Standard Java AWT font
* @param antiAlias
* Whether or not to apply AntiAliasing to the cached font
*/
public TrueTypeFont(java.awt.Font font, boolean antiAlias) {
this( font, antiAlias, null );
}
/**
* Create a standard Java2D BufferedImage of the given character
*
* @param ch
* The character to create a BufferedImage for
*
* @return A BufferedImage containing the character
*/
private BufferedImage getFontImage(char ch) {
// Create a temporary image to extract the character's size
BufferedImage tempfontImage = new BufferedImage(1, 1,
BufferedImage.TYPE_INT_ARGB);
Graphics2D g = (Graphics2D) tempfontImage.getGraphics();
if (antiAlias == true) {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
}
g.setFont(font);
fontMetrics = g.getFontMetrics();
int charwidth = fontMetrics.charWidth(ch);
if (charwidth <= 0) {
charwidth = 1;
}
int charheight = fontMetrics.getHeight();
if (charheight <= 0) {
charheight = fontSize;
}
// Create another image holding the character we are creating
BufferedImage fontImage;
fontImage = new BufferedImage(charwidth, charheight,
BufferedImage.TYPE_INT_ARGB);
Graphics2D gt = (Graphics2D) fontImage.getGraphics();
if (antiAlias == true) {
gt.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
}
gt.setFont(font);
gt.setColor(Color.WHITE);
int charx = 0;
int chary = 0;
gt.drawString(String.valueOf(ch), (charx), (chary)
+ fontMetrics.getAscent());
return fontImage;
}
/**
* Create and store the font
*
* @param customCharsArray Characters that should be also added to the cache.
*/
private void createSet( char[] customCharsArray ) {
// If there are custom chars then I expand the font texture twice
if (customCharsArray != null && customCharsArray.length > 0) {
textureWidth *= 2;
}
// In any case this should be done in other way. Texture with size 512x512
// can maintain only 256 characters with resolution of 32x32. The texture
// size should be calculated dynamicaly by looking at character sizes.
try {
BufferedImage imgTemp = new BufferedImage(textureWidth, textureHeight, BufferedImage.TYPE_INT_ARGB);
Graphics2D g = (Graphics2D) imgTemp.getGraphics();
g.setColor(new Color(255,255,255,1));
g.fillRect(0,0,textureWidth,textureHeight);
int rowHeight = 0;
int positionX = 0;
int positionY = 0;
int customCharsLength = ( customCharsArray != null ) ? customCharsArray.length : 0;
for (int i = 0; i < 256 + customCharsLength; i++) {
// get 0-255 characters and then custom characters
char ch = ( i < 256 ) ? (char) i : customCharsArray[i-256];
BufferedImage fontImage = getFontImage(ch);
IntObject newIntObject = new IntObject();
newIntObject.width = fontImage.getWidth();
newIntObject.height = fontImage.getHeight();
if (positionX + newIntObject.width >= textureWidth) {
positionX = 0;
positionY += rowHeight;
rowHeight = 0;
}
newIntObject.storedX = positionX;
newIntObject.storedY = positionY;
if (newIntObject.height > fontHeight) {
fontHeight = newIntObject.height;
}
if (newIntObject.height > rowHeight) {
rowHeight = newIntObject.height;
}
// Draw it here
g.drawImage(fontImage, positionX, positionY, null);
positionX += newIntObject.width;
if( i < 256 ) { // standard characters
charArray[i] = newIntObject;
} else { // custom characters
customChars.put( new Character( ch ), newIntObject );
}
fontImage = null;
}
OpenglUtils.checkGLError();
fontTexture = BufferedImageUtil
.getTexture(font.toString(), imgTemp);
OpenglUtils.checkGLError();
} catch (IOException e) {
System.err.println("Failed to create font.");
e.printStackTrace();
}
}
private void drawQuad(float drawX, float drawY, float drawX2, float drawY2,
float srcX, float srcY, float srcX2, float srcY2) {
float DrawWidth = drawX2 - drawX;
float DrawHeight = drawY2 - drawY;
float TextureSrcX = srcX / textureWidth;
float TextureSrcY = srcY / textureHeight;
float SrcWidth = srcX2 - srcX;
float SrcHeight = srcY2 - srcY;
float RenderWidth = (SrcWidth / textureWidth);
float RenderHeight = (SrcHeight / textureHeight);
GL.glTexCoord2f(TextureSrcX, TextureSrcY);
GL.glVertex2f(drawX, drawY);
GL.glTexCoord2f(TextureSrcX, TextureSrcY + RenderHeight);
GL.glVertex2f(drawX, drawY + DrawHeight);
GL.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY + RenderHeight);
GL.glVertex2f(drawX + DrawWidth, drawY + DrawHeight);
GL.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY);
GL.glVertex2f(drawX + DrawWidth, drawY);
}
/**
* Draw a textured quad
*
* @param drawX
* The left x position to draw to
* @param drawY
* The top y position to draw to
* @param drawX2
* The right x position to draw to
* @param drawY2
* The bottom y position to draw to
* @param srcX
* The left source x position to draw from
* @param srcY
* The top source y position to draw from
* @param srcX2
* The right source x position to draw from
* @param srcY2
* The bottom source y position to draw from
*/
private void drawQuadShader(float drawX, float drawY, float drawX2, float drawY2,
float srcX, float srcY, float srcX2, float srcY2,float index) {
float DrawWidth = drawX2 - drawX;
float DrawHeight = drawY2 - drawY;
float TextureSrcX = srcX / textureWidth;
float TextureSrcY = srcY / textureHeight;
float SrcWidth = srcX2 - srcX;
float SrcHeight = srcY2 - srcY;
float RenderWidth = (SrcWidth / textureWidth);
float RenderHeight = (SrcHeight / textureHeight);
ShaderUtils.draw2dImgReverse(fontTexture.getTextureID(),drawX,drawY,index,DrawWidth,DrawHeight,TextureSrcX,TextureSrcY,TextureSrcX+RenderWidth,TextureSrcY+RenderHeight);
// ShaderUtils.glColor(1,1,1);
// ShaderUtils.glTexCoord2f(TextureSrcX, TextureSrcY);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX), ShaderUtils.getRealY(drawY),-0.01f);
//
//
// ShaderUtils.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX + DrawWidth), ShaderUtils.getRealY(drawY),-0.01f);
//
//
// ShaderUtils.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY + RenderHeight);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX + DrawWidth), ShaderUtils.getRealY(drawY + DrawHeight),-0.01f);
//
//
//
// ShaderUtils.glTexCoord2f(TextureSrcX, TextureSrcY + RenderHeight);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX),ShaderUtils.getRealY( drawY + DrawHeight),-0.01f);
//
//
// ShaderUtils.glTexCoord2f(TextureSrcX, TextureSrcY);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX), ShaderUtils.getRealY(drawY),-0.01f);
//
//
//
// ShaderUtils.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY + RenderHeight);
// ShaderUtils.glNormal3f(0,0,1);
// ShaderUtils.glVertex3f(ShaderUtils.getRealX(drawX + DrawWidth), ShaderUtils.getRealY(drawY + DrawHeight),-0.01f);
// GL.glTexCoord2f(TextureSrcX, TextureSrcY);
// GL.glVertex2f(drawX, drawY);
// GL.glTexCoord2f(TextureSrcX, TextureSrcY + RenderHeight);
// GL.glVertex2f(drawX, drawY + DrawHeight);
// GL.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY + RenderHeight);
// GL.glVertex2f(drawX + DrawWidth, drawY + DrawHeight);
// GL.glTexCoord2f(TextureSrcX + RenderWidth, TextureSrcY);
// GL.glVertex2f(drawX + DrawWidth, drawY);
}
/**
* Get the width of a given String
*
* @param whatchars
* The characters to get the width of
*
* @return The width of the characters
*/
public int getWidth(String whatchars) {
int totalwidth = 0;
IntObject intObject = null;
int currentChar = 0;
for (int i = 0; i < whatchars.length(); i++) {
currentChar = whatchars.charAt(i);
if (currentChar < 256) {
intObject = charArray[currentChar];
} else {
intObject = (IntObject)customChars.get( new Character( (char) currentChar ) );
}
if( intObject != null )
totalwidth += intObject.width;
}
return totalwidth;
}
/**
* Get the font's height
*
* @return The height of the font
*/
public int getHeight() {
return fontHeight;
}
/**
* Get the height of a String
*
* @return The height of a given string
*/
public int getHeight(String HeightString) {
return fontHeight;
}
/**
* Get the font's line height
*
* @return The line height of the font
*/
public int getLineHeight() {
return fontHeight;
}
/**
* Draw a string
*
* @param x
* The x position to draw the string
* @param y
* The y position to draw the string
* @param whatchars
* The string to draw
* @param color
* The color to draw the text
*/
public void drawString(float x, float y, String whatchars,
org.newdawn.slick.Color color) {
drawString(x,y,whatchars,color,0,whatchars.length()-1);
}
public void drawStringShader(float x, float y, String whatchars,
org.newdawn.slick.Color color,float index) {
drawStringShader(x,y,whatchars,color,0,whatchars.length()-1,index);
}
/**
* @see Font#drawString(float, float, String, org.newdawn.slick.Color, int, int)
*/
public void drawString(float x, float y, String whatchars,
org.newdawn.slick.Color color, int startIndex, int endIndex) {
color.bind();
fontTexture.bind();
IntObject intObject = null;
int charCurrent;
GL.glBegin(SGL.GL_QUADS);
int totalwidth = 0;
for (int i = 0; i < whatchars.length(); i++) {
charCurrent = whatchars.charAt(i);
if (charCurrent < 256) {
intObject = charArray[charCurrent];
} else {
intObject = (IntObject)customChars.get( new Character( (char) charCurrent ) );
}
if( intObject != null ) {
if ((i >= startIndex) || (i <= endIndex)) {
drawQuad((x + totalwidth), y,
(x + totalwidth + intObject.width),
(y + intObject.height), intObject.storedX,
intObject.storedY, intObject.storedX + intObject.width,
intObject.storedY + intObject.height);
}
totalwidth += intObject.width;
}
}
//GL.glEnd();
}
public void drawStringShader(float x, float y, String whatchars,
org.newdawn.slick.Color color, int startIndex, int endIndex,float index) {
ShaderUtils.glColor(color.a,color.b,color.g);
ShaderUtils.bindTexture(fontTexture.getTextureID());
IntObject intObject = null;
int charCurrent;
// GL.glBegin(SGL.GL_QUADS);
int totalwidth = 0;
for (int i = 0; i < whatchars.length(); i++) {
charCurrent = whatchars.charAt(i);
if(charCurrent=='\n'){
totalwidth=0;
y+=fontSize;
}
if (charCurrent < 256) {
intObject = charArray[charCurrent];
} else {
intObject = (IntObject)customChars.get( new Character( (char) charCurrent ) );
}
if( intObject != null ) {
if ((i >= startIndex) || (i <= endIndex)) {
drawQuadShader((x + totalwidth), y,
(x + totalwidth + intObject.width),
(y + intObject.height), intObject.storedX,
intObject.storedY, intObject.storedX + intObject.width,
intObject.storedY + intObject.height,index);
}
totalwidth += intObject.width;
}
}
// GL.glEnd();
}
/**
* Draw a string
*
* @param x
* The x position to draw the string
* @param y
* The y position to draw the string
* @param whatchars
* The string to draw
*/
public void drawString(float x, float y, String whatchars) {
drawString(x, y, whatchars, org.newdawn.slick.Color.white);
}
}
| |
package editor.run;
import com.sun.jdi.VirtualMachine;
import editor.GosuEditor;
import editor.GosuPanel;
import editor.LabFrame;
import editor.TextComponentWriter;
import editor.settings.CompilerSettings;
import editor.util.Experiment;
import gw.util.PathUtil;
import editor.util.TaskQueue;
import gw.util.GosuExceptionUtil;
import javax.swing.text.SimpleAttributeSet;
import javax.swing.text.StyleConstants;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.StringTokenizer;
/**
*/
public abstract class AbstractOutOfProcessExecutor<T extends IRunConfig> implements IProcessRunner<T>
{
private T _runConfig;
private RunState _runState;
private Process _process;
private VirtualMachine _vm;
public AbstractOutOfProcessExecutor( RunState runState )
{
_runState = runState;
}
protected abstract String exec() throws Exception;
@Override
public T getRunConfig()
{
return _runConfig;
}
@Override
public RunState getRunState()
{
return _runState;
}
@Override
public Process getProcess()
{
return _process;
}
protected void setProcess( Process process )
{
_process = process;
}
public VirtualMachine getVm()
{
return _vm;
}
public void setVm( VirtualMachine vm )
{
_vm = vm;
}
public void execute( T runConfig )
{
try
{
_runConfig = runConfig;
TaskQueue queue = TaskQueue.getInstance( "_execute_gosu" );
getGosuPanel().addBusySignal( _runState );
queue.postTask(
() -> {
GosuEditor.getParserTaskQueue().waitUntilAllCurrentTasksFinish();
try
{
String result = null;
try
{
result = exec();
}
finally
{
String programResults = result;
EventQueue.invokeLater(
() -> {
getGosuPanel().removeBusySignal();
if( programResults != null )
{
printLabMessage( programResults );
}
} );
}
}
catch( Exception e )
{
Throwable cause = GosuExceptionUtil.findExceptionCause( e );
throw GosuExceptionUtil.forceThrow( cause );
}
} );
}
catch( Throwable t )
{
editor.util.EditorUtilities.handleUncaughtException( t );
}
}
int waitFor() throws IOException, InterruptedException
{
captureErrorStream();
InputStream input = _process.getInputStream();
byte[] b = new byte[512];
int read = 1;
while( read > -1 )
{
read = input.read( b, 0, b.length );
if( read > -1 )
{
System.out.write( b, 0, read );
}
}
return _process.waitFor();
}
private void captureErrorStream() throws IOException
{
new Thread( () -> {
InputStream input = _process.getErrorStream();
byte[] b = new byte[512];
int read = 1;
while( read > -1 )
{
try
{
read = input.read( b, 0, b.length );
}
catch( IOException e )
{
// eat
}
if( read > -1 )
{
System.out.write( b, 0, read );
}
if( !_process.isAlive() )
{
break;
}
}
}, "Capture Error Stream" ).start();
}
String makeClasspath( GosuPanel gosuPanel ) throws IOException
{
return makeClasspath( gosuPanel, true );
}
String makeClasspath( GosuPanel gosuPanel, boolean bToolsJar ) throws IOException
{
StringBuilder classpath = new StringBuilder();
String javaHomePath = System.getProperty( "java.home" );
addExperimentPaths( gosuPanel, classpath, javaHomePath );
String cp = System.getProperty( "java.class.path" );
StringTokenizer tok = new StringTokenizer( cp, File.pathSeparator );
while( tok.hasMoreTokens() )
{
String path = tok.nextToken();
if( path.contains( "gw-asm-all" ) ||
path.contains( "gosu-core" ) )
{
classpath.append( path ).append( File.pathSeparator );
}
}
if( bToolsJar )
{
classpath.append( PathUtil.findToolsJar() ).append( File.pathSeparator );
}
cp = classpath.toString();
if( cp.endsWith( File.pathSeparator ) )
{
cp = cp.substring( 0, cp.length()-1 );
}
return cp;
}
private void addExperimentPaths( GosuPanel gosuPanel, StringBuilder classpath, String javaHomePath )
{
if( CompilerSettings.isStaticCompile() )
{
classpath.append( PathUtil.getAbsolutePathName( CompilerSettings.getCompilerOutputDir() ) ).append( File.pathSeparator );
}
Experiment experiment = gosuPanel.getExperiment();
List<String> srcPaths = experiment.getSourcePath();
for( String path : srcPaths )
{
if( path.startsWith( javaHomePath ) )
{
// don't pack jre jars
continue;
}
if( !CompilerSettings.isStaticCompile() || PathUtil.isFile( PathUtil.getAbsolutePath( PathUtil.create( path ) ) ) )
{
// Include jars in classpath, include source directories only if running with static compilation OFF
classpath.append( path ).append( File.pathSeparator );
}
}
}
void printLabMessage( String message )
{
if( getGosuPanel().getConsolePanel() == null )
{
return;
}
SimpleAttributeSet attr = new SimpleAttributeSet();
attr.addAttribute( StyleConstants.Foreground, new Color( 192, 192, 192 ) );
TextComponentWriter out = (TextComponentWriter)System.out;
out.setAttributes( attr );
System.out.println( message );
out.setAttributes( null );
}
protected GosuPanel getGosuPanel()
{
return LabFrame.instance().getGosuPanel();
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.core.assembler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.namespace.QName;
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.compiler.PackageRegistry;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.definitions.ResourceTypePackageRegistry;
import org.kie.api.internal.assembler.KieAssemblerService;
import org.kie.api.internal.io.ResourceTypePackage;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceConfiguration;
import org.kie.api.io.ResourceType;
import org.kie.api.io.ResourceWithConfiguration;
import org.kie.dmn.api.core.DMNCompiler;
import org.kie.dmn.api.core.DMNMessage;
import org.kie.dmn.api.core.DMNModel;
import org.kie.dmn.api.marshalling.DMNMarshaller;
import org.kie.dmn.core.api.DMNFactory;
import org.kie.dmn.core.compiler.CoerceDecisionServiceSingletonOutputOption;
import org.kie.dmn.core.compiler.DMNCompilerConfigurationImpl;
import org.kie.dmn.core.compiler.DMNCompilerImpl;
import org.kie.dmn.core.compiler.DMNProfile;
import org.kie.dmn.core.compiler.ImportDMNResolverUtil;
import org.kie.dmn.core.compiler.ImportDMNResolverUtil.ImportType;
import org.kie.dmn.core.compiler.RuntimeTypeCheckOption;
import org.kie.dmn.core.compiler.profiles.ExtendedDMNProfile;
import org.kie.dmn.core.impl.DMNKnowledgeBuilderError;
import org.kie.dmn.core.impl.DMNPackageImpl;
import org.kie.dmn.feel.util.Either;
import org.kie.dmn.feel.util.EvalHelper;
import org.kie.dmn.model.api.Definitions;
import org.kie.dmn.model.api.Import;
import org.kie.internal.builder.ResultSeverity;
import org.kie.internal.utils.ChainedProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DMNAssemblerService implements KieAssemblerService {
private static final Logger logger = LoggerFactory.getLogger( DMNAssemblerService.class );
public static final String ORG_KIE_DMN_PREFIX = "org.kie.dmn";
public static final String DMN_PROFILE_PREFIX = ORG_KIE_DMN_PREFIX + ".profiles.";
public static final String DMN_COMPILER_CACHE_KEY = "DMN_COMPILER_CACHE_KEY";
public static final String DMN_PROFILES_CACHE_KEY = "DMN_PROFILES_CACHE_KEY";
private DMNCompilerConfigurationImpl externalCompilerConfiguration;
public DMNAssemblerService(DMNCompilerConfigurationImpl externalCompilerConfiguration) {
this.externalCompilerConfiguration = externalCompilerConfiguration;
}
public DMNAssemblerService() {
}
@Override
public ResourceType getResourceType() {
return ResourceType.DMN;
}
@Override
public void addResources(Object kbuilder, Collection<ResourceWithConfiguration> resources, ResourceType type) throws Exception {
EvalHelper.clearGenericAccessorCache();
KnowledgeBuilderImpl kbuilderImpl = (KnowledgeBuilderImpl) kbuilder;
DMNCompilerImpl dmnCompiler = (DMNCompilerImpl) kbuilderImpl.getCachedOrCreate(DMN_COMPILER_CACHE_KEY, () -> getCompiler(kbuilderImpl));
DMNMarshaller dmnMarshaller = dmnCompiler.getMarshaller();
List<DMNResource> dmnResources = new ArrayList<>();
for (ResourceWithConfiguration r : resources) {
Definitions definitions = dmnMarshaller.unmarshal(r.getResource().getReader());
QName modelID = new QName(definitions.getNamespace(), definitions.getName());
DMNResource dmnResource = new DMNResource(modelID, r, definitions);
dmnResources.add(dmnResource);
}
Collection<DMNModel> dmnModels = new ArrayList<>();
// KIE API: KieContainer upgrade using KieContainer#updateToVersion -based DMN Import resolution strategy
if (kbuilderImpl.getKnowledgeBase() != null) {
for (InternalKnowledgePackage pr : kbuilderImpl.getKnowledgeBase().getPackagesMap().values()) {
ResourceTypePackage resourceTypePackage = pr.getResourceTypePackages().get(ResourceType.DMN);
if (resourceTypePackage != null) {
DMNPackageImpl dmnpkg = (DMNPackageImpl) resourceTypePackage;
dmnModels.addAll(dmnpkg.getAllModels().values());
}
}
}
// Workbench: InternalKieBuilder#createFileSet#build -based DMN Import resolution strategy
for (PackageRegistry pr : kbuilderImpl.getPackageRegistry().values()) {
ResourceTypePackage resourceTypePackage = pr.getPackage().getResourceTypePackages().get(ResourceType.DMN);
if (resourceTypePackage != null) {
DMNPackageImpl dmnpkg = (DMNPackageImpl) resourceTypePackage;
dmnModels.addAll(dmnpkg.getAllModels().values());
}
}
enrichDMNResourcesWithImportsDependencies(dmnResources, dmnModels);
List<DMNResource> sortedDmnResources = DMNResourceDependenciesSorter.sort(dmnResources);
for (DMNResource dmnRes : sortedDmnResources) {
DMNModel dmnModel = internalAddResource(kbuilderImpl, dmnCompiler, dmnRes.getResAndConfig(), dmnModels);
dmnModels.add(dmnModel);
}
}
public static void enrichDMNResourcesWithImportsDependencies(List<DMNResource> dmnResources, Collection<DMNModel> dmnModels) {
for (DMNResource r : dmnResources) {
for (Import i : r.getDefinitions().getImport()) {
if (ImportDMNResolverUtil.whichImportType(i) == ImportType.DMN) {
Either<String, DMNModel> inAlreadyCompiled = ImportDMNResolverUtil.resolveImportDMN(i, dmnModels, x -> new QName(x.getNamespace(), x.getName()));
if (inAlreadyCompiled.isLeft()) { // the DMN Model is not already available in the KieBuilder and needs to be compiled.
Either<String, DMNResource> resolvedResult = ImportDMNResolverUtil.resolveImportDMN(i, dmnResources, DMNResource::getModelID);
DMNResource located = resolvedResult.getOrElseThrow(RuntimeException::new);
r.addDependency(located.getModelID());
} else {
// do nothing: the DMN Model is already available in the KieBuilder.
}
}
}
}
}
private DMNModel internalAddResource(KnowledgeBuilderImpl kbuilder, DMNCompiler dmnCompiler, ResourceWithConfiguration r, Collection<DMNModel> dmnModels) throws Exception {
r.getBeforeAdd().accept(kbuilder);
DMNModel dmnModel = compileResourceToModel(kbuilder, dmnCompiler, r.getResource(), dmnModels);
r.getAfterAdd().accept(kbuilder);
return dmnModel;
}
@Override
public void addResource(Object kbuilder, Resource resource, ResourceType type, ResourceConfiguration configuration) throws Exception {
logger.warn("invoked legacy addResource (no control on the order of the assembler compilation): " + resource.getSourcePath());
KnowledgeBuilderImpl kbuilderImpl = (KnowledgeBuilderImpl) kbuilder;
DMNCompiler dmnCompiler = kbuilderImpl.getCachedOrCreate( DMN_COMPILER_CACHE_KEY, () -> getCompiler( kbuilderImpl ) );
Collection<DMNModel> dmnModels = new ArrayList<>();
for (PackageRegistry pr : kbuilderImpl.getPackageRegistry().values()) {
ResourceTypePackage resourceTypePackage = pr.getPackage().getResourceTypePackages().get(ResourceType.DMN);
if (resourceTypePackage != null) {
DMNPackageImpl dmnpkg = (DMNPackageImpl) resourceTypePackage;
dmnModels.addAll(dmnpkg.getAllModels().values());
}
}
compileResourceToModel(kbuilderImpl, dmnCompiler, resource, dmnModels);
}
private DMNModel compileResourceToModel(KnowledgeBuilderImpl kbuilderImpl, DMNCompiler dmnCompiler, Resource resource, Collection<DMNModel> dmnModels) {
DMNModel model = dmnCompiler.compile(resource, dmnModels);
if( model != null ) {
String namespace = model.getNamespace();
PackageRegistry pkgReg = kbuilderImpl.getOrCreatePackageRegistry( new PackageDescr( namespace ) );
InternalKnowledgePackage kpkgs = pkgReg.getPackage();
kpkgs.addCloningResource( DMN_COMPILER_CACHE_KEY, dmnCompiler );
ResourceTypePackageRegistry rpkg = kpkgs.getResourceTypePackages();
DMNPackageImpl dmnpkg = rpkg.computeIfAbsent(ResourceType.DMN, rtp -> new DMNPackageImpl(namespace));
if ( dmnpkg.getModel( model.getName() ) != null ) {
kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.ERROR, resource, namespace, "Duplicate model name " + model.getName() + " in namespace " + namespace));
logger.error( "Duplicate model name {} in namespace {}", model.getName(), namespace );
}
dmnpkg.addModel( model.getName(), model );
for (DMNMessage m : model.getMessages()) {
kbuilderImpl.addBuilderResult(DMNKnowledgeBuilderError.from(resource, namespace, m));
}
dmnpkg.addProfiles(kbuilderImpl.getCachedOrCreate(DMN_PROFILES_CACHE_KEY, () -> getDMNProfiles(kbuilderImpl)));
} else {
kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.ERROR, resource, "Unable to compile DMN model for the resource"));
logger.error( "Unable to compile DMN model for resource {}", resource.getSourcePath() );
}
return model;
}
private List<DMNProfile> getDMNProfiles(KnowledgeBuilderImpl kbuilderImpl) {
ChainedProperties chainedProperties = kbuilderImpl.getBuilderConfiguration().getChainedProperties();
List<DMNProfile> dmnProfiles = new ArrayList<>();
dmnProfiles.addAll(getDefaultDMNProfiles(chainedProperties));
Map<String, String> dmnProfileProperties = new HashMap<>();
chainedProperties.mapStartsWith(dmnProfileProperties, DMN_PROFILE_PREFIX, false);
if (!dmnProfileProperties.isEmpty()) {
try {
for (Map.Entry<String, String> dmnProfileProperty : dmnProfileProperties.entrySet()) {
DMNProfile dmnProfile = (DMNProfile) kbuilderImpl.getRootClassLoader()
.loadClass(dmnProfileProperty.getValue()).newInstance();
dmnProfiles.add(dmnProfile);
}
return dmnProfiles;
} catch (Exception e) {
kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.WARNING, "Trying to load a non-existing Kie DMN profile " + e.getLocalizedMessage()));
logger.error("Trying to load a non-existing Kie DMN profile {}", e.getLocalizedMessage(), e);
kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.WARNING, "DMN Compiler configuration contained errors, will fall-back using empty-configuration compiler."));
logger.warn("DMN Compiler configuration contained errors, will fall-back using empty-configuration compiler.");
}
}
return dmnProfiles;
}
public static List<DMNProfile> getDefaultDMNProfiles(ChainedProperties properties) {
if (!isStrictMode(properties)) {
return Arrays.asList(new ExtendedDMNProfile());
} else {
return Collections.emptyList();
}
}
public static boolean isStrictMode(ChainedProperties properties) {
String val = properties.getProperty("org.kie.dmn.strictConformance", "false");
return "".equals(val) || Boolean.parseBoolean(val);
}
private DMNCompiler getCompiler(KnowledgeBuilderImpl kbuilderImpl) {
List<DMNProfile> dmnProfiles = kbuilderImpl.getCachedOrCreate(DMN_PROFILES_CACHE_KEY, () -> getDMNProfiles(kbuilderImpl));
DMNCompilerConfigurationImpl compilerConfiguration;
// Beware: compilerConfiguration can't be cached in DMNAssemblerService
if (externalCompilerConfiguration == null) {
compilerConfiguration = compilerConfigWithKModulePrefs(kbuilderImpl.getRootClassLoader(), kbuilderImpl.getBuilderConfiguration().getChainedProperties(), dmnProfiles, (DMNCompilerConfigurationImpl) DMNFactory.newCompilerConfiguration());
} else {
compilerConfiguration = externalCompilerConfiguration;
}
if (isStrictMode(kbuilderImpl.getBuilderConfiguration().getChainedProperties())) {
compilerConfiguration.setProperty(RuntimeTypeCheckOption.PROPERTY_NAME, "true");
compilerConfiguration.setProperty(CoerceDecisionServiceSingletonOutputOption.PROPERTY_NAME, "false");
}
return DMNFactory.newCompiler(compilerConfiguration);
}
/**
* Returns a DMNCompilerConfiguration with the specified properties set, and applying the explicited dmnProfiles.
* @param classLoader
* @param chainedProperties applies properties --it does not do any classloading nor profile loading based on these properites, just passes the values.
* @param dmnProfiles applies these DMNProfile(s) to the DMNCompilerConfiguration
* @param config
* @return
*/
public static DMNCompilerConfigurationImpl compilerConfigWithKModulePrefs(ClassLoader classLoader, ChainedProperties chainedProperties, List<DMNProfile> dmnProfiles, DMNCompilerConfigurationImpl config) {
config.setRootClassLoader(classLoader);
Map<String, String> dmnPrefs = new HashMap<>();
chainedProperties.mapStartsWith(dmnPrefs, ORG_KIE_DMN_PREFIX, true);
config.setProperties(dmnPrefs);
for (DMNProfile dmnProfile : dmnProfiles) {
config.addExtensions(dmnProfile.getExtensionRegisters());
config.addDRGElementCompilers(dmnProfile.getDRGElementCompilers());
config.addFEELProfile(dmnProfile);
}
return config;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse.type;
import java.math.BigDecimal;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.calcite.rel.RelNode;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory;
import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException;
import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.QBSubQueryParseInfo;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeSubQueryDesc;
import org.apache.hadoop.hive.ql.plan.SubqueryType;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualNS;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqualNS;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.NullWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Expression factory for Hive {@link ExprNodeDesc}.
*/
public class ExprNodeDescExprFactory extends ExprFactory<ExprNodeDesc> {
private static final Logger LOG = LoggerFactory.getLogger(ExprNodeDescExprFactory.class);
/**
* {@inheritDoc}
*/
@Override
protected boolean isExprInstance(Object o) {
return o instanceof ExprNodeDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc toExpr(ColumnInfo colInfo, RowResolver rowResolver, int offset)
throws SemanticException {
ObjectInspector inspector = colInfo.getObjectInspector();
if (inspector instanceof ConstantObjectInspector && inspector instanceof PrimitiveObjectInspector) {
return toPrimitiveConstDesc(colInfo, inspector);
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof ListObjectInspector) {
ObjectInspector listElementOI = ((ListObjectInspector)inspector).getListElementObjectInspector();
if (listElementOI instanceof PrimitiveObjectInspector) {
return toListConstDesc(colInfo, inspector, listElementOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof MapObjectInspector) {
ObjectInspector keyOI = ((MapObjectInspector)inspector).getMapKeyObjectInspector();
ObjectInspector valueOI = ((MapObjectInspector)inspector).getMapValueObjectInspector();
if (keyOI instanceof PrimitiveObjectInspector && valueOI instanceof PrimitiveObjectInspector) {
return toMapConstDesc(colInfo, inspector, keyOI, valueOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof StructObjectInspector) {
boolean allPrimitive = true;
List<? extends StructField> fields = ((StructObjectInspector)inspector).getAllStructFieldRefs();
for (StructField field : fields) {
allPrimitive &= field.getFieldObjectInspector() instanceof PrimitiveObjectInspector;
}
if (allPrimitive) {
return toStructConstDesc(colInfo, inspector, fields);
}
}
// non-constant or non-primitive constants
ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo);
column.setSkewedCol(colInfo.isSkewedCol());
return column;
}
private static ExprNodeConstantDesc toPrimitiveConstDesc(ColumnInfo colInfo, ObjectInspector inspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
ExprNodeConstantDesc constantExpr =
new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toListConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
ObjectInspector listElementOI) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)listElementOI;
List<?> values = (List<?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
List<Object> constant = new ArrayList<Object>();
for (Object o : values) {
constant.add(poi.getPrimitiveJavaObject(o));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toMapConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
ObjectInspector keyOI, ObjectInspector valueOI) {
PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector)keyOI;
PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector)valueOI;
Map<?, ?> values = (Map<?, ?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
Map<Object, Object> constant = new LinkedHashMap<Object, Object>();
for (Map.Entry<?, ?> e : values.entrySet()) {
constant.put(keyPoi.getPrimitiveJavaObject(e.getKey()), valuePoi.getPrimitiveJavaObject(e.getValue()));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toStructConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
List<? extends StructField> fields) {
List<?> values = (List<?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
List<Object> constant = new ArrayList<Object>();
for (int i = 0; i < values.size(); i++) {
Object value = values.get(i);
PrimitiveObjectInspector fieldPoi = (PrimitiveObjectInspector) fields.get(i).getFieldObjectInspector();
constant.add(fieldPoi.getPrimitiveJavaObject(value));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnDesc createColumnRefExpr(ColumnInfo colInfo, RowResolver rowResolver, int offset) {
return new ExprNodeColumnDesc(colInfo);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnDesc createColumnRefExpr(ColumnInfo colInfo, List<RowResolver> rowResolverList) {
return new ExprNodeColumnDesc(colInfo);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createNullConstantExpr() {
return new ExprNodeConstantDesc(TypeInfoFactory.
getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class), null);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprDynamicParamDesc createDynamicParamExpr(int index) {
return new ExprDynamicParamDesc(TypeInfoFactory.
getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class), index,null);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createBooleanConstantExpr(String value) {
Boolean b = value != null ? Boolean.valueOf(value) : null;
return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, b);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createBigintConstantExpr(String value) {
Long l = Long.valueOf(value);
return new ExprNodeConstantDesc(l);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntConstantExpr(String value) {
Integer i = Integer.valueOf(value);
return new ExprNodeConstantDesc(i);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createSmallintConstantExpr(String value) {
Short s = Short.valueOf(value);
return new ExprNodeConstantDesc(s);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTinyintConstantExpr(String value) {
Byte b = Byte.valueOf(value);
return new ExprNodeConstantDesc(b);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createFloatConstantExpr(String value) {
Float f = Float.valueOf(value);
return new ExprNodeConstantDesc(f);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDoubleConstantExpr(String value) {
Double d = Double.valueOf(value);
return new ExprNodeConstantDesc(d);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDecimalConstantExpr(String value, boolean allowNullValueConstantExpr) {
HiveDecimal hd = HiveDecimal.create(value);
if (!allowNullValueConstantExpr && hd == null) {
return null;
}
return new ExprNodeConstantDesc(adjustType(hd), hd);
}
@Override
protected TypeInfo adjustConstantType(PrimitiveTypeInfo targetType, Object constantValue) {
if (constantValue instanceof HiveDecimal) {
return adjustType((HiveDecimal) constantValue);
}
return targetType;
}
private DecimalTypeInfo adjustType(HiveDecimal hd) {
// Note: the normalize() call with rounding in HiveDecimal will currently reduce the
// precision and scale of the value by throwing away trailing zeroes. This may or may
// not be desirable for the literals; however, this used to be the default behavior
// for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
int prec = 1;
int scale = 0;
if (hd != null) {
prec = hd.precision();
scale = hd.scale();
}
DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
return typeInfo;
}
/**
* {@inheritDoc}
*/
@Override
protected Object interpretConstantAsPrimitive(PrimitiveTypeInfo targetType, Object constantValue,
PrimitiveTypeInfo sourceType, boolean isEqual) {
if (constantValue instanceof Number || constantValue instanceof String) {
try {
PrimitiveTypeEntry primitiveTypeEntry = targetType.getPrimitiveTypeEntry();
if (PrimitiveObjectInspectorUtils.intTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).intValueExact();
} else if (PrimitiveObjectInspectorUtils.longTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).longValueExact();
} else if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
return Double.valueOf(constantValue.toString());
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
return Float.valueOf(constantValue.toString());
} else if (PrimitiveObjectInspectorUtils.byteTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).byteValueExact();
} else if (PrimitiveObjectInspectorUtils.shortTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).shortValueExact();
} else if (PrimitiveObjectInspectorUtils.decimalTypeEntry.equals(primitiveTypeEntry)) {
return HiveDecimal.create(constantValue.toString());
}
} catch (NumberFormatException | ArithmeticException nfe) {
if (!isEqual && (constantValue instanceof Number ||
NumberUtils.isNumber(constantValue.toString()))) {
// The target is a number, if constantToInterpret can be interpreted as a number,
// return the constantToInterpret directly, GenericUDFBaseCompare will do
// type conversion for us.
return constantValue;
}
LOG.trace("Failed to narrow type of constant", nfe);
return null;
}
}
// Comparision of decimal and float/double happens in float/double.
if (constantValue instanceof HiveDecimal) {
HiveDecimal hiveDecimal = (HiveDecimal) constantValue;
PrimitiveTypeEntry primitiveTypeEntry = targetType.getPrimitiveTypeEntry();
if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
return hiveDecimal.doubleValue();
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
return hiveDecimal.floatValue();
}
return hiveDecimal;
}
String constTypeInfoName = sourceType.getTypeName();
if (constTypeInfoName.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
// because a comparison against a "string" will happen in "string" type.
// to avoid unintentional comparisons in "string"
// constants which are representing char/varchar values must be converted to the
// appropriate type.
if (targetType instanceof CharTypeInfo) {
final String constValue = constantValue.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveChar newValue = new HiveChar(constValue, length);
HiveChar maxCharConst = new HiveChar(constValue, HiveChar.MAX_CHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return newValue;
} else {
return null;
}
}
if (targetType instanceof VarcharTypeInfo) {
final String constValue = constantValue.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveVarchar newValue = new HiveVarchar(constValue, length);
HiveVarchar maxCharConst = new HiveVarchar(constValue, HiveVarchar.MAX_VARCHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return newValue;
} else {
return null;
}
}
}
return constantValue;
}
private BigDecimal toBigDecimal(String val) {
if (!NumberUtils.isNumber(val)) {
throw new NumberFormatException("The given string is not a valid number: " + val);
}
return new BigDecimal(val.replaceAll("[dDfFlL]$", ""));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createStringConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, value);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDateConstantExpr(String value) {
Date d = Date.valueOf(value);
return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo, d);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTimestampConstantExpr(String value) {
Timestamp t = Timestamp.valueOf(value);
return new ExprNodeConstantDesc(TypeInfoFactory.timestampTypeInfo, t);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTimestampLocalTimeZoneConstantExpr(String value, ZoneId zoneId) {
TimestampTZ t = TimestampTZUtil.parse(value);
return new ExprNodeConstantDesc(TypeInfoFactory.getTimestampTZTypeInfo(zoneId), t);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalYearMonthConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
HiveIntervalYearMonth.valueOf(value));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalDayTimeConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
HiveIntervalDayTime.valueOf(value));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalYearConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(Integer.parseInt(value), 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalMonthConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(0, Integer.parseInt(value)));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalDayConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(Integer.parseInt(value), 0, 0, 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalHourConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, Integer.parseInt(value), 0, 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalMinuteConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, Integer.parseInt(value), 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalSecondConstantExpr(String value) {
BigDecimal bd = new BigDecimal(value);
BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger());
BigDecimal bdNanos = bd.subtract(bdSeconds);
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(),
bdNanos.multiply(NANOS_PER_SEC_BD).intValue()));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc createStructExpr(TypeInfo typeInfo, List<ExprNodeDesc> operands)
throws SemanticException {
assert typeInfo instanceof StructTypeInfo;
if (isAllConstants(operands)) {
return createConstantExpr(typeInfo,
operands.stream()
.map(this::getConstantValue)
.collect(Collectors.toList()));
}
return ExprNodeGenericFuncDesc.newInstance(
new GenericUDFStruct(),
GenericUDFStruct.class.getAnnotation(Description.class).name(),
operands);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createConstantExpr(TypeInfo typeInfo, Object constantValue) {
return new ExprNodeConstantDesc(typeInfo, constantValue);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeFieldDesc createNestedColumnRefExpr(
TypeInfo typeInfo, ExprNodeDesc expr, String fieldName, Boolean isList) {
return new ExprNodeFieldDesc(typeInfo, expr, fieldName, isList);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeGenericFuncDesc createFuncCallExpr(TypeInfo typeInfo, FunctionInfo fi,
String funcText, List<ExprNodeDesc> inputs) throws UDFArgumentException {
GenericUDF genericUDF = fi.getGenericUDF();
if (genericUDF instanceof SettableUDF) {
((SettableUDF) genericUDF).setTypeInfo(typeInfo);
}
return ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText, inputs);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnListDesc createExprsListExpr() {
return new ExprNodeColumnListDesc();
}
/**
* {@inheritDoc}
*/
@Override
protected void addExprToExprsList(ExprNodeDesc columnList, ExprNodeDesc expr) {
ExprNodeColumnListDesc l = (ExprNodeColumnListDesc) columnList;
l.addColumn(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConstantExpr(Object o) {
return o instanceof ExprNodeConstantDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isFuncCallExpr(Object o) {
return o instanceof ExprNodeGenericFuncDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected Object getConstantValue(ExprNodeDesc expr) {
return ((ExprNodeConstantDesc) expr).getValue();
}
/**
* {@inheritDoc}
*/
@Override
protected String getConstantValueAsString(ExprNodeDesc expr) {
return ((ExprNodeConstantDesc) expr).getValue().toString();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isColumnRefExpr(Object o) {
return o instanceof ExprNodeColumnDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected String getColumnName(ExprNodeDesc expr, RowResolver rowResolver) {
return ((ExprNodeColumnDesc) expr).getColumn();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isExprsListExpr(Object o) {
return o instanceof ExprNodeColumnListDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected List<ExprNodeDesc> getExprChildren(ExprNodeDesc expr) {
return expr.getChildren();
}
/**
* {@inheritDoc}
*/
@Override
protected TypeInfo getTypeInfo(ExprNodeDesc expr) {
return expr.getTypeInfo();
}
/**
* {@inheritDoc}
*/
@Override
protected List<TypeInfo> getStructTypeInfoList(ExprNodeDesc expr) {
StructTypeInfo structTypeInfo = (StructTypeInfo) expr.getTypeInfo();
return structTypeInfo.getAllStructFieldTypeInfos();
}
/**
* {@inheritDoc}
*/
@Override
protected List<String> getStructNameList(ExprNodeDesc expr) {
StructTypeInfo structTypeInfo = (StructTypeInfo) expr.getTypeInfo();
return structTypeInfo.getAllStructFieldNames();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isORFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpOr(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isANDFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpAnd(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isPOSITIVEFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpPositive(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isNEGATIVEFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpNegative(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isAndFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPAnd;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isOrFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPOr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isInFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFIn;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isCompareFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFBaseCompare;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isEqualFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPEqual
&& !(fi.getGenericUDF() instanceof GenericUDFOPEqualNS);
}
@Override
protected boolean isNSCompareFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPEqualNS ||
fi.getGenericUDF() instanceof GenericUDFOPNotEqualNS;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConsistentWithinQuery(FunctionInfo fi) {
return FunctionRegistry.isConsistentWithinQuery(fi.getGenericUDF());
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isStateful(FunctionInfo fi) {
return FunctionRegistry.isStateful(fi.getGenericUDF());
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc setTypeInfo(ExprNodeDesc expr, TypeInfo type) {
expr.setTypeInfo(type);
return expr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean convertCASEIntoCOALESCEFuncCallExpr(FunctionInfo fi, List<ExprNodeDesc> inputs) {
GenericUDF genericUDF = fi.getGenericUDF();
if (genericUDF instanceof GenericUDFWhen && inputs.size() == 3 &&
inputs.get(1) instanceof ExprNodeConstantDesc &&
inputs.get(2) instanceof ExprNodeConstantDesc) {
ExprNodeConstantDesc constThen = (ExprNodeConstantDesc) inputs.get(1);
ExprNodeConstantDesc constElse = (ExprNodeConstantDesc) inputs.get(2);
Object thenVal = constThen.getValue();
Object elseVal = constElse.getValue();
if (thenVal instanceof Boolean && elseVal instanceof Boolean) {
return true;
}
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc foldExpr(ExprNodeDesc expr) {
if (expr instanceof ExprNodeGenericFuncDesc) {
return ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc) expr);
}
return expr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isSTRUCTFuncCallExpr(ExprNodeDesc expr) {
return ExprNodeDescUtils.isStructUDF(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConstantStruct(ExprNodeDesc expr) {
return ExprNodeDescUtils.isConstantStruct(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc createSubqueryExpr(TypeCheckCtx ctx, ASTNode expr, SubqueryType subqueryType,
Object[] inputs) throws CalciteSubquerySemanticException {
// subqueryToRelNode might be null if subquery expression anywhere other than
// as expected in filter (where/having). We should throw an appropriate error
// message
Map<ASTNode, QBSubQueryParseInfo> subqueryToRelNode = ctx.getSubqueryToRelNode();
if (subqueryToRelNode == null) {
throw new CalciteSubquerySemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
" Currently SubQuery expressions are only allowed as " +
"Where and Having Clause predicates"));
}
ASTNode subqueryOp = (ASTNode) expr.getChild(0);
RelNode subqueryRel = subqueryToRelNode.get(expr).getSubQueryRelNode();
// For now because subquery is only supported in filter
// we will create subquery expression of boolean type
switch (subqueryType) {
case EXISTS: {
if (subqueryToRelNode.get(expr).hasFullAggregate()) {
return createConstantExpr(TypeInfoFactory.booleanTypeInfo, true);
}
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.EXISTS);
}
case IN: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.IN, lhs);
}
case SCALAR: {
// only single subquery expr is supported
if (subqueryRel.getRowType().getFieldCount() != 1) {
throw new CalciteSubquerySemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
"More than one column expression in subquery"));
}
// figure out subquery expression column's type
TypeInfo subExprType = TypeConverter.convert(subqueryRel.getRowType().getFieldList().get(0).getType());
return new ExprNodeSubQueryDesc(subExprType, subqueryRel,
SubqueryType.SCALAR);
}
case SOME: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.SOME, lhs, (ASTNode) subqueryOp.getChild(1));
}
case ALL: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.ALL, lhs, (ASTNode) subqueryOp.getChild(1));
}
default:
return null;
}
}
/**
* {@inheritDoc}
*/
@Override
protected FunctionInfo getFunctionInfo(String funcName) throws SemanticException {
return FunctionRegistry.getFunctionInfo(funcName);
}
@Override
protected ExprNodeDesc replaceFieldNamesInStruct(ExprNodeDesc expr, List<String> newFieldNames) {
if (newFieldNames.isEmpty()) {
return expr;
}
ExprNodeGenericFuncDesc structCall = (ExprNodeGenericFuncDesc) expr;
List<TypeInfo> newTypes = structCall.getChildren().stream().map(ExprNodeDesc::getTypeInfo).collect(Collectors.toList());
TypeInfo newType = TypeInfoFactory.getStructTypeInfo(newFieldNames, newTypes);
return new ExprNodeGenericFuncDesc(newType, structCall.getGenericUDF(), structCall.getChildren());
}
}
| |
/*
* Copyright (c) 2016-2021 VMware Inc. or its affiliates, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.core.publisher;
import java.util.NoSuchElementException;
import java.util.concurrent.Callable;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.reactivestreams.Subscription;
import reactor.core.CoreSubscriber;
import reactor.core.Fuseable;
import reactor.core.Scannable;
import reactor.test.StepVerifier;
import reactor.test.subscriber.AssertSubscriber;
import static org.assertj.core.api.Assertions.*;
public class MonoSingleTest {
@Nested
class ConcreteClassConsistency {
//tests Flux.single, Flux.single(T) and Mono.single API consistency over returned classes
@Test
void monoWithScalarEmpty() {
Mono<Integer> source = Mono.empty();
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoError.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void monoWithScalarError() {
Mono<Integer> source = Mono.error(new IllegalStateException("test"));
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoError.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void monoWithScalarValue() {
Mono<Integer> source = Mono.just(1);
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoJust.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void monoWithCallable() {
Mono<Integer> source = Mono.fromSupplier(() -> 1);
Mono<Integer> single = source.single();
assertThat(source).as("source")
.isInstanceOf(Callable.class)
.isNotInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single").isInstanceOf(MonoSingleCallable.class);
}
@Test
void monoWithNormal() {
Mono<Integer> source = Mono.just(1).hide();
Mono<Integer> single = source.single();
assertThat(source).as("source").isNotInstanceOf(Callable.class); //excludes ScalarCallable too
assertThat(single).as("single").isInstanceOf(MonoSingleMono.class);
}
@Test
void fluxWithScalarEmpty() {
Flux<Integer> source = Flux.empty();
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoError.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxWithScalarError() {
Flux<Integer> source = Flux.error(new IllegalStateException("test"));
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoError.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxWithScalarValue() {
Flux<Integer> source = Flux.just(1);
Mono<Integer> single = source.single();
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoJust.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxWithCallable() {
Flux<Integer> source = Mono.fromSupplier(() -> 1).flux();
Mono<Integer> single = source.single();
assertThat(source).as("source")
.isInstanceOf(Callable.class)
.isNotInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single").isInstanceOf(MonoSingleCallable.class);
}
@Test
void fluxWithNormal() {
Flux<Integer> source = Flux.range(1, 10);
Mono<Integer> single = source.single();
assertThat(source).as("source").isNotInstanceOf(Callable.class); //excludes ScalarCallable too
assertThat(single).as("single").isInstanceOf(MonoSingle.class);
}
@Test
void fluxDefaultValueWithScalarEmpty() {
Flux<Integer> source = Flux.empty();
Mono<Integer> single = source.single(2);
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoJust.class) //2
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxDefaultValueWithScalarError() {
Flux<Integer> source = Flux.error(new IllegalStateException("test"));
Mono<Integer> single = source.single(2);
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoError.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxDefaultValueWithScalarValue() {
Flux<Integer> source = Flux.just(1);
Mono<Integer> single = source.single(2);
assertThat(source).as("source").isInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single")
.isInstanceOf(MonoJust.class)
.isInstanceOf(Fuseable.ScalarCallable.class);
}
@Test
void fluxDefaultValueWithCallable() {
Flux<Integer> source = Mono.fromSupplier(() -> 1).flux();
Mono<Integer> single = source.single(2);
assertThat(source).as("source")
.isInstanceOf(Callable.class)
.isNotInstanceOf(Fuseable.ScalarCallable.class);
assertThat(single).as("single").isInstanceOf(MonoSingleCallable.class);
}
@Test
void fluxDefaultValueWithNormal() {
Flux<Integer> source = Flux.range(1, 10);
Mono<Integer> single = source.single(2);
assertThat(source).as("source").isNotInstanceOf(Callable.class); //excludes ScalarCallable too
assertThat(single).as("single").isInstanceOf(MonoSingle.class);
}
@Test
void fluxDefaultValueNullRejectedInAllSourceCases() {
Flux<Integer> sourceScalar = Flux.empty();
Flux<Integer> sourceCallable = Mono.fromSupplier(() -> 1).flux();
Flux<Integer> sourceNormal = Flux.range(1, 10);
assertThatNullPointerException().as("sourceScalar").isThrownBy(() -> sourceScalar.single(null));
assertThatNullPointerException().as("sourceCallable").isThrownBy(() -> sourceCallable.single(null));
assertThatNullPointerException().as("sourceNormal").isThrownBy(() -> sourceNormal.single(null));
}
@Test
void fluxDefaultValueIsUsedForScalarSource() {
Flux<Integer> sourceScalar = Flux.empty();
StepVerifier.create(sourceScalar.single(2))
.expectNext(2)
.verifyComplete();
}
@Test
void fluxDefaultValueIsUsedForCallableSource() {
Flux<Integer> sourceCallable = Mono.<Integer>fromSupplier(() -> null).flux();
StepVerifier.create(sourceCallable.single(2))
.expectNext(2)
.verifyComplete();
}
@Test
void fluxDefaultValueIsUsedForNormalSource() {
Flux<Integer> sourceNormal = Flux.<Integer>empty().hide();
StepVerifier.create(sourceNormal.single(2))
.expectNext(2)
.verifyComplete();
}
}
@Test
void source1Null() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
new MonoSingle<>(null, 1, false);
});
}
@Test
public void normal() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.just(1).single().subscribe(ts);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void normalBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.just(1).single().subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(1);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void empty() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>empty().single().subscribe(ts);
ts.assertNoValues()
.assertError(NoSuchElementException.class)
.assertNotComplete();
}
@Test
public void error() {
StepVerifier.create(Flux.error(new RuntimeException("forced failure"))
.single())
.verifyErrorMessage("forced failure");
}
@Test
public void errorHide() {
StepVerifier.create(Flux.error(new RuntimeException("forced failure"))
.hide()
.single())
.verifyErrorMessage("forced failure");
}
@Test
public void errorDefault() {
StepVerifier.create(Flux.error(new RuntimeException("forced failure"))
.single("bla"))
.verifyErrorMessage("forced failure");
}
@Test
public void errorHideDefault() {
StepVerifier.create(Flux.error(new RuntimeException("forced failure"))
.hide()
.single("bla"))
.verifyErrorMessage("forced failure");
}
@Test
public void emptyDefault() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.<Integer>empty().single(1).subscribe(ts);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void emptyDefaultBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.<Integer>empty().single(1).subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(1);
ts.assertValues(1)
.assertNoError()
.assertComplete();
}
@Test
public void multi() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 10).single().subscribe(ts);
ts.assertNoValues()
.assertError(IndexOutOfBoundsException.class)
.assertNotComplete();
}
@Test
public void multiBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.range(1, 10).single().subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(1);
ts.assertNoValues()
.assertError(IndexOutOfBoundsException.class)
.assertNotComplete();
}
@Test
public void singleCallable() {
StepVerifier.create(Mono.fromCallable(() -> 1)
.flux()
.single())
.expectNext(1)
.verifyComplete();
}
@Test
public void singleFallbackEmpty() {
StepVerifier.create(Flux.empty()
.single(1))
.expectNext(1)
.verifyComplete();
}
@Test
public void singleFallbackJust() {
StepVerifier.create(Flux.just(1)
.single(2))
.expectNext(1)
.verifyComplete();
}
@Test
public void singleFallbackCallable() {
StepVerifier.create(Mono.fromCallable(() -> 1)
.flux()
.single(2))
.expectNext(1)
.verifyComplete();
}
@Test
public void singleJustHide() {
StepVerifier.create(Flux.empty()
.single())
.verifyError(NoSuchElementException.class);
}
@Test
public void singleFallbackJustHide() {
StepVerifier.create(Flux.just(1)
.hide()
.single(2))
.expectNext(1)
.verifyComplete();
}
@Test
public void singleEmptyFallbackCallable() {
StepVerifier.create(Mono.fromCallable(() -> 1)
.flux()
.singleOrEmpty())
.expectNext(1)
.verifyComplete();
}
@Test
public void singleEmptyFallbackJustHide() {
StepVerifier.create(Flux.empty()
.hide()
.singleOrEmpty())
.verifyComplete();
}
@Test
public void singleEmptyFallbackJustHideError() {
StepVerifier.create(Flux.just(1, 2, 3)
.hide()
.singleOrEmpty())
.verifyError(IndexOutOfBoundsException.class);
}
@Test
public void scanOperator(){
MonoSingle<String> test = new MonoSingle<>(Flux.just("foo"));
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<String>
actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoSingle.SingleSubscriber<String> test = new MonoSingle.SingleSubscriber<>(
actual, "foo", false);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(Integer.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
}
| |
package com.example.pre_inverse_remove;
import arez.Arez;
import arez.ArezContext;
import arez.Component;
import arez.Disposable;
import arez.Locator;
import arez.ObservableValue;
import arez.SafeProcedure;
import arez.component.DisposeNotifier;
import arez.component.Identifiable;
import arez.component.Verifiable;
import arez.component.internal.CollectionsUtil;
import arez.component.internal.ComponentKernel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.braincheck.Guards;
@Generated("arez.processor.ArezProcessor")
final class Arez_PublicAccessViaInterfacePreInverseRemoveModel extends PublicAccessViaInterfacePreInverseRemoveModel implements Disposable, Identifiable<Integer>, Verifiable, DisposeNotifier {
private static volatile int $$arezi$$_nextId;
private final ComponentKernel $$arezi$$_kernel;
@Nonnull
private final ObservableValue<Collection<PublicAccessViaInterfacePreInverseRemoveModel.Element>> $$arez$$_elements;
private Collection<PublicAccessViaInterfacePreInverseRemoveModel.Element> $$arezd$$_elements;
private Collection<PublicAccessViaInterfacePreInverseRemoveModel.Element> $$arezd$$_$$cache$$_elements;
Arez_PublicAccessViaInterfacePreInverseRemoveModel() {
super();
final ArezContext $$arezv$$_context = Arez.context();
final int $$arezv$$_id = ++$$arezi$$_nextId;
final String $$arezv$$_name = Arez.areNamesEnabled() ? "com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel." + $$arezv$$_id : null;
final Component $$arezv$$_component = Arez.areNativeComponentsEnabled() ? $$arezv$$_context.component( "com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel", $$arezv$$_id, $$arezv$$_name, this::$$arezi$$_nativeComponentPreDispose ) : null;
this.$$arezi$$_kernel = new ComponentKernel( Arez.areZonesEnabled() ? $$arezv$$_context : null, Arez.areNamesEnabled() ? $$arezv$$_name : null, $$arezv$$_id, Arez.areNativeComponentsEnabled() ? $$arezv$$_component : null, Arez.areNativeComponentsEnabled() ? null : this::$$arezi$$_preDispose, Arez.areNativeComponentsEnabled() ? null : this::$$arezi$$_dispose, null, true, false, false );
this.$$arez$$_elements = $$arezv$$_context.observable( Arez.areNativeComponentsEnabled() ? $$arezv$$_component : null, Arez.areNamesEnabled() ? $$arezv$$_name + ".elements" : null, Arez.arePropertyIntrospectorsEnabled() ? () -> this.$$arezd$$_elements : null, null );
this.$$arezd$$_elements = new HashSet<>();
this.$$arezd$$_$$cache$$_elements = null;
this.$$arezi$$_kernel.componentConstructed();
this.$$arezi$$_kernel.componentReady();
}
@Nonnull
private Locator $$arezi$$_locator() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named '$$arezi$$_locator' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
return this.$$arezi$$_kernel.getContext().locator();
}
private int $$arezi$$_id() {
return this.$$arezi$$_kernel.getId();
}
@Override
@Nonnull
public Integer getArezId() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'getArezId' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'getArezId' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
return $$arezi$$_id();
}
private void $$arezi$$_preDispose() {
for ( final PublicAccessViaInterfacePreInverseRemoveModel.Element other : new ArrayList<>( $$arezd$$_elements ) ) {
( (PublicAccessViaInterfacePreInverseRemoveModel_Arez_Element) other ).$$arezi$$_delink_other();
}
}
private void $$arezi$$_nativeComponentPreDispose() {
this.$$arezi$$_preDispose();
this.$$arezi$$_kernel.notifyOnDisposeListeners();
}
@Override
public void addOnDisposeListener(@Nonnull final Object key, @Nonnull final SafeProcedure action) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'addOnDisposeListener' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
this.$$arezi$$_kernel.addOnDisposeListener( key, action );
}
@Override
public void removeOnDisposeListener(@Nonnull final Object key) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'removeOnDisposeListener' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'removeOnDisposeListener' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
this.$$arezi$$_kernel.removeOnDisposeListener( key );
}
@Override
public boolean isDisposed() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'isDisposed' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'isDisposed' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
return this.$$arezi$$_kernel.isDisposed();
}
@Override
public void dispose() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'dispose' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'dispose' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
this.$$arezi$$_kernel.dispose();
}
private void $$arezi$$_dispose() {
this.$$arez$$_elements.dispose();
}
@Override
public void verify() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'verify' invoked on uninitialized component of type 'com_example_pre_inverse_remove_PublicAccessViaInterfacePreInverseRemoveModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'verify' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named 'verify' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
if ( Arez.shouldCheckApiInvariants() && Arez.isVerifyEnabled() ) {
Guards.apiInvariant( () -> this == $$arezi$$_locator().findById( PublicAccessViaInterfacePreInverseRemoveModel.class, $$arezi$$_id() ), () -> "Attempted to lookup self in Locator with type PublicAccessViaInterfacePreInverseRemoveModel and id '" + $$arezi$$_id() + "' but unable to locate self. Actual value: " + $$arezi$$_locator().findById( PublicAccessViaInterfacePreInverseRemoveModel.class, $$arezi$$_id() ) );
for( final PublicAccessViaInterfacePreInverseRemoveModel.Element element : this.$$arezd$$_elements ) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> Disposable.isNotDisposed( element ), () -> "Inverse relationship named 'elements' on component named '" + this.$$arezi$$_kernel.getName() + "' contains disposed element '" + element + "'" );
}
}
}
}
@Override
Collection<PublicAccessViaInterfacePreInverseRemoveModel.Element> getElements() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named 'getElements' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
this.$$arez$$_elements.reportObserved();
if ( Arez.areCollectionsPropertiesUnmodifiable() ) {
final Collection<PublicAccessViaInterfacePreInverseRemoveModel.Element> $$ar$$_result = this.$$arezd$$_elements;
if ( null == this.$$arezd$$_$$cache$$_elements && null != $$ar$$_result ) {
this.$$arezd$$_$$cache$$_elements = CollectionsUtil.wrap( $$ar$$_result );
}
return $$arezd$$_$$cache$$_elements;
} else {
return this.$$arezd$$_elements;
}
}
void $$arezir$$_elements_add(
@Nonnull final PublicAccessViaInterfacePreInverseRemoveModel.Element element) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named '$$arezir$$_elements_add' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
this.$$arez$$_elements.preReportChanged();
if ( Arez.shouldCheckInvariants() ) {
Guards.invariant( () -> !this.$$arezd$$_elements.contains( element ), () -> "Attempted to add reference 'element' to inverse 'elements' but inverse already contained element. Inverse = " + $$arez$$_elements );
}
this.$$arezd$$_elements.add( element );
if ( Arez.areCollectionsPropertiesUnmodifiable() ) {
this.$$arezd$$_$$cache$$_elements = null;
}
this.$$arez$$_elements.reportChanged();
}
void $$arezir$$_elements_remove(
@Nonnull final PublicAccessViaInterfacePreInverseRemoveModel.Element element) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named '$$arezir$$_elements_remove' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
this.$$arez$$_elements.preReportChanged();
if ( Arez.shouldCheckInvariants() ) {
Guards.invariant( () -> this.$$arezd$$_elements.contains( element ), () -> "Attempted to remove reference 'element' from inverse 'elements' but inverse does not contain element. Inverse = " + $$arez$$_elements );
}
preElementsRemove( element );
this.$$arezd$$_elements.remove( element );
if ( Arez.areCollectionsPropertiesUnmodifiable() ) {
this.$$arezd$$_$$cache$$_elements = null;
}
this.$$arez$$_elements.reportChanged();
}
@Override
public String toString() {
if ( Arez.areNamesEnabled() ) {
return "ArezComponent[" + this.$$arezi$$_kernel.getName() + "]";
} else {
return super.toString();
}
}
}
| |
/*
* (c) Copyright 2017 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.baseline.errorprone;
import com.google.auto.service.AutoService;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.BugPattern;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.matchers.method.MethodMatchers;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MemberReferenceTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Type;
import java.util.Collection;
import java.util.Deque;
import java.util.Dictionary;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Stack;
import java.util.Vector;
import java.util.function.Function;
import java.util.function.Predicate;
import javax.annotation.Nullable;
@AutoService(BugChecker.class)
@BugPattern(
name = "StrictCollectionIncompatibleType",
// Idea provides a similar check, avoid noise when that warning is already suppressed.
// https://github.com/JetBrains/intellij-community/blob/master/java/java-analysis-impl/src/com/intellij/codeInspection/miscGenerics/SuspiciousCollectionsMethodCallsInspection.java
altNames = {"SuspiciousMethodCalls", "CollectionIncompatibleType"},
link = "https://github.com/palantir/gradle-baseline#baseline-error-prone-checks",
linkType = BugPattern.LinkType.CUSTOM,
severity = BugPattern.SeverityLevel.WARNING,
summary = "Likely programming error due to using incompatible types as "
+ "arguments for a collection method that accepts Object.")
public final class StrictCollectionIncompatibleType extends BugChecker
implements BugChecker.MethodInvocationTreeMatcher, BugChecker.MemberReferenceTreeMatcher {
// Collection Types
private static final String COLLECTION = Collection.class.getName();
private static final String DEQUE = Deque.class.getName();
private static final String DICTIONARY = Dictionary.class.getName();
private static final String LIST = List.class.getName();
private static final String MAP = Map.class.getName();
private static final String STACK = Stack.class.getName();
private static final String VECTOR = Vector.class.getName();
// Functional Types
private static final String FUNCTION = Function.class.getName();
private static final String PREDICATE = Predicate.class.getName();
private final ImmutableList<IncompatibleTypeMatcher> matchers = ImmutableList.of(
// Matched patterns are based error-prone CollectionIncompatibleType
// https://github.com/google/error-prone/blob/master/core/src/main/java/com/google/errorprone/bugpatterns/collectionincompatibletype/CollectionIncompatibleType.java
compatibleArgType(MAP, "containsKey(java.lang.Object)", 0, 0),
compatibleArgType(MAP, "containsValue(java.lang.Object)", 1, 0),
compatibleArgType(MAP, "get(java.lang.Object)", 0, 0),
compatibleArgType(MAP, "getOrDefault(java.lang.Object,V)", 0, 0),
compatibleArgType(MAP, "remove(java.lang.Object)", 0, 0),
compatibleArgType(COLLECTION, "contains(java.lang.Object)", 0, 0),
compatibleArgType(COLLECTION, "remove(java.lang.Object)", 0, 0),
compatibleArgType(DEQUE, "removeFirstOccurrence(java.lang.Object)", 0, 0),
compatibleArgType(DEQUE, "removeLastOccurrence(java.lang.Object)", 0, 0),
compatibleArgType(DICTIONARY, "get(java.lang.Object)", 0, 0),
compatibleArgType(DICTIONARY, "remove(java.lang.Object)", 0, 0),
compatibleArgType(LIST, "indexOf(java.lang.Object)", 0, 0),
compatibleArgType(LIST, "lastIndexOf(java.lang.Object)", 0, 0),
compatibleArgType(STACK, "search(java.lang.Object)", 0, 0),
compatibleArgType(VECTOR, "indexOf(java.lang.Object,int)", 0, 0),
compatibleArgType(VECTOR, "lastIndexOf(java.lang.Object,int)", 0, 0),
compatibleArgType(VECTOR, "removeElement(java.lang.Object)", 0, 0));
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
// Return the description from the first matching IncompatibleTypeMatcher
for (int i = 0; i < matchers.size(); i++) {
IncompatibleTypeMatcher matcher = matchers.get(i);
Optional<Description> result = matcher.describe(tree, state);
if (result.isPresent()) {
return result.get();
}
}
return Description.NO_MATCH;
}
@Override
public Description matchMemberReference(MemberReferenceTree tree, VisitorState state) {
// Return the description from the first matching IncompatibleTypeMatcher
for (int i = 0; i < matchers.size(); i++) {
IncompatibleTypeMatcher matcher = matchers.get(i);
Optional<Description> result = matcher.describe(tree, state);
if (result.isPresent()) {
return result.get();
}
}
return Description.NO_MATCH;
}
@Nullable
private static Type getBoxedResult(ExpressionTree expressionTree, VisitorState state) {
Type rawType = ASTHelpers.getResultType(expressionTree);
if (rawType == null) {
return null;
}
return state.getTypes().boxedTypeOrType(rawType);
}
@Nullable
private static Type getTargetTypeAsSuper(MethodInvocationTree tree, String superTarget, VisitorState state) {
Type targetMapType = getTargetType(tree);
if (targetMapType == null) {
return null;
}
Symbol mapSymbol = state.getSymbolFromString(superTarget);
if (mapSymbol == null) {
return null;
}
return state.getTypes().asSuper(targetMapType, mapSymbol);
}
@Nullable
private static Type getTargetTypeAsSuper(MemberReferenceTree tree, String superTarget, VisitorState state) {
ExpressionTree targetExpressionTree = tree.getQualifierExpression();
if (targetExpressionTree == null) {
return null;
}
Type targetMapType = ASTHelpers.getResultType(targetExpressionTree);
if (targetMapType == null) {
return null;
}
Symbol mapSymbol = state.getSymbolFromString(superTarget);
if (mapSymbol == null) {
return null;
}
return state.getTypes().asSuper(targetMapType, mapSymbol);
}
@Nullable
private static Type getTargetType(MethodInvocationTree tree) {
ExpressionTree methodSelect = tree.getMethodSelect();
if (methodSelect instanceof MemberSelectTree) {
MemberSelectTree memberSelectTree = (MemberSelectTree) methodSelect;
return ASTHelpers.getResultType(memberSelectTree.getExpression());
}
return null;
}
private IncompatibleTypeMatcher compatibleArgType(
String baseType, String signature, int typeArgumentIndex, int argumentIndex) {
// Eagerly create the matcher to avoid allocation for each check
Matcher<ExpressionTree> methodMatcher =
MethodMatchers.instanceMethod().onDescendantOf(baseType).withSignature(signature);
return new IncompatibleTypeMatcher() {
@Override
public Optional<Description> describe(MethodInvocationTree tree, VisitorState state) {
if (!methodMatcher.matches(tree, state)) {
// This matcher does not apply
return Optional.empty();
}
if (tree.getArguments().size() <= argumentIndex) {
return IncompatibleTypeMatcher.NO_MATCH;
}
Type targetType = getTargetTypeAsSuper(tree, baseType, state);
if (targetType == null) {
return IncompatibleTypeMatcher.NO_MATCH;
}
if (targetType.getTypeArguments().size() <= typeArgumentIndex) {
return IncompatibleTypeMatcher.NO_MATCH;
}
Type typeArgumentType = targetType.getTypeArguments().get(typeArgumentIndex);
ExpressionTree argumentTree = tree.getArguments().get(argumentIndex);
Type argumentType = getBoxedResult(argumentTree, state);
if (argumentType == null) {
return IncompatibleTypeMatcher.NO_MATCH;
}
if (typesCompatible(argumentType, typeArgumentType, state)) {
return IncompatibleTypeMatcher.NO_MATCH;
}
return Optional.of(buildDescription(argumentTree)
.setMessage("Likely programming error due to using incompatible types as arguments for "
+ "a collection method that accepts Object. Value '"
+ state.getSourceForNode(argumentTree)
+ "' of type '"
+ prettyType(argumentType)
+ "' is not compatible with the expected type '"
+ prettyType(typeArgumentType)
+ '\'')
.build());
}
@Override
public Optional<Description> describe(MemberReferenceTree tree, VisitorState state) {
if (!methodMatcher.matches(tree, state)) {
// This matcher does not apply
return Optional.empty();
}
if (tree.getMode() != MemberReferenceTree.ReferenceMode.INVOKE) {
return IncompatibleTypeMatcher.NO_MATCH;
}
Type targetType = getTargetTypeAsSuper(tree, baseType, state);
if (targetType == null) {
return IncompatibleTypeMatcher.NO_MATCH;
}
if (targetType.getTypeArguments().size() <= typeArgumentIndex) {
return IncompatibleTypeMatcher.NO_MATCH;
}
Type typeArgumentType = targetType.getTypeArguments().get(typeArgumentIndex);
Type rawArgumentType = getFunctionalInterfaceArgumentType(tree, argumentIndex, state);
if (rawArgumentType == null) {
return IncompatibleTypeMatcher.NO_MATCH;
}
Type argumentType = state.getTypes().boxedTypeOrType(rawArgumentType);
if (typesCompatible(argumentType, typeArgumentType, state)) {
return IncompatibleTypeMatcher.NO_MATCH;
}
return Optional.of(buildDescription(tree)
.setMessage("Likely programming error due to using incompatible types as arguments for "
+ "a collection method that accepts Object. Type '"
+ prettyType(argumentType)
+ "' is not compatible with the expected type '"
+ prettyType(typeArgumentType)
+ '\'')
.build());
}
};
}
private static boolean typesCompatible(Type argumentType, Type typeArgumentType, VisitorState state) {
// Check erased types only to avoid more complex edge cases. This way we only warn when we
// have high confidence something isn't right.
// This tests that types are within the same (linear) inheritance hierarchy, but does not
// not accept types with a common ancestor.
return ASTHelpers.isSubtype(argumentType, typeArgumentType, state)
// Check the reverse direction as well, this allows 'Object' to succeed for
// delegation, as well as most false positives without sacrificing many known
// failure cases.
|| ASTHelpers.isSubtype(typeArgumentType, argumentType, state);
}
@Nullable
private static Type getFunctionalInterfaceArgumentType(
MemberReferenceTree tree, int argumentIndex, VisitorState state) {
Type resultType = ASTHelpers.getResultType(tree);
if (resultType == null) {
return null;
}
if (!isSupportedFunctionalInterface(resultType, state)) {
return null;
}
if (resultType.getTypeArguments().size() <= argumentIndex) {
// Not enough information, it's possible we can
// inspect the resolved symbol in a future change.
return null;
}
return resultType.getTypeArguments().get(argumentIndex);
}
// We don't have a great way to check types on arbitrary interfaces, currently
// we specifically support common types used in streams.
private static boolean isSupportedFunctionalInterface(Type type, VisitorState state) {
// We don't use subtype checks because it's possible for interfaces to extend Function with a default
// apply implementation, expecting a type that doesn't match function type variables.
return ASTHelpers.isSameType(type, state.getTypeFromString(FUNCTION), state)
|| ASTHelpers.isSameType(type, state.getTypeFromString(PREDICATE), state);
}
/**
* Pretty prints the input type for use in description messages. This is not suitable for suggested fixes because
* unlike {@link SuggestedFixes#prettyType(VisitorState, SuggestedFix.Builder, Type)} with non-null state and
* builder, it doesn't add relevant imports.
*/
private static String prettyType(Type type) {
return SuggestedFixes.prettyType(null, null, type);
}
private interface IncompatibleTypeMatcher {
/**
* Signals that a matcher applied to the input, but did not find any bugs. It is not necessary to check
* additional {@link IncompatibleTypeMatcher matchers}.
*/
Optional<Description> NO_MATCH = Optional.of(Description.NO_MATCH);
/**
* Returns an empty optional if the provided {@link MethodInvocationTree} isn't matched. If the method is
* matched, an {@link Optional} of {@link Description#NO_MATCH} is returned for valid use.
*/
Optional<Description> describe(MethodInvocationTree tree, VisitorState state);
Optional<Description> describe(MemberReferenceTree tree, VisitorState state);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/servicemanagement/v1/servicemanager.proto
package com.google.api.servicemanagement.v1;
/**
* <pre>
* Request message for DisableService method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.DisableServiceRequest}
*/
public final class DisableServiceRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.DisableServiceRequest)
DisableServiceRequestOrBuilder {
// Use DisableServiceRequest.newBuilder() to construct.
private DisableServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DisableServiceRequest() {
serviceName_ = "";
consumerId_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private DisableServiceRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
serviceName_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
consumerId_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto.internal_static_google_api_servicemanagement_v1_DisableServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto.internal_static_google_api_servicemanagement_v1_DisableServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.DisableServiceRequest.class, com.google.api.servicemanagement.v1.DisableServiceRequest.Builder.class);
}
public static final int SERVICE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object serviceName_;
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceName_ = s;
return s;
}
}
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public com.google.protobuf.ByteString
getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONSUMER_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object consumerId_;
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public java.lang.String getConsumerId() {
java.lang.Object ref = consumerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
consumerId_ = s;
return s;
}
}
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public com.google.protobuf.ByteString
getConsumerIdBytes() {
java.lang.Object ref = consumerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
consumerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getServiceNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceName_);
}
if (!getConsumerIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, consumerId_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getServiceNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceName_);
}
if (!getConsumerIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, consumerId_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.servicemanagement.v1.DisableServiceRequest)) {
return super.equals(obj);
}
com.google.api.servicemanagement.v1.DisableServiceRequest other = (com.google.api.servicemanagement.v1.DisableServiceRequest) obj;
boolean result = true;
result = result && getServiceName()
.equals(other.getServiceName());
result = result && getConsumerId()
.equals(other.getConsumerId());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getServiceName().hashCode();
hash = (37 * hash) + CONSUMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getConsumerId().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.servicemanagement.v1.DisableServiceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for DisableService method.
* </pre>
*
* Protobuf type {@code google.api.servicemanagement.v1.DisableServiceRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.DisableServiceRequest)
com.google.api.servicemanagement.v1.DisableServiceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.servicemanagement.v1.ServiceManagerProto.internal_static_google_api_servicemanagement_v1_DisableServiceRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicemanagement.v1.ServiceManagerProto.internal_static_google_api_servicemanagement_v1_DisableServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicemanagement.v1.DisableServiceRequest.class, com.google.api.servicemanagement.v1.DisableServiceRequest.Builder.class);
}
// Construct using com.google.api.servicemanagement.v1.DisableServiceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
serviceName_ = "";
consumerId_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.api.servicemanagement.v1.ServiceManagerProto.internal_static_google_api_servicemanagement_v1_DisableServiceRequest_descriptor;
}
public com.google.api.servicemanagement.v1.DisableServiceRequest getDefaultInstanceForType() {
return com.google.api.servicemanagement.v1.DisableServiceRequest.getDefaultInstance();
}
public com.google.api.servicemanagement.v1.DisableServiceRequest build() {
com.google.api.servicemanagement.v1.DisableServiceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.api.servicemanagement.v1.DisableServiceRequest buildPartial() {
com.google.api.servicemanagement.v1.DisableServiceRequest result = new com.google.api.servicemanagement.v1.DisableServiceRequest(this);
result.serviceName_ = serviceName_;
result.consumerId_ = consumerId_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.servicemanagement.v1.DisableServiceRequest) {
return mergeFrom((com.google.api.servicemanagement.v1.DisableServiceRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.servicemanagement.v1.DisableServiceRequest other) {
if (other == com.google.api.servicemanagement.v1.DisableServiceRequest.getDefaultInstance()) return this;
if (!other.getServiceName().isEmpty()) {
serviceName_ = other.serviceName_;
onChanged();
}
if (!other.getConsumerId().isEmpty()) {
consumerId_ = other.consumerId_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.api.servicemanagement.v1.DisableServiceRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.api.servicemanagement.v1.DisableServiceRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object serviceName_ = "";
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public java.lang.String getServiceName() {
java.lang.Object ref = serviceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public com.google.protobuf.ByteString
getServiceNameBytes() {
java.lang.Object ref = serviceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
serviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public Builder setServiceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
serviceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public Builder clearServiceName() {
serviceName_ = getDefaultInstance().getServiceName();
onChanged();
return this;
}
/**
* <pre>
* Name of the service to disable. Specifying an unknown service name
* will cause the request to fail.
* </pre>
*
* <code>optional string service_name = 1;</code>
*/
public Builder setServiceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
serviceName_ = value;
onChanged();
return this;
}
private java.lang.Object consumerId_ = "";
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public java.lang.String getConsumerId() {
java.lang.Object ref = consumerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
consumerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public com.google.protobuf.ByteString
getConsumerIdBytes() {
java.lang.Object ref = consumerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
consumerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public Builder setConsumerId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
consumerId_ = value;
onChanged();
return this;
}
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public Builder clearConsumerId() {
consumerId_ = getDefaultInstance().getConsumerId();
onChanged();
return this;
}
/**
* <pre>
* The identity of consumer resource which service disablement will be
* applied to.
* The Google Service Management implementation accepts the following
* forms: "project:<project_id>", "project_number:<project_number>".
* Note: this is made compatible with
* google.api.servicecontrol.v1.Operation.consumer_id.
* </pre>
*
* <code>optional string consumer_id = 2;</code>
*/
public Builder setConsumerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
consumerId_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.DisableServiceRequest)
}
// @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.DisableServiceRequest)
private static final com.google.api.servicemanagement.v1.DisableServiceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.DisableServiceRequest();
}
public static com.google.api.servicemanagement.v1.DisableServiceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DisableServiceRequest>
PARSER = new com.google.protobuf.AbstractParser<DisableServiceRequest>() {
public DisableServiceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DisableServiceRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DisableServiceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DisableServiceRequest> getParserForType() {
return PARSER;
}
public com.google.api.servicemanagement.v1.DisableServiceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (c) 2019 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.client.datamovement;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.Iterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.UUID;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvParser;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import com.marklogic.client.MarkLogicIOException;
import com.marklogic.client.document.DocumentWriteOperation;
import com.marklogic.client.impl.DocumentWriteOperationImpl;
import com.marklogic.client.io.JacksonHandle;
/**
* The JacksonCSVSplitter class uses the Jackson CSV parser without attempting to abstract it capabilities.
* The application can override defaults by configuring the Jackson ObjectReader and CsvSchema including parsing TSV
*/
public class JacksonCSVSplitter implements Splitter<JacksonHandle> {
private CsvSchema csvSchema = null;
private CsvMapper csvMapper;
private long count = 0;
private ArrayNode headers = null;
/**
* The CsvMapper configured for the current instance.
* @return the CsvMapper for the current instance.
*/
public CsvMapper getCsvMapper() {
return csvMapper;
}
/**
* Used to set the CsvSchema for the current instance.
* @param schema is the CsvSchema passed in.
* @return an instance of JacksonCSVSplitter with CsvSchema set to the parameter.
*/
public JacksonCSVSplitter withCsvSchema(CsvSchema schema) {
this.csvSchema = schema;
return this;
}
/**
* Used to set the CsvMapper for the current instance.
* @param mapper is the CsvMapper passed in.
* @return an instance of JacksonCSVSplitter with CsvMapper set to the parameter.
*/
public JacksonCSVSplitter withCsvMapper(CsvMapper mapper) {
this.csvMapper = mapper;
return this;
}
/**
* The CsvSchema configured for the current instance.
* @return the CsvSchema for the current instance.
*/
public CsvSchema getCsvSchema() {
return csvSchema;
}
private CsvMapper configureCsvMapper() {
if(csvMapper == null) {
csvMapper = new CsvMapper()
.configure(CsvParser.Feature.ALLOW_TRAILING_COMMA, true)
.configure(CsvParser.Feature.FAIL_ON_MISSING_COLUMNS, false)
.configure(CsvParser.Feature.IGNORE_TRAILING_UNMAPPABLE, false)
.configure(CsvParser.Feature.INSERT_NULLS_FOR_MISSING_COLUMNS, false)
.configure(CsvParser.Feature.SKIP_EMPTY_LINES, true)
.configure(CsvParser.Feature.TRIM_SPACES, true)
.configure(CsvParser.Feature.WRAP_AS_ARRAY, false)
.configure(CsvParser.Feature.IGNORE_TRAILING_UNMAPPABLE, true);
}
return csvMapper;
}
/**
* Takes the input stream and converts it into a stream of JacksonHandle by setting the schema
* and wrapping the JsonNode into JacksonHandle.
* @param input the input stream passed in.
* @return a stream of JacksonHandle.
* @throws IOException if the input cannot be split
*/
@Override
public Stream<JacksonHandle> split(InputStream input) throws IOException {
if(input == null) {
throw new IllegalArgumentException("InputSteam cannot be null.");
}
return configureInput(configureObjReader().readValues(input));
}
/**
* Takes the input stream and converts it into a stream of JacksonHandle by setting the schema
* and wrapping the JsonNode into JacksonHandle.
* @param input the Reader stream passed in.
* @return a stream of JacksonHandle.
* @throws IOException if the input cannot be split
*/
public Stream<JacksonHandle> split(Reader input) throws IOException {
if(input == null) {
throw new IllegalArgumentException("Input cannot be null.");
}
Iterator<JsonNode> nodeItr = configureObjReader().readValues(input);
return configureInput(nodeItr);
}
/**
* Takes the input stream and converts it into a stream of DocumentWriteOperation by setting the schema
* and wrapping the JsonNode into DocumentWriteOperation.
* @param input is the incoming input stream.
* @return a stream of DocumentWriteOperation.
* @throws Exception if the input cannot be split
*/
@Override
public Stream<DocumentWriteOperation> splitWriteOperations(InputStream input) throws Exception {
return splitWriteOperations(input, null);
}
/**
* Takes the input stream and the input name, then converts the input into a stream of DocumentWriteOperation
* by setting the schema and wrapping the JsonNode into DocumentWriteOperation.
* @param input is the incoming input stream.
* @param splitFilename the name of the input stream, including name and extension. It is used to generate URLs for
* split files.The splitFilename could either be provided here or in user-defined UriMaker.
* @return a stream of DocumentWriteOperation.
* @throws Exception if the input cannot be split
*/
@Override
public Stream<DocumentWriteOperation> splitWriteOperations(InputStream input, String splitFilename) throws Exception {
if (input == null) {
throw new IllegalArgumentException("Input cannot be null");
}
if (getUriMaker() == null) {
JacksonCSVSplitter.UriMakerImpl uriMaker = new UriMakerImpl();
setUriMaker(uriMaker);
}
if (splitFilename != null) {
getUriMaker().setSplitFilename(splitFilename);
}
Iterator<JsonNode> nodeItr = configureObjReader().readValues(input);
return configureInputDocumentWriteOperation(nodeItr);
}
/**
* Takes the input Reader and converts it into a stream of DocumentWriteOperation by setting the schema
* and wrapping the JsonNode into DocumentWriteOperation.
* @param input is the incoming input Reader.
* @return a stream of DocumentWriteOperation.
* @throws Exception if the input cannot be split
*/
public Stream<DocumentWriteOperation> splitWriteOperations(Reader input) throws Exception {
return splitWriteOperations(input, null);
}
/**
* Takes the input Reader and the input name, then converts the input Reader into a stream of DocumentWriteOperation
* by setting the schema and wrapping the JsonNode into DocumentWriteOperation.
* @param input is the incoming input Reader.
* @param splitFilename the name of the input Reader, including name and extension. It is used to generate URLs for
* split files.The splitFilename could either be provided here or in user-defined UriMaker.
* @return a stream of DocumentWriteOperation.
* @throws Exception if the input cannot be split
*/
public Stream<DocumentWriteOperation> splitWriteOperations(Reader input, String splitFilename) throws Exception {
if (input == null) {
throw new IllegalArgumentException("Input cannot be null");
}
if (getUriMaker() == null) {
JacksonCSVSplitter.UriMakerImpl uriMaker = new UriMakerImpl();
setUriMaker(uriMaker);
}
if (splitFilename != null) {
getUriMaker().setSplitFilename(splitFilename);
}
//for case file.csv, to generate uris with extension "json"
//for default UriMaker only, not custom UriMaker
if (getUriMaker() instanceof JacksonCSVSplitter.UriMakerImpl) {
((UriMakerImpl) getUriMaker()).setExtension("json");
}
Iterator<JsonNode> nodeItr = configureObjReader().readValues(input);
return configureInputDocumentWriteOperation(nodeItr);
}
/**
* The number of JsonNodes found so far.
* @return the number of JsonNodes found in the input stream.
*/
@Override
public long getCount() {
return this.count;
}
/**
* The headers of the csv file.
* @return the headers found in the csv file.
*/
public ArrayNode getHeaders() {
return this.headers;
}
private void incrementCount() {
this.count++;
}
private ObjectReader configureObjReader() {
this.count=0;
CsvSchema firstLineSchema = getCsvSchema()!=null? getCsvSchema():CsvSchema.emptySchema().withHeader();
CsvMapper csvMapper = getCsvMapper()!=null ? getCsvMapper() : configureCsvMapper();
ObjectReader objectReader = csvMapper.readerFor(JsonNode.class);
return objectReader.with(firstLineSchema);
}
private JacksonHandle wrapJacksonHandle(JsonNode content) {
incrementCount();
return new JacksonHandle(content);
}
private DocumentWriteOperation wrapDocumentWriteOperation(JsonNode content) {
JacksonHandle handle = wrapJacksonHandle(content);
String uri = uriMaker.makeUri(count, handle);
return new DocumentWriteOperationImpl(
DocumentWriteOperation.OperationType.DOCUMENT_WRITE,
uri,
null,
handle
);
}
private PeekingIterator<JsonNode> configureSplitObj(Iterator<JsonNode> nodeItr){
if (nodeItr == null || !nodeItr.hasNext()) {
throw new MarkLogicIOException("No header found.");
}
PeekingIterator<JsonNode> peekingIterator = new PeekingIterator<JsonNode>(nodeItr);
Iterator<String> headerValue = peekingIterator.getFirst().fieldNames();
this.headers = new ObjectMapper().createArrayNode();
while (headerValue.hasNext()) {
headers.add(headerValue.next());
}
return peekingIterator;
}
private Stream<JacksonHandle> configureInput(Iterator<JsonNode> nodeItr) {
if(getCsvSchema() == null) {
PeekingIterator<JsonNode> peekingIterator = configureSplitObj(nodeItr);
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(peekingIterator, Spliterator.ORDERED), false).map(this::wrapJacksonHandle);
}
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(nodeItr, Spliterator.ORDERED), false).map(this::wrapJacksonHandle);
}
private Stream<DocumentWriteOperation> configureInputDocumentWriteOperation(Iterator<JsonNode> nodeItr) {
if(getCsvSchema() == null) {
PeekingIterator<JsonNode> peekingIterator = configureSplitObj(nodeItr);
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(peekingIterator, Spliterator.ORDERED), false).map(this::wrapDocumentWriteOperation);
}
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(nodeItr, Spliterator.ORDERED), false).map(this::wrapDocumentWriteOperation);
}
private JacksonCSVSplitter.UriMaker uriMaker;
/**
* Get the UriMaker of the splitter
* @return the UriMaker of the splitter
*/
public JacksonCSVSplitter.UriMaker getUriMaker() {
return this.uriMaker;
}
/**
* Set the UriMaker to the splitter
* @param uriMaker the uriMaker to generate URI of each split file.
*/
public void setUriMaker(JacksonCSVSplitter.UriMaker uriMaker) {
this.uriMaker = uriMaker;
}
/**
* UriMaker which generates URI for each split file
*/
public interface UriMaker extends Splitter.UriMaker {
/**
* Generates URI for each split
* @param num the count of each split
* @param handle the handle which contains the content of each split. It could be utilized to make a meaningful
* document URI.
* @return the generated URI of current split
*/
String makeUri(long num, JacksonHandle handle);
}
private static class UriMakerImpl extends com.marklogic.client.datamovement.impl.UriMakerImpl<JacksonHandle> implements UriMaker {
@Override
public String makeUri(long num, JacksonHandle handle) {
StringBuilder uri = new StringBuilder();
if (getInputAfter() != null && getInputAfter().length() != 0) {
uri.append(getInputAfter());
}
if (getSplitFilename() != null && getSplitFilename().length() != 0) {
uri.append(getName());
}
if (uri.length() == 0) {
uri.append("/");
}
uri.append(num).append("_").append(UUID.randomUUID()).append(".json");
return uri.toString();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.seqno;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.replication.ReplicationResponse;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.SafeCommitInfo;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.IndexSettingsModule;
import org.junit.Before;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class PeerRecoveryRetentionLeaseExpiryTests extends ReplicationTrackerTestCase {
private static final ActionListener<ReplicationResponse> EMPTY_LISTENER = ActionListener.wrap(() -> { });
private ReplicationTracker replicationTracker;
private AtomicLong currentTimeMillis;
private Settings settings;
private SafeCommitInfo safeCommitInfo;
@Before
public void setUpReplicationTracker() throws InterruptedException {
final AllocationId primaryAllocationId = AllocationId.newInitializing();
currentTimeMillis = new AtomicLong(randomLongBetween(0, 1024));
if (randomBoolean()) {
settings = Settings.builder()
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.getKey(),
TimeValue.timeValueMillis(randomLongBetween(1, TimeValue.timeValueHours(12).millis())))
.build();
} else {
settings = Settings.EMPTY;
}
safeCommitInfo = null; // must be set in each test
final long primaryTerm = randomLongBetween(1, Long.MAX_VALUE);
replicationTracker = new ReplicationTracker(
new ShardId("test", "_na", 0),
primaryAllocationId.getId(),
IndexSettingsModule.newIndexSettings("test", settings),
primaryTerm,
UNASSIGNED_SEQ_NO,
value -> { },
currentTimeMillis::get,
(leases, listener) -> { },
() -> safeCommitInfo);
replicationTracker.updateFromMaster(1L, Collections.singleton(primaryAllocationId.getId()),
routingTable(Collections.emptySet(), primaryAllocationId));
replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED);
final AllocationId replicaAllocationId = AllocationId.newInitializing();
final IndexShardRoutingTable routingTableWithReplica
= routingTable(Collections.singleton(replicaAllocationId), primaryAllocationId);
replicationTracker.updateFromMaster(2L, Collections.singleton(primaryAllocationId.getId()), routingTableWithReplica);
replicationTracker.addPeerRecoveryRetentionLease(
routingTableWithReplica.getByAllocationId(replicaAllocationId.getId()).currentNodeId(), randomCheckpoint(),
EMPTY_LISTENER);
replicationTracker.initiateTracking(replicaAllocationId.getId());
replicationTracker.markAllocationIdAsInSync(replicaAllocationId.getId(), randomCheckpoint());
}
private long randomCheckpoint() {
return randomBoolean() ? SequenceNumbers.NO_OPS_PERFORMED : randomNonNegativeLong();
}
private void startReplica() {
final ShardRouting replicaShardRouting = replicationTracker.routingTable.replicaShards().get(0);
final IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(replicationTracker.routingTable);
builder.removeShard(replicaShardRouting);
builder.addShard(replicaShardRouting.moveToStarted());
replicationTracker.updateFromMaster(replicationTracker.appliedClusterStateVersion + 1,
replicationTracker.routingTable.shards().stream().map(sr -> sr.allocationId().getId()).collect(Collectors.toSet()),
builder.build());
}
public void testPeerRecoveryRetentionLeasesForAssignedCopiesDoNotEverExpire() {
if (randomBoolean()) {
startReplica();
}
currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(0, Long.MAX_VALUE - currentTimeMillis.get()));
safeCommitInfo = randomSafeCommitInfo();
final Tuple<Boolean, RetentionLeases> retentionLeases = replicationTracker.getRetentionLeases(true);
assertFalse(retentionLeases.v1());
final Set<String> leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet());
assertThat(leaseIds, hasSize(2));
assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream()
.map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet())));
}
public void testPeerRecoveryRetentionLeasesForUnassignedCopiesDoNotExpireImmediatelyIfShardsNotAllStarted() {
final String unknownNodeId = randomAlphaOfLength(10);
final long globalCheckpoint = randomNonNegativeLong(); // not NO_OPS_PERFORMED since this always results in file-based recovery
replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER);
currentTimeMillis.set(currentTimeMillis.get()
+ randomLongBetween(0, IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis()));
safeCommitInfo = randomSafeCommitInfoSuitableForOpsBasedRecovery(globalCheckpoint);
final Tuple<Boolean, RetentionLeases> retentionLeases = replicationTracker.getRetentionLeases(true);
assertFalse("should not have expired anything", retentionLeases.v1());
final Set<String> leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet());
assertThat(leaseIds, hasSize(3));
assertThat(leaseIds, equalTo(Stream.concat(Stream.of(ReplicationTracker.getPeerRecoveryRetentionLeaseId(unknownNodeId)),
replicationTracker.routingTable.shards().stream()
.map(ReplicationTracker::getPeerRecoveryRetentionLeaseId)).collect(Collectors.toSet())));
}
public void testPeerRecoveryRetentionLeasesForUnassignedCopiesExpireEventually() {
if (randomBoolean()) {
startReplica();
}
final String unknownNodeId = randomAlphaOfLength(10);
final long globalCheckpoint = randomCheckpoint();
replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER);
currentTimeMillis.set(randomLongBetween(
currentTimeMillis.get() + IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis() + 1,
Long.MAX_VALUE));
safeCommitInfo = randomSafeCommitInfoSuitableForOpsBasedRecovery(globalCheckpoint);
final Tuple<Boolean, RetentionLeases> retentionLeases = replicationTracker.getRetentionLeases(true);
assertTrue("should have expired something", retentionLeases.v1());
final Set<String> leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet());
assertThat(leaseIds, hasSize(2));
assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream()
.map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet())));
}
public void testPeerRecoveryRetentionLeasesForUnassignedCopiesExpireImmediatelyIfShardsAllStarted() {
final String unknownNodeId = randomAlphaOfLength(10);
replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, randomCheckpoint(), EMPTY_LISTENER);
startReplica();
currentTimeMillis.set(currentTimeMillis.get() +
(usually()
? randomLongBetween(0, IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.get(settings).millis())
: randomLongBetween(0, Long.MAX_VALUE - currentTimeMillis.get())));
safeCommitInfo = randomSafeCommitInfo();
final Tuple<Boolean, RetentionLeases> retentionLeases = replicationTracker.getRetentionLeases(true);
assertTrue(retentionLeases.v1());
final Set<String> leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet());
assertThat(leaseIds, hasSize(2));
assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream()
.map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet())));
}
public void testPeerRecoveryRetentionLeasesForUnassignedCopiesExpireIfRetainingTooMuchHistory() {
if (randomBoolean()) {
startReplica();
}
final String unknownNodeId = randomAlphaOfLength(10);
final long globalCheckpoint = randomValueOtherThan(SequenceNumbers.NO_OPS_PERFORMED, this::randomCheckpoint);
replicationTracker.addPeerRecoveryRetentionLease(unknownNodeId, globalCheckpoint, EMPTY_LISTENER);
safeCommitInfo = randomSafeCommitInfoSuitableForFileBasedRecovery(globalCheckpoint);
final Tuple<Boolean, RetentionLeases> retentionLeases = replicationTracker.getRetentionLeases(true);
assertTrue("should have expired something", retentionLeases.v1());
final Set<String> leaseIds = retentionLeases.v2().leases().stream().map(RetentionLease::id).collect(Collectors.toSet());
assertThat(leaseIds, hasSize(2));
assertThat(leaseIds, equalTo(replicationTracker.routingTable.shards().stream()
.map(ReplicationTracker::getPeerRecoveryRetentionLeaseId).collect(Collectors.toSet())));
}
private SafeCommitInfo randomSafeCommitInfo() {
return randomBoolean() ? SafeCommitInfo.EMPTY : new SafeCommitInfo(
randomFrom(randomNonNegativeLong(), (long) randomIntBetween(0, Integer.MAX_VALUE)),
randomIntBetween(0, Integer.MAX_VALUE));
}
private SafeCommitInfo randomSafeCommitInfoSuitableForOpsBasedRecovery(long globalCheckpoint) {
// simulate a safe commit that is behind the given global checkpoint, so that no files need to be transferrred
final long localCheckpoint = randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint);
return new SafeCommitInfo(localCheckpoint, between(0, Math.toIntExact(Math.min(localCheckpoint + 1, Integer.MAX_VALUE))));
}
private SafeCommitInfo randomSafeCommitInfoSuitableForFileBasedRecovery(long globalCheckpoint) {
// simulate a later safe commit containing no documents, which is always better to transfer than any ops
return new SafeCommitInfo(randomLongBetween(globalCheckpoint + 1, Long.MAX_VALUE), 0);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kendra.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides the configuration information for the knowledge article types that Amazon Kendra indexes. Amazon Kendra
* indexes standard knowledge articles and the standard fields of knowledge articles, or the custom fields of custom
* knowledge articles, but not both
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kendra-2019-02-03/SalesforceKnowledgeArticleConfiguration"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SalesforceKnowledgeArticleConfiguration implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
*/
private java.util.List<String> includedStates;
/**
* <p>
* Configuration information for standard Salesforce knowledge articles.
* </p>
*/
private SalesforceStandardKnowledgeArticleTypeConfiguration standardKnowledgeArticleTypeConfiguration;
/**
* <p>
* Configuration information for custom Salesforce knowledge articles.
* </p>
*/
private java.util.List<SalesforceCustomKnowledgeArticleTypeConfiguration> customKnowledgeArticleTypeConfigurations;
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
*
* @return Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You
* must specify at least one state.
* @see SalesforceKnowledgeArticleState
*/
public java.util.List<String> getIncludedStates() {
return includedStates;
}
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
*
* @param includedStates
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You
* must specify at least one state.
* @see SalesforceKnowledgeArticleState
*/
public void setIncludedStates(java.util.Collection<String> includedStates) {
if (includedStates == null) {
this.includedStates = null;
return;
}
this.includedStates = new java.util.ArrayList<String>(includedStates);
}
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIncludedStates(java.util.Collection)} or {@link #withIncludedStates(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param includedStates
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You
* must specify at least one state.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SalesforceKnowledgeArticleState
*/
public SalesforceKnowledgeArticleConfiguration withIncludedStates(String... includedStates) {
if (this.includedStates == null) {
setIncludedStates(new java.util.ArrayList<String>(includedStates.length));
}
for (String ele : includedStates) {
this.includedStates.add(ele);
}
return this;
}
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
*
* @param includedStates
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You
* must specify at least one state.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SalesforceKnowledgeArticleState
*/
public SalesforceKnowledgeArticleConfiguration withIncludedStates(java.util.Collection<String> includedStates) {
setIncludedStates(includedStates);
return this;
}
/**
* <p>
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You must
* specify at least one state.
* </p>
*
* @param includedStates
* Specifies the document states that should be included when Amazon Kendra indexes knowledge articles. You
* must specify at least one state.
* @return Returns a reference to this object so that method calls can be chained together.
* @see SalesforceKnowledgeArticleState
*/
public SalesforceKnowledgeArticleConfiguration withIncludedStates(SalesforceKnowledgeArticleState... includedStates) {
java.util.ArrayList<String> includedStatesCopy = new java.util.ArrayList<String>(includedStates.length);
for (SalesforceKnowledgeArticleState value : includedStates) {
includedStatesCopy.add(value.toString());
}
if (getIncludedStates() == null) {
setIncludedStates(includedStatesCopy);
} else {
getIncludedStates().addAll(includedStatesCopy);
}
return this;
}
/**
* <p>
* Configuration information for standard Salesforce knowledge articles.
* </p>
*
* @param standardKnowledgeArticleTypeConfiguration
* Configuration information for standard Salesforce knowledge articles.
*/
public void setStandardKnowledgeArticleTypeConfiguration(SalesforceStandardKnowledgeArticleTypeConfiguration standardKnowledgeArticleTypeConfiguration) {
this.standardKnowledgeArticleTypeConfiguration = standardKnowledgeArticleTypeConfiguration;
}
/**
* <p>
* Configuration information for standard Salesforce knowledge articles.
* </p>
*
* @return Configuration information for standard Salesforce knowledge articles.
*/
public SalesforceStandardKnowledgeArticleTypeConfiguration getStandardKnowledgeArticleTypeConfiguration() {
return this.standardKnowledgeArticleTypeConfiguration;
}
/**
* <p>
* Configuration information for standard Salesforce knowledge articles.
* </p>
*
* @param standardKnowledgeArticleTypeConfiguration
* Configuration information for standard Salesforce knowledge articles.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SalesforceKnowledgeArticleConfiguration withStandardKnowledgeArticleTypeConfiguration(
SalesforceStandardKnowledgeArticleTypeConfiguration standardKnowledgeArticleTypeConfiguration) {
setStandardKnowledgeArticleTypeConfiguration(standardKnowledgeArticleTypeConfiguration);
return this;
}
/**
* <p>
* Configuration information for custom Salesforce knowledge articles.
* </p>
*
* @return Configuration information for custom Salesforce knowledge articles.
*/
public java.util.List<SalesforceCustomKnowledgeArticleTypeConfiguration> getCustomKnowledgeArticleTypeConfigurations() {
return customKnowledgeArticleTypeConfigurations;
}
/**
* <p>
* Configuration information for custom Salesforce knowledge articles.
* </p>
*
* @param customKnowledgeArticleTypeConfigurations
* Configuration information for custom Salesforce knowledge articles.
*/
public void setCustomKnowledgeArticleTypeConfigurations(
java.util.Collection<SalesforceCustomKnowledgeArticleTypeConfiguration> customKnowledgeArticleTypeConfigurations) {
if (customKnowledgeArticleTypeConfigurations == null) {
this.customKnowledgeArticleTypeConfigurations = null;
return;
}
this.customKnowledgeArticleTypeConfigurations = new java.util.ArrayList<SalesforceCustomKnowledgeArticleTypeConfiguration>(
customKnowledgeArticleTypeConfigurations);
}
/**
* <p>
* Configuration information for custom Salesforce knowledge articles.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setCustomKnowledgeArticleTypeConfigurations(java.util.Collection)} or
* {@link #withCustomKnowledgeArticleTypeConfigurations(java.util.Collection)} if you want to override the existing
* values.
* </p>
*
* @param customKnowledgeArticleTypeConfigurations
* Configuration information for custom Salesforce knowledge articles.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SalesforceKnowledgeArticleConfiguration withCustomKnowledgeArticleTypeConfigurations(
SalesforceCustomKnowledgeArticleTypeConfiguration... customKnowledgeArticleTypeConfigurations) {
if (this.customKnowledgeArticleTypeConfigurations == null) {
setCustomKnowledgeArticleTypeConfigurations(new java.util.ArrayList<SalesforceCustomKnowledgeArticleTypeConfiguration>(
customKnowledgeArticleTypeConfigurations.length));
}
for (SalesforceCustomKnowledgeArticleTypeConfiguration ele : customKnowledgeArticleTypeConfigurations) {
this.customKnowledgeArticleTypeConfigurations.add(ele);
}
return this;
}
/**
* <p>
* Configuration information for custom Salesforce knowledge articles.
* </p>
*
* @param customKnowledgeArticleTypeConfigurations
* Configuration information for custom Salesforce knowledge articles.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SalesforceKnowledgeArticleConfiguration withCustomKnowledgeArticleTypeConfigurations(
java.util.Collection<SalesforceCustomKnowledgeArticleTypeConfiguration> customKnowledgeArticleTypeConfigurations) {
setCustomKnowledgeArticleTypeConfigurations(customKnowledgeArticleTypeConfigurations);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIncludedStates() != null)
sb.append("IncludedStates: ").append(getIncludedStates()).append(",");
if (getStandardKnowledgeArticleTypeConfiguration() != null)
sb.append("StandardKnowledgeArticleTypeConfiguration: ").append(getStandardKnowledgeArticleTypeConfiguration()).append(",");
if (getCustomKnowledgeArticleTypeConfigurations() != null)
sb.append("CustomKnowledgeArticleTypeConfigurations: ").append(getCustomKnowledgeArticleTypeConfigurations());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SalesforceKnowledgeArticleConfiguration == false)
return false;
SalesforceKnowledgeArticleConfiguration other = (SalesforceKnowledgeArticleConfiguration) obj;
if (other.getIncludedStates() == null ^ this.getIncludedStates() == null)
return false;
if (other.getIncludedStates() != null && other.getIncludedStates().equals(this.getIncludedStates()) == false)
return false;
if (other.getStandardKnowledgeArticleTypeConfiguration() == null ^ this.getStandardKnowledgeArticleTypeConfiguration() == null)
return false;
if (other.getStandardKnowledgeArticleTypeConfiguration() != null
&& other.getStandardKnowledgeArticleTypeConfiguration().equals(this.getStandardKnowledgeArticleTypeConfiguration()) == false)
return false;
if (other.getCustomKnowledgeArticleTypeConfigurations() == null ^ this.getCustomKnowledgeArticleTypeConfigurations() == null)
return false;
if (other.getCustomKnowledgeArticleTypeConfigurations() != null
&& other.getCustomKnowledgeArticleTypeConfigurations().equals(this.getCustomKnowledgeArticleTypeConfigurations()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIncludedStates() == null) ? 0 : getIncludedStates().hashCode());
hashCode = prime * hashCode
+ ((getStandardKnowledgeArticleTypeConfiguration() == null) ? 0 : getStandardKnowledgeArticleTypeConfiguration().hashCode());
hashCode = prime * hashCode + ((getCustomKnowledgeArticleTypeConfigurations() == null) ? 0 : getCustomKnowledgeArticleTypeConfigurations().hashCode());
return hashCode;
}
@Override
public SalesforceKnowledgeArticleConfiguration clone() {
try {
return (SalesforceKnowledgeArticleConfiguration) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.kendra.model.transform.SalesforceKnowledgeArticleConfigurationMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
Copyright 2012 Selenium committers
Copyright 2012 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.thoughtworks.selenium;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables;
import com.google.common.io.Files;
import com.google.common.io.Resources;
import org.junit.After;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.ExternalResource;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import org.openqa.selenium.Build;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriverBackedSelenium;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.environment.GlobalTestEnvironment;
import org.openqa.selenium.internal.WrapsDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.testing.DevMode;
import org.openqa.selenium.testing.InProject;
import org.openqa.selenium.testing.drivers.Browser;
import org.openqa.selenium.testing.drivers.WebDriverBuilder;
import org.openqa.selenium.v1.SeleniumTestEnvironment;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Logger;
import static com.thoughtworks.selenium.BrowserConfigurationOptions.MULTI_WINDOW;
import static com.thoughtworks.selenium.BrowserConfigurationOptions.SINGLE_WINDOW;
import static org.openqa.selenium.UnexpectedAlertBehaviour.IGNORE;
import static org.openqa.selenium.remote.CapabilityType.UNEXPECTED_ALERT_BEHAVIOUR;
public class InternalSelenseTestBase extends SeleneseTestBase {
private static final Logger log = Logger.getLogger(InternalSelenseTestBase.class.getName());
private static final ThreadLocal<Selenium> instance = new ThreadLocal<Selenium>();
private static String seleniumServerUrl;
private static final AtomicBoolean mustBuild = new AtomicBoolean(true);
@BeforeClass
public static void buildJavascriptLibraries() throws IOException {
if (!DevMode.isInDevMode() || !mustBuild.compareAndSet(true, false)) {
return;
}
log.info("In dev mode. Copying required files in case we're using a WebDriver-backed Selenium");
try {
new Build().of(
"//java/client/src/org/openqa/selenium/internal/seleniumemulation",
"//third_party/js/sizzle"
).go();
File buildDir = InProject.locate("java/client/build/production/org/openqa/selenium/internal/seleniumemulation");
buildDir = new File(buildDir, "selenium_atoms");
if (!buildDir.exists()) {
assertTrue(buildDir.mkdir());
}
File atomsDir = InProject.locate("build/javascript/selenium-atoms");
for (File file : atomsDir.listFiles()) {
if (file.getName().endsWith(".js")) {
File dest = new File(buildDir, file.getName());
Files.copy(file, dest);
}
}
File sizzle = InProject.locate("third_party/js/sizzle/sizzle.js");
Files.copy(sizzle, new File(buildDir, "sizzle.js"));
File seDir = InProject.locate("java/client/test/com/thoughtworks/selenium");
File destDir = InProject.locate("java/client/build/production/com/thoughtworks/selenium");
for (File file : seDir.listFiles()) {
if (file.getName().endsWith(".js")) {
File dest = new File(destDir, file.getName());
Files.copy(file, dest);
}
}
} catch (WebDriverException e) {
System.err.println("Cannot build javascript libraries for selenium emulation: " + e.getMessage());
}
}
@BeforeClass
public static void initializeServer() {
SeleniumTestEnvironment env = GlobalTestEnvironment.get(SeleniumTestEnvironment.class);
seleniumServerUrl = env.getSeleniumServerUrl();
}
public TestWatcher traceMethodName = new TestWatcher() {
@Override
protected void starting(Description description) {
super.starting(description);
log.info(">>> Starting " + description);
}
@Override
protected void finished(Description description) {
super.finished(description);
log.info("<<< Finished " + description);
}
};
public ExternalResource initializeSelenium = new ExternalResource() {
@Override
protected void before() throws Throwable {
selenium = instance.get();
if (selenium != null) {
return;
}
DesiredCapabilities caps = new DesiredCapabilities();
caps.setCapability(UNEXPECTED_ALERT_BEHAVIOUR, IGNORE);
if (Boolean.getBoolean("singlewindow")) {
caps.setCapability(SINGLE_WINDOW, true);
caps.setCapability(MULTI_WINDOW, "");
}
if (Boolean.getBoolean("webdriver.debug")) {
caps.setCapability("browserSideLog", true);
}
String baseUrl = whereIs("/selenium-server/tests/");
caps.setCapability("selenium.base.url", baseUrl);
caps.setCapability("selenium.server.url", seleniumServerUrl);
if (Boolean.getBoolean("selenium.browser.selenium")) {
URL serverUrl = new URL(seleniumServerUrl);
selenium = new DefaultSelenium(serverUrl.getHost(), serverUrl.getPort(), determineBrowserName(), baseUrl);
selenium.start();
} else {
WebDriver driver = new WebDriverBuilder().setDesiredCapabilities(caps).get();
selenium = new WebDriverBackedSelenium(driver, baseUrl);
}
selenium.setBrowserLogLevel("debug");
instance.set(selenium);
}
};
private String determineBrowserName() {
String property = System.getProperty("selenium.browser");
if (property == null) {
return "*chrome"; // Default to firefox
}
if (property.startsWith("*")) {
return property;
}
Browser browser = Browser.valueOf(property);
switch (browser) {
case chrome:
return "*googlechrome";
case ie:
return "*iexplore";
case ff:
return "*firefox";
case safari:
return "*safari";
default:
fail("Attempt to use an unsupported browser: " + property);
}
return null; // we never get here.
}
public ExternalResource addNecessaryJavascriptCommands = new ExternalResource() {
@Override
protected void before() throws Throwable {
if (selenium == null || !(selenium instanceof WebDriverBackedSelenium)) {
return;
}
// We need to be a on page where we can execute JS
WebDriver driver = ((WrapsDriver) selenium).getWrappedDriver();
driver.get(whereIs("/selenium-server"));
try {
URL scriptUrl =
Resources.getResource(getClass(), "/com/thoughtworks/selenium/testHelpers.js");
String script = Resources.toString(scriptUrl, Charsets.UTF_8);
((JavascriptExecutor) driver).executeScript(script);
} catch (IOException e) {
fail("Cannot read script: " + Throwables.getStackTraceAsString(e));
}
}
};
public ExternalResource returnFocusToMainWindow = new ExternalResource() {
@Override
protected void before() throws Throwable {
if (selenium == null) {
return;
}
try {
selenium.selectWindow("");
selenium.windowFocus();
} catch (SeleniumException e) {
// TODO(simon): Window switching in Opera is picky.
if (Browser.detect() != Browser.opera) {
throw e;
}
}
}
};
public TestWatcher filter = new TestWatcher() {
@Override
public Statement apply(Statement base, Description description) {
String onlyRun = System.getProperty("only_run");
Assume.assumeTrue(onlyRun == null ||
Arrays.asList(onlyRun.split(",")).contains(description.getTestClass().getSimpleName()));
String mth = System.getProperty("method");
Assume.assumeTrue(mth == null ||
Arrays.asList(mth.split(",")).contains(description.getMethodName()));
return super.apply(base, description);
}
};
@Rule
public TestRule chain =
RuleChain.outerRule(filter)
.around(initializeSelenium)
.around(returnFocusToMainWindow)
.around(addNecessaryJavascriptCommands)
.around(traceMethodName);
@After
public void checkVerifications() {
checkForVerificationErrors();
}
private String whereIs(String location) {
return GlobalTestEnvironment.get().getAppServer().whereIs(location);
}
public static void destroyDriver() {
if (Boolean.getBoolean("webdriver.singletestsuite.leaverunning")) {
return;
}
Selenium selenium = instance.get();
if (selenium != null) {
selenium.stop();
instance.remove();
}
}
}
| |
package ch.bastiangardel.easypay.rest;
import ch.bastiangardel.easypay.dto.*;
import ch.bastiangardel.easypay.exception.*;
import ch.bastiangardel.easypay.model.CheckOut;
import ch.bastiangardel.easypay.model.Receipt;
import ch.bastiangardel.easypay.model.User;
import ch.bastiangardel.easypay.repository.CheckOutRepository;
import ch.bastiangardel.easypay.repository.ReceiptRepository;
import ch.bastiangardel.easypay.repository.UserRepository;
import com.fasterxml.jackson.annotation.JsonView;
import com.notnoop.apns.APNS;
import com.notnoop.apns.ApnsService;
import io.swagger.annotations.ApiOperation;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authz.UnauthorizedException;
import org.apache.shiro.authz.annotation.RequiresAuthentication;
import org.apache.shiro.authz.annotation.RequiresRoles;
import org.apache.shiro.subject.Subject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.LinkedList;
import java.util.List;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
/**
* Created by bastiangardel on 16.05.16.
*
* Copyright (c) 2016 Bastian Gardel
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software
* and associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
@RestController
@RequestMapping("/receipts")
public class ReceiptController {
private static final Logger log = LoggerFactory.
getLogger(ReceiptController.class);
@Autowired
private ReceiptRepository receiptRepo;
@Autowired
private CheckOutRepository checkOutRepo;
@Autowired
private UserRepository userRepo;
@RequestMapping(method = POST)
@RequiresAuthentication
@RequiresRoles("SELLER")
public SuccessMessageDTO create(@RequestBody ReceiptCreationDTO receiptCreationDTO){
log.info("create new Receipt {}");
CheckOut checkOut;
checkOut = checkOutRepo.findByUuid(receiptCreationDTO.getUuidCheckout());
if (checkOut == null)
throw new CheckOutNotFoundException("Not Found CheckOut with UUID : " + receiptCreationDTO.getUuidCheckout());
final Subject subject = SecurityUtils.getSubject();
log.info("{} create new Receipt from {}", checkOut.getOwner().getEmail(), subject.getSession().getAttribute("email"));
if(!checkOut.getOwner().getEmail().equals(subject.getSession().getAttribute("email")))
throw new OwnerException("Your are not the owner of this checkout");
if(checkOut.getLastReceipt() != null)
throw new ReceiptToPayAlreadyExist("There is already a receipt to pay in this checkout");
Receipt receipt = receiptRepo.save(receiptCreationDTO.dtoToModel());
checkOut.setLastReceipt(receipt);
checkOutRepo.save(checkOut);
return new SuccessMessageDTO("Creation with Success");
}
@JsonView(View.Summary.class)
@RequestMapping(method = GET)
@RequiresAuthentication
@RequiresRoles("ADMIN" )
public List<Receipt> getAll() {
log.info("Get All Receipt");
return (List<Receipt>) receiptRepo.findAll();
}
@RequestMapping(value = "/history", method = GET)
@RequiresAuthentication
public List<ReceiptHistoryDTO> getReceiptHistory(@RequestParam("uuid") String uuid){
log.info("Get Receipt History from checkOut : {}", uuid);
CheckOut checkOut;
checkOut = checkOutRepo.findByUuid(uuid);
if (checkOut == null)
throw new CheckOutNotFoundException("Not Found CheckOut with UUID : " + uuid);
List<ReceiptHistoryDTO> list = new LinkedList<>();
for(Receipt receipt : checkOut.getReceiptsHistory())
{
ReceiptHistoryDTO receiptPayDTO = new ReceiptHistoryDTO();
receiptPayDTO.modelToDto(receipt);
list.add(receiptPayDTO);
}
return list;
}
@RequestMapping(value = "/pay", method = GET)
@RequiresAuthentication
public ReceiptPayDTO getReceiptToPay(@RequestParam("uuid") String uuid){
log.info("Get Receipt from checkOut : {}", uuid);
CheckOut checkOut;
checkOut = checkOutRepo.findByUuid(uuid);
if (checkOut == null)
throw new CheckOutNotFoundException("Not Found CheckOut with UUID : " + uuid);
ReceiptPayDTO receiptPayDTO = new ReceiptPayDTO();
Receipt receipt = checkOut.getLastReceipt();
if (receipt == null)
throw new NoReceiptToPayExeption("No Receipt to Pay");
return receiptPayDTO.modelToDto(receipt);
}
@RequestMapping(value = "/pay", method = POST)
@RequiresAuthentication
public SuccessMessageDTO paiement(@RequestBody ReceiptPayDTO receiptPayDTO, @RequestParam("uuid") String uuid){
log.info("PayReceipt : {}", receiptPayDTO.getId());
final Subject subject = SecurityUtils.getSubject();
Receipt receipt;
receipt = receiptRepo.findOne(receiptPayDTO.getId());
if (receipt == null)
throw new ReceiptNotFoundException("Not found Receipt with ID : " + receiptPayDTO.getId());
CheckOut checkOut;
checkOut = checkOutRepo.findByUuid(uuid);
if (checkOut == null)
throw new CheckOutNotFoundException("Not found CheckOut with UUID : " + uuid);
User owner = checkOut.getOwner();
if (receipt.isPaid())
throw new NoReceiptToPayExeption("Receipt with id : " + receipt.getId() + " already pay");
User user = userRepo.findByEmail((String) subject.getSession().getAttribute("email"));
if (receipt.getAmount() > user.getAmount())
throw new NotEnoughMoneyException("You have not enough money in your account!!");
checkOut.setLastReceipt(null);
List<Receipt> listreceipt = checkOut.getReceiptsHistory();
listreceipt.add(receipt);
receipt.setPaid(true);
user.setAmount(user.getAmount() - receipt.getAmount());
receipt.setPaiyedBy(user);
owner.setAmount(owner.getAmount() + receipt.getAmount());
List<Receipt> list = user.getReceiptHistory();
list.add(receipt);
ApnsService service = APNS.newService()
.withCert("apns.p12", "sake56ekas")
.withSandboxDestination()
.build();
String payload = APNS.newPayload()
.alertBody("Receipt " + receipt.getId()+ " on checkout " + uuid + " payed by " + user.getName())
.alertTitle("Receipt Payed").customField("uuid", uuid).build();
String token = receipt.getDeviceToken();
if (token != "")
{
log.info("Playload : {}", payload);
service.push(token, payload);
log.info("The notification has been hopefully sent");
}
userRepo.save(user);
userRepo.save(owner);
receiptRepo.save(receipt);
checkOutRepo.save(checkOut);
return new SuccessMessageDTO("Payment executed with Success");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pirk.schema.query;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.pirk.schema.data.DataSchemaLoader;
import org.apache.pirk.schema.data.partitioner.IPDataPartitioner;
import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner;
import org.apache.pirk.schema.query.filter.StopListFilter;
import org.apache.pirk.test.utils.Inputs;
import org.apache.pirk.test.utils.TestUtils;
import org.apache.pirk.utils.PIRException;
import org.apache.pirk.utils.SystemConfiguration;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Test suite for LoadQuerySchema and QuerySchema
*/
public class LoadQuerySchemaTest
{
private static final Logger logger = LoggerFactory.getLogger(LoadQuerySchemaTest.class);
private final String querySchemaFile = "querySchemaFile";
private final String dataSchemaName = "fakeDataSchema";
private final String querySchemaName = "fakeQuerySchema";
private final String element1 = "elementName1";
private final String element2 = "elementName2";
private final String element3 = "elementName3";
private final String element4 = "elementName4";
private final List<String> queryElements = Arrays.asList(element1, element2, element3);
private final List<String> filterElements = Collections.singletonList(element2);
@Test
public void testGeneralSchemaLoad() throws Exception
{
logger.info("Starting testGeneralSchemaLoad: ");
// Pull off the properties and reset upon completion
String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", "none");
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
String stopListFileProp = SystemConfiguration.getProperty("pir.stopListFile");
// Create the stoplist file
createStopListFile();
// Create the data schema used and force it to load
createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, StopListFilter.class.getName());
QuerySchemaLoader.initialize();
// Check the entries
QuerySchema qSchema = QuerySchemaRegistry.get(querySchemaName);
assertEquals(querySchemaName, qSchema.getSchemaName());
assertEquals(dataSchemaName, qSchema.getDataSchemaName());
assertEquals(element4, qSchema.getSelectorName());
assertEquals(StopListFilter.class.getName(), qSchema.getFilterTypeName());
assertTrue("Filter class instance must be StopListFilter", qSchema.getFilter() instanceof StopListFilter);
assertEquals(3, qSchema.getElementNames().size());
for (String item : qSchema.getElementNames())
{
assertTrue("elementNames: item = " + item + " must equal one of: " + element1 + ", " + element2 + ", or " + element3,
item.equals(element1) || item.equals(element2) || item.equals(element3));
}
assertEquals(1, qSchema.getFilteredElementNames().size());
for (String item : qSchema.getFilteredElementNames())
{
assertEquals("filterElementNames: item = " + item + " must equal " + element2, item, element2);
}
// one string, array IPs, array integers
int stringSize = Integer.parseInt(SystemConfiguration.getProperty("pir.stringBits"));
int arrayMult = Integer.parseInt(SystemConfiguration.getProperty("pir.numReturnArrayElements"));
int dataElementSize = stringSize + 32 * arrayMult + 32 * arrayMult;
assertEquals(dataElementSize, qSchema.getDataElementSize());
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
SystemConfiguration.setProperty("pir.stopListFile", stopListFileProp);
// Force the query and data schemas to load their original values
if (!dataSchemasProp.equals("none"))
{
DataSchemaLoader.initialize();
}
if (!querySchemasProp.equals("none"))
{
QuerySchemaLoader.initialize();
}
logger.info("Finished testGeneralSchemaLoad: ");
}
@Test
public void testGeneralSchemaLoadWithAdditionalFields() throws Exception
{
logger.info("Starting testGeneralSchemaLoadWithAdditionalFields: ");
// Pull off the properties and reset upon completion
String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", "none");
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the additionalFields
HashMap<String,String> additionalFields = new HashMap<>();
additionalFields.put("key1", "value1");
additionalFields.put("key2", "value2");
// Create the query schema used and force it to load
TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, null, true, null, false,
additionalFields);
QuerySchemaLoader.initialize();
// Check the entries
QuerySchema qSchema = QuerySchemaRegistry.get(querySchemaName);
assertNotNull("qSchema is null", qSchema);
Map<String,String> schemaAdditionalFields = qSchema.getAdditionalFields();
assertEquals(schemaAdditionalFields.size(), 2);
assertEquals(schemaAdditionalFields.get("key1"), "value1");
assertEquals(schemaAdditionalFields.get("key2"), "value2");
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
// Force the query and data schemas to load their original values
if (!dataSchemasProp.equals("none"))
{
DataSchemaLoader.initialize();
}
if (!querySchemasProp.equals("none"))
{
QuerySchemaLoader.initialize();
}
logger.info("Finished testGeneralSchemaLoadWithAdditionalFields");
}
@Test
public void testUnknownFilterClass() throws Exception
{
// Pull off the properties and reset upon completion
String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", "none");
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements, "bogusFilterClass");
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for bogus filter class");
} catch (PIRException ignore)
{
// Expected
}
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
// Force the query and data schemas to load their original values
if (!dataSchemasProp.equals("none"))
{
DataSchemaLoader.initialize();
}
if (!querySchemasProp.equals("none"))
{
QuerySchemaLoader.initialize();
}
logger.info("Finished testFunkyFilterScenarios");
}
@Test
public void testDataSchemaDoesNotExist() throws Exception
{
logger.info("Starting testDataSchemaDoesNotExist: ");
// Pull off the properties and reset upon completion
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the query schema used and force it to load
TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName + "bogus", element4, queryElements, filterElements, null);
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for non-existent DataSchema");
} catch (PIRException ignore)
{
// Expected
}
// Reset original query properties and force to load
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
if (!querySchemasProp.equals("none"))
{
QuerySchemaLoader.initialize();
}
logger.info("Finished testDataSchemaDoesNotExist ");
}
@Test
public void testSelectorDoesNotExistInDataSchema() throws Exception
{
logger.info("Starting testSelectorDoesNotExistInDataSchema: ");
// Pull off the properties and reset upon completion
String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", "none");
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements,
StopListFilter.class.getName());
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for non-existent selectorName");
} catch (Exception ignore)
{
// Expected
}
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
// Force the query and data schemas to load their original values
if (!dataSchemasProp.equals("none"))
{
DataSchemaLoader.initialize();
}
if (!querySchemasProp.equals("none"))
{
QuerySchemaLoader.initialize();
}
logger.info("Finished testSelectorDoesNotExistInDataSchema ");
}
// Create the stoplist file and alter the properties accordingly
private void createStopListFile() throws IOException, PIRException
{
String newSLFile = Inputs.createStopList(null, false);
SystemConfiguration.setProperty("pir.stopListFile", newSLFile);
}
// Create the test data schema file
private void createDataSchema(String schemaFile) throws Exception
{
// Create a temporary file for the test schema, set in the properties
File file = File.createTempFile(schemaFile, ".xml");
file.deleteOnExit();
logger.info("file = " + file.toString());
SystemConfiguration.setProperty("data.schemas", file.toString());
// Write to the file
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.newDocument();
// root element
Element rootElement = doc.createElement("schema");
doc.appendChild(rootElement);
// Add the schemaName
Element schemaNameElement = doc.createElement("schemaName");
schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
rootElement.appendChild(schemaNameElement);
// Add the elements
// element1 -- single String
TestUtils.addElement(doc, rootElement, element1, PrimitiveTypePartitioner.STRING, "false", PrimitiveTypePartitioner.class.getName());
// element2 - -- array of Integers
TestUtils.addElement(doc, rootElement, element2, PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
// element3 -- array of IP addresses
TestUtils.addElement(doc, rootElement, element3, PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
// element4 -- single byte type
TestUtils.addElement(doc, rootElement, element4, PrimitiveTypePartitioner.BYTE, "false", PrimitiveTypePartitioner.class.getName());
// Write to a xml file
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(doc);
StreamResult result = new StreamResult(file);
transformer.transform(source, result);
// Output for testing
StreamResult consoleResult = new StreamResult(System.out);
transformer.transform(source, consoleResult);
System.out.println();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.sql.analyzer.Session;
import com.facebook.presto.sql.planner.ExpressionInterpreter;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolResolver;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.LikePredicate;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.facebook.presto.sql.tree.StringLiteral;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import org.intellij.lang.annotations.Language;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.connector.dual.DualMetadata.DUAL_METADATA_MANAGER;
import static com.facebook.presto.sql.analyzer.Session.DEFAULT_CATALOG;
import static com.facebook.presto.sql.analyzer.Session.DEFAULT_SCHEMA;
import static com.facebook.presto.sql.parser.SqlParser.createExpression;
import static com.google.common.base.Charsets.UTF_8;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
public class TestExpressionInterpreter
{
@Test
public void testAnd()
throws Exception
{
assertOptimizedEquals("true and false", "false");
assertOptimizedEquals("false and true", "false");
assertOptimizedEquals("false and false", "false");
assertOptimizedEquals("true and null", "null");
assertOptimizedEquals("false and null", "false");
assertOptimizedEquals("null and true", "null");
assertOptimizedEquals("null and false", "false");
assertOptimizedEquals("null and null", "null");
assertOptimizedEquals("a='z' and true", "a='z'");
assertOptimizedEquals("a='z' and false", "false");
assertOptimizedEquals("true and a='z'", "a='z'");
assertOptimizedEquals("false and a='z'", "false");
assertOptimizedEquals("a='z' and b=1+1", "a='z' and b=2");
}
@Test
public void testOr()
throws Exception
{
assertOptimizedEquals("true or true", "true");
assertOptimizedEquals("true or false", "true");
assertOptimizedEquals("false or true", "true");
assertOptimizedEquals("false or false", "false");
assertOptimizedEquals("true or null", "true");
assertOptimizedEquals("null or true", "true");
assertOptimizedEquals("null or null", "null");
assertOptimizedEquals("false or null", "null");
assertOptimizedEquals("null or false", "null");
assertOptimizedEquals("a='z' or true", "true");
assertOptimizedEquals("a='z' or false", "a='z'");
assertOptimizedEquals("true or a='z'", "true");
assertOptimizedEquals("false or a='z'", "a='z'");
assertOptimizedEquals("a='z' or b=1+1", "a='z' or b=2");
}
@Test
public void testComparison()
throws Exception
{
assertOptimizedEquals("null = null", "null");
assertOptimizedEquals("'a' = 'b'", "false");
assertOptimizedEquals("'a' = 'a'", "true");
assertOptimizedEquals("'a' = null", "null");
assertOptimizedEquals("null = 'a'", "null");
assertOptimizedEquals("boundLong = 1234", "true");
assertOptimizedEquals("boundDouble = 12.34", "true");
assertOptimizedEquals("boundString = 'hello'", "true");
assertOptimizedEquals("boundLong = a", "1234 = a");
assertOptimizedEquals("10151082135029368 = 10151082135029369", "false");
}
@Test
public void testIsDistinctFrom()
throws Exception
{
assertOptimizedEquals("null is distinct from null", "false");
assertOptimizedEquals("3 is distinct from 4", "true");
assertOptimizedEquals("3 is distinct from 3", "false");
assertOptimizedEquals("3 is distinct from null", "true");
assertOptimizedEquals("null is distinct from 3", "true");
assertOptimizedEquals("10151082135029368 is distinct from 10151082135029369", "true");
}
@Test
public void testIsNull()
throws Exception
{
assertOptimizedEquals("null is null", "true");
assertOptimizedEquals("1 is null", "false");
assertOptimizedEquals("1.0 is null", "false");
assertOptimizedEquals("'a' is null", "false");
assertOptimizedEquals("true is null", "false");
assertOptimizedEquals("null+1 is null", "true");
assertOptimizedEquals("a is null", "a is null");
assertOptimizedEquals("a+(1+1) is null", "a+2 is null");
}
@Test
public void testIsNotNull()
throws Exception
{
assertOptimizedEquals("null is not null", "false");
assertOptimizedEquals("1 is not null", "true");
assertOptimizedEquals("1.0 is not null", "true");
assertOptimizedEquals("'a' is not null", "true");
assertOptimizedEquals("true is not null", "true");
assertOptimizedEquals("null+1 is not null", "false");
assertOptimizedEquals("a is not null", "a is not null");
assertOptimizedEquals("a+(1+1) is not null", "a+2 is not null");
}
@Test
public void testNullIf()
throws Exception
{
assertOptimizedEquals("nullif(true, true)", "null");
assertOptimizedEquals("nullif(true, false)", "true");
assertOptimizedEquals("nullif(null, false)", "null");
assertOptimizedEquals("nullif(true, null)", "true");
assertOptimizedEquals("nullif('a', 'a')", "null");
assertOptimizedEquals("nullif('a', 'b')", "'a'");
assertOptimizedEquals("nullif(null, 'b')", "null");
assertOptimizedEquals("nullif('a', null)", "'a'");
assertOptimizedEquals("nullif(1, 1)", "null");
assertOptimizedEquals("nullif(1, 2)", "1");
assertOptimizedEquals("nullif(1.0, 1)", "null");
assertOptimizedEquals("nullif(1.1, 1)", "1.1");
assertOptimizedEquals("nullif(1.1, 1.1)", "null");
assertOptimizedEquals("nullif(1, 2-1)", "null");
assertOptimizedEquals("nullif(null, null)", "null");
assertOptimizedEquals("nullif(1, null)", "1");
assertOptimizedEquals("nullif(a, 1)", "nullif(a, 1)");
assertOptimizedEquals("nullif(a, b)", "nullif(a, b)");
assertOptimizedEquals("nullif(a, b+(1+1))", "nullif(a, b+2)");
}
@Test
public void testNegative()
throws Exception
{
assertOptimizedEquals("-(1)", "-1");
assertOptimizedEquals("-(a+1)", "-(a+1)");
assertOptimizedEquals("-(1+1)", "-2");
assertOptimizedEquals("-(null)", "null");
assertOptimizedEquals("-(a+(1+1))", "-(a+2)");
}
@Test
public void testNot()
throws Exception
{
assertOptimizedEquals("not true", "false");
assertOptimizedEquals("not false", "true");
assertOptimizedEquals("not null", "null");
assertOptimizedEquals("not 1=1", "false");
assertOptimizedEquals("not 1!=1", "true");
assertOptimizedEquals("not a=1", "not a=1");
assertOptimizedEquals("not a=(1+1)", "not a=2");
}
@Test
public void testFunctionCall()
throws Exception
{
assertOptimizedEquals("abs(-5)", "5");
assertOptimizedEquals("abs(-10-5)", "15");
assertOptimizedEquals("abs(-boundLong + 1)", "1233");
assertOptimizedEquals("abs(-boundLong)", "1234");
assertOptimizedEquals("abs(a)", "abs(a)");
assertOptimizedEquals("abs(a + 1)", "abs(a + 1)");
}
@Test
public void testNonDeterministicFunctionCall()
throws Exception
{
// optimize should do nothing
assertOptimizedEquals("random()", "random()");
// evaluate should execute
Object value = evaluate("random()");
Assert.assertTrue(value instanceof Double);
double randomValue = (double) value;
Assert.assertTrue(0 <= randomValue && randomValue < 1);
}
@Test
public void testBetween()
throws Exception
{
assertOptimizedEquals("3 between 2 and 4", "true");
assertOptimizedEquals("2 between 3 and 4", "false");
assertOptimizedEquals("null between 2 and 4", "null");
assertOptimizedEquals("3 between null and 4", "null");
assertOptimizedEquals("3 between 2 and null", "null");
assertOptimizedEquals("'c' between 'b' and 'd'", "true");
assertOptimizedEquals("'b' between 'c' and 'd'", "false");
assertOptimizedEquals("null between 'b' and 'd'", "null");
assertOptimizedEquals("'c' between null and 'd'", "null");
assertOptimizedEquals("'c' between 'b' and null", "null");
assertOptimizedEquals("boundLong between 1000 and 2000", "true");
assertOptimizedEquals("boundLong between 3 and 4", "false");
assertOptimizedEquals("boundString between 'e' and 'i'", "true");
assertOptimizedEquals("boundString between 'a' and 'b'", "false");
assertOptimizedEquals("boundLong between a and 2000 + 1", "1234 between a and 2001");
assertOptimizedEquals("boundString between a and 'bar'", "'hello' between a and 'bar'");
}
@Test
public void testExtract()
{
DateTime dateTime = new DateTime(2001, 8, 22, 3, 4, 5, 321, DateTimeZone.UTC);
long seconds = MILLISECONDS.toSeconds(dateTime.getMillis());
assertOptimizedEquals("extract (CENTURY from " + seconds + ")", "20");
assertOptimizedEquals("extract (YEAR from " + seconds + ")", "2001");
assertOptimizedEquals("extract (QUARTER from " + seconds + ")", "3");
assertOptimizedEquals("extract (MONTH from " + seconds + ")", "8");
assertOptimizedEquals("extract (WEEK from " + seconds + ")", "34");
assertOptimizedEquals("extract (DOW from " + seconds + ")", "3");
assertOptimizedEquals("extract (DOY from " + seconds + ")", "234");
assertOptimizedEquals("extract (DAY from " + seconds + ")", "22");
assertOptimizedEquals("extract (HOUR from " + seconds + ")", "3");
assertOptimizedEquals("extract (MINUTE from " + seconds + ")", "4");
assertOptimizedEquals("extract (SECOND from " + seconds + ")", "5");
assertOptimizedEquals("extract (TIMEZONE_HOUR from " + seconds + ")", "0");
assertOptimizedEquals("extract (TIMEZONE_MINUTE from " + seconds + ")", "0");
assertOptimizedEquals("extract (CENTURY from boundTimestamp)", "20");
assertOptimizedEquals("extract (YEAR from boundTimestamp)", "2001");
assertOptimizedEquals("extract (QUARTER from boundTimestamp)", "3");
assertOptimizedEquals("extract (MONTH from boundTimestamp)", "8");
assertOptimizedEquals("extract (WEEK from boundTimestamp)", "34");
assertOptimizedEquals("extract (DOW from boundTimestamp)", "3");
assertOptimizedEquals("extract (DOY from boundTimestamp)", "234");
assertOptimizedEquals("extract (DAY from boundTimestamp)", "22");
assertOptimizedEquals("extract (HOUR from boundTimestamp)", "3");
assertOptimizedEquals("extract (MINUTE from boundTimestamp)", "4");
assertOptimizedEquals("extract (SECOND from boundTimestamp)", "5");
assertOptimizedEquals("extract (TIMEZONE_HOUR from boundTimestamp)", "0");
assertOptimizedEquals("extract (TIMEZONE_MINUTE from boundTimestamp)", "0");
assertOptimizedEquals("extract (YEAR from a)", "extract (YEAR from a)");
assertOptimizedEquals("extract (SECOND from boundTimestamp + 3)", "8");
}
@Test
public void testIn()
throws Exception
{
assertOptimizedEquals("3 in (2, 4, 3, 5)", "true");
assertOptimizedEquals("3 in (2, 4, 9, 5)", "false");
assertOptimizedEquals("3 in (2, null, 3, 5)", "true");
assertOptimizedEquals("'foo' in ('bar', 'baz', 'foo', 'blah')", "true");
assertOptimizedEquals("'foo' in ('bar', 'baz', 'buz', 'blah')", "false");
assertOptimizedEquals("'foo' in ('bar', null, 'foo', 'blah')", "true");
assertOptimizedEquals("null in (2, null, 3, 5)", "null");
assertOptimizedEquals("3 in (2, null)", "null");
assertOptimizedEquals("boundLong in (2, 1234, 3, 5)", "true");
assertOptimizedEquals("boundLong in (2, 4, 3, 5)", "false");
assertOptimizedEquals("1234 in (2, boundLong, 3, 5)", "true");
assertOptimizedEquals("99 in (2, boundLong, 3, 5)", "false");
assertOptimizedEquals("boundLong in (2, boundLong, 3, 5)", "true");
assertOptimizedEquals("boundString in ('bar', 'hello', 'foo', 'blah')", "true");
assertOptimizedEquals("boundString in ('bar', 'baz', 'foo', 'blah')", "false");
assertOptimizedEquals("'hello' in ('bar', boundString, 'foo', 'blah')", "true");
assertOptimizedEquals("'baz' in ('bar', boundString, 'foo', 'blah')", "false");
assertOptimizedEquals("boundLong in (2, 1234, a, 5)", "true");
assertOptimizedEquals("boundString in ('bar', 'hello', a, 'blah')", "true");
assertOptimizedEquals("boundLong in (2, 4, a, b, 9)", "1234 in (a, b)");
assertOptimizedEquals("a in (2, 4, boundLong, b, 5)", "a in (2, 4, 1234, b, 5)");
}
@Test
public void testCurrentTimestamp()
throws Exception
{
long current = MILLISECONDS.toSeconds(System.currentTimeMillis());
assertOptimizedEquals("current_timestamp >= " + current, "true");
assertOptimizedEquals("current_timestamp > " + current + TimeUnit.MINUTES.toSeconds(1), "false");
}
@Test
public void testCastToString()
throws Exception
{
// long
assertOptimizedEquals("cast(123 as VARCHAR)", "'123'");
assertOptimizedEquals("cast(-123 as VARCHAR)", "'-123'");
// double
assertOptimizedEquals("cast(123.0 as VARCHAR)", "'123.0'");
assertOptimizedEquals("cast(-123.0 as VARCHAR)", "'-123.0'");
assertOptimizedEquals("cast(123.456 as VARCHAR)", "'123.456'");
assertOptimizedEquals("cast(-123.456 as VARCHAR)", "'-123.456'");
// boolean
assertOptimizedEquals("cast(true as VARCHAR)", "'true'");
assertOptimizedEquals("cast(false as VARCHAR)", "'false'");
// string
assertOptimizedEquals("cast('xyz' as VARCHAR)", "'xyz'");
// null
assertOptimizedEquals("cast(null as VARCHAR)", "null");
}
@Test
public void testCastToBoolean()
throws Exception
{
// long
assertOptimizedEquals("cast(123 as BOOLEAN)", "true");
assertOptimizedEquals("cast(-123 as BOOLEAN)", "true");
assertOptimizedEquals("cast(0 as BOOLEAN)", "false");
// boolean
assertOptimizedEquals("cast(true as BOOLEAN)", "true");
assertOptimizedEquals("cast(false as BOOLEAN)", "false");
// string
assertOptimizedEquals("cast('true' as BOOLEAN)", "true");
assertOptimizedEquals("cast('false' as BOOLEAN)", "false");
assertOptimizedEquals("cast('t' as BOOLEAN)", "true");
assertOptimizedEquals("cast('f' as BOOLEAN)", "false");
assertOptimizedEquals("cast('1' as BOOLEAN)", "true");
assertOptimizedEquals("cast('0' as BOOLEAN)", "false");
// null
assertOptimizedEquals("cast(null as BOOLEAN)", "null");
// double
assertOptimizedEquals("cast(123.45 as BOOLEAN)", "true");
assertOptimizedEquals("cast(-123.45 as BOOLEAN)", "true");
assertOptimizedEquals("cast(0.0 as BOOLEAN)", "false");
}
@Test
public void testCastToLong()
throws Exception
{
// long
assertOptimizedEquals("cast(0 as BIGINT)", "0");
assertOptimizedEquals("cast(123 as BIGINT)", "123");
assertOptimizedEquals("cast(-123 as BIGINT)", "-123");
// double
assertOptimizedEquals("cast(123.0 as BIGINT)", "123");
assertOptimizedEquals("cast(-123.0 as BIGINT)", "-123");
assertOptimizedEquals("cast(123.456 as BIGINT)", "123");
assertOptimizedEquals("cast(-123.456 as BIGINT)", "-123");
// boolean
assertOptimizedEquals("cast(true as BIGINT)", "1");
assertOptimizedEquals("cast(false as BIGINT)", "0");
// string
assertOptimizedEquals("cast('123' as BIGINT)", "123");
assertOptimizedEquals("cast('-123' as BIGINT)", "-123");
// null
assertOptimizedEquals("cast(null as BIGINT)", "null");
}
@Test
public void testCastToDouble()
throws Exception
{
// long
assertOptimizedEquals("cast(0 as DOUBLE)", "0.0");
assertOptimizedEquals("cast(123 as DOUBLE)", "123.0");
assertOptimizedEquals("cast(-123 as DOUBLE)", "-123.0");
// double
assertOptimizedEquals("cast(123.0 as DOUBLE)", "123.0");
assertOptimizedEquals("cast(-123.0 as DOUBLE)", "-123.0");
assertOptimizedEquals("cast(123.456 as DOUBLE)", "123.456");
assertOptimizedEquals("cast(-123.456 as DOUBLE)", "-123.456");
// string
assertOptimizedEquals("cast('0' as DOUBLE)", "0.0");
assertOptimizedEquals("cast('123' as DOUBLE)", "123.0");
assertOptimizedEquals("cast('-123' as DOUBLE)", "-123.0");
assertOptimizedEquals("cast('123.0' as DOUBLE)", "123.0");
assertOptimizedEquals("cast('-123.0' as DOUBLE)", "-123.0");
assertOptimizedEquals("cast('123.456' as DOUBLE)", "123.456");
assertOptimizedEquals("cast('-123.456' as DOUBLE)", "-123.456");
// null
assertOptimizedEquals("cast(null as DOUBLE)", "null");
// boolean
assertOptimizedEquals("cast(true as DOUBLE)", "1.0");
assertOptimizedEquals("cast(false as DOUBLE)", "0.0");
}
@Test
public void testCastOptimization()
throws Exception
{
assertOptimizedEquals("cast(boundLong as VARCHAR)", "'1234'");
assertOptimizedEquals("cast(boundLong + 1 as VARCHAR)", "'1235'");
assertOptimizedEquals("cast(unbound as VARCHAR)", "cast(unbound as VARCHAR)");
}
@Test
public void testReservedWithDoubleQuotes()
throws Exception
{
assertOptimizedEquals("\"time\"", "\"time\"");
}
@Test
public void testSearchCase()
throws Exception
{
assertOptimizedEquals("case " +
"when true then 33 " +
"end",
"33");
assertOptimizedEquals("case " +
"when false then 1 " +
"else 33 " +
"end",
"33");
assertOptimizedEquals("case " +
"when boundLong = 1234 then 33 " +
"end",
"33");
assertOptimizedEquals("case " +
"when true then boundLong " +
"end",
"1234");
assertOptimizedEquals("case " +
"when false then 1 " +
"else boundLong " +
"end",
"1234");
assertOptimizedEquals("case " +
"when boundLong = 1234 then 33 " +
"else a " +
"end",
"33");
assertOptimizedEquals("case " +
"when true then boundLong " +
"else a " +
"end",
"1234");
assertOptimizedEquals("case " +
"when false then a " +
"else boundLong " +
"end",
"1234");
assertOptimizedEquals("case " +
"when a = 1234 then 33 " +
"else 1 " +
"end",
"" +
"case " +
"when a = 1234 then 33 " +
"else 1 " +
"end");
}
@Test
public void testSimpleCase()
throws Exception
{
assertOptimizedEquals("case true " +
"when true then 33 " +
"end",
"33");
assertOptimizedEquals("case true " +
"when false then 1 " +
"else 33 end",
"33");
assertOptimizedEquals("case boundLong " +
"when 1234 then 33 " +
"end",
"33");
assertOptimizedEquals("case 1234 " +
"when boundLong then 33 " +
"end",
"33");
assertOptimizedEquals("case true " +
"when true then boundLong " +
"end",
"1234");
assertOptimizedEquals("case true " +
"when false then 1 " +
"else boundLong " +
"end",
"1234");
assertOptimizedEquals("case boundLong " +
"when 1234 then 33 " +
"else a " +
"end",
"33");
assertOptimizedEquals("case true " +
"when true then boundLong " +
"else a " +
"end",
"1234");
assertOptimizedEquals("case true " +
"when false then a " +
"else boundLong " +
"end",
"1234");
assertOptimizedEquals("case a " +
"when 1234 then 33 " +
"else 1 " +
"end",
"" +
"case a " +
"when 1234 then 33 " +
"else 1 " +
"end");
}
@Test
public void testIf()
throws Exception
{
assertOptimizedEquals("IF(2 = 2, 3, 4)", "3");
assertOptimizedEquals("IF(1 = 2, 3, 4)", "4");
assertOptimizedEquals("IF(true, 3, 4)", "3");
assertOptimizedEquals("IF(false, 3, 4)", "4");
assertOptimizedEquals("IF(null, 3, 4)", "4");
assertOptimizedEquals("IF(true, 3, null)", "3");
assertOptimizedEquals("IF(false, 3, null)", "null");
assertOptimizedEquals("IF(true, null, 4)", "null");
assertOptimizedEquals("IF(false, null, 4)", "4");
assertOptimizedEquals("IF(true, null, null)", "null");
assertOptimizedEquals("IF(false, null, null)", "null");
assertOptimizedEquals("IF(true, 3.5, 4.2)", "3.5");
assertOptimizedEquals("IF(false, 3.5, 4.2)", "4.2");
assertOptimizedEquals("IF(true, 'foo', 'bar')", "'foo'");
assertOptimizedEquals("IF(false, 'foo', 'bar')", "'bar'");
assertOptimizedEquals("IF(a, 1 + 2, 3 + 4)", "IF(a, 3, 7)");
}
@Test
public void testLike()
throws Exception
{
assertOptimizedEquals("'a' LIKE 'a'", "true");
assertOptimizedEquals("'' LIKE 'a'", "false");
assertOptimizedEquals("'abc' LIKE 'a'", "false");
assertOptimizedEquals("'a' LIKE '_'", "true");
assertOptimizedEquals("'' LIKE '_'", "false");
assertOptimizedEquals("'abc' LIKE '_'", "false");
assertOptimizedEquals("'a' LIKE '%'", "true");
assertOptimizedEquals("'' LIKE '%'", "true");
assertOptimizedEquals("'abc' LIKE '%'", "true");
assertOptimizedEquals("'abc' LIKE '___'", "true");
assertOptimizedEquals("'ab' LIKE '___'", "false");
assertOptimizedEquals("'abcd' LIKE '___'", "false");
assertOptimizedEquals("'abc' LIKE 'abc'", "true");
assertOptimizedEquals("'xyz' LIKE 'abc'", "false");
assertOptimizedEquals("'abc0' LIKE 'abc'", "false");
assertOptimizedEquals("'0abc' LIKE 'abc'", "false");
assertOptimizedEquals("'abc' LIKE 'abc%'", "true");
assertOptimizedEquals("'abc0' LIKE 'abc%'", "true");
assertOptimizedEquals("'0abc' LIKE 'abc%'", "false");
assertOptimizedEquals("'abc' LIKE '%abc'", "true");
assertOptimizedEquals("'0abc' LIKE '%abc'", "true");
assertOptimizedEquals("'abc0' LIKE '%abc'", "false");
assertOptimizedEquals("'abc' LIKE '%abc%'", "true");
assertOptimizedEquals("'0abc' LIKE '%abc%'", "true");
assertOptimizedEquals("'abc0' LIKE '%abc%'", "true");
assertOptimizedEquals("'0abc0' LIKE '%abc%'", "true");
assertOptimizedEquals("'xyzw' LIKE '%abc%'", "false");
assertOptimizedEquals("'abc' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'0abc' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'abc0' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'0abc0' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'ab01c' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'0ab01c' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'ab01c0' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'0ab01c0' LIKE '%ab%c%'", "true");
assertOptimizedEquals("'xyzw' LIKE '%ab%c%'", "false");
// ensure regex chars are escaped
assertOptimizedEquals("'\' LIKE '\'", "true");
assertOptimizedEquals("'.*' LIKE '.*'", "true");
assertOptimizedEquals("'[' LIKE '['", "true");
assertOptimizedEquals("']' LIKE ']'", "true");
assertOptimizedEquals("'{' LIKE '{'", "true");
assertOptimizedEquals("'}' LIKE '}'", "true");
assertOptimizedEquals("'?' LIKE '?'", "true");
assertOptimizedEquals("'+' LIKE '+'", "true");
assertOptimizedEquals("'(' LIKE '('", "true");
assertOptimizedEquals("')' LIKE ')'", "true");
assertOptimizedEquals("'|' LIKE '|'", "true");
assertOptimizedEquals("'^' LIKE '^'", "true");
assertOptimizedEquals("'$' LIKE '$'", "true");
assertOptimizedEquals("null like '%'", "null");
assertOptimizedEquals("'a' like null", "null");
assertOptimizedEquals("'a' like '%' escape null", "null");
assertOptimizedEquals("'%' like 'z%' escape 'z'", "true");
}
@Test
public void testLikeOptimization()
throws Exception
{
assertOptimizedEquals("unboundstring like 'abc'", "unboundstring = 'abc'");
assertOptimizedEquals("boundstring like boundpattern", "true");
assertOptimizedEquals("'abc' like boundpattern", "false");
assertOptimizedEquals("unboundstring like boundpattern", "unboundstring like boundpattern");
assertOptimizedEquals("unboundstring like unboundpattern escape unboundstring", "unboundstring like unboundpattern escape unboundstring");
}
@Test
public void testTimestampLiteral()
{
DateTimeZone timeZone = DateTimeZone.forOffsetHours(5);
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 321, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 0, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22'", getSeconds(new DateTime(1960, 1, 22, 0, 0, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321Z'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 321, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05Z'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 0, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04Z'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321+05:00'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05+05:00'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04+05:00'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321+05'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05+05'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04+05'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321 Asia/Oral'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04:05 Asia/Oral'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("timestamp '1960-01-22 03:04 Asia/Oral'", getSeconds(new DateTime(1960, 1, 22, 3, 4, 0, 0, timeZone)));
}
@Test
public void testIntervalLiteral()
{
assertOptimizedEquals("INTERVAL '123' DAY", String.valueOf(DAYS.toSeconds(123)));
assertOptimizedEquals("INTERVAL + '123' DAY", String.valueOf(DAYS.toSeconds(123)));
assertOptimizedEquals("INTERVAL - '123' DAY", String.valueOf(-DAYS.toSeconds(123)));
// assertOptimizedEquals("INTERVAL '123 23:58:53.456' DAY TO SECOND",
// String.valueOf(DAYS.toSeconds(123) + HOURS.toSeconds(23) + MINUTES.toSeconds(59) + SECONDS.toSeconds(53)));
assertOptimizedEquals("INTERVAL '123' HOUR", String.valueOf(HOURS.toSeconds(123)));
assertOptimizedEquals("INTERVAL + '123' HOUR", String.valueOf(HOURS.toSeconds(123)));
assertOptimizedEquals("INTERVAL - '123' HOUR", String.valueOf(-HOURS.toSeconds(123)));
// assertOptimizedEquals("INTERVAL '23:59' HOUR TO MINUTE", String.valueOf(HOURS.toSeconds(23) + MINUTES.toSeconds(59)));
assertOptimizedEquals("INTERVAL '123' MINUTE", String.valueOf(MINUTES.toSeconds(123)));
assertOptimizedEquals("INTERVAL + '123' MINUTE", String.valueOf(MINUTES.toSeconds(123)));
assertOptimizedEquals("INTERVAL - '123' MINUTE", String.valueOf(-MINUTES.toSeconds(123)));
assertOptimizedEquals("INTERVAL '123' SECOND", String.valueOf(SECONDS.toSeconds(123)));
assertOptimizedEquals("INTERVAL + '123' SECOND", String.valueOf(SECONDS.toSeconds(123)));
assertOptimizedEquals("INTERVAL - '123' SECOND", String.valueOf(-SECONDS.toSeconds(123)));
}
@Test
public void testIntervalMath()
{
assertOptimizedEquals("timestamp '1960-01-22 03:04:05.321' - interval '7' day", getSeconds(new DateTime(1960, 1, 15, 3, 4, 5, 321, DateTimeZone.UTC)));
}
@Test
public void testDateLiteral()
{
assertOptimizedEquals("DATE '1960-01-22'", getSeconds(new DateTime(1960, 1, 22, 0, 0, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("DATE '2013-03-22'", getSeconds(new DateTime(2013, 3, 22, 0, 0, 0, 0, DateTimeZone.UTC)));
}
@Test
public void testTimeLiteral()
{
DateTimeZone timeZone = DateTimeZone.forOffsetHours(5);
assertOptimizedEquals("time '03:04:05.321'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 321, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04:05'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 0, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04:05.321Z'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 321, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04:05Z'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 0, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04Z'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, DateTimeZone.UTC)));
assertOptimizedEquals("time '03:04:05.321+05:00'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("time '03:04:05+05:00'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("time '03:04+05:00'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, timeZone)));
assertOptimizedEquals("time '03:04:05.321+05'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("time '03:04:05+05'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("time '03:04+05'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, timeZone)));
assertOptimizedEquals("time '03:04:05.321 Asia/Oral'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 321, timeZone)));
assertOptimizedEquals("time '03:04:05 Asia/Oral'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 5, 0, timeZone)));
assertOptimizedEquals("time '03:04 Asia/Oral'", getSeconds(new DateTime(1970, 1, 1, 3, 4, 0, 0, timeZone)));
}
@Test
public void testFailedExpressionOptimization()
throws Exception
{
assertOptimizedEqualsSelf("if(x, 1, 0 / 0)");
assertOptimizedEqualsSelf("if(x, 0 / 0, 1)");
assertOptimizedEqualsSelf("case x when 1 then 1 when 0 / 0 then 2 end");
assertOptimizedEqualsSelf("case x when true then 1 else 0 / 0 end");
assertOptimizedEqualsSelf("case x when true then 0 / 0 else 1 end");
assertOptimizedEqualsSelf("case when x then 1 when 0 / 0 then 2 end");
assertOptimizedEqualsSelf("case when x then 1 else 0 / 0 end");
assertOptimizedEqualsSelf("case when x then 0 / 0 else 1 end");
assertOptimizedEqualsSelf("coalesce(x, 0 / 0)");
}
@Test(expectedExceptions = ArithmeticException.class)
public void testOptimizeDivideByZero()
throws Exception
{
optimize("0 / 0");
}
@Test(expectedExceptions = ArithmeticException.class)
public void testOptimizeConstantIfDivideByZero()
throws Exception
{
optimize("if(false, 1, 0 / 0)");
}
@Test(expectedExceptions = ArithmeticException.class)
public void testOptimizeConstantSearchedCaseDivideByZero()
throws Exception
{
optimize("case when 0 / 0 then 1 end");
}
@Test(timeOut = 1000)
public void testLikeInvalidUtf8()
{
assertLike(new byte[] {'a', 'b', 'c'}, "%b%", true);
assertLike(new byte[] {'a', 'b', 'c', (byte) 0xFF, 'x', 'y'}, "%b%", true);
}
private static void assertLike(byte[] value, String pattern, boolean expected)
{
Expression predicate = new LikePredicate(
rawStringLiteral(Slices.wrappedBuffer(value)),
new StringLiteral(pattern),
null);
assertEquals(evaluate(predicate), expected);
}
private static StringLiteral rawStringLiteral(final Slice slice)
{
return new StringLiteral(slice.toString(UTF_8))
{
@Override
public Slice getSlice()
{
return slice;
}
};
}
private static String getSeconds(DateTime dateTime)
{
return String.valueOf(MILLISECONDS.toSeconds(dateTime.getMillis()));
}
private static void assertOptimizedEquals(@Language("SQL") String actual, @Language("SQL") String expected)
{
assertEquals(optimize(actual), optimize(expected));
}
private static void assertOptimizedEqualsSelf(@Language("SQL") String expression)
{
assertEquals(optimize(expression), createExpression(expression));
}
private static Object optimize(@Language("SQL") String expression)
{
Expression parsedExpression = createExpression(expression);
// verify roundtrip
Expression roundtrip = createExpression(ExpressionFormatter.formatExpression(parsedExpression));
assertEquals(parsedExpression, roundtrip);
ExpressionInterpreter interpreter = ExpressionInterpreter.expressionOptimizer(parsedExpression, DUAL_METADATA_MANAGER, new Session("user", "test", DEFAULT_CATALOG, DEFAULT_SCHEMA, null, null));
return interpreter.optimize(new SymbolResolver()
{
@Override
public Object getValue(Symbol symbol)
{
switch (symbol.getName().toLowerCase()) {
case "boundlong":
return 1234L;
case "boundstring":
return Slices.wrappedBuffer("hello".getBytes(UTF_8));
case "bounddouble":
return 12.34;
case "boundtimestamp":
DateTime dateTime = new DateTime(2001, 8, 22, 3, 4, 5, 321, DateTimeZone.UTC);
return MILLISECONDS.toSeconds(dateTime.getMillis());
case "boundpattern":
return Slices.wrappedBuffer("%el%".getBytes(UTF_8));
}
return new QualifiedNameReference(symbol.toQualifiedName());
}
});
}
private static Object evaluate(String expression)
{
Expression parsedExpression = createExpression(expression);
// verify roundtrip
Expression roundtrip = createExpression(ExpressionFormatter.formatExpression(parsedExpression));
assertEquals(parsedExpression, roundtrip);
return evaluate(parsedExpression);
}
private static Object evaluate(Expression expression)
{
ExpressionInterpreter interpreter = ExpressionInterpreter.expressionInterpreter(expression, DUAL_METADATA_MANAGER, new Session("user", "test", DEFAULT_CATALOG, DEFAULT_SCHEMA, null, null));
return interpreter.evaluate((RecordCursor) null);
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.debug.gui.memory;
import java.awt.BorderLayout;
import java.lang.invoke.MethodHandles;
import java.math.BigInteger;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import docking.ActionContext;
import docking.action.*;
import docking.menu.MultiStateDockingAction;
import docking.widgets.fieldpanel.support.ViewerPosition;
import ghidra.app.plugin.core.byteviewer.*;
import ghidra.app.plugin.core.debug.DebuggerCoordinates;
import ghidra.app.plugin.core.debug.gui.DebuggerLocationLabel;
import ghidra.app.plugin.core.debug.gui.DebuggerResources;
import ghidra.app.plugin.core.debug.gui.DebuggerResources.AbstractFollowsCurrentThreadAction;
import ghidra.app.plugin.core.debug.gui.action.*;
import ghidra.app.plugin.core.debug.gui.action.AutoReadMemorySpec.AutoReadMemorySpecConfigFieldCodec;
import ghidra.app.plugin.core.format.ByteBlock;
import ghidra.app.services.DebuggerTraceManagerService;
import ghidra.framework.options.SaveState;
import ghidra.framework.plugintool.*;
import ghidra.framework.plugintool.annotation.AutoConfigStateField;
import ghidra.framework.plugintool.annotation.AutoServiceConsumed;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSetView;
import ghidra.program.model.listing.Program;
import ghidra.program.util.ProgramLocation;
import ghidra.program.util.ProgramSelection;
import ghidra.trace.model.Trace;
import ghidra.trace.model.program.TraceProgramView;
import ghidra.util.Swing;
public class DebuggerMemoryBytesProvider extends ProgramByteViewerComponentProvider {
private static final AutoConfigState.ClassHandler<ProgramByteViewerComponentProvider> CONFIG_STATE_HANDLER =
AutoConfigState.wireHandler(ProgramByteViewerComponentProvider.class,
MethodHandles.lookup());
private static final String KEY_DEBUGGER_COORDINATES = "DebuggerCoordinates";
protected static boolean sameCoordinates(DebuggerCoordinates a, DebuggerCoordinates b) {
if (!Objects.equals(a.getView(), b.getView())) {
return false; // Subsumes trace
}
if (!Objects.equals(a.getRecorder(), b.getRecorder())) {
return false; // For capture memory action
}
if (!Objects.equals(a.getTime(), b.getTime())) {
return false;
}
if (!Objects.equals(a.getThread(), b.getThread())) {
return false; // for reg/pc tracking
}
if (!Objects.equals(a.getFrame(), b.getFrame())) {
return false; // for reg/pc tracking
}
return true;
}
protected class FollowsCurrentThreadAction extends AbstractFollowsCurrentThreadAction {
public FollowsCurrentThreadAction() {
super(plugin);
setMenuBarData(new MenuData(new String[] { NAME }));
setSelected(true);
addLocalAction(this);
setEnabled(true);
}
@Override
public void actionPerformed(ActionContext context) {
doSetFollowsCurrentThread(isSelected());
}
}
protected class ForMemoryBytesGoToTrait extends DebuggerGoToTrait {
public ForMemoryBytesGoToTrait() {
super(DebuggerMemoryBytesProvider.this.tool, DebuggerMemoryBytesProvider.this.plugin,
DebuggerMemoryBytesProvider.this);
}
@Override
protected boolean goToAddress(Address address) {
TraceProgramView view = current.getView();
if (view == null) {
return false;
}
return goTo(view, new ProgramLocation(view, address));
}
}
protected class ForMemoryBytesTrackingTrait extends DebuggerTrackLocationTrait {
public ForMemoryBytesTrackingTrait() {
super(DebuggerMemoryBytesProvider.this.tool, DebuggerMemoryBytesProvider.this.plugin,
DebuggerMemoryBytesProvider.this);
}
@Override
protected void locationTracked() {
doGoToTracked();
}
}
protected class ForMemoryBytesReadsMemoryTrait extends DebuggerReadsMemoryTrait {
public ForMemoryBytesReadsMemoryTrait() {
super(DebuggerMemoryBytesProvider.this.tool, DebuggerMemoryBytesProvider.this.plugin,
DebuggerMemoryBytesProvider.this);
}
@Override
protected AddressSetView getSelection() {
return DebuggerMemoryBytesProvider.this.getSelection();
}
@Override
protected void repaintPanel() {
for (ByteViewerComponent view : getByteViewerPanel().getViewList()) {
// NB. ByteViewerComponent extends FieldPanel
view.repaint();
}
}
}
private final AutoReadMemorySpec defaultReadMemorySpec =
AutoReadMemorySpec.fromConfigName(VisibleROOnceAutoReadMemorySpec.CONFIG_NAME);
private final DebuggerMemoryBytesPlugin myPlugin;
@AutoServiceConsumed
private DebuggerTraceManagerService traceManager;
@SuppressWarnings("unused")
private final AutoService.Wiring autoServiceWiring;
protected DockingAction actionGoTo;
protected FollowsCurrentThreadAction actionFollowsCurrentThread;
protected MultiStateDockingAction<AutoReadMemorySpec> actionAutoReadMemory;
protected DockingAction actionReadSelectedMemory;
protected MultiStateDockingAction<LocationTrackingSpec> actionTrackLocation;
protected ForMemoryBytesGoToTrait goToTrait;
protected ForMemoryBytesTrackingTrait trackingTrait;
protected ForMemoryBytesReadsMemoryTrait readsMemTrait;
protected final DebuggerLocationLabel locationLabel = new DebuggerLocationLabel();
@AutoConfigStateField
protected boolean followsCurrentThread = true;
@AutoConfigStateField(codec = AutoReadMemorySpecConfigFieldCodec.class)
protected AutoReadMemorySpec autoReadMemorySpec = defaultReadMemorySpec;
// TODO: followsCurrentSnap?
DebuggerCoordinates current = DebuggerCoordinates.NOWHERE;
protected final boolean isMainViewer;
protected DebuggerMemoryBytesProvider(PluginTool tool, DebuggerMemoryBytesPlugin plugin,
boolean isConnected) {
super(tool, plugin, DebuggerResources.TITLE_PROVIDER_MEMORY_BYTES, isConnected);
this.myPlugin = plugin;
this.isMainViewer = isConnected;
autoServiceWiring = AutoService.wireServicesConsumed(plugin, this);
createActions();
addDisplayListener(readsMemTrait.getDisplayListener());
decorationComponent.add(locationLabel, BorderLayout.NORTH);
goToTrait.goToCoordinates(current);
trackingTrait.goToCoordinates(current);
readsMemTrait.goToCoordinates(current);
locationLabel.goToCoordinates(current);
setHelpLocation(DebuggerResources.HELP_PROVIDER_MEMORY_BYTES);
}
/**
* TODO: I'd rather this not be here
*/
protected Plugin getPlugin() {
return plugin;
}
protected void initTraits() {
if (goToTrait == null) {
goToTrait = new ForMemoryBytesGoToTrait();
}
if (trackingTrait == null) {
trackingTrait = new ForMemoryBytesTrackingTrait();
}
if (readsMemTrait == null) {
readsMemTrait = new ForMemoryBytesReadsMemoryTrait();
}
}
@Override
protected ByteViewerPanel newByteViewerPanel() {
initTraits();
return new DebuggerMemoryBytesPanel(this);
}
// For testing access
@Override
protected ByteViewerPanel getByteViewerPanel() {
return super.getByteViewerPanel();
}
@Override
protected void addToToolbar() {
// Prevent this from being added to the toolbar
}
/**
* Deal with the fact that initialization order is hard to control
*/
protected DebuggerCoordinates getCurrent() {
return current == null ? DebuggerCoordinates.NOWHERE : current;
}
protected String computeSubTitle() {
// TODO: This should be factored in a common place
DebuggerCoordinates current = getCurrent();
TraceProgramView view = current == null ? null : current.getView();
List<String> parts = new ArrayList<>();
LocationTrackingSpec trackingSpec = trackingTrait == null ? null : trackingTrait.getSpec();
if (trackingSpec != null) {
String specTitle = trackingSpec.computeTitle(current);
if (specTitle != null) {
parts.add(specTitle);
}
}
if (view != null) {
parts.add(current.getTrace().getDomainFile().getName());
}
return StringUtils.join(parts, ", ");
}
@Override
protected void updateTitle() {
setSubTitle(computeSubTitle());
}
protected void createActions() {
initTraits();
if (!isMainViewer()) {
actionFollowsCurrentThread = new FollowsCurrentThreadAction();
}
actionGoTo = goToTrait.installAction();
actionTrackLocation = trackingTrait.installAction();
actionAutoReadMemory = readsMemTrait.installAutoReadAction();
actionReadSelectedMemory = readsMemTrait.installReadSelectedAction();
}
@Override
protected void doSetProgram(Program newProgram) {
if (newProgram != null && newProgram != current.getView()) {
throw new AssertionError();
}
if (getProgram() == newProgram) {
return;
}
if (newProgram != null && !(newProgram instanceof TraceProgramView)) {
throw new IllegalArgumentException("Dynamic Listings require trace views");
}
super.doSetProgram(newProgram);
if (newProgram != null) {
setSelection(new ProgramSelection());
}
updateTitle();
locationLabel.updateLabel();
}
protected DebuggerCoordinates adjustCoordinates(DebuggerCoordinates coordinates) {
if (followsCurrentThread) {
return coordinates;
}
// Because the view's snap is changing with or without us.... So go with.
return current.withTime(coordinates.getTime());
}
public void goToCoordinates(DebuggerCoordinates coordinates) {
if (sameCoordinates(current, coordinates)) {
current = coordinates;
return;
}
current = coordinates;
doSetProgram(current.getView());
goToTrait.goToCoordinates(coordinates);
trackingTrait.goToCoordinates(coordinates);
readsMemTrait.goToCoordinates(coordinates);
locationLabel.goToCoordinates(coordinates);
contextChanged();
}
public void coordinatesActivated(DebuggerCoordinates coordinates) {
DebuggerCoordinates adjusted = adjustCoordinates(coordinates);
goToCoordinates(adjusted);
}
public void traceClosed(Trace trace) {
if (current.getTrace() == trace) {
goToCoordinates(DebuggerCoordinates.NOWHERE);
}
}
public void setFollowsCurrentThread(boolean follows) {
if (isMainViewer()) {
throw new IllegalStateException(
"The main memory bytes viewer always follows the current trace and thread");
}
actionFollowsCurrentThread.setSelected(follows);
doSetFollowsCurrentThread(follows);
}
protected void doSetFollowsCurrentThread(boolean follows) {
this.followsCurrentThread = follows;
updateBorder();
updateTitle();
coordinatesActivated(traceManager.getCurrent());
}
protected void updateBorder() {
decorationComponent.setConnected(followsCurrentThread);
}
public boolean isFollowsCurrentThread() {
return followsCurrentThread;
}
public void setAutoReadMemorySpec(AutoReadMemorySpec spec) {
readsMemTrait.setAutoSpec(spec);
}
public AutoReadMemorySpec getAutoReadMemorySpec() {
return readsMemTrait.getAutoSpec();
}
protected void doGoToTracked() {
ProgramLocation loc = trackingTrait.getTrackedLocation();
if (loc == null) {
return;
}
TraceProgramView curView = current.getView();
Swing.runIfSwingOrRunLater(() -> {
goTo(curView, loc);
});
}
public void setTrackingSpec(LocationTrackingSpec spec) {
trackingTrait.setSpec(spec);
}
public LocationTrackingSpec getTrackingSpec() {
return trackingTrait.getSpec();
}
@Override
public boolean isConnected() {
return false;
}
public boolean isMainViewer() {
return isMainViewer;
}
@Override
protected void writeConfigState(SaveState saveState) {
super.writeConfigState(saveState);
}
@Override
protected void readConfigState(SaveState saveState) {
super.readConfigState(saveState);
CONFIG_STATE_HANDLER.readConfigState(this, saveState);
trackingTrait.readConfigState(saveState);
if (isMainViewer()) {
followsCurrentThread = true;
}
else {
actionFollowsCurrentThread.setSelected(followsCurrentThread);
updateBorder();
}
// TODO: actionAutoReadMemory
}
@Override
protected void writeDataState(SaveState saveState) {
if (!isMainViewer()) {
current.writeDataState(tool, saveState, KEY_DEBUGGER_COORDINATES);
}
super.writeDataState(saveState);
}
@Override
protected void readDataState(SaveState saveState) {
if (!isMainViewer()) {
DebuggerCoordinates coordinates =
DebuggerCoordinates.readDataState(tool, saveState, KEY_DEBUGGER_COORDINATES, true);
coordinatesActivated(coordinates);
}
super.readDataState(saveState);
}
@Override
protected void updateLocation(ByteBlock block, BigInteger blockOffset, int column,
boolean export) {
super.updateLocation(block, blockOffset, column, export);
locationLabel.goToAddress(currentLocation == null ? null : currentLocation.getAddress());
}
@Override
public void cloneWindow() {
final DebuggerMemoryBytesProvider newProvider = myPlugin.createNewDisconnectedProvider();
final ViewerPosition vp = panel.getViewerPosition();
final SaveState saveState = new SaveState();
writeConfigState(saveState);
Swing.runLater(() -> {
newProvider.readConfigState(saveState);
newProvider.goToCoordinates(current);
newProvider.setLocation(currentLocation);
newProvider.panel.setViewerPosition(vp);
});
}
@Override
protected ProgramByteBlockSet newByteBlockSet(ByteBlockChangeManager changeManager) {
if (program == null) {
return null;
}
return new WritesTargetProgramByteBlockSet(this, program, changeManager);
}
@Override
public void addLocalAction(DockingActionIf action) {
/**
* TODO This is a terrible hack, but it's temporary. We do not yet support writing target
* memory from the bytes provider. Once we do, we should obviously take this hack out. I
* don't think we'll forget, because the only way to get the write toggle button back is to
* delete this override.
*/
if (action == editModeAction) {
return;
}
super.addLocalAction(action);
}
}
| |
import javax.swing.JFrame;
import javax.swing.JScrollPane;
import javax.swing.JPanel;
import javax.swing.BoxLayout;
import javax.swing.JCheckBox;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JComboBox;
import javax.swing.border.TitledBorder;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import java.util.Vector;
import java.util.HashSet;
import java.util.StringTokenizer;
import java.awt.Dimension;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
public class FilterFrame extends JFrame
{
private JAnalyze mainWindow;
private static final long serialVersionUID = 1L;
private Collection<ResultsContainer> results;
private Collection<String> disabled = new HashSet<String>();
private final Map<String, Integer> ips = new HashMap<String, Integer>();
private int mismatchFilter;
private int cableFaultFilter = 2;
private int congestionFilter = 2;
private int duplexFilter = 2;
private int newCongestionFilter = 2;
private int initialPeakSpeedFilter = 4;
public FilterFrame(JAnalyze mainWindow, Collection<ResultsContainer> results) {
this.mainWindow = mainWindow;
this.results = results;
resultsChange();
StringTokenizer st = new StringTokenizer(mainWindow.getProperties().getProperty("disabled", ""), ",");
while (st.hasMoreTokens()) {
disabled.add(st.nextToken());
}
setSize(400, 350);
}
public Collection<ResultsContainer> getResults() {
Collection<ResultsContainer> newResults = new Vector<ResultsContainer>();
for (ResultsContainer result : results) {
if (mismatchFilter != 2 && result.getMismatch() != mismatchFilter)
continue;
if (cableFaultFilter != 2 && result.getCable() != cableFaultFilter)
continue;
if (congestionFilter != 2 && result.getCongestion() != congestionFilter)
continue;
if (duplexFilter != 2 && result.getDuplex() != duplexFilter)
continue;
if (newCongestionFilter != 2 && result.getNewCongestion() != newCongestionFilter)
continue;
if (initialPeakSpeedFilter != 4 && result.getInitialPeakSpeedEquality() != initialPeakSpeedFilter)
continue;
if (ips.get(result.getIP()).equals(1)) {
newResults.add(result);
}
}
return newResults;
}
public void resultsChange() {
Container cp = getContentPane();
cp.removeAll();
Map<String, Integer> oldIPs = new HashMap<String, Integer>(ips);
ips.clear();
for (ResultsContainer result : results) {
if (!oldIPs.containsKey(result.getIP())) {
ips.put(result.getIP(), disabled.contains(result.getIP()) ? 0 : 1);
}
else {
ips.put(result.getIP(), oldIPs.get(result.getIP()));
}
}
cp.setLayout(new BorderLayout());
JPanel leftPanel = new JPanel();
leftPanel.setLayout(new BorderLayout());
final JPanel ipsPanel = new JPanel();
ipsPanel.setBorder(new TitledBorder("Show IPs"));
ipsPanel.setLayout(new BoxLayout(ipsPanel, BoxLayout.Y_AXIS));
if (ips.keySet().size() == 0) {
ipsPanel.add(new JLabel(" "));
}
else {
JPanel tmpPanel = new JPanel();
tmpPanel.setBorder(new TitledBorder("select IPs"));
JButton allButton = new JButton("all");
allButton.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
for (Component component : ipsPanel.getComponents()) {
JCheckBox checkBox = (JCheckBox) component;
checkBox.setSelected(true);
}
}
});
allButton.setPreferredSize(new Dimension(55, 20));
JButton noneButton = new JButton("none");
noneButton.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
for (Component component : ipsPanel.getComponents()) {
JCheckBox checkBox = (JCheckBox) component;
checkBox.setSelected(false);
}
}
});
noneButton.setPreferredSize(new Dimension(70, 20));
tmpPanel.add(allButton);
tmpPanel.add(noneButton);
leftPanel.add(tmpPanel, BorderLayout.SOUTH);
}
for (String ip : ips.keySet()) {
JCheckBox checkBox = new JCheckBox(ip, ips.get(ip) == 1) {
private static final long serialVersionUID = 1L;
public void setSelected(boolean value) {
super.setSelected(value);
fireActionPerformed(new ActionEvent(FilterFrame.this,
ActionEvent.ACTION_PERFORMED,
""));
}
};
checkBox.addActionListener( new IPCheckBoxActionListener(checkBox, ip, disabled));
ipsPanel.add(checkBox);
}
leftPanel.add(new JScrollPane(ipsPanel));
cp.add(leftPanel, BorderLayout.WEST);
JPanel applyPanel = new JPanel();
JButton applyButton = new JButton("Apply");
applyButton.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
mainWindow.rebuildResultsList();
}
});
applyPanel.add(applyButton);
cp.add(applyPanel, BorderLayout.SOUTH);
JPanel optPanel = new JPanel();
optPanel.setLayout(new BoxLayout(optPanel, BoxLayout.Y_AXIS));
String[] optStrings = {"no", "yes", "both"};
JComboBox mismatchBox = new JComboBox(optStrings);
mismatchBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
mismatchFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("mismatchFilter", Integer.toString(mismatchFilter));
}
});
mismatchFilter = Integer.parseInt(mainWindow.getProperties().getProperty("mismatchFilter", "2"));
mismatchBox.setSelectedIndex(mismatchFilter);
JPanel horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("Mismatch: "));
horizontalPanel.add(mismatchBox);
optPanel.add(horizontalPanel);
JComboBox cableFaultBox = new JComboBox(optStrings);
cableFaultBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
cableFaultFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("cableFaultFilter", Integer.toString(cableFaultFilter));
}
});
cableFaultFilter = Integer.parseInt(mainWindow.getProperties().getProperty("cableFaultFilter", "2"));
cableFaultBox.setSelectedIndex(cableFaultFilter);
horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("Cable fault: "));
horizontalPanel.add(cableFaultBox);
optPanel.add(horizontalPanel);
JComboBox congestionBox = new JComboBox(optStrings);
congestionBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
congestionFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("congestionFilter", Integer.toString(congestionFilter));
}
});
congestionFilter = Integer.parseInt(mainWindow.getProperties().getProperty("congestionFilter", "2"));
congestionBox.setSelectedIndex(congestionFilter);
horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("Congestion: "));
horizontalPanel.add(congestionBox);
optPanel.add(horizontalPanel);
JComboBox duplexBox = new JComboBox(optStrings);
duplexBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
duplexFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("duplexFilter", Integer.toString(duplexFilter));
}
});
duplexFilter = Integer.parseInt(mainWindow.getProperties().getProperty("duplexFilter", "2"));
duplexBox.setSelectedIndex(duplexFilter);
horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("Duplex: "));
horizontalPanel.add(duplexBox);
optPanel.add(horizontalPanel);
JComboBox newCongestionBox = new JComboBox(optStrings);
newCongestionBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
newCongestionFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("newCongestionFilter",
Integer.toString(newCongestionFilter));
}
});
newCongestionFilter=Integer.parseInt(mainWindow.getProperties().getProperty("newCongestionFilter","2"));
newCongestionBox.setSelectedIndex(newCongestionFilter);
horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("New congestion: "));
horizontalPanel.add(newCongestionBox);
optPanel.add(horizontalPanel);
JComboBox initialPeakSpeedBox = new JComboBox(new String[] {"n/a", "equal", "greater", "less", "all"});
initialPeakSpeedBox.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
initialPeakSpeedFilter = ((JComboBox) e.getSource()).getSelectedIndex();
mainWindow.getProperties().setProperty("initialPeakSpeedFilter",
Integer.toString(initialPeakSpeedFilter));
}
});
initialPeakSpeedFilter=Integer.parseInt(mainWindow.getProperties().getProperty("initialPeakSpeedFilter","4"));
initialPeakSpeedBox.setSelectedIndex(initialPeakSpeedFilter);
horizontalPanel = new JPanel();
horizontalPanel.setLayout(new BoxLayout(horizontalPanel, BoxLayout.X_AXIS));
horizontalPanel.add(new JLabel("Initial peak speed: "));
horizontalPanel.add(initialPeakSpeedBox);
optPanel.add(horizontalPanel);
JPanel tmpPanel = new JPanel();
tmpPanel.add(new JScrollPane(optPanel));
cp.add(tmpPanel);
validate();
cp.repaint();
}
class IPCheckBoxActionListener implements ActionListener {
private JCheckBox checkBox;
private String ip;
private Collection<String> disabled;
IPCheckBoxActionListener(JCheckBox checkBox, String ip, Collection<String> disabled) {
this.checkBox = checkBox;
this.ip = ip;
this.disabled = disabled;
}
public void actionPerformed(ActionEvent e) {
if (checkBox.isSelected()) {
ips.put(ip, 1);
disabled.remove(ip);
}
else {
ips.put(ip, 0);
if (!disabled.contains(ip)) {
disabled.add(ip);
}
}
StringBuffer newDisabled = new StringBuffer();
boolean first = true;
for (String ip : disabled) {
if (!first) {
newDisabled.append(",");
}
newDisabled.append(ip);
first = false;
}
mainWindow.getProperties().setProperty("disabled", newDisabled.toString());
}
}
}
| |
package com.netflix.priam.defaultimpl;
import com.datastax.driver.core.VersionNumber;
import com.google.common.base.Strings;
import com.google.inject.Inject;
import com.netflix.priam.config.BackupConfiguration;
import com.netflix.priam.config.CassandraConfiguration;
import com.netflix.priam.utils.CassandraTuner;
import com.netflix.priam.utils.TokenManager;
import org.apache.cassandra.locator.SnitchProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import javax.annotation.Nullable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class StandardTuner implements CassandraTuner {
private static final Logger logger = LoggerFactory.getLogger(StandardTuner.class);
private static final String CL_BACKUP_PROPS_FILE = "/conf/commitlog_archiving.properties";
private final CassandraConfiguration cassandraConfiguration;
private final BackupConfiguration backupConfiguration;
@Inject
public StandardTuner(CassandraConfiguration cassandraConfiguration, BackupConfiguration backupConfiguration) {
this.cassandraConfiguration = cassandraConfiguration;
this.backupConfiguration = backupConfiguration;
}
@Override
public void writeAllProperties(String yamlLocation, String hostIp, String seedProvider, @Nullable VersionNumber cassandraVersion) throws IOException {
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
File yamlFile = new File(yamlLocation);
Map<String, Object> map = load(yaml, yamlFile);
put(map, "cluster_name", cassandraConfiguration.getClusterName());
put(map, "storage_port", cassandraConfiguration.getStoragePort());
put(map, "ssl_storage_port", cassandraConfiguration.getSslStoragePort());
put(map, "start_rpc", cassandraConfiguration.isThriftEnabled());
put(map, "rpc_port", cassandraConfiguration.getThriftPort());
put(map, "start_native_transport", cassandraConfiguration.isNativeTransportEnabled());
put(map, "native_transport_port", cassandraConfiguration.getNativeTransportPort());
put(map, "listen_address", hostIp);
put(map, "rpc_address", hostIp);
put(map, "auto_bootstrap", cassandraConfiguration.getAutoBootstrap());
put(map, "saved_caches_directory", cassandraConfiguration.getCacheLocation());
put(map, "commitlog_directory", cassandraConfiguration.getCommitLogLocation());
put(map, "hints_directory", cassandraConfiguration.getHintsLocation());
put(map, "data_file_directories", Arrays.asList(cassandraConfiguration.getDataFileLocation()));
put(map, "incremental_backups", backupConfiguration.isIncrementalBackupEnabledForCassandra());
put(map, "tombstone_warn_threshold", cassandraConfiguration.getTombstonesWarningThreshold());
put(map, "tombstone_failure_threshold", cassandraConfiguration.getTombstonesFailureThreshold());
put(map, "endpoint_snitch", cassandraConfiguration.getEndpointSnitch());
put(map, "compaction_throughput_mb_per_sec", cassandraConfiguration.getCompactionThroughputMBPerSec());
put(map, "partitioner", derivePartitioner(map.get("partitioner").toString(), cassandraConfiguration.getPartitioner()));
put(map, "memtable_total_space_in_mb", cassandraConfiguration.getMemtableTotalSpaceMB());
put(map, "memtable_flush_writers", cassandraConfiguration.getMemtableFlushWriters());
put(map, "stream_throughput_outbound_megabits_per_sec", cassandraConfiguration.getStreamingThroughputMbps());
put(map, "max_hint_window_in_ms", cassandraConfiguration.getMaxHintWindowMS());
put(map, "hinted_handoff_throttle_in_kb", cassandraConfiguration.getHintedHandoffThrottleKB());
put(map, "authenticator", cassandraConfiguration.getAuthenticator());
put(map, "authorizer", cassandraConfiguration.getAuthorizer());
put(map, "internode_compression", cassandraConfiguration.getInternodeCompression());
put(map, "inter_dc_tcp_nodelay", cassandraConfiguration.isInterDcTcpNodelay());
put(map, "concurrent_reads", cassandraConfiguration.getConcurrentReads());
put(map, "concurrent_writes", cassandraConfiguration.getConcurrentWrites());
put(map, "concurrent_compactors", cassandraConfiguration.getConcurrentCompactors());
put(map, "disk_optimization_strategy", cassandraConfiguration.getDiskOptimizationStrategy());
put(map, "rpc_server_type", cassandraConfiguration.getRpcServerType());
put(map, "index_interval", cassandraConfiguration.getIndexInterval()); // Removed in Cassandra 2.1
put(map, "read_request_timeout_in_ms", cassandraConfiguration.getReadRequestTimeoutInMs());
put(map, "range_request_timeout_in_ms", cassandraConfiguration.getRangeRequestTimeoutInMs());
put(map, "write_request_timeout_in_ms", cassandraConfiguration.getWriteRequestTimeoutInMs());
put(map, "request_timeout_in_ms", cassandraConfiguration.getRequestTimeoutInMs());
List<Map<String, Object>> seedp = get(map, "seed_provider");
Map<String, Object> m = seedp.get(0);
put(m, "class_name", seedProvider);
configureSecurity(map);
configureGlobalCaches(cassandraConfiguration, map);
configureBatchSizes(cassandraConfiguration, map, cassandraVersion);
//force to 1 until vnodes are properly supported
put(map, "num_tokens", 1);
addExtraCassParams(map);
logger.info(yaml.dump(map));
yaml.dump(map, new FileWriter(yamlFile));
configureCommitLogBackups();
writeCassandraSnitchProperties();
}
/**
* Setup the cassandra 1.1 global cache values
*/
private void configureGlobalCaches(CassandraConfiguration cassandraConfiguration, Map<String, Object> yaml) {
Integer keyCacheSize = cassandraConfiguration.getKeyCacheSizeInMB();
if (keyCacheSize != null) {
put(yaml, "key_cache_size_in_mb", keyCacheSize);
put(yaml, "key_cache_keys_to_save", cassandraConfiguration.getKeyCacheKeysToSave());
}
Integer rowCacheSize = cassandraConfiguration.getRowCacheSizeInMB();
if (rowCacheSize != null) {
put(yaml, "row_cache_size_in_mb", rowCacheSize);
put(yaml, "row_cache_keys_to_save", cassandraConfiguration.getRowCacheKeysToSave());
}
}
private String derivePartitioner(String fromYaml, String fromConfig) {
if (Strings.isNullOrEmpty(fromYaml)) {
return fromConfig;
}
//this check is to prevent against overwriting an existing yaml file that has
// a partitioner not RandomPartitioner or (as of cass 1.2) Murmur3Partitioner.
//basically we don't want to hose existing deployments by changing the partitioner unexpectedly on them
final String lowerCase = fromYaml.toLowerCase();
if (lowerCase.contains("randomparti") || lowerCase.contains("murmur")) {
return fromConfig;
}
// If both partitioners are either ByteOrderedPartitioner or EmoPartitioner than accept whichever
// is from the configuration file.
if (TokenManager.clientPartitioner(fromYaml).equals(TokenManager.clientPartitioner(fromConfig))) {
return fromConfig;
}
return fromYaml;
}
private void configureSecurity(Map<String, Object> map) {
//the client-side ssl settings
Map<String, Object> clientEnc = get(map, "client_encryption_options");
put(clientEnc, "enabled", cassandraConfiguration.isClientSslEnabled());
//the server-side (internode) ssl settings
Map<String, Object> serverEnc = get(map, "server_encryption_options");
put(serverEnc, "internode_encryption", cassandraConfiguration.getInternodeEncryption());
}
private void configureCommitLogBackups() throws IOException {
if (!backupConfiguration.isCommitLogBackupEnabled()) {
return;
}
Properties props = new Properties();
props.put("archive_command", backupConfiguration.getCommitLogBackupArchiveCmd());
props.put("restore_command", backupConfiguration.getCommitLogBackupRestoreCmd());
props.put("restore_directories", backupConfiguration.getCommitLogBackupRestoreFromDirs());
props.put("restore_point_in_time", backupConfiguration.getCommitLogBackupRestorePointInTime());
File commitLogProperties = new File(cassandraConfiguration.getCassHome() + CL_BACKUP_PROPS_FILE);
try (FileOutputStream fos = new FileOutputStream(commitLogProperties)) {
props.store(fos, "cassandra commit log archive props, as written by priam");
}
}
private void configureBatchSizes(CassandraConfiguration cassandraConfiguration, Map<String, Object> yaml, @Nullable VersionNumber cassandraVersion) {
put(yaml, "batch_size_warn_threshold_in_kb", cassandraConfiguration.getBatchSizeWarningThresholdInKb());
// Failure threshold is only supported starting in 2.2. Don't configure if the version number is known to be
// prior to that.
Integer batchSizeFailureThresholdInKb = cassandraConfiguration.getBatchSizeFailureThresholdInKb();
if (batchSizeFailureThresholdInKb != null) {
if (cassandraVersion == null) {
logger.warn("Batch size failure threshold has been set to {} but the Cassandra version could not be confirmed. " +
"If Cassandra version is not 2.2+ this will cause a configuration error.", batchSizeFailureThresholdInKb);
}
if (cassandraVersion != null && cassandraVersion.compareTo(VersionNumber.parse("2.2")) < 0) {
logger.info("Batch size failure threshold has been set to {} but Cassandra version {} does not support this" +
"option. Ignoring value.", batchSizeFailureThresholdInKb, cassandraVersion);
} else {
put(yaml, "batch_size_fail_threshold_in_kb", batchSizeFailureThresholdInKb);
}
}
}
@Override
public void updateAutoBootstrap(String yamlFile, boolean autobootstrap) throws IOException {
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(options);
Map<String, Object> map = load(yaml, new File(yamlFile));
put(map, "auto_bootstrap", autobootstrap); //Don't bootstrap in restore mode
logger.info("Updating yaml {}", yaml.dump(map));
yaml.dump(map, new FileWriter(yamlFile));
}
private void addExtraCassParams(Map<String, Object> map) {
Map<String, String> params = cassandraConfiguration.getExtraConfigParams();
if (params == null) {
logger.info("Updating yaml: no extra cass params");
return;
}
logger.info("Updating yaml: adding extra cass params");
for (Map.Entry<String, String> entry : params.entrySet()) {
String cassKey = entry.getKey();
String cassVal = entry.getValue();
logger.info("Updating yaml: CassKey[{}], Val[{}]", cassKey, cassVal);
put(map, cassKey, cassVal);
}
}
private void writeCassandraSnitchProperties() {
String rackdcPropFileName = cassandraConfiguration.getCassHome() + "/conf/" + SnitchProperties.RACKDC_PROPERTY_FILENAME;
File rackdcPropFile = new File(rackdcPropFileName);
Properties properties = new Properties();
// Read the existing properties, if any.
if (rackdcPropFile.exists()) {
try (Reader reader = new FileReader(rackdcPropFile)) {
properties.load(reader);
} catch (Exception e) {
throw new RuntimeException("Unable to read " + SnitchProperties.RACKDC_PROPERTY_FILENAME, e);
}
}
// Set the "dc_suffix" property if there is one configured
String dcSuffix = cassandraConfiguration.getDataCenterSuffix();
if (Strings.isNullOrEmpty(dcSuffix)) {
properties.remove("dc_suffix");
} else {
properties.put("dc_suffix", dcSuffix);
}
if (logger.isInfoEnabled()) {
if (properties.isEmpty()) {
logger.info("Updating {}: no properties", SnitchProperties.RACKDC_PROPERTY_FILENAME);
} else {
for (Map.Entry entry : properties.entrySet()) {
logger.info("Updating {}: {}={}", SnitchProperties.RACKDC_PROPERTY_FILENAME, entry.getKey(), entry.getValue());
}
}
}
// Write the updated properties back
try (Writer writer = new FileWriter(rackdcPropFile)) {
properties.store(writer, "");
} catch (Exception e) {
throw new RuntimeException("Unable to write " + SnitchProperties.RACKDC_PROPERTY_FILENAME, e);
}
}
@SuppressWarnings("unchecked")
private Map<String, Object> load(Yaml yaml, File yamlFile) throws FileNotFoundException {
return (Map<String, Object>) yaml.load(new FileInputStream(yamlFile));
}
@SuppressWarnings("unchecked")
private <T> T get(Map<String, Object> map, String key) {
return (T) map.get(key);
}
private void put(Map<String, Object> map, String key, Object value) {
if (value != null) {
map.put(key, value);
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/pubsub/v1/schema.proto
package com.google.pubsub.v1;
/**
*
*
* <pre>
* Request for the `ValidateMessage` method.
* </pre>
*
* Protobuf type {@code google.pubsub.v1.ValidateMessageRequest}
*/
public final class ValidateMessageRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.pubsub.v1.ValidateMessageRequest)
ValidateMessageRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ValidateMessageRequest.newBuilder() to construct.
private ValidateMessageRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ValidateMessageRequest() {
parent_ = "";
message_ = com.google.protobuf.ByteString.EMPTY;
encoding_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ValidateMessageRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ValidateMessageRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
schemaSpecCase_ = 2;
schemaSpec_ = s;
break;
}
case 26:
{
com.google.pubsub.v1.Schema.Builder subBuilder = null;
if (schemaSpecCase_ == 3) {
subBuilder = ((com.google.pubsub.v1.Schema) schemaSpec_).toBuilder();
}
schemaSpec_ =
input.readMessage(com.google.pubsub.v1.Schema.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.pubsub.v1.Schema) schemaSpec_);
schemaSpec_ = subBuilder.buildPartial();
}
schemaSpecCase_ = 3;
break;
}
case 34:
{
message_ = input.readBytes();
break;
}
case 40:
{
int rawValue = input.readEnum();
encoding_ = rawValue;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.pubsub.v1.SchemaProto
.internal_static_google_pubsub_v1_ValidateMessageRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.SchemaProto
.internal_static_google_pubsub_v1_ValidateMessageRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.ValidateMessageRequest.class,
com.google.pubsub.v1.ValidateMessageRequest.Builder.class);
}
private int schemaSpecCase_ = 0;
private java.lang.Object schemaSpec_;
public enum SchemaSpecCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
NAME(2),
SCHEMA(3),
SCHEMASPEC_NOT_SET(0);
private final int value;
private SchemaSpecCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SchemaSpecCase valueOf(int value) {
return forNumber(value);
}
public static SchemaSpecCase forNumber(int value) {
switch (value) {
case 2:
return NAME;
case 3:
return SCHEMA;
case 0:
return SCHEMASPEC_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public SchemaSpecCase getSchemaSpecCase() {
return SchemaSpecCase.forNumber(schemaSpecCase_);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the name field is set.
*/
public boolean hasName() {
return schemaSpecCase_ == 2;
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = "";
if (schemaSpecCase_ == 2) {
ref = schemaSpec_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (schemaSpecCase_ == 2) {
schemaSpec_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = "";
if (schemaSpecCase_ == 2) {
ref = schemaSpec_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (schemaSpecCase_ == 2) {
schemaSpec_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SCHEMA_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*
* @return Whether the schema field is set.
*/
@java.lang.Override
public boolean hasSchema() {
return schemaSpecCase_ == 3;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*
* @return The schema.
*/
@java.lang.Override
public com.google.pubsub.v1.Schema getSchema() {
if (schemaSpecCase_ == 3) {
return (com.google.pubsub.v1.Schema) schemaSpec_;
}
return com.google.pubsub.v1.Schema.getDefaultInstance();
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
@java.lang.Override
public com.google.pubsub.v1.SchemaOrBuilder getSchemaOrBuilder() {
if (schemaSpecCase_ == 3) {
return (com.google.pubsub.v1.Schema) schemaSpec_;
}
return com.google.pubsub.v1.Schema.getDefaultInstance();
}
public static final int MESSAGE_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString message_;
/**
*
*
* <pre>
* Message to validate against the provided `schema_spec`.
* </pre>
*
* <code>bytes message = 4;</code>
*
* @return The message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessage() {
return message_;
}
public static final int ENCODING_FIELD_NUMBER = 5;
private int encoding_;
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @return The enum numeric value on the wire for encoding.
*/
@java.lang.Override
public int getEncodingValue() {
return encoding_;
}
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @return The encoding.
*/
@java.lang.Override
public com.google.pubsub.v1.Encoding getEncoding() {
@SuppressWarnings("deprecation")
com.google.pubsub.v1.Encoding result = com.google.pubsub.v1.Encoding.valueOf(encoding_);
return result == null ? com.google.pubsub.v1.Encoding.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (schemaSpecCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, schemaSpec_);
}
if (schemaSpecCase_ == 3) {
output.writeMessage(3, (com.google.pubsub.v1.Schema) schemaSpec_);
}
if (!message_.isEmpty()) {
output.writeBytes(4, message_);
}
if (encoding_ != com.google.pubsub.v1.Encoding.ENCODING_UNSPECIFIED.getNumber()) {
output.writeEnum(5, encoding_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (schemaSpecCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, schemaSpec_);
}
if (schemaSpecCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, (com.google.pubsub.v1.Schema) schemaSpec_);
}
if (!message_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(4, message_);
}
if (encoding_ != com.google.pubsub.v1.Encoding.ENCODING_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, encoding_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.pubsub.v1.ValidateMessageRequest)) {
return super.equals(obj);
}
com.google.pubsub.v1.ValidateMessageRequest other =
(com.google.pubsub.v1.ValidateMessageRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getMessage().equals(other.getMessage())) return false;
if (encoding_ != other.encoding_) return false;
if (!getSchemaSpecCase().equals(other.getSchemaSpecCase())) return false;
switch (schemaSpecCase_) {
case 2:
if (!getName().equals(other.getName())) return false;
break;
case 3:
if (!getSchema().equals(other.getSchema())) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
hash = (37 * hash) + ENCODING_FIELD_NUMBER;
hash = (53 * hash) + encoding_;
switch (schemaSpecCase_) {
case 2:
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
break;
case 3:
hash = (37 * hash) + SCHEMA_FIELD_NUMBER;
hash = (53 * hash) + getSchema().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.pubsub.v1.ValidateMessageRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.pubsub.v1.ValidateMessageRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `ValidateMessage` method.
* </pre>
*
* Protobuf type {@code google.pubsub.v1.ValidateMessageRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.pubsub.v1.ValidateMessageRequest)
com.google.pubsub.v1.ValidateMessageRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.pubsub.v1.SchemaProto
.internal_static_google_pubsub_v1_ValidateMessageRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.pubsub.v1.SchemaProto
.internal_static_google_pubsub_v1_ValidateMessageRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.pubsub.v1.ValidateMessageRequest.class,
com.google.pubsub.v1.ValidateMessageRequest.Builder.class);
}
// Construct using com.google.pubsub.v1.ValidateMessageRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
message_ = com.google.protobuf.ByteString.EMPTY;
encoding_ = 0;
schemaSpecCase_ = 0;
schemaSpec_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.pubsub.v1.SchemaProto
.internal_static_google_pubsub_v1_ValidateMessageRequest_descriptor;
}
@java.lang.Override
public com.google.pubsub.v1.ValidateMessageRequest getDefaultInstanceForType() {
return com.google.pubsub.v1.ValidateMessageRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.pubsub.v1.ValidateMessageRequest build() {
com.google.pubsub.v1.ValidateMessageRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.pubsub.v1.ValidateMessageRequest buildPartial() {
com.google.pubsub.v1.ValidateMessageRequest result =
new com.google.pubsub.v1.ValidateMessageRequest(this);
result.parent_ = parent_;
if (schemaSpecCase_ == 2) {
result.schemaSpec_ = schemaSpec_;
}
if (schemaSpecCase_ == 3) {
if (schemaBuilder_ == null) {
result.schemaSpec_ = schemaSpec_;
} else {
result.schemaSpec_ = schemaBuilder_.build();
}
}
result.message_ = message_;
result.encoding_ = encoding_;
result.schemaSpecCase_ = schemaSpecCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.pubsub.v1.ValidateMessageRequest) {
return mergeFrom((com.google.pubsub.v1.ValidateMessageRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.pubsub.v1.ValidateMessageRequest other) {
if (other == com.google.pubsub.v1.ValidateMessageRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.getMessage() != com.google.protobuf.ByteString.EMPTY) {
setMessage(other.getMessage());
}
if (other.encoding_ != 0) {
setEncodingValue(other.getEncodingValue());
}
switch (other.getSchemaSpecCase()) {
case NAME:
{
schemaSpecCase_ = 2;
schemaSpec_ = other.schemaSpec_;
onChanged();
break;
}
case SCHEMA:
{
mergeSchema(other.getSchema());
break;
}
case SCHEMASPEC_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.pubsub.v1.ValidateMessageRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.pubsub.v1.ValidateMessageRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int schemaSpecCase_ = 0;
private java.lang.Object schemaSpec_;
public SchemaSpecCase getSchemaSpecCase() {
return SchemaSpecCase.forNumber(schemaSpecCase_);
}
public Builder clearSchemaSpec() {
schemaSpecCase_ = 0;
schemaSpec_ = null;
onChanged();
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project in which to validate schemas.
* Format is `projects/{project-id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return Whether the name field is set.
*/
@java.lang.Override
public boolean hasName() {
return schemaSpecCase_ == 2;
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = "";
if (schemaSpecCase_ == 2) {
ref = schemaSpec_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (schemaSpecCase_ == 2) {
schemaSpec_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = "";
if (schemaSpecCase_ == 2) {
ref = schemaSpec_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (schemaSpecCase_ == 2) {
schemaSpec_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
schemaSpecCase_ = 2;
schemaSpec_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
if (schemaSpecCase_ == 2) {
schemaSpecCase_ = 0;
schemaSpec_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Name of the schema against which to validate.
* Format is `projects/{project}/schemas/{schema}`.
* </pre>
*
* <code>string name = 2 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
schemaSpecCase_ = 2;
schemaSpec_ = value;
onChanged();
return this;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.Schema,
com.google.pubsub.v1.Schema.Builder,
com.google.pubsub.v1.SchemaOrBuilder>
schemaBuilder_;
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*
* @return Whether the schema field is set.
*/
@java.lang.Override
public boolean hasSchema() {
return schemaSpecCase_ == 3;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*
* @return The schema.
*/
@java.lang.Override
public com.google.pubsub.v1.Schema getSchema() {
if (schemaBuilder_ == null) {
if (schemaSpecCase_ == 3) {
return (com.google.pubsub.v1.Schema) schemaSpec_;
}
return com.google.pubsub.v1.Schema.getDefaultInstance();
} else {
if (schemaSpecCase_ == 3) {
return schemaBuilder_.getMessage();
}
return com.google.pubsub.v1.Schema.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
public Builder setSchema(com.google.pubsub.v1.Schema value) {
if (schemaBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
schemaSpec_ = value;
onChanged();
} else {
schemaBuilder_.setMessage(value);
}
schemaSpecCase_ = 3;
return this;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
public Builder setSchema(com.google.pubsub.v1.Schema.Builder builderForValue) {
if (schemaBuilder_ == null) {
schemaSpec_ = builderForValue.build();
onChanged();
} else {
schemaBuilder_.setMessage(builderForValue.build());
}
schemaSpecCase_ = 3;
return this;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
public Builder mergeSchema(com.google.pubsub.v1.Schema value) {
if (schemaBuilder_ == null) {
if (schemaSpecCase_ == 3
&& schemaSpec_ != com.google.pubsub.v1.Schema.getDefaultInstance()) {
schemaSpec_ =
com.google.pubsub.v1.Schema.newBuilder((com.google.pubsub.v1.Schema) schemaSpec_)
.mergeFrom(value)
.buildPartial();
} else {
schemaSpec_ = value;
}
onChanged();
} else {
if (schemaSpecCase_ == 3) {
schemaBuilder_.mergeFrom(value);
}
schemaBuilder_.setMessage(value);
}
schemaSpecCase_ = 3;
return this;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
public Builder clearSchema() {
if (schemaBuilder_ == null) {
if (schemaSpecCase_ == 3) {
schemaSpecCase_ = 0;
schemaSpec_ = null;
onChanged();
}
} else {
if (schemaSpecCase_ == 3) {
schemaSpecCase_ = 0;
schemaSpec_ = null;
}
schemaBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
public com.google.pubsub.v1.Schema.Builder getSchemaBuilder() {
return getSchemaFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
@java.lang.Override
public com.google.pubsub.v1.SchemaOrBuilder getSchemaOrBuilder() {
if ((schemaSpecCase_ == 3) && (schemaBuilder_ != null)) {
return schemaBuilder_.getMessageOrBuilder();
} else {
if (schemaSpecCase_ == 3) {
return (com.google.pubsub.v1.Schema) schemaSpec_;
}
return com.google.pubsub.v1.Schema.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Ad-hoc schema against which to validate
* </pre>
*
* <code>.google.pubsub.v1.Schema schema = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.Schema,
com.google.pubsub.v1.Schema.Builder,
com.google.pubsub.v1.SchemaOrBuilder>
getSchemaFieldBuilder() {
if (schemaBuilder_ == null) {
if (!(schemaSpecCase_ == 3)) {
schemaSpec_ = com.google.pubsub.v1.Schema.getDefaultInstance();
}
schemaBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.pubsub.v1.Schema,
com.google.pubsub.v1.Schema.Builder,
com.google.pubsub.v1.SchemaOrBuilder>(
(com.google.pubsub.v1.Schema) schemaSpec_, getParentForChildren(), isClean());
schemaSpec_ = null;
}
schemaSpecCase_ = 3;
onChanged();
;
return schemaBuilder_;
}
private com.google.protobuf.ByteString message_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Message to validate against the provided `schema_spec`.
* </pre>
*
* <code>bytes message = 4;</code>
*
* @return The message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessage() {
return message_;
}
/**
*
*
* <pre>
* Message to validate against the provided `schema_spec`.
* </pre>
*
* <code>bytes message = 4;</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Message to validate against the provided `schema_spec`.
* </pre>
*
* <code>bytes message = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
onChanged();
return this;
}
private int encoding_ = 0;
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @return The enum numeric value on the wire for encoding.
*/
@java.lang.Override
public int getEncodingValue() {
return encoding_;
}
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @param value The enum numeric value on the wire for encoding to set.
* @return This builder for chaining.
*/
public Builder setEncodingValue(int value) {
encoding_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @return The encoding.
*/
@java.lang.Override
public com.google.pubsub.v1.Encoding getEncoding() {
@SuppressWarnings("deprecation")
com.google.pubsub.v1.Encoding result = com.google.pubsub.v1.Encoding.valueOf(encoding_);
return result == null ? com.google.pubsub.v1.Encoding.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @param value The encoding to set.
* @return This builder for chaining.
*/
public Builder setEncoding(com.google.pubsub.v1.Encoding value) {
if (value == null) {
throw new NullPointerException();
}
encoding_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The encoding expected for messages
* </pre>
*
* <code>.google.pubsub.v1.Encoding encoding = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearEncoding() {
encoding_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.pubsub.v1.ValidateMessageRequest)
}
// @@protoc_insertion_point(class_scope:google.pubsub.v1.ValidateMessageRequest)
private static final com.google.pubsub.v1.ValidateMessageRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.pubsub.v1.ValidateMessageRequest();
}
public static com.google.pubsub.v1.ValidateMessageRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ValidateMessageRequest> PARSER =
new com.google.protobuf.AbstractParser<ValidateMessageRequest>() {
@java.lang.Override
public ValidateMessageRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ValidateMessageRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ValidateMessageRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ValidateMessageRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.pubsub.v1.ValidateMessageRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.ui;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CustomShortcutSet;
import com.intellij.openapi.actionSystem.ShortcutSet;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Experiments;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.GuiUtils;
import com.intellij.ui.UIBundle;
import com.intellij.ui.components.fields.ExtendableTextComponent;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.accessibility.ScreenReader;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
public class ComponentWithBrowseButton<Comp extends JComponent> extends JPanel implements Disposable {
private static final Logger LOG = Logger.getInstance(ComponentWithBrowseButton.class);
private final Comp myComponent;
private final FixedSizeButton myBrowseButton;
private boolean myButtonEnabled = true;
public ComponentWithBrowseButton(@NotNull Comp component, @Nullable ActionListener browseActionListener) {
super(new BorderLayout(SystemInfo.isMac || UIUtil.isUnderDarcula() ? 0 : 2, 0));
myComponent = component;
// required! otherwise JPanel will occasionally gain focus instead of the component
setFocusable(false);
boolean inlineBrowseButton = myComponent instanceof ExtendableTextComponent && Experiments.isFeatureEnabled("inline.browse.button");
if (inlineBrowseButton) {
((ExtendableTextComponent)myComponent).addExtension(ExtendableTextComponent.Extension.create(
getDefaultIcon(), getHoveredIcon(),
UIBundle.message("component.with.browse.button.browse.button.tooltip.text"),
this::notifyActionListeners));
new DumbAwareAction() {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
notifyActionListeners();
}
}.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.SHIFT_DOWN_MASK)), myComponent);
}
add(myComponent, BorderLayout.CENTER);
myBrowseButton = new FixedSizeButton(myComponent);
if (browseActionListener != null) {
myBrowseButton.addActionListener(browseActionListener);
}
if (!inlineBrowseButton) {
add(myBrowseButton, BorderLayout.EAST);
}
myBrowseButton.setToolTipText(UIBundle.message("component.with.browse.button.browse.button.tooltip.text"));
// FixedSizeButton isn't focusable but it should be selectable via keyboard.
if (ApplicationManager.getApplication() != null) { // avoid crash at design time
new MyDoClickAction(myBrowseButton).registerShortcut(myComponent);
}
if (ScreenReader.isActive()) {
myBrowseButton.setFocusable(true);
myBrowseButton.getAccessibleContext().setAccessibleName("Browse");
}
}
@NotNull
protected Icon getDefaultIcon() {
return AllIcons.General.OpenDisk;
}
@NotNull
protected Icon getHoveredIcon() {
return AllIcons.General.OpenDiskHover;
}
private void notifyActionListeners() {
ActionEvent event = new ActionEvent(myComponent, ActionEvent.ACTION_PERFORMED, "action");
for (ActionListener listener: myBrowseButton.getActionListeners()) listener.actionPerformed(event);
}
@NotNull
public final Comp getChildComponent() {
return myComponent;
}
public void setTextFieldPreferredWidth(final int charCount) {
JComponent comp = getChildComponent();
Dimension size = GuiUtils.getSizeByChars(charCount, comp);
comp.setPreferredSize(size);
Dimension preferredSize = myBrowseButton.getPreferredSize();
boolean keepHeight = UIUtil.isUnderAquaLookAndFeel() || UIUtil.isUnderWin10LookAndFeel();
preferredSize.setSize(size.width + preferredSize.width + 2,
keepHeight ? preferredSize.height : preferredSize.height + 2);
setPreferredSize(preferredSize);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
myBrowseButton.setEnabled(enabled && myButtonEnabled);
myComponent.setEnabled(enabled);
}
public void setButtonEnabled(boolean buttonEnabled) {
myButtonEnabled = buttonEnabled;
setEnabled(isEnabled());
}
public void setButtonIcon(Icon icon) {
myBrowseButton.setIcon(icon);
myBrowseButton.setDisabledIcon(IconLoader.getDisabledIcon(icon));
}
/**
* Adds specified {@code listener} to the browse button.
*/
public void addActionListener(ActionListener listener){
myBrowseButton.addActionListener(listener);
}
public void removeActionListener(ActionListener listener) {
myBrowseButton.removeActionListener(listener);
}
public void addBrowseFolderListener(@Nullable @Nls(capitalization = Nls.Capitalization.Title) String title,
@Nullable @Nls(capitalization = Nls.Capitalization.Sentence) String description,
@Nullable Project project,
FileChooserDescriptor fileChooserDescriptor,
TextComponentAccessor<Comp> accessor) {
addActionListener(new BrowseFolderActionListener<>(title, description, this, project, fileChooserDescriptor, accessor));
}
/**
* @deprecated use {@link #addBrowseFolderListener(String, String, Project, FileChooserDescriptor, TextComponentAccessor)} instead
*/
@Deprecated
public void addBrowseFolderListener(@Nullable @Nls(capitalization = Nls.Capitalization.Title) String title,
@Nullable @Nls(capitalization = Nls.Capitalization.Sentence) String description,
@Nullable Project project,
FileChooserDescriptor fileChooserDescriptor,
TextComponentAccessor<Comp> accessor, boolean autoRemoveOnHide) {
addBrowseFolderListener(title, description, project, fileChooserDescriptor, accessor);
}
/**
* @deprecated use {@link #addActionListener(ActionListener)} instead
*/
@Deprecated
public void addBrowseFolderListener(@Nullable Project project, final BrowseFolderActionListener<Comp> actionListener) {
addActionListener(actionListener);
}
@Override
public void dispose() {
ActionListener[] listeners = myBrowseButton.getActionListeners();
for (ActionListener listener : listeners) {
myBrowseButton.removeActionListener(listener);
}
}
public FixedSizeButton getButton() {
return myBrowseButton;
}
/**
* Do not use this class directly it is public just to hack other implementation of controls similar to TextFieldWithBrowseButton.
*/
public static final class MyDoClickAction extends DumbAwareAction {
private final FixedSizeButton myBrowseButton;
public MyDoClickAction(FixedSizeButton browseButton) {
myBrowseButton = browseButton;
}
@Override
public void update(@NotNull AnActionEvent e) {
e.getPresentation().setEnabled(myBrowseButton.isVisible() && myBrowseButton.isEnabled());
}
@Override
public void actionPerformed(@NotNull AnActionEvent e){
myBrowseButton.doClick();
}
public void registerShortcut(JComponent textField) {
ShortcutSet shiftEnter = new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.SHIFT_DOWN_MASK));
registerCustomShortcutSet(shiftEnter, textField);
myBrowseButton.setToolTipText(KeymapUtil.getShortcutsText(shiftEnter.getShortcuts()));
}
public static void addTo(FixedSizeButton browseButton, JComponent aComponent) {
new MyDoClickAction(browseButton).registerShortcut(aComponent);
}
}
public static class BrowseFolderActionListener<T extends JComponent> implements ActionListener {
private final String myTitle;
private final String myDescription;
protected ComponentWithBrowseButton<T> myTextComponent;
private final TextComponentAccessor<? super T> myAccessor;
private Project myProject;
protected final FileChooserDescriptor myFileChooserDescriptor;
public BrowseFolderActionListener(@Nullable @Nls(capitalization = Nls.Capitalization.Title) String title,
@Nullable @Nls(capitalization = Nls.Capitalization.Sentence) String description,
ComponentWithBrowseButton<T> textField,
@Nullable Project project,
FileChooserDescriptor fileChooserDescriptor,
TextComponentAccessor<? super T> accessor) {
if (fileChooserDescriptor != null && fileChooserDescriptor.isChooseMultiple()) {
LOG.error("multiple selection not supported");
fileChooserDescriptor = new FileChooserDescriptor(fileChooserDescriptor) {
@Override
public boolean isChooseMultiple() {
return false;
}
};
}
myTitle = title;
myDescription = description;
myTextComponent = textField;
myProject = project;
myFileChooserDescriptor = fileChooserDescriptor;
myAccessor = accessor;
}
@Nullable
protected Project getProject() {
return myProject;
}
protected void setProject(@Nullable Project project) {
myProject = project;
}
@Override
public void actionPerformed(ActionEvent e) {
FileChooserDescriptor fileChooserDescriptor = myFileChooserDescriptor;
if (myTitle != null || myDescription != null) {
fileChooserDescriptor = (FileChooserDescriptor)myFileChooserDescriptor.clone();
if (myTitle != null) {
fileChooserDescriptor.setTitle(myTitle);
}
if (myDescription != null) {
fileChooserDescriptor.setDescription(myDescription);
}
}
FileChooser.chooseFile(fileChooserDescriptor, getProject(), myTextComponent, getInitialFile(), this::onFileChosen);
}
@Nullable
protected VirtualFile getInitialFile() {
String directoryName = getComponentText();
if (StringUtil.isEmptyOrSpaces(directoryName)) {
return null;
}
directoryName = FileUtil.toSystemIndependentName(directoryName);
VirtualFile path = LocalFileSystem.getInstance().findFileByPath(expandPath(directoryName));
while (path == null && directoryName.length() > 0) {
int pos = directoryName.lastIndexOf('/');
if (pos <= 0) break;
directoryName = directoryName.substring(0, pos);
path = LocalFileSystem.getInstance().findFileByPath(directoryName);
}
return path;
}
@NotNull
protected String expandPath(@NotNull String path) {
return path;
}
protected String getComponentText() {
return myAccessor.getText(myTextComponent.getChildComponent()).trim();
}
@NotNull
protected String chosenFileToResultingText(@NotNull VirtualFile chosenFile) {
return chosenFile.getPresentableUrl();
}
protected void onFileChosen(@NotNull VirtualFile chosenFile) {
myAccessor.setText(myTextComponent.getChildComponent(), chosenFileToResultingText(chosenFile));
}
}
@Override
public final void requestFocus() {
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() ->
IdeFocusManager.getGlobalInstance().requestFocus(myComponent, true));
}
@SuppressWarnings("deprecation")
@Override
public final void setNextFocusableComponent(Component aComponent) {
super.setNextFocusableComponent(aComponent);
myComponent.setNextFocusableComponent(aComponent);
}
private KeyEvent myCurrentEvent = null;
@Override
protected final boolean processKeyBinding(KeyStroke ks, KeyEvent e, int condition, boolean pressed) {
if (condition == WHEN_FOCUSED && myCurrentEvent != e) {
try {
myCurrentEvent = e;
myComponent.dispatchEvent(e);
}
finally {
myCurrentEvent = null;
}
}
if (e.isConsumed()) return true;
return super.processKeyBinding(ks, e, condition, pressed);
}
/**
* @deprecated use {@link #addActionListener(ActionListener)} instead
*/
@Deprecated
public void addBrowseFolderListener(@Nullable Project project, final BrowseFolderActionListener<Comp> actionListener, boolean autoRemoveOnHide) {
addActionListener(actionListener);
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.client;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.authentication.util.AuthToken;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosKey;
import javax.security.auth.kerberos.KerberosTicket;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.Map;
import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
/**
* The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
* <p>
* It uses the default principal for the Kerberos cache (normally set via kinit).
* <p>
* It falls back to the {@link PseudoAuthenticator} if the HTTP endpoint does not trigger an SPNEGO authentication
* sequence.
*/
public class KerberosAuthenticator implements Authenticator {
private static Logger LOG = LoggerFactory.getLogger(
KerberosAuthenticator.class);
/**
* HTTP header used by the SPNEGO server endpoint during an authentication sequence.
*/
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
/**
* HTTP header used by the SPNEGO client endpoint during an authentication sequence.
*/
public static final String AUTHORIZATION = "Authorization";
/**
* HTTP header prefix used by the SPNEGO client/server endpoints during an authentication sequence.
*/
public static final String NEGOTIATE = "Negotiate";
private static final String AUTH_HTTP_METHOD = "OPTIONS";
/*
* Defines the Kerberos configuration that will be used to obtain the Kerberos principal from the
* Kerberos cache.
*/
private static class KerberosConfiguration extends Configuration {
private static final String OS_LOGIN_MODULE_NAME;
private static final boolean windows = System.getProperty("os.name").startsWith("Windows");
private static final boolean is64Bit = System.getProperty("os.arch").contains("64");
private static final boolean aix = System.getProperty("os.name").equals("AIX");
/* Return the OS login module class name */
private static String getOSLoginModuleName() {
if (IBM_JAVA) {
if (windows) {
return is64Bit ? "com.ibm.security.auth.module.Win64LoginModule"
: "com.ibm.security.auth.module.NTLoginModule";
} else if (aix) {
return is64Bit ? "com.ibm.security.auth.module.AIX64LoginModule"
: "com.ibm.security.auth.module.AIXLoginModule";
} else {
return "com.ibm.security.auth.module.LinuxLoginModule";
}
} else {
return windows ? "com.sun.security.auth.module.NTLoginModule"
: "com.sun.security.auth.module.UnixLoginModule";
}
}
static {
OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
}
private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
new HashMap<String, String>());
private static final Map<String, String> USER_KERBEROS_OPTIONS = new HashMap<String, String>();
static {
String ticketCache = System.getenv("KRB5CCNAME");
if (IBM_JAVA) {
USER_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
} else {
USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
}
if (ticketCache != null) {
if (IBM_JAVA) {
// The first value searched when "useDefaultCcache" is used.
System.setProperty("KRB5CCNAME", ticketCache);
} else {
USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
}
}
USER_KERBEROS_OPTIONS.put("renewTGT", "true");
}
private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
USER_KERBEROS_OPTIONS);
private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN};
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
return USER_KERBEROS_CONF;
}
}
private URL url;
private HttpURLConnection conn;
private Base64 base64;
private ConnectionConfigurator connConfigurator;
/**
* Sets a {@link ConnectionConfigurator} instance to use for
* configuring connections.
*
* @param configurator the {@link ConnectionConfigurator} instance.
*/
@Override
public void setConnectionConfigurator(ConnectionConfigurator configurator) {
connConfigurator = configurator;
}
/**
* Performs SPNEGO authentication against the specified URL.
* <p>
* If a token is given it does a NOP and returns the given token.
* <p>
* If no token is given, it will perform the SPNEGO authentication sequence using an
* HTTP <code>OPTIONS</code> request.
*
* @param url the URl to authenticate against.
* @param token the authentication token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
@Override
public void authenticate(URL url, AuthenticatedURL.Token token)
throws IOException, AuthenticationException {
if (!token.isSet()) {
this.url = url;
base64 = new Base64(0);
conn = (HttpURLConnection) url.openConnection();
if (connConfigurator != null) {
conn = connConfigurator.configure(conn);
}
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.connect();
boolean needFallback = false;
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
LOG.debug("JDK performed authentication on our behalf.");
// If the JDK already did the SPNEGO back-and-forth for
// us, just pull out the token.
AuthenticatedURL.extractToken(conn, token);
if (isTokenKerberos(token)) {
return;
}
needFallback = true;
}
if (!needFallback && isNegotiate()) {
LOG.debug("Performing our own SPNEGO sequence.");
doSpnegoSequence(token);
} else {
LOG.debug("Using fallback authenticator sequence.");
Authenticator auth = getFallBackAuthenticator();
// Make sure that the fall back authenticator have the same
// ConnectionConfigurator, since the method might be overridden.
// Otherwise the fall back authenticator might not have the information
// to make the connection (e.g., SSL certificates)
auth.setConnectionConfigurator(connConfigurator);
auth.authenticate(url, token);
}
}
}
/**
* If the specified URL does not support SPNEGO authentication, a fallback {@link Authenticator} will be used.
* <p>
* This implementation returns a {@link PseudoAuthenticator}.
*
* @return the fallback {@link Authenticator}.
*/
protected Authenticator getFallBackAuthenticator() {
Authenticator auth = new PseudoAuthenticator();
if (connConfigurator != null) {
auth.setConnectionConfigurator(connConfigurator);
}
return auth;
}
/*
* Check if the passed token is of type "kerberos" or "kerberos-dt"
*/
private boolean isTokenKerberos(AuthenticatedURL.Token token)
throws AuthenticationException {
if (token.isSet()) {
AuthToken aToken = AuthToken.parse(token.toString());
if (aToken.getType().equals("kerberos") ||
aToken.getType().equals("kerberos-dt")) {
return true;
}
}
return false;
}
/*
* Indicates if the response is starting a SPNEGO negotiation.
*/
private boolean isNegotiate() throws IOException {
boolean negotiate = false;
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
}
return negotiate;
}
/**
* Implements the SPNEGO authentication sequence interaction using the current default principal
* in the Kerberos cache (normally set via kinit).
*
* @param token the authentication token being used for the user.
*
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication error occurred.
*/
private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
try {
AccessControlContext context = AccessController.getContext();
Subject subject = Subject.getSubject(context);
if (subject == null
|| (subject.getPrivateCredentials(KerberosKey.class).isEmpty()
&& subject.getPrivateCredentials(KerberosTicket.class).isEmpty())) {
LOG.debug("No subject in context, logging in");
subject = new Subject();
LoginContext login = new LoginContext("", subject,
null, new KerberosConfiguration());
login.login();
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using subject: " + subject);
}
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
GSSContext gssContext = null;
try {
GSSManager gssManager = GSSManager.getInstance();
String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
KerberosAuthenticator.this.url.getHost());
Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
GSSName serviceName = gssManager.createName(servicePrincipal,
oid);
oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
gssContext = gssManager.createContext(serviceName, oid, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestCredDeleg(true);
gssContext.requestMutualAuth(true);
byte[] inToken = new byte[0];
byte[] outToken;
boolean established = false;
// Loop while the context is still not established
while (!established) {
outToken = gssContext.initSecContext(inToken, 0, inToken.length);
if (outToken != null) {
sendToken(outToken);
}
if (!gssContext.isEstablished()) {
inToken = readToken();
} else {
established = true;
}
}
} finally {
if (gssContext != null) {
gssContext.dispose();
gssContext = null;
}
}
return null;
}
});
} catch (PrivilegedActionException ex) {
throw new AuthenticationException(ex.getException());
} catch (LoginException ex) {
throw new AuthenticationException(ex);
}
AuthenticatedURL.extractToken(conn, token);
}
/*
* Sends the Kerberos token to the server.
*/
private void sendToken(byte[] outToken) throws IOException {
String token = base64.encodeToString(outToken);
conn = (HttpURLConnection) url.openConnection();
if (connConfigurator != null) {
conn = connConfigurator.configure(conn);
}
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token);
conn.connect();
}
/*
* Retrieves the Kerberos token returned by the server.
*/
private byte[] readToken() throws IOException, AuthenticationException {
int status = conn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE +
"' header incorrect: " + authHeader);
}
String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim();
return base64.decode(negotiation);
}
throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status);
}
}
| |
package org.apache.lucene.codecs.lucene49;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.apache.lucene.codecs.lucene49.Lucene49NormsFormat.VERSION_CURRENT;
import static org.apache.lucene.codecs.lucene49.Lucene49NormsProducer.BLOCK_SIZE;
import static org.apache.lucene.codecs.lucene49.Lucene49NormsProducer.CONST_COMPRESSED;
import static org.apache.lucene.codecs.lucene49.Lucene49NormsProducer.DELTA_COMPRESSED;
import static org.apache.lucene.codecs.lucene49.Lucene49NormsProducer.TABLE_COMPRESSED;
import static org.apache.lucene.codecs.lucene49.Lucene49NormsProducer.UNCOMPRESSED;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.NormsConsumer;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.packed.BlockPackedWriter;
import org.apache.lucene.util.packed.PackedInts;
/**
* Writer for 4.9 norms
* @deprecated for test purposes only
*/
@Deprecated
final class Lucene49NormsConsumer extends NormsConsumer {
IndexOutput data, meta;
final int maxDoc;
Lucene49NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException {
maxDoc = state.segmentInfo.maxDoc();
boolean success = false;
try {
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
data = state.directory.createOutput(dataName, state.context);
CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT);
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
meta = state.directory.createOutput(metaName, state.context);
CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT);
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this);
}
}
}
// we explicitly use only certain bits per value and a specified format, so we statically check this will work
static {
assert PackedInts.Format.PACKED_SINGLE_BLOCK.isSupported(1);
assert PackedInts.Format.PACKED_SINGLE_BLOCK.isSupported(2);
assert PackedInts.Format.PACKED_SINGLE_BLOCK.isSupported(4);
}
@Override
public void addNormsField(FieldInfo field, Iterable<Number> values) throws IOException {
meta.writeVInt(field.number);
long minValue = Long.MAX_VALUE;
long maxValue = Long.MIN_VALUE;
// TODO: more efficient?
NormMap uniqueValues = new NormMap();
long count = 0;
for (Number nv : values) {
if (nv == null) {
throw new IllegalStateException("illegal norms data for field " + field.name + ", got null for value: " + count);
}
final long v = nv.longValue();
minValue = Math.min(minValue, v);
maxValue = Math.max(maxValue, v);
if (uniqueValues != null) {
if (uniqueValues.add(v)) {
if (uniqueValues.size > 256) {
uniqueValues = null;
}
}
}
++count;
}
if (count != maxDoc) {
throw new IllegalStateException("illegal norms data for field " + field.name + ", expected " + maxDoc + " values, got " + count);
}
if (uniqueValues != null && uniqueValues.size == 1) {
// 0 bpv
meta.writeByte(CONST_COMPRESSED);
meta.writeLong(minValue);
} else if (uniqueValues != null) {
// small number of unique values: this is the typical case:
// we only use bpv=1,2,4,8
PackedInts.Format format = PackedInts.Format.PACKED_SINGLE_BLOCK;
int bitsPerValue = PackedInts.bitsRequired(uniqueValues.size-1);
if (bitsPerValue == 3) {
bitsPerValue = 4;
} else if (bitsPerValue > 4) {
bitsPerValue = 8;
}
if (bitsPerValue == 8 && minValue >= Byte.MIN_VALUE && maxValue <= Byte.MAX_VALUE) {
meta.writeByte(UNCOMPRESSED); // uncompressed byte[]
meta.writeLong(data.getFilePointer());
for (Number nv : values) {
data.writeByte(nv == null ? 0 : (byte) nv.longValue());
}
} else {
meta.writeByte(TABLE_COMPRESSED); // table-compressed
meta.writeLong(data.getFilePointer());
data.writeVInt(PackedInts.VERSION_CURRENT);
long[] decode = uniqueValues.getDecodeTable();
// upgrade to power of two sized array
int size = 1 << bitsPerValue;
data.writeVInt(size);
for (int i = 0; i < decode.length; i++) {
data.writeLong(decode[i]);
}
for (int i = decode.length; i < size; i++) {
data.writeLong(0);
}
data.writeVInt(format.getId());
data.writeVInt(bitsPerValue);
final PackedInts.Writer writer = PackedInts.getWriterNoHeader(data, format, maxDoc, bitsPerValue, PackedInts.DEFAULT_BUFFER_SIZE);
for(Number nv : values) {
writer.add(uniqueValues.getOrd(nv.longValue()));
}
writer.finish();
}
} else {
meta.writeByte(DELTA_COMPRESSED); // delta-compressed
meta.writeLong(data.getFilePointer());
data.writeVInt(PackedInts.VERSION_CURRENT);
data.writeVInt(BLOCK_SIZE);
final BlockPackedWriter writer = new BlockPackedWriter(data, BLOCK_SIZE);
for (Number nv : values) {
writer.add(nv.longValue());
}
writer.finish();
}
}
@Override
public void close() throws IOException {
boolean success = false;
try {
if (meta != null) {
meta.writeVInt(-1); // write EOF marker
CodecUtil.writeFooter(meta); // write checksum
}
if (data != null) {
CodecUtil.writeFooter(data); // write checksum
}
success = true;
} finally {
if (success) {
IOUtils.close(data, meta);
} else {
IOUtils.closeWhileHandlingException(data, meta);
}
meta = data = null;
}
}
// specialized deduplication of long->ord for norms: 99.99999% of the time this will be a single-byte range.
static class NormMap {
// we use short: at most we will add 257 values to this map before its rejected as too big above.
final short[] singleByteRange = new short[256];
final Map<Long,Short> other = new HashMap<Long,Short>();
int size;
{
Arrays.fill(singleByteRange, (short)-1);
}
/** adds an item to the mapping. returns true if actually added */
public boolean add(long l) {
assert size <= 256; // once we add > 256 values, we nullify the map in addNumericField and don't use this strategy
if (l >= Byte.MIN_VALUE && l <= Byte.MAX_VALUE) {
int index = (int) (l + 128);
short previous = singleByteRange[index];
if (previous < 0) {
singleByteRange[index] = (short) size;
size++;
return true;
} else {
return false;
}
} else {
if (!other.containsKey(l)) {
other.put(l, (short)size);
size++;
return true;
} else {
return false;
}
}
}
/** gets the ordinal for a previously added item */
public int getOrd(long l) {
if (l >= Byte.MIN_VALUE && l <= Byte.MAX_VALUE) {
int index = (int) (l + 128);
return singleByteRange[index];
} else {
// NPE if something is screwed up
return other.get(l);
}
}
/** retrieves the ordinal table for previously added items */
public long[] getDecodeTable() {
long decode[] = new long[size];
for (int i = 0; i < singleByteRange.length; i++) {
short s = singleByteRange[i];
if (s >= 0) {
decode[s] = i - 128;
}
}
for (Map.Entry<Long,Short> entry : other.entrySet()) {
decode[entry.getValue()] = entry.getKey();
}
return decode;
}
}
}
| |
package com.alibaba.fastjson.parser.deserializer;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Map;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.parser.DefaultJSONParser;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.parser.JSONLexer;
import com.alibaba.fastjson.parser.JSONToken;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.serializer.JavaBeanSerializer;
import com.alibaba.fastjson.util.TypeUtils;
public class ThrowableDeserializer extends JavaBeanDeserializer {
public ThrowableDeserializer(ParserConfig mapping, Class<?> clazz){
super(mapping, clazz, clazz);
}
@SuppressWarnings("unchecked")
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName) {
JSONLexer lexer = parser.lexer;
if (lexer.token() == JSONToken.NULL) {
lexer.nextToken();
return null;
}
if (parser.getResolveStatus() == DefaultJSONParser.TypeNameRedirect) {
parser.setResolveStatus(DefaultJSONParser.NONE);
} else {
if (lexer.token() != JSONToken.LBRACE) {
throw new JSONException("syntax error");
}
}
Throwable cause = null;
Class<?> exClass = null;
if (type != null && type instanceof Class) {
Class<?> clazz = (Class<?>) type;
if (Throwable.class.isAssignableFrom(clazz)) {
exClass = clazz;
}
}
String message = null;
StackTraceElement[] stackTrace = null;
Map<String, Object> otherValues = null;
for (;;) {
// lexer.scanSymbol
String key = lexer.scanSymbol(parser.getSymbolTable());
if (key == null) {
if (lexer.token() == JSONToken.RBRACE) {
lexer.nextToken(JSONToken.COMMA);
break;
}
if (lexer.token() == JSONToken.COMMA) {
if (lexer.isEnabled(Feature.AllowArbitraryCommas)) {
continue;
}
}
}
lexer.nextTokenWithColon(JSONToken.LITERAL_STRING);
if (JSON.DEFAULT_TYPE_KEY.equals(key)) {
if (lexer.token() == JSONToken.LITERAL_STRING) {
String exClassName = lexer.stringVal();
exClass = parser.getConfig().checkAutoType(exClassName, Throwable.class, lexer.getFeatures());
} else {
throw new JSONException("syntax error");
}
lexer.nextToken(JSONToken.COMMA);
} else if ("message".equals(key)) {
if (lexer.token() == JSONToken.NULL) {
message = null;
} else if (lexer.token() == JSONToken.LITERAL_STRING) {
message = lexer.stringVal();
} else {
throw new JSONException("syntax error");
}
lexer.nextToken();
} else if ("cause".equals(key)) {
cause = deserialze(parser, null, "cause");
} else if ("stackTrace".equals(key)) {
stackTrace = parser.parseObject(StackTraceElement[].class);
} else {
if (otherValues == null) {
otherValues = new HashMap<String, Object>();
}
otherValues.put(key, parser.parse());
}
if (lexer.token() == JSONToken.RBRACE) {
lexer.nextToken(JSONToken.COMMA);
break;
}
}
Throwable ex = null;
if (exClass == null) {
ex = new Exception(message, cause);
} else {
if (!Throwable.class.isAssignableFrom(exClass)) {
throw new JSONException("type not match, not Throwable. " + exClass.getName());
}
try {
ex = createException(message, cause, exClass);
if (ex == null) {
ex = new Exception(message, cause);
}
} catch (Exception e) {
throw new JSONException("create instance error", e);
}
}
if (stackTrace != null) {
ex.setStackTrace(stackTrace);
}
if (otherValues != null) {
JavaBeanDeserializer exBeanDeser = null;
if (exClass != null) {
if (exClass == clazz) {
exBeanDeser = this;
} else {
ObjectDeserializer exDeser = parser.getConfig().getDeserializer(exClass);
if (exDeser instanceof JavaBeanDeserializer) {
exBeanDeser = (JavaBeanDeserializer) exDeser;
}
}
}
if (exBeanDeser != null) {
for (Map.Entry<String, Object> entry : otherValues.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
FieldDeserializer fieldDeserializer = exBeanDeser.getFieldDeserializer(key);
if (fieldDeserializer != null) {
fieldDeserializer.setValue(ex, value);
}
}
}
}
return (T) ex;
}
private Throwable createException(String message, Throwable cause, Class<?> exClass) throws Exception {
Constructor<?> defaultConstructor = null;
Constructor<?> messageConstructor = null;
Constructor<?> causeConstructor = null;
for (Constructor<?> constructor : exClass.getConstructors()) {
Class<?>[] types = constructor.getParameterTypes();
if (types.length == 0) {
defaultConstructor = constructor;
continue;
}
if (types.length == 1 && types[0] == String.class) {
messageConstructor = constructor;
continue;
}
if (types.length == 2 && types[0] == String.class && types[1] == Throwable.class) {
causeConstructor = constructor;
continue;
}
}
if (causeConstructor != null) {
return (Throwable) causeConstructor.newInstance(message, cause);
}
if (messageConstructor != null) {
return (Throwable) messageConstructor.newInstance(message);
}
if (defaultConstructor != null) {
return (Throwable) defaultConstructor.newInstance();
}
return null;
}
public int getFastMatchToken() {
return JSONToken.LBRACE;
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.cache.persistence.DiskRecoveryStore;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionStamp;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
// macros whose definition changes this class:
// disk: DISK
// lru: LRU
// stats: STATS
// versioned: VERSIONED
// offheap: OFFHEAP
// One of the following key macros must be defined:
// key object: KEY_OBJECT
// key int: KEY_INT
// key long: KEY_LONG
// key uuid: KEY_UUID
// key string1: KEY_STRING1
// key string2: KEY_STRING2
/**
* Do not modify this class. It was generated.
* Instead modify LeafRegionEntry.cpp and then run
* bin/generateRegionEntryClasses.sh from the directory
* that contains your build.xml.
*/
public class VersionedThinDiskRegionEntryHeapLongKey extends VersionedThinDiskRegionEntryHeap {
public VersionedThinDiskRegionEntryHeapLongKey (RegionEntryContext context, long key,
Object value
) {
super(context,
(value instanceof RecoveredEntry ? null : value)
);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// common code
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VersionedThinDiskRegionEntryHeapLongKey> lastModifiedUpdater
= AtomicLongFieldUpdater.newUpdater(VersionedThinDiskRegionEntryHeapLongKey.class, "lastModified");
private volatile Object value;
@Override
protected final Object getValueField() {
return this.value;
}
@Override
protected void setValueField(Object v) {
this.value = v;
}
protected long getlastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
/**
* @see HashEntry#getEntryHash()
*/
public final int getEntryHash() {
return this.hash;
}
protected void setEntryHash(int v) {
this.hash = v;
}
/**
* @see HashEntry#getNextEntry()
*/
public final HashEntry<Object, Object> getNextEntry() {
return this.next;
}
/**
* @see HashEntry#setNextEntry
*/
public final void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// disk code
protected void initialize(RegionEntryContext context, Object value) {
diskInitialize(context, value);
}
@Override
public int updateAsyncEntrySize(EnableLRU capacityController) {
throw new IllegalStateException("should never be called");
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private void diskInitialize(RegionEntryContext context, Object value) {
DiskRecoveryStore drs = (DiskRecoveryStore)context;
DiskStoreImpl ds = drs.getDiskStore();
long maxOplogSize = ds.getMaxOplogSize();
//get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList());
Helper.initialize(this, drs, value);
}
/**
* DiskId
*
* @since 5.1
*/
protected DiskId id;//= new DiskId();
public DiskId getDiskId() {
return this.id;
}
@Override
void setDiskId(RegionEntry old) {
this.id = ((AbstractDiskRegionEntry)old).getDiskId();
}
// // inlining DiskId
// // always have these fields
// /**
// * id consists of
// * most significant
// * 1 byte = users bits
// * 2-8 bytes = oplog id
// * least significant.
// *
// * The highest bit in the oplog id part is set to 1 if the oplog id
// * is negative.
// * @todo this field could be an int for an overflow only region
// */
// private long id;
// /**
// * Length of the bytes on disk.
// * This is always set. If the value is invalid then it will be set to 0.
// * The most significant bit is used by overflow to mark it as needing to be written.
// */
// protected int valueLength = 0;
// // have intOffset or longOffset
// // intOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile int offsetInOplog;
// // longOffset
// /**
// * The position in the oplog (the oplog offset) where this entry's value is
// * stored
// */
// private volatile long offsetInOplog;
// // have overflowOnly or persistence
// // overflowOnly
// // no fields
// // persistent
// /** unique entry identifier * */
// private long keyId;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// versioned code
private VersionSource memberID;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
public long getRegionVersion() {
return (((long)regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
public long getVersionTimeStamp() {
return getLastModified();
}
public void setVersionTimeStamp(long time) {
setLastModified(time);
}
public VersionSource getMemberID() {
return this.memberID;
}
public int getDistributedSystemId() {
return this.distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public void setVersions(VersionTag tag) {
this.memberID = tag.getMemberID();
int eVersion = tag.getEntryVersion();
this.entryVersionLowBytes = (short)(eVersion & 0xffff);
this.entryVersionHighByte = (byte)((eVersion & 0xff0000) >> 16);
this.regionVersionHighBytes = tag.getRegionVersionHighBytes();
this.regionVersionLowBytes = tag.getRegionVersionLowBytes();
if (!(tag.isGatewayTag()) && this.distributedSystemId == tag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= tag.getVersionTimeStamp()) {
setVersionTimeStamp(tag.getVersionTimeStamp());
} else {
tag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(tag.getVersionTimeStamp());
}
this.distributedSystemId = (byte)(tag.getDistributedSystemId() & 0xff);
}
public void setMemberID(VersionSource memberID) {
this.memberID = memberID;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberID);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(this.distributedSystemId);
return tag;
}
public void processVersionTag(LocalRegion r, VersionTag tag,
boolean isTombstoneFromGII, boolean hasDelta,
VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) {
basicProcessVersionTag(r, tag, isTombstoneFromGII, hasDelta, thisVM, sender, checkForConflicts);
}
@Override
public void processVersionTag(EntryEvent cacheEvent) {
// this keeps Eclipse happy. without it the sender chain becomes confused
// while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
public short getRegionVersionHighBytes() {
return this.regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
public int getRegionVersionLowBytes() {
return this.regionVersionLowBytes;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// key code
private final long key;
@Override
public final Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(Object k) {
if (k instanceof Long) {
return ((Long) k).longValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.authorization.permission;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Objects;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import com.google.common.primitives.Longs;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
import org.apache.jackrabbit.oak.plugins.tree.TreeProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionConstants;
import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction;
import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider;
import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits;
import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider;
import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.util.Text;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.collect.Iterables.addAll;
import static com.google.common.collect.Sets.newLinkedHashSet;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.oak.plugins.tree.TreeConstants.OAK_CHILD_ORDER;
final class PermissionStoreEditor implements AccessControlConstants, PermissionConstants {
private static final Logger log = LoggerFactory.getLogger(PermissionStoreEditor.class);
private final String accessControlledPath;
private final String nodeName;
private final Map<String, List<AcEntry>> entries = Maps.newHashMap();
private final NodeBuilder permissionRoot;
PermissionStoreEditor(@NotNull String aclPath, @NotNull String name,
@NotNull NodeState node, @NotNull NodeBuilder permissionRoot,
@NotNull TypePredicate isACE, @NotNull TypePredicate isGrantACE,
@NotNull PrivilegeBitsProvider bitsProvider,
@NotNull RestrictionProvider restrictionProvider,
@NotNull TreeProvider treeProvider) {
this.permissionRoot = permissionRoot;
if (name.equals(REP_REPO_POLICY)) {
accessControlledPath = "";
} else {
accessControlledPath = aclPath.isEmpty() ? "/" : aclPath;
}
nodeName = PermissionUtil.getEntryName(accessControlledPath);
Set<String> orderedChildNames = newLinkedHashSet(node.getNames(OAK_CHILD_ORDER));
long n = orderedChildNames.size();
if (node.getChildNodeCount(n + 1) > n) {
addAll(orderedChildNames, node.getChildNodeNames());
}
PrivilegeBits jcrAll = bitsProvider.getBits(PrivilegeConstants.JCR_ALL);
int index = 0;
for (String childName : orderedChildNames) {
NodeState ace = node.getChildNode(childName);
if (isACE.apply(ace)) {
boolean isAllow = isGrantACE.apply(ace);
PrivilegeBits privilegeBits = bitsProvider.getBits(ace.getNames(REP_PRIVILEGES));
Set<Restriction> restrictions = restrictionProvider.readRestrictions(Strings.emptyToNull(accessControlledPath), treeProvider.createReadOnlyTree(ace));
AcEntry entry = (privilegeBits.equals(jcrAll)) ?
new JcrAllAcEntry(ace, accessControlledPath, index, isAllow, privilegeBits, restrictions) :
new AcEntry(ace, accessControlledPath, index, isAllow, privilegeBits, restrictions);
List<AcEntry> list = entries.get(entry.principalName);
if (list == null) {
list = new ArrayList<>();
entries.put(entry.principalName, list);
}
list.add(entry);
index++;
}
}
}
String getPath() {
return accessControlledPath;
}
boolean isEmpty() {
return entries.isEmpty();
}
void removePermissionEntries(PermissionStoreEditor otherEditor) {
entries.keySet().removeAll(otherEditor.entries.keySet());
}
void removePermissionEntries() {
for (String principalName : entries.keySet()) {
if (permissionRoot.hasChildNode(principalName)) {
NodeBuilder principalRoot = permissionRoot.getChildNode(principalName);
boolean removed = false;
// find the ACL node that for this path and principal
NodeBuilder parent = principalRoot.getChildNode(nodeName);
if (!parent.exists()) {
continue;
}
// check if the node is the correct one
if (PermissionUtil.checkACLPath(parent, accessControlledPath)) {
// remove and reconnect child nodes
NodeBuilder newParent = null;
for (String childName : parent.getChildNodeNames()) {
if (childName.charAt(0) != 'c') {
continue;
}
NodeBuilder child = parent.getChildNode(childName);
if (newParent == null) {
newParent = child;
} else {
newParent.setChildNode(childName, child.getNodeState());
}
}
if (newParent != null) {
// replace the 'parent', which got removed
principalRoot.setChildNode(nodeName, newParent.getNodeState());
removed = true;
} else {
removed = parent.remove();
}
} else {
// check if any of the child nodes match
for (String childName : parent.getChildNodeNames()) {
if (childName.charAt(0) != 'c') {
continue;
}
NodeBuilder child = parent.getChildNode(childName);
if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
removed = child.remove();
}
}
}
if (removed) {
updateNumEntries(principalName, principalRoot, -1);
}
} else {
log.error("Unable to remove permission entry {}: Principal root missing.", this);
}
}
}
void updatePermissionEntries() {
for (Map.Entry<String, List<AcEntry>> entry: entries.entrySet()) {
String principalName = entry.getKey();
NodeBuilder principalRoot = permissionRoot.child(principalName);
if (!principalRoot.hasProperty(JCR_PRIMARYTYPE)) {
principalRoot.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
}
NodeBuilder parent = principalRoot.child(nodeName);
if (!parent.hasProperty(JCR_PRIMARYTYPE)) {
parent.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
}
// check if current parent already has the correct path
if (parent.hasProperty(REP_ACCESS_CONTROLLED_PATH)) {
if (!PermissionUtil.checkACLPath(parent, accessControlledPath)) {
// hash collision, find a new child
NodeBuilder child = null;
int idx = 0;
for (String childName : parent.getChildNodeNames()) {
if (childName.charAt(0) != 'c') {
continue;
}
child = parent.getChildNode(childName);
if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
break;
}
child = null;
idx++;
}
while (child == null) {
String name = 'c' + String.valueOf(idx++);
child = parent.getChildNode(name);
if (child.exists()) {
child = null;
} else {
child = parent.child(name);
child.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
}
}
parent = child;
parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
}
} else {
// new parent
parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
}
updateEntries(parent, entry.getValue());
if (parent.isNew()) {
updateNumEntries(principalName, principalRoot, +1);
}
}
}
private void updateEntries(NodeBuilder parent, List<AcEntry> list) {
// remove old entries
for (String childName : parent.getChildNodeNames()) {
if (childName.charAt(0) != 'c') {
parent.getChildNode(childName).remove();
}
}
for (AcEntry ace: list) {
ace.writeToPermissionStore(parent);
}
}
private static void updateNumEntries(@NotNull String principalName, @NotNull NodeBuilder principalRoot, int cnt) {
PropertyState ps = principalRoot.getProperty(REP_NUM_PERMISSIONS);
long numEntries = ((ps == null) ? 0 : ps.getValue(Type.LONG)) + cnt;
if (ps == null && !principalRoot.isNew()) {
// existing principal root that doesn't have the rep:numEntries set
return;
} else if (numEntries < 0) {
// numEntries unexpectedly turned negative
log.error("NumEntries counter for principal '"+principalName+"' turned negative -> removing 'rep:numPermissions' property.");
principalRoot.removeProperty(REP_NUM_PERMISSIONS);
} else {
principalRoot.setProperty(REP_NUM_PERMISSIONS, numEntries, Type.LONG);
}
}
private final class JcrAllAcEntry extends AcEntry {
private JcrAllAcEntry(@NotNull NodeState node,
@NotNull String accessControlledPath,
int index, boolean isAllow,
@NotNull PrivilegeBits privilegeBits,
@NotNull Set<Restriction> restrictions) {
super(node, accessControlledPath, index, isAllow, privilegeBits, restrictions);
}
@Override
protected PropertyState getPrivilegeBitsProperty() {
return PropertyStates.createProperty(REP_PRIVILEGE_BITS, Longs.asList(PermissionStore.DYNAMIC_ALL_BITS), Type.LONGS);
}
}
private class AcEntry {
private final String accessControlledPath;
private final String principalName;
private final PrivilegeBits privilegeBits;
private final boolean isAllow;
private final Set<Restriction> restrictions;
private final int index;
private int hashCode = -1;
AcEntry(@NotNull NodeState node, @NotNull String accessControlledPath, int index,
boolean isAllow, @NotNull PrivilegeBits privilegeBits,
@NotNull Set<Restriction> restrictions) {
this.accessControlledPath = accessControlledPath;
this.index = index;
this.principalName = Text.escapeIllegalJcrChars(node.getString(REP_PRINCIPAL_NAME));
this.privilegeBits = privilegeBits;
this.isAllow = isAllow;
this.restrictions = restrictions;
}
private void writeToPermissionStore(NodeBuilder parent) {
NodeBuilder n = parent.child(String.valueOf(index))
.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSIONS, Type.NAME)
.setProperty(REP_IS_ALLOW, isAllow)
.setProperty(getPrivilegeBitsProperty());
for (Restriction restriction : restrictions) {
n.setProperty(restriction.getProperty());
}
}
protected PropertyState getPrivilegeBitsProperty() {
return privilegeBits.asPropertyState(REP_PRIVILEGE_BITS);
}
//-------------------------------------------------------------< Object >---
@Override
public int hashCode() {
if (hashCode == -1) {
hashCode = Objects.hashCode(accessControlledPath, principalName, privilegeBits, isAllow, restrictions);
}
return hashCode;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof AcEntry) {
AcEntry other = (AcEntry) o;
return isAllow == other.isAllow
&& privilegeBits.equals(other.privilegeBits)
&& principalName.equals(other.principalName)
&& accessControlledPath.equals(other.accessControlledPath)
&& restrictions.equals(other.restrictions);
}
return false;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(accessControlledPath);
sb.append(';').append(principalName);
sb.append(';').append(isAllow ? "allow" : "deny");
sb.append(';').append(privilegeBits);
sb.append(';').append(restrictions);
return sb.toString();
}
}
}
| |
package com.exasol.adapter.dialects.hive;
import static com.exasol.adapter.dialects.VisitorAssertions.assertSqlNodeConvertedToAsterisk;
import static com.exasol.adapter.dialects.VisitorAssertions.assertSqlNodeConvertedToOne;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.*;
import com.exasol.adapter.metadata.*;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import com.exasol.adapter.AdapterException;
import com.exasol.adapter.AdapterProperties;
import com.exasol.adapter.dialects.*;
import com.exasol.adapter.jdbc.ConnectionFactory;
import com.exasol.adapter.sql.*;
@ExtendWith(MockitoExtension.class)
class HiveSqlGenerationVisitorTest {
private SqlNodeVisitor<String> visitor;
@BeforeEach
void beforeEach(@Mock final ConnectionFactory connectionFactoryMock) {
final SqlDialectFactory dialectFactory = new HiveSqlDialectFactory();
final SqlDialect dialect = dialectFactory.createSqlDialect(connectionFactoryMock,
AdapterProperties.emptyProperties());
final SqlGenerationContext context = new SqlGenerationContext("test_catalog", "test_schema", false);
this.visitor = new HiveSqlGenerationVisitor(dialect, context);
}
@Test
void testVisitSqlSelectListSelectStar() throws AdapterException {
final SqlSelectList sqlSelectList = SqlSelectList.createSelectStarSelectList();
final TableMetadata tableMetadata = new TableMetadata("", "", Collections.emptyList(), "");
final SqlTable fromClause = new SqlTable("test_table", tableMetadata);
final SqlNode sqlStatementSelect = SqlStatementSelect.builder().selectList(sqlSelectList).fromClause(fromClause)
.build();
sqlSelectList.setParent(sqlStatementSelect);
assertSqlNodeConvertedToAsterisk(sqlSelectList, this.visitor);
}
@Test
void testVisitSqlSelectListSelectStarRequiresCastBinary() throws AdapterException {
final SqlSelectList sqlSelectList = SqlSelectList.createSelectStarSelectList();
final List<ColumnMetadata> columns = new ArrayList<>();
columns.add(ColumnMetadata.builder().name("test_column")
.adapterNotes("{\"jdbcDataType\":-2, \"typeName\":\"BINARY\"}")
.type(DataType.createVarChar(10, DataType.ExaCharset.UTF8)).build());
final TableMetadata tableMetadata = new TableMetadata("", "", columns, "");
final SqlTable fromClause = new SqlTable("", tableMetadata);
final SqlNode select = SqlStatementSelect.builder().selectList(sqlSelectList).fromClause(fromClause).build();
sqlSelectList.setParent(select);
assertThat(this.visitor.visit(sqlSelectList), equalTo("base64(`test_column`)"));
}
@Test
void testVisitSqlSelectListRequiresAnyColumn() throws AdapterException {
final SqlSelectList sqlSelectList = SqlSelectList.createAnyValueSelectList();
assertSqlNodeConvertedToOne(sqlSelectList, this.visitor);
}
@Test
void testVisitSqlSelectListSelectRegularList() throws AdapterException {
final SqlSelectList sqlSelectList = SqlSelectList
.createRegularSelectList(Arrays.asList(new SqlLiteralBool(true), new SqlLiteralString("string")));
assertThat(this.visitor.visit(sqlSelectList), equalTo("true, 'string'"));
}
@Test
void testVisitSqlSelectListSelectRegularListWithColumns() throws AdapterException {
final ColumnMetadata columnMetadata1 = ColumnMetadata.builder().name("test_column").type(DataType.createBool())
.adapterNotes("{\"jdbcDataType\":16, \"typeName\":\"BOOLEAN\"}").build();
final ColumnMetadata columnMetadata2 = ColumnMetadata.builder().name("test_column2")
.type(DataType.createDouble()).adapterNotes("{\"jdbcDataType\":-2, \"typeName\":\"BINARY\"}").build();
final SqlSelectList sqlSelectList = SqlSelectList.createRegularSelectList(Arrays.asList(
new SqlColumn(1, columnMetadata1, "test_table"), new SqlColumn(2, columnMetadata2, "test_table")));
assertThat(this.visitor.visit(sqlSelectList),
equalTo("`test_table`.`test_column`, base64(`test_table`.`test_column2`)"));
}
@Test
void testVisitSqlSelectListSelectStarThrowsException() {
final SqlSelectList sqlSelectList = createSqlSelectStarListWithOneColumn("",
DataType.createVarChar(10, DataType.ExaCharset.UTF8), "test_column");
SqlSelectList.createSelectStarSelectList();
assertThrows(SqlGenerationVisitorException.class, () -> this.visitor.visit(sqlSelectList));
}
private SqlSelectList createSqlSelectStarListWithOneColumn(final String adapterNotes, final DataType dataType,
final String columnName) {
final SqlSelectList selectList = SqlSelectList.createSelectStarSelectList();
final List<ColumnMetadata> columns = new ArrayList<>();
columns.add(ColumnMetadata.builder().name(columnName).adapterNotes(adapterNotes).type(dataType).build());
final TableMetadata tableMetadata = new TableMetadata("", "", columns, "");
final SqlTable fromClause = new SqlTable("", tableMetadata);
final SqlNode sqlStatementSelect = SqlStatementSelect.builder().selectList(selectList).fromClause(fromClause)
.build();
selectList.setParent(sqlStatementSelect);
return selectList;
}
@Test
void testVisitSqlPredicateEqual() throws AdapterException {
final SqlPredicateEqual sqlPredicateEqual = new SqlPredicateEqual(new SqlLiteralBool(true),
new SqlLiteralBool(true));
assertThat(this.visitor.visit(sqlPredicateEqual), equalTo("true = true"));
}
@Test
void testVisitSqlPredicateEqualLeftNull() throws AdapterException {
final SqlPredicateEqual sqlPredicateEqual = new SqlPredicateEqual(new SqlLiteralNull(),
new SqlColumn(0, ColumnMetadata.builder().name("test_column").type(DataType.createBool()).build()));
assertThat(this.visitor.visit(sqlPredicateEqual), equalTo("`test_column` IS NULL"));
}
@Test
void testVisitSqlPredicateEqualRightNull() throws AdapterException {
final SqlPredicateEqual sqlPredicateEqual = new SqlPredicateEqual(
new SqlColumn(0, ColumnMetadata.builder().name("test_column").type(DataType.createBool()).build()),
new SqlLiteralNull());
assertThat(this.visitor.visit(sqlPredicateEqual), equalTo("`test_column` IS NULL"));
}
@Test
void testVisitSqlPredicateNotEqual() throws AdapterException {
final SqlPredicateNotEqual sqlPredicateNotEqual = new SqlPredicateNotEqual(new SqlLiteralBool(true),
new SqlLiteralBool(false));
assertThat(this.visitor.visit(sqlPredicateNotEqual), equalTo("true <> false"));
}
@Test
void testVisitSqlPredicateEqualLeftNotNull() throws AdapterException {
final SqlPredicateNotEqual sqlPredicateNotEqual = new SqlPredicateNotEqual(new SqlLiteralNull(),
new SqlColumn(0, ColumnMetadata.builder().name("test_column").type(DataType.createBool()).build()));
assertThat(this.visitor.visit(sqlPredicateNotEqual), equalTo("`test_column` IS NOT NULL"));
}
@Test
void testVisitSqlPredicateEqualRightNotNull() throws AdapterException {
final SqlPredicateNotEqual sqlPredicateNotEqual = new SqlPredicateNotEqual(
new SqlColumn(0, ColumnMetadata.builder().name("test_column").type(DataType.createBool()).build()),
new SqlLiteralNull());
assertThat(this.visitor.visit(sqlPredicateNotEqual), equalTo("`test_column` IS NOT NULL"));
}
@Test
void testVisitSqlPredicateLikeRegexp() throws AdapterException {
final SqlPredicateLikeRegexp sqlSelectList = new SqlPredicateLikeRegexp(new SqlLiteralString("abcd"),
new SqlLiteralString("a_d"));
assertThat(this.visitor.visit(sqlSelectList), equalTo("'abcd'REGEXP'a_d'"));
}
@CsvSource({ "CONCAT", "REPEAT", "UPPER", "LOWER" })
@ParameterizedTest
void testVisitSqlFunctionScalarWithCastedFunctions(final ScalarFunction scalarFunction) throws AdapterException {
final List<SqlNode> arguments = new ArrayList<>();
arguments.add(new SqlLiteralDouble(10.5));
arguments.add(new SqlLiteralDouble(10.10));
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(scalarFunction, arguments);
assertThat(this.visitor.visit(sqlFunctionScalar),
equalTo("CAST(" + scalarFunction.name() + "(10.5,10.1) as string)"));
}
@CsvSource({ "DIV, DIV", //
"MOD, %", //
"BIT_AND, &", //
"BIT_OR, |", //
"BIT_XOR, ^" })
@ParameterizedTest
void testVisitSqlFunctionScalarWithChangedFunctions(final ScalarFunction scalarFunction,
final String expectedString) throws AdapterException {
final List<SqlNode> arguments = new ArrayList<>();
arguments.add(new SqlLiteralDouble(10.5));
arguments.add(new SqlLiteralDouble(10.10));
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(scalarFunction, arguments);
assertThat(this.visitor.visit(sqlFunctionScalar), equalTo("10.5 " + expectedString + " 10.1"));
}
@Test
void testVisitSqlFunctionScalarSubstring() throws AdapterException {
final List<SqlNode> arguments = new ArrayList<>();
arguments.add(new SqlLiteralString("string"));
arguments.add(new SqlLiteralDouble(1));
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(ScalarFunction.SUBSTR, arguments);
assertThat(this.visitor.visit(sqlFunctionScalar), equalTo("SUBSTR('string', 1.0)"));
}
@Test
void testVisitSqlFunctionScalarSubstringWithFrom() throws AdapterException {
final List<SqlNode> arguments = new ArrayList<>();
arguments.add(new SqlLiteralString("string"));
arguments.add(new SqlLiteralString("FROM 4 FOR 2"));
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(ScalarFunction.SUBSTR, arguments);
assertThat(this.visitor.visit(sqlFunctionScalar), equalTo("SUBSTRING('string','FROM 4 FOR 2')"));
}
@Test
void testVisitSqlFunctionScalarCurrentDate() throws AdapterException {
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(ScalarFunction.CURRENT_DATE, null);
assertThat(this.visitor.visit(sqlFunctionScalar), equalTo("CURRENT_DATE"));
}
@Test
void testVisitSqlFunctionScalarDataTrunc() throws AdapterException {
final List<SqlNode> arguments = new ArrayList<>();
arguments.add(new SqlLiteralDate("2019-07-04"));
arguments.add(new SqlLiteralString("MM"));
final SqlFunctionScalar sqlFunctionScalar = new SqlFunctionScalar(ScalarFunction.DATE_TRUNC, arguments);
assertThat(this.visitor.visit(sqlFunctionScalar), equalTo("TRUNC('MM',DATE '2019-07-04')"));
}
}
| |
package org.jasig.cas.authentication.handler.support;
import org.jasig.cas.authentication.HandlerResult;
import org.jasig.cas.authentication.PreventedException;
import org.jasig.cas.authentication.UsernamePasswordCredential;
import org.jasig.cas.authentication.principal.Principal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.validation.constraints.NotNull;
import java.security.GeneralSecurityException;
import java.util.Set;
/**
* JAAS Authentication Handler for CAAS. This is a simple bridge from CAS'
* authentication to JAAS.
*
* <p>
* Using the JAAS Authentication Handler requires you to configure the
* appropriate JAAS modules. You can specify the location of a jass.conf file
* using the following VM parameter:
* <pre>
* -Djava.security.auth.login.config=$PATH_TO_JAAS_CONF/jaas.conf
* </pre>
*
* <p>
* This example jaas.conf would try Kerberos based authentication, then try LDAP
* authentication:
* <pre>
* CAS {
* com.sun.security.auth.module.Krb5LoginModule sufficient
* client=TRUE
* debug=FALSE
* useTicketCache=FALSE;
* edu.uconn.netid.jaas.LDAPLoginModule sufficient
* java.naming.provider.url="ldap://ldapserver.my.edu:389/dc=my,dc=edu"
* java.naming.security.principal="uid=jaasauth,dc=my,dc=edu"
* java.naming.security.credentials="password"
* Attribute="uid"
* startTLS="true";
* };
* </pre>
*
* @author <a href="mailto:dotmatt@uconn.edu">Matthew J. Smith</a>
* @author Marvin S. Addison
* @author Misagh Moayyed
*
* @see javax.security.auth.callback.CallbackHandler
* @see javax.security.auth.callback.PasswordCallback
* @see javax.security.auth.callback.NameCallback
* @since 3.0.0
*/
@Component("jaasAuthenticationHandler")
public class JaasAuthenticationHandler extends AbstractUsernamePasswordAuthenticationHandler {
/** If no realm is specified, we default to CAS. */
private static final String DEFAULT_REALM = "CAS";
/**
* System property key to specify kerb5 realm.
*/
private static final String SYS_PROP_KRB5_REALM = "java.security.krb5.realm";
/**
* System property key to specify kerb5 kdc.
*/
private static final String SYS_PROP_KERB5_KDC = "java.security.krb5.kdc";
/** The realm that contains the login module information. */
@NotNull
private String realm = DEFAULT_REALM;
/** System property value to overwrite the realm in krb5 config. */
private String kerberosRealmSystemProperty;
/** System property value to overwrite the kdc in krb5 config. */
private String kerberosKdcSystemProperty;
/**
* Instantiates a new Jaas authentication handler,
* and attempts to load/verify the configuration.
*/
public JaasAuthenticationHandler() {
Assert.notNull(Configuration.getConfiguration(),
"Static Configuration cannot be null. Did you remember to specify \"java.security.auth.login.config\"?");
}
@Override
protected final HandlerResult authenticateUsernamePasswordInternal(final UsernamePasswordCredential credential)
throws GeneralSecurityException, PreventedException {
if (this.kerberosKdcSystemProperty != null) {
logger.debug("Setting kerberos system property {} to {}", SYS_PROP_KERB5_KDC, this.kerberosKdcSystemProperty);
System.setProperty(SYS_PROP_KERB5_KDC, this.kerberosKdcSystemProperty);
}
if (this.kerberosRealmSystemProperty != null) {
logger.debug("Setting kerberos system property {} to {}", SYS_PROP_KRB5_REALM, this.kerberosRealmSystemProperty);
System.setProperty(SYS_PROP_KRB5_REALM, this.kerberosRealmSystemProperty);
}
final String username = credential.getUsername();
final String password = getPasswordEncoder().encode(credential.getPassword());
final LoginContext lc = new LoginContext(
this.realm,
new UsernamePasswordCallbackHandler(username, password));
try {
logger.debug("Attempting authentication for: {}", username);
lc.login();
} finally {
lc.logout();
}
Principal principal = null;
final Set<java.security.Principal> principals = lc.getSubject().getPrincipals();
if (principals != null && !principals.isEmpty()) {
final java.security.Principal secPrincipal = principals.iterator().next();
principal = this.principalFactory.createPrincipal(secPrincipal.getName());
}
return createHandlerResult(credential, principal, null);
}
@Autowired
public void setRealm(@Value("${cas.authn.jaas.realm:" + DEFAULT_REALM + '}') final String realm) {
this.realm = realm;
}
/**
* Typically, the default realm and the KDC for that realm are indicated in the Kerberos {@code krb5.conf} configuration file.
* However, if you like, you can instead specify the realm value by setting this following system property value.
* <p>If you set the realm property, you SHOULD also configure the {@link #setKerberosKdcSystemProperty(String)}.
* <p>Also note that if you set these properties, then no cross-realm authentication is possible unless
* a {@code krb5.conf} file is also provided from which the additional information required for cross-realm authentication
* may be obtained.
* <p>If you set values for these properties, then they override the default realm and KDC values specified
* in {@code krb5.conf} (if such a file is found). The {@code krb5.conf} file is still consulted if values for items
* other than the default realm and KDC are needed. If no {@code krb5.conf} file is found,
* then the default values used for these items are implementation-specific.
* @param kerberosRealmSystemProperty system property to indicate realm.
* @see <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/jgss/tutorials/KerberosReq.html">
* Oracle documentation</a>
* @since 4.1.0
*/
@Autowired
public final void setKerberosRealmSystemProperty(@Value("${cas.authn.jaas.kerb.realm:}")
final String kerberosRealmSystemProperty) {
this.kerberosRealmSystemProperty = kerberosRealmSystemProperty;
}
/**
* Typically, the default realm and the KDC for that realm are indicated in the Kerberos {@code krb5.conf} configuration file.
* However, if you like, you can instead specify the kdc value by setting this system property value.
* <p>If you set the realm property, you SHOULD also configure the {@link #setKerberosRealmSystemProperty(String)}.
* <p>Also note that if you set these properties, then no cross-realm authentication is possible unless
* a {@code krb5.conf} file is also provided from which the additional information required for cross-realm authentication
* may be obtained.
* <p>If you set values for these properties, then they override the default realm and KDC values specified
* in {@code krb5.conf} (if such a file is found). The {@code krb5.conf} file is still consulted if values for items
* other than the default realm and KDC are needed. If no {@code krb5.conf} file is found,
* then the default values used for these items are implementation-specific.
* @param kerberosKdcSystemProperty system property to indicate kdc
* @see <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/jgss/tutorials/KerberosReq.html">
* Oracle documentation</a>
* @since 4.1.0
*/
@Autowired
public final void setKerberosKdcSystemProperty(@Value("${cas.authn.jaas.kerb.kdc:}")
final String kerberosKdcSystemProperty) {
this.kerberosKdcSystemProperty = kerberosKdcSystemProperty;
}
/**
* A simple JAAS CallbackHandler which accepts a Name String and Password
* String in the constructor. Only NameCallbacks and PasswordCallbacks are
* accepted in the callback array. This code based loosely on example given
* in Sun's javadoc for CallbackHandler interface.
*/
protected static final class UsernamePasswordCallbackHandler implements CallbackHandler {
/** The username of the principal we are trying to authenticate. */
private final String userName;
/** The password of the principal we are trying to authenticate. */
private final String password;
/**
* Constructor accepts name and password to be used for authentication.
*
* @param userName name to be used for authentication
* @param password Password to be used for authentication
*/
protected UsernamePasswordCallbackHandler(final String userName,
final String password) {
this.userName = userName;
this.password = password;
}
@Override
public void handle(final Callback[] callbacks)
throws UnsupportedCallbackException {
for (final Callback callback : callbacks) {
if (callback.getClass().equals(NameCallback.class)) {
((NameCallback) callback).setName(this.userName);
} else if (callback.getClass().equals(PasswordCallback.class)) {
((PasswordCallback) callback).setPassword(this.password
.toCharArray());
} else {
throw new UnsupportedCallbackException(callback,
"Unrecognized Callback");
}
}
}
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.rds.AmazonRDS#authorizeDBSecurityGroupIngress(AuthorizeDBSecurityGroupIngressRequest) AuthorizeDBSecurityGroupIngress operation}.
* <p>
* Enables ingress to a DBSecurityGroup using one of two forms of authorization. First, EC2 or VPC Security Groups can be added to the DBSecurityGroup
* if the application using the database is running on EC2 or VPC instances. Second, IP ranges are available if the application accessing your database
* is running on the Internet. Required parameters for this API are one of CIDR range, EC2SecurityGroupId for VPC, or (EC2SecurityGroupOwnerId and either
* EC2SecurityGroupName or EC2SecurityGroupId for non-VPC).
* </p>
* <p>
* <b>NOTE:</b> You cannot authorize ingress from an EC2 security group in one Region to an Amazon RDS DB Instance in another. You cannot authorize
* ingress from a VPC security group in one VPC to an Amazon RDS DB Instance in another.
* </p>
* <p>
* For an overview of CIDR ranges, go to the <a href="http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing"> Wikipedia Tutorial </a> .
* </p>
*
* @see com.amazonaws.services.rds.AmazonRDS#authorizeDBSecurityGroupIngress(AuthorizeDBSecurityGroupIngressRequest)
*/
public class AuthorizeDBSecurityGroupIngressRequest extends AmazonWebServiceRequest {
/**
* The name of the DB Security Group to add authorization to.
*/
private String dBSecurityGroupName;
/**
* The IP range to authorize.
*/
private String cIDRIP;
/**
* Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
private String eC2SecurityGroupName;
/**
* Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
private String eC2SecurityGroupId;
/**
* AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
private String eC2SecurityGroupOwnerId;
/**
* Default constructor for a new AuthorizeDBSecurityGroupIngressRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public AuthorizeDBSecurityGroupIngressRequest() {}
/**
* Constructs a new AuthorizeDBSecurityGroupIngressRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param dBSecurityGroupName The name of the DB Security Group to add
* authorization to.
*/
public AuthorizeDBSecurityGroupIngressRequest(String dBSecurityGroupName) {
this.dBSecurityGroupName = dBSecurityGroupName;
}
/**
* The name of the DB Security Group to add authorization to.
*
* @return The name of the DB Security Group to add authorization to.
*/
public String getDBSecurityGroupName() {
return dBSecurityGroupName;
}
/**
* The name of the DB Security Group to add authorization to.
*
* @param dBSecurityGroupName The name of the DB Security Group to add authorization to.
*/
public void setDBSecurityGroupName(String dBSecurityGroupName) {
this.dBSecurityGroupName = dBSecurityGroupName;
}
/**
* The name of the DB Security Group to add authorization to.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param dBSecurityGroupName The name of the DB Security Group to add authorization to.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeDBSecurityGroupIngressRequest withDBSecurityGroupName(String dBSecurityGroupName) {
this.dBSecurityGroupName = dBSecurityGroupName;
return this;
}
/**
* The IP range to authorize.
*
* @return The IP range to authorize.
*/
public String getCIDRIP() {
return cIDRIP;
}
/**
* The IP range to authorize.
*
* @param cIDRIP The IP range to authorize.
*/
public void setCIDRIP(String cIDRIP) {
this.cIDRIP = cIDRIP;
}
/**
* The IP range to authorize.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cIDRIP The IP range to authorize.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeDBSecurityGroupIngressRequest withCIDRIP(String cIDRIP) {
this.cIDRIP = cIDRIP;
return this;
}
/**
* Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public String getEC2SecurityGroupName() {
return eC2SecurityGroupName;
}
/**
* Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @param eC2SecurityGroupName Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public void setEC2SecurityGroupName(String eC2SecurityGroupName) {
this.eC2SecurityGroupName = eC2SecurityGroupName;
}
/**
* Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param eC2SecurityGroupName Name of the EC2 Security Group to authorize. For VPC DB Security
* Groups, <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeDBSecurityGroupIngressRequest withEC2SecurityGroupName(String eC2SecurityGroupName) {
this.eC2SecurityGroupName = eC2SecurityGroupName;
return this;
}
/**
* Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public String getEC2SecurityGroupId() {
return eC2SecurityGroupId;
}
/**
* Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @param eC2SecurityGroupId Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public void setEC2SecurityGroupId(String eC2SecurityGroupId) {
this.eC2SecurityGroupId = eC2SecurityGroupId;
}
/**
* Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param eC2SecurityGroupId Id of the EC2 Security Group to authorize. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeDBSecurityGroupIngressRequest withEC2SecurityGroupId(String eC2SecurityGroupId) {
this.eC2SecurityGroupId = eC2SecurityGroupId;
return this;
}
/**
* AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public String getEC2SecurityGroupOwnerId() {
return eC2SecurityGroupOwnerId;
}
/**
* AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @param eC2SecurityGroupOwnerId AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*/
public void setEC2SecurityGroupOwnerId(String eC2SecurityGroupOwnerId) {
this.eC2SecurityGroupOwnerId = eC2SecurityGroupOwnerId;
}
/**
* AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param eC2SecurityGroupOwnerId AWS Account Number of the owner of the EC2 Security Group specified in
* the EC2SecurityGroupName parameter. The AWS Access Key ID is not an
* acceptable value. For VPC DB Security Groups,
* <code>EC2SecurityGroupId</code> must be provided. Otherwise,
* EC2SecurityGroupOwnerId and either <code>EC2SecurityGroupName</code>
* or <code>EC2SecurityGroupId</code> must be provided.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeDBSecurityGroupIngressRequest withEC2SecurityGroupOwnerId(String eC2SecurityGroupOwnerId) {
this.eC2SecurityGroupOwnerId = eC2SecurityGroupOwnerId;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (dBSecurityGroupName != null) sb.append("DBSecurityGroupName: " + dBSecurityGroupName + ", ");
if (cIDRIP != null) sb.append("CIDRIP: " + cIDRIP + ", ");
if (eC2SecurityGroupName != null) sb.append("EC2SecurityGroupName: " + eC2SecurityGroupName + ", ");
if (eC2SecurityGroupId != null) sb.append("EC2SecurityGroupId: " + eC2SecurityGroupId + ", ");
if (eC2SecurityGroupOwnerId != null) sb.append("EC2SecurityGroupOwnerId: " + eC2SecurityGroupOwnerId + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBSecurityGroupName() == null) ? 0 : getDBSecurityGroupName().hashCode());
hashCode = prime * hashCode + ((getCIDRIP() == null) ? 0 : getCIDRIP().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupName() == null) ? 0 : getEC2SecurityGroupName().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupId() == null) ? 0 : getEC2SecurityGroupId().hashCode());
hashCode = prime * hashCode + ((getEC2SecurityGroupOwnerId() == null) ? 0 : getEC2SecurityGroupOwnerId().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof AuthorizeDBSecurityGroupIngressRequest == false) return false;
AuthorizeDBSecurityGroupIngressRequest other = (AuthorizeDBSecurityGroupIngressRequest)obj;
if (other.getDBSecurityGroupName() == null ^ this.getDBSecurityGroupName() == null) return false;
if (other.getDBSecurityGroupName() != null && other.getDBSecurityGroupName().equals(this.getDBSecurityGroupName()) == false) return false;
if (other.getCIDRIP() == null ^ this.getCIDRIP() == null) return false;
if (other.getCIDRIP() != null && other.getCIDRIP().equals(this.getCIDRIP()) == false) return false;
if (other.getEC2SecurityGroupName() == null ^ this.getEC2SecurityGroupName() == null) return false;
if (other.getEC2SecurityGroupName() != null && other.getEC2SecurityGroupName().equals(this.getEC2SecurityGroupName()) == false) return false;
if (other.getEC2SecurityGroupId() == null ^ this.getEC2SecurityGroupId() == null) return false;
if (other.getEC2SecurityGroupId() != null && other.getEC2SecurityGroupId().equals(this.getEC2SecurityGroupId()) == false) return false;
if (other.getEC2SecurityGroupOwnerId() == null ^ this.getEC2SecurityGroupOwnerId() == null) return false;
if (other.getEC2SecurityGroupOwnerId() != null && other.getEC2SecurityGroupOwnerId().equals(this.getEC2SecurityGroupOwnerId()) == false) return false;
return true;
}
}
| |
package com.motodb.view;
import java.time.LocalDate;
import java.util.Arrays;
import com.motodb.controller.ChampionshipManager;
import com.motodb.controller.ChampionshipManagerImpl;
import com.motodb.controller.ClaxManager;
import com.motodb.controller.ClaxManagerImpl;
import com.motodb.controller.SessionManager;
import com.motodb.controller.SessionManagerImpl;
import com.motodb.controller.WeekendManager;
import com.motodb.controller.WeekendManagerImpl;
import com.motodb.model.Clax;
import com.motodb.model.Session;
import com.motodb.model.Weekend;
import com.motodb.view.alert.AlertTypes;
import com.motodb.view.alert.AlertTypesImpl;
import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.DateCell;
import javafx.scene.control.DatePicker;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextField;
import javafx.scene.layout.VBox;
import javafx.util.Callback;
public class AddSessionControl extends ScreenControl {
// Alert panel to manage exceptions
private final AlertTypes alert = new AlertTypesImpl();
// Controller
private final SessionManager sessionManager = new SessionManagerImpl();
private final ClaxManager classManager = new ClaxManagerImpl();
private final WeekendManager weekendManager = new WeekendManagerImpl();
private final ChampionshipManager championshipManager = new ChampionshipManagerImpl();
@FXML
private TableView<Session> sessionsTable;
@FXML
private TableColumn<Session, String> yearColumn, codeColumn, typeColumn, classColumn, startDateColumn,
airTempColumn, groundTempColumn, humColumn, conditionsColumn, lapsColumn, durationColumn;
@FXML
private TextField durationField, lapsField, humidityField, groundTemperatureField, airTemperatureField, codeField,
conditionsField, searchField;
@FXML
private ComboBox<String> typeBox, yearBox, codeBox;
@FXML
private ComboBox<Weekend> weekendBox;
@FXML
private ComboBox<Clax> classBox;
@FXML
private DatePicker startDate;
@FXML
private Button delete;
@FXML
private VBox vBoxFields;
public enum SessionType {
WUP("Warm Up", FXCollections.observableArrayList(Arrays.asList("WP"))), FP("Free Practice",
FXCollections.observableArrayList(Arrays.asList("FP1", "FP2", "FP3", "FP4"))), Q("Qualification",
FXCollections.observableArrayList(Arrays.asList("Q1", "Q2"))), Gara("Race",
FXCollections.observableArrayList(Arrays.asList("RACE")));
private String type;
private ObservableList<String> codes;
private SessionType(String type, ObservableList<String> codes) {
this.type = type;
this.codes = codes;
}
public String getType() {
return type;
}
public ObservableList<String> getCodes() {
return codes;
}
}
/**
* Called after the fxml file has been loaded; this method initializes the
* fxml control class.
*/
public void initialize() {
classBox.setItems(classManager.getClasses());
championshipManager.getChampionships().forEach(l -> yearBox.getItems().add(Integer.toString(l.getYear())));
ObservableList<String> o = FXCollections.observableArrayList();
for (SessionType type : SessionType.values()) {
o.add(type.getType());
}
typeBox.setItems(o);
classBox.setDisable(true);
codeBox.setDisable(true);
weekendBox.setDisable(true);
startDate.setDisable(true);
// Initialize the table
yearColumn.setCellValueFactory(cellData -> cellData.getValue().yearProperty().asString());
codeColumn.setCellValueFactory(cellData -> cellData.getValue().codeProperty());
typeColumn.setCellValueFactory(cellData -> cellData.getValue().typeProperty());
classColumn.setCellValueFactory(cellData -> cellData.getValue().classNameProperty());
startDateColumn.setCellValueFactory(
cellData -> new ReadOnlyStringWrapper(cellData.getValue().getStartDate().toString()));
airTempColumn.setCellValueFactory(cellData -> cellData.getValue().airTempProperty().asString());
groundTempColumn.setCellValueFactory(cellData -> cellData.getValue().groundTempProperty().asString());
humColumn.setCellValueFactory(cellData -> cellData.getValue().humidityProperty().asString());
conditionsColumn.setCellValueFactory(cellData -> cellData.getValue().conditionsProperty());
lapsColumn.setCellValueFactory(cellData -> cellData.getValue().lapsProperty().asString());
durationColumn.setCellValueFactory(cellData -> cellData.getValue().durationMaxProperty());
// Add observable list data to the table
sessionsTable.setItems(sessionManager.getSessions());
// Make the table columns editable by double clicking
this.edit();
// Use a 'searchField' to search for books in the tableView
this.search();
// Listen for selection changes and enable delete button
this.update();
}
/**
* Called when the user press the 'add' button; this method adds a new depot
* to the controller ObservableList of depots
*/
@FXML
private void add() {
try {
sessionManager.addSession(classBox.getSelectionModel().getSelectedItem().getName(),
Integer.parseInt(yearBox.getSelectionModel().getSelectedItem()),
weekendBox.getValue().getStartDate(), conditionsField.getText(),
Integer.parseInt(airTemperatureField.getText()), Integer.parseInt(groundTemperatureField.getText()),
Integer.parseInt(humidityField.getText()), java.sql.Date.valueOf(startDate.getValue()),
codeBox.getValue(), durationField.getText(), typeBox.getValue(),
Integer.parseInt(lapsField.getText()));
sessionsTable.setItems(sessionManager.getSessions()); // Update
// table view
this.clear();
} catch (Exception e) {
e.printStackTrace();
alert.showWarning(e);
}
}
/**
* Called when the user edit a depot name directly from the tableColumn;
* This method edits the selected field in the observableList of depots and
* makes fields editable directly from the table
*/
private void edit() {
}
/**
* Called on delete button press, opens a confirmation dialog asking if you
* really want to delete the element; this method is called to delete the
* selected element from the observableList
*/
@FXML
private void delete() {
}
/**
* Called when the user enter something in the search field; It search name
* of the depot
*/
private void search() {/*
* // 1. Wrap the ObservableList in a FilteredList
* (initially display all data). FilteredList<Clax>
* filteredData = new
* FilteredList<>(manager.getClasses(), p -> true);
*
* // 2. Set the filter Predicate whenever the filter
* changes. searchField.textProperty().addListener((
* observable, oldValue, newValue) -> {
* filteredData.setPredicate(e -> { // If filter text
* is empty, display all persons. if (newValue ==
* null || newValue.isEmpty()) { return true; }
*
* // Compare first name and last name of every
* person with filter text. String lowerCaseFilter =
* newValue.toLowerCase();
*
* if (e.getName().toLowerCase().contains(
* lowerCaseFilter)) { return true; // Filter matches
* first name. } else if
* (e.getRules().toLowerCase().contains(
* lowerCaseFilter)) { return true; // Filter matches
* last name. } return false; // Does not match. });
* });
*
* // 3. Wrap the FilteredList in a SortedList.
* SortedList<Clax> sortedData = new
* SortedList<>(filteredData);
*
* // 4. Bind the SortedList comparator to the
* TableView comparator.
* sortedData.comparatorProperty().bind(classesTable.
* comparatorProperty());
*
* // 5. Add sorted (and filtered) data to the table.
* classesTable.setItems(sortedData);
*/
}
/**
* It listen for selection changes to disable/enable the delete button when
* the user selects something in the table
*/
private void update() {
yearBox.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
if (!weekendManager.getWeekendsFromYear(Integer.parseInt(newValue)).isEmpty()) {
classBox.setDisable(false);
classBox.setItems(classManager.getClassesFromYear(Integer.parseInt(newValue)));
weekendBox.setDisable(false);
weekendBox.setItems(weekendManager.getWeekendsFromYear(Integer.parseInt(newValue)));
this.updatePossibleDates();
} else {
weekendBox.setDisable(true);
startDate.setDisable(true);
}
}
});
typeBox.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
codeBox.setDisable(false);
for (SessionType type : SessionType.values()) {
if (typeBox.getSelectionModel().getSelectedItem().equals(type.getType())) {
codeBox.setItems(type.getCodes());
}
}
}
});
}
private void updatePossibleDates() {
weekendBox.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
if (newValue != null) {
startDate.setDisable(false);
startDate.setValue(newValue.getStartDate().toLocalDate());
final Callback<DatePicker, DateCell> dayCellFactory = new Callback<DatePicker, DateCell>() {
@Override
public DateCell call(final DatePicker datePicker) {
return new DateCell() {
@Override
public void updateItem(LocalDate item, boolean empty) {
super.updateItem(item, empty);
if (item.isBefore(newValue.getStartDate().toLocalDate())
|| item.isAfter(newValue.getFinishDate().toLocalDate())) {
setDisable(true);
setStyle("-fx-background-color: #ffc0cb;");
}
}
};
}
};
startDate.setDayCellFactory(dayCellFactory);
}
});
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import com.google.common.collect.Sets;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import org.apache.pulsar.broker.PulsarServerException;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.SubscriptionType;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.DispatchRate;
import org.apache.pulsar.common.policies.data.DelayedDeliveryPolicies;
import org.apache.pulsar.common.policies.data.InactiveTopicDeleteMode;
import org.apache.pulsar.common.policies.data.InactiveTopicPolicies;
import org.apache.pulsar.common.policies.data.OffloadPoliciesImpl;
import org.apache.pulsar.common.policies.data.OffloadedReadPriority;
import org.apache.pulsar.common.policies.data.PersistencePolicies;
import org.apache.pulsar.common.policies.data.PublishRate;
import org.apache.pulsar.common.policies.data.RetentionPolicies;
import org.apache.pulsar.common.policies.data.SubscribeRate;
import org.apache.pulsar.common.policies.data.impl.BacklogQuotaImpl;
import org.awaitility.Awaitility;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Starts 3 brokers that are in 3 different clusters
*/
@Test(groups = "broker")
public class ReplicatorTopicPoliciesTest extends ReplicatorTestBase {
@Override
@BeforeClass(alwaysRun = true, timeOut = 300000)
public void setup() throws Exception {
config1.setSystemTopicEnabled(true);
config1.setDefaultNumberOfNamespaceBundles(1);
config1.setTopicLevelPoliciesEnabled(true);
config2.setSystemTopicEnabled(true);
config2.setTopicLevelPoliciesEnabled(true);
config2.setDefaultNumberOfNamespaceBundles(1);
config3.setSystemTopicEnabled(true);
config3.setTopicLevelPoliciesEnabled(true);
config3.setDefaultNumberOfNamespaceBundles(1);
super.setup();
}
@Override
@AfterClass(alwaysRun = true, timeOut = 300000)
public void cleanup() throws Exception {
super.cleanup();
}
@Test
public void testReplicateQuotaTopicPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set BacklogQuota
BacklogQuotaImpl backlogQuota = new BacklogQuotaImpl();
backlogQuota.setLimitSize(1);
backlogQuota.setLimitTime(2);
admin1.topicPolicies(true).setBacklogQuota(topic, backlogQuota);
Awaitility.await().untilAsserted(() ->
assertTrue(admin2.topicPolicies(true).getBacklogQuotaMap(topic).containsValue(backlogQuota)));
Awaitility.await().untilAsserted(() ->
assertTrue(admin3.topicPolicies(true).getBacklogQuotaMap(topic).containsValue(backlogQuota)));
//remove BacklogQuota
admin1.topicPolicies(true).removeBacklogQuota(topic);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getBacklogQuotaMap(topic).size(), 0));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getBacklogQuotaMap(topic).size(), 0));
}
@Test
public void testReplicateMessageTTLPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set message ttl
admin1.topicPolicies(true).setMessageTTL(topic, 10);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMessageTTL(topic).intValue(), 10));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMessageTTL(topic).intValue(), 10));
//remove message ttl
admin1.topicPolicies(true).removeMessageTTL(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMessageTTL(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMessageTTL(topic)));
}
@Test
public void testReplicateSubscribeRatePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set global topic policy
SubscribeRate subscribeRate = new SubscribeRate(100, 10000);
admin1.topicPolicies(true).setSubscribeRate(topic, subscribeRate);
// get global topic policy
untilRemoteClustersAsserted(
admin -> assertEquals(admin.topicPolicies(true).getSubscribeRate(topic), subscribeRate));
// remove global topic policy
admin1.topicPolicies(true).removeSubscribeRate(topic);
untilRemoteClustersAsserted(admin -> assertNull(admin.topicPolicies(true).getSubscribeRate(topic)));
}
@Test
public void testReplicateMaxMessageSizePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set global topic policy
admin1.topicPolicies(true).setMaxMessageSize(topic, 1000);
// get global topic policy
untilRemoteClustersAsserted(
admin -> assertEquals(admin.topicPolicies(true).getMaxMessageSize(topic), Integer.valueOf(1000)));
// remove global topic policy
admin1.topicPolicies(true).removeMaxMessageSize(topic);
untilRemoteClustersAsserted(admin -> assertNull(admin.topicPolicies(true).getMaxMessageSize(topic)));
}
@Test
public void testReplicatePublishRatePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set global topic policy
PublishRate publishRate = new PublishRate(100, 10000);
admin1.topicPolicies(true).setPublishRate(topic, publishRate);
// get global topic policy
untilRemoteClustersAsserted(
admin -> assertEquals(admin.topicPolicies(true).getPublishRate(topic), publishRate));
// remove global topic policy
admin1.topicPolicies(true).removePublishRate(topic);
untilRemoteClustersAsserted(admin -> assertNull(admin.topicPolicies(true).getPublishRate(topic)));
}
@Test
public void testReplicateDeduplicationSnapshotIntervalPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set global topic policy
admin1.topicPolicies(true).setDeduplicationSnapshotInterval(topic, 100);
// get global topic policy
untilRemoteClustersAsserted(
admin -> assertEquals(admin.topicPolicies(true).getDeduplicationSnapshotInterval(topic),
Integer.valueOf(100)));
// remove global topic policy
admin1.topicPolicies(true).removeDeduplicationSnapshotInterval(topic);
untilRemoteClustersAsserted(
admin -> assertNull(admin.topicPolicies(true).getDeduplicationSnapshotInterval(topic)));
}
private void untilRemoteClustersAsserted(ThrowingConsumer<PulsarAdmin> condition) {
Awaitility.await().untilAsserted(() -> condition.apply(admin2));
Awaitility.await().untilAsserted(() -> condition.apply(admin3));
}
private interface ThrowingConsumer<I> {
void apply(I input) throws Throwable;
}
@Test
public void testReplicatePersistentPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set PersistencePolicies
PersistencePolicies policies = new PersistencePolicies(5, 3, 2, 1000);
admin1.topicPolicies(true).setPersistence(topic, policies);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getPersistence(topic), policies));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getPersistence(topic), policies));
//remove PersistencePolicies
admin1.topicPolicies(true).removePersistence(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getPersistence(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getPersistence(topic)));
}
@Test
public void testReplicateDeduplicationStatusPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set subscription types policies
admin1.topicPolicies(true).setDeduplicationStatus(topic, true);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertTrue(admin2.topicPolicies(true).getDeduplicationStatus(topic)));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertTrue(admin3.topicPolicies(true).getDeduplicationStatus(topic)));
// remove subscription types policies
admin1.topicPolicies(true).removeDeduplicationStatus(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getDeduplicationStatus(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getDeduplicationStatus(topic)));
}
@Test
public void testReplicatorMaxProducer() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set max producer policies
admin1.topicPolicies(true).setMaxProducers(topic, 100);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMaxProducers(topic).intValue(), 100));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMaxProducers(topic).intValue(), 100));
// remove max producer policies
admin1.topicPolicies(true).removeMaxProducers(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMaxProducers(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMaxProducers(topic)));
}
@Test
public void testReplicatorMaxConsumerPerSubPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set max consumer per sub
admin1.topicPolicies(true).setMaxConsumersPerSubscription(topic, 100);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMaxConsumersPerSubscription(topic).intValue(), 100));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMaxConsumersPerSubscription(topic).intValue(), 100));
Awaitility.await().untilAsserted(() -> {
assertEquals(admin1.topicPolicies(true).getMaxConsumersPerSubscription(topic).intValue(), 100);
assertNull(admin1.topicPolicies().getMaxConsumersPerSubscription(topic));
});
//remove max consumer per sub
admin1.topicPolicies(true).removeMaxConsumersPerSubscription(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMaxConsumersPerSubscription(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMaxConsumersPerSubscription(topic)));
}
@Test
public void testReplicateMaxUnackedMsgPerConsumer() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set max unacked msgs per consumers
admin1.topicPolicies(true).setMaxUnackedMessagesOnConsumer(topic, 100);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMaxUnackedMessagesOnConsumer(topic).intValue(), 100));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMaxUnackedMessagesOnConsumer(topic).intValue(), 100));
// remove max unacked msgs per consumers
admin1.topicPolicies(true).removeMaxUnackedMessagesOnConsumer(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMaxUnackedMessagesOnConsumer(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMaxUnackedMessagesOnConsumer(topic)));
}
@Test
public void testReplicatorTopicPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set retention
RetentionPolicies retentionPolicies = new RetentionPolicies(1, 1);
admin1.topicPolicies(true).setRetention(persistentTopicName, retentionPolicies);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getRetention(persistentTopicName), retentionPolicies));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getRetention(persistentTopicName), retentionPolicies));
Awaitility.await().untilAsserted(() -> {
assertEquals(admin1.topicPolicies(true).getRetention(persistentTopicName), retentionPolicies);
assertNull(admin1.topicPolicies().getRetention(persistentTopicName));
});
//remove retention
admin1.topicPolicies(true).removeRetention(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getRetention(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getRetention(persistentTopicName)));
}
@Test
public void testReplicateSubscriptionTypesPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
Set<SubscriptionType> subscriptionTypes = new HashSet<>();
subscriptionTypes.add(SubscriptionType.Shared);
// set subscription types policies
admin1.topicPolicies(true).setSubscriptionTypesEnabled(topic, subscriptionTypes);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getSubscriptionTypesEnabled(topic), subscriptionTypes));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getSubscriptionTypesEnabled(topic), subscriptionTypes));
// remove subscription types policies
admin1.topicPolicies(true).removeSubscriptionTypesEnabled(topic);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getSubscriptionTypesEnabled(topic), Collections.emptySet()));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getSubscriptionTypesEnabled(topic), Collections.emptySet()));
}
@Test
public void testReplicateMaxConsumers() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set max consumers
admin1.topicPolicies(true).setMaxConsumers(topic, 100);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMaxConsumers(topic).intValue(), 100));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMaxConsumers(topic).intValue(), 100));
// remove max consumers
admin1.topicPolicies(true).removeMaxConsumers(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMaxConsumers(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMaxConsumers(topic)));
}
@Test
public void testReplicatorMessageDispatchRatePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set dispatchRate
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(1)
.dispatchThrottlingRateInMsg(2)
.ratePeriodInSecond(3)
.relativeToPublishRate(true)
.build();
admin1.topicPolicies(true).setDispatchRate(persistentTopicName, dispatchRate);
// get dispatchRate
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getDispatchRate(persistentTopicName), dispatchRate));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getDispatchRate(persistentTopicName), dispatchRate));
//remove dispatchRate
admin1.topicPolicies(true).removeDispatchRate(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getDispatchRate(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getDispatchRate(persistentTopicName)));
}
@Test
public void testReplicateDelayedDelivery() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
DelayedDeliveryPolicies policies = DelayedDeliveryPolicies.builder().active(true).tickTime(10000L).build();
// set delayed delivery
admin1.topicPolicies(true).setDelayedDeliveryPolicy(topic, policies);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getDelayedDeliveryPolicy(topic), policies));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getDelayedDeliveryPolicy(topic), policies));
// remove delayed delivery
admin1.topicPolicies(true).removeDelayedDeliveryPolicy(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getDelayedDeliveryPolicy(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getDelayedDeliveryPolicy(topic)));
}
@Test
public void testReplicatorInactiveTopicPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set InactiveTopicPolicies
InactiveTopicPolicies inactiveTopicPolicies =
new InactiveTopicPolicies(InactiveTopicDeleteMode.delete_when_no_subscriptions, 1, true);
admin1.topicPolicies(true).setInactiveTopicPolicies(persistentTopicName, inactiveTopicPolicies);
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true)
.getInactiveTopicPolicies(persistentTopicName), inactiveTopicPolicies));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true)
.getInactiveTopicPolicies(persistentTopicName), inactiveTopicPolicies));
// remove InactiveTopicPolicies
admin1.topicPolicies(true).removeInactiveTopicPolicies(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getInactiveTopicPolicies(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getInactiveTopicPolicies(persistentTopicName)));
}
@Test
public void testReplicatorSubscriptionDispatchRatePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set subscription dispatch rate
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(1)
.ratePeriodInSecond(1)
.dispatchThrottlingRateInByte(1)
.relativeToPublishRate(true)
.build();
admin1.topicPolicies(true).setSubscriptionDispatchRate(persistentTopicName, dispatchRate);
// get subscription dispatch rate
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true)
.getSubscriptionDispatchRate(persistentTopicName), dispatchRate));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true)
.getSubscriptionDispatchRate(persistentTopicName), dispatchRate));
//remove subscription dispatch rate
admin1.topicPolicies(true).removeSubscriptionDispatchRate(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getSubscriptionDispatchRate(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getSubscriptionDispatchRate(persistentTopicName)));
}
@Test
public void testReplicateReplicatorDispatchRatePolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set replicator dispatch rate
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(1)
.ratePeriodInSecond(1)
.dispatchThrottlingRateInByte(1)
.relativeToPublishRate(true)
.build();
admin1.topicPolicies(true).setReplicatorDispatchRate(persistentTopicName, dispatchRate);
// get replicator dispatch rate
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true)
.getReplicatorDispatchRate(persistentTopicName), dispatchRate));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true)
.getReplicatorDispatchRate(persistentTopicName), dispatchRate));
//remove replicator dispatch rate
admin1.topicPolicies(true).removeReplicatorDispatchRate(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getReplicatorDispatchRate(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getReplicatorDispatchRate(persistentTopicName)));
}
@Test
public void testReplicateMaxUnackedMsgPerSub() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String topic = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, topic);
// set max unacked msgs per sub
admin1.topicPolicies(true).setMaxUnackedMessagesOnSubscription(topic, 100);
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getMaxUnackedMessagesOnSubscription(topic).intValue(), 100));
Awaitility.await().ignoreExceptions().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getMaxUnackedMessagesOnSubscription(topic).intValue(), 100));
// remove max unacked msgs per sub
admin1.topicPolicies(true).removeMaxUnackedMessagesOnSubscription(topic);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getMaxUnackedMessagesOnSubscription(topic)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getMaxUnackedMessagesOnSubscription(topic)));
}
@Test
public void testReplicatorCompactionThresholdPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
// set compaction threshold
admin1.topicPolicies(true).setCompactionThreshold(persistentTopicName, 1);
// get compaction threshold
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true)
.getCompactionThreshold(persistentTopicName), Long.valueOf(1)));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true)
.getCompactionThreshold(persistentTopicName), Long.valueOf(1)));
//remove compaction threshold
admin1.topicPolicies(true).removeCompactionThreshold(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getCompactionThreshold(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getCompactionThreshold(persistentTopicName)));
}
@Test
public void testReplicateMaxSubscriptionsPerTopic() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
//set max subscriptions per topic
admin1.topicPolicies(true).setMaxSubscriptionsPerTopic(persistentTopicName, 1024);
//get max subscriptions per topic
untilRemoteClustersAsserted(
admin -> assertEquals(admin.topicPolicies(true).getMaxSubscriptionsPerTopic(persistentTopicName),
Integer.valueOf(1024)));
//remove
admin1.topicPolicies(true).removeMaxSubscriptionsPerTopic(persistentTopicName);
untilRemoteClustersAsserted(
admin -> assertNull(admin.topicPolicies(true).getMaxSubscriptionsPerTopic(persistentTopicName)));
}
@Test
public void testReplicatorOffloadPolicies() throws Exception {
final String namespace = "pulsar/partitionedNs-" + UUID.randomUUID();
final String persistentTopicName = "persistent://" + namespace + "/topic" + UUID.randomUUID();
init(namespace, persistentTopicName);
OffloadPoliciesImpl offloadPolicies =
OffloadPoliciesImpl.create("s3", "region", "bucket", "endpoint", null, null, null, null,
8, 9, 10L, null, OffloadedReadPriority.BOOKKEEPER_FIRST);
// set offload policies
try{
admin1.topicPolicies(true).setOffloadPolicies(persistentTopicName, offloadPolicies);
}catch (Exception exception){
// driver not found exception.
assertTrue(exception instanceof PulsarAdminException.ServerSideErrorException);
}
// get offload policies
Awaitility.await().untilAsserted(() ->
assertEquals(admin2.topicPolicies(true).getOffloadPolicies(persistentTopicName), offloadPolicies));
Awaitility.await().untilAsserted(() ->
assertEquals(admin3.topicPolicies(true).getOffloadPolicies(persistentTopicName), offloadPolicies));
//remove offload policies
admin1.topicPolicies(true).removeOffloadPolicies(persistentTopicName);
Awaitility.await().untilAsserted(() ->
assertNull(admin2.topicPolicies(true).getOffloadPolicies(persistentTopicName)));
Awaitility.await().untilAsserted(() ->
assertNull(admin3.topicPolicies(true).getOffloadPolicies(persistentTopicName)));
}
private void init(String namespace, String topic)
throws PulsarAdminException, PulsarClientException, PulsarServerException {
final String cluster2 = pulsar2.getConfig().getClusterName();
final String cluster1 = pulsar1.getConfig().getClusterName();
final String cluster3 = pulsar3.getConfig().getClusterName();
admin1.namespaces().createNamespace(namespace, Sets.newHashSet(cluster1, cluster2, cluster3));
admin1.namespaces().setNamespaceReplicationClusters(namespace, Sets.newHashSet("r1", "r2", "r3"));
// Create partitioned-topic from R1
admin1.topics().createPartitionedTopic(topic, 3);
// List partitioned topics from R2
Awaitility.await().untilAsserted(() -> assertNotNull(admin2.topics().getPartitionedTopicList(namespace)));
Awaitility.await().untilAsserted(() -> assertEquals(
admin2.topics().getPartitionedTopicList(namespace).get(0), topic));
assertEquals(admin1.topics().getList(namespace).size(), 3);
// List partitioned topics from R3
Awaitility.await().untilAsserted(() -> assertNotNull(admin3.topics().getPartitionedTopicList(namespace)));
Awaitility.await().untilAsserted(() -> assertEquals(
admin3.topics().getPartitionedTopicList(namespace).get(0), topic));
pulsar1.getClient().newProducer().topic(topic).create().close();
pulsar2.getClient().newProducer().topic(topic).create().close();
pulsar3.getClient().newProducer().topic(topic).create().close();
//init topic policies server
Awaitility.await().ignoreExceptions().untilAsserted(() -> {
assertNull(pulsar1.getTopicPoliciesService().getTopicPolicies(TopicName.get(topic)));
assertNull(pulsar2.getTopicPoliciesService().getTopicPolicies(TopicName.get(topic)));
assertNull(pulsar3.getTopicPoliciesService().getTopicPolicies(TopicName.get(topic)));
});
}
}
| |
package ca.uhn.fhir.rest.server.provider.dstu2;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.model.dstu2.resource.Conformance;
import ca.uhn.fhir.model.dstu2.resource.Conformance.Rest;
import ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource;
import ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceInteraction;
import ca.uhn.fhir.model.dstu2.resource.Conformance.RestResourceSearchParam;
import ca.uhn.fhir.model.dstu2.resource.OperationDefinition;
import ca.uhn.fhir.model.dstu2.resource.OperationDefinition.Parameter;
import ca.uhn.fhir.model.dstu2.valueset.*;
import ca.uhn.fhir.model.primitive.DateTimeDt;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Metadata;
import ca.uhn.fhir.rest.annotation.Read;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.*;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.method.*;
import ca.uhn.fhir.rest.server.method.OperationMethodBinding.ReturnType;
import ca.uhn.fhir.rest.server.util.BaseServerCapabilityStatementProvider;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
import java.util.Map.Entry;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/*
* #%L
* HAPI FHIR Structures - DSTU2 (FHIR v1.0.0)
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* Server FHIR Provider which serves the conformance statement for a RESTful server implementation
*/
public class ServerConformanceProvider extends BaseServerCapabilityStatementProvider implements IServerConformanceProvider<Conformance> {
private String myPublisher = "Not provided";
/**
* No-arg constructor and setter so that the ServerConfirmanceProvider can be Spring-wired with the RestfulService avoiding the potential reference cycle that would happen.
*/
public ServerConformanceProvider() {
super();
}
/**
* Constructor
*
* @deprecated Use no-args constructor instead. Deprecated in 4.0.0
*/
@Deprecated
public ServerConformanceProvider(RestfulServer theRestfulServer) {
this();
}
/**
* Constructor
*/
public ServerConformanceProvider(RestfulServerConfiguration theServerConfiguration) {
super(theServerConfiguration);
}
private void checkBindingForSystemOps(Rest rest, Set<SystemRestfulInteractionEnum> systemOps, BaseMethodBinding<?> nextMethodBinding) {
if (nextMethodBinding.getRestOperationType() != null) {
String sysOpCode = nextMethodBinding.getRestOperationType().getCode();
if (sysOpCode != null) {
SystemRestfulInteractionEnum sysOp = SystemRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(sysOpCode);
if (sysOp == null) {
return;
}
if (systemOps.contains(sysOp) == false) {
systemOps.add(sysOp);
rest.addInteraction().setCode(sysOp);
}
}
}
}
private Map<String, List<BaseMethodBinding<?>>> collectMethodBindings(RequestDetails theRequestDetails) {
Map<String, List<BaseMethodBinding<?>>> resourceToMethods = new TreeMap<String, List<BaseMethodBinding<?>>>();
for (ResourceBinding next : getServerConfiguration(theRequestDetails).getResourceBindings()) {
String resourceName = next.getResourceName();
for (BaseMethodBinding<?> nextMethodBinding : next.getMethodBindings()) {
if (resourceToMethods.containsKey(resourceName) == false) {
resourceToMethods.put(resourceName, new ArrayList<BaseMethodBinding<?>>());
}
resourceToMethods.get(resourceName).add(nextMethodBinding);
}
}
for (BaseMethodBinding<?> nextMethodBinding : getServerConfiguration(theRequestDetails).getServerBindings()) {
String resourceName = "";
if (resourceToMethods.containsKey(resourceName) == false) {
resourceToMethods.put(resourceName, new ArrayList<BaseMethodBinding<?>>());
}
resourceToMethods.get(resourceName).add(nextMethodBinding);
}
return resourceToMethods;
}
private DateTimeDt conformanceDate(RequestDetails theRequestDetails) {
IPrimitiveType<Date> buildDate = getServerConfiguration(theRequestDetails).getConformanceDate();
if (buildDate != null && buildDate.getValue() != null) {
try {
return new DateTimeDt(buildDate.getValueAsString());
} catch (DataFormatException e) {
// fall through
}
}
return DateTimeDt.withCurrentTime();
}
private String createOperationName(OperationMethodBinding theMethodBinding) {
StringBuilder retVal = new StringBuilder();
if (theMethodBinding.getResourceName() != null) {
retVal.append(theMethodBinding.getResourceName());
}
retVal.append('-');
if (theMethodBinding.isCanOperateAtInstanceLevel()) {
retVal.append('i');
}
if (theMethodBinding.isCanOperateAtServerLevel()) {
retVal.append('s');
}
retVal.append('-');
// Exclude the leading $
retVal.append(theMethodBinding.getName(), 1, theMethodBinding.getName().length());
return retVal.toString();
}
/**
* Gets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The
* value defaults to "Not provided" but may be set to null, which will cause this element to be omitted.
*/
public String getPublisher() {
return myPublisher;
}
/**
* Sets the value of the "publisher" that will be placed in the generated conformance statement. As this is a mandatory element, the value should not be null (although this is not enforced). The
* value defaults to "Not provided" but may be set to null, which will cause this element to be omitted.
*/
public void setPublisher(String thePublisher) {
myPublisher = thePublisher;
}
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
@Override
@Metadata
public Conformance getServerConformance(HttpServletRequest theRequest, RequestDetails theRequestDetails) {
RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails);
Bindings bindings = serverConfiguration.provideBindings();
Conformance retVal = new Conformance();
retVal.setPublisher(myPublisher);
retVal.setDate(conformanceDate(theRequestDetails));
retVal.setFhirVersion(FhirVersionEnum.DSTU2.getFhirVersionString());
retVal.setAcceptUnknown(UnknownContentCodeEnum.UNKNOWN_EXTENSIONS); // TODO: make this configurable - this is a fairly big effort since the parser
// needs to be modified to actually allow it
ServletContext servletContext = (ServletContext) (theRequest == null ? null : theRequest.getAttribute(RestfulServer.SERVLET_CONTEXT_ATTRIBUTE));
String serverBase = serverConfiguration.getServerAddressStrategy().determineServerBase(servletContext, theRequest);
retVal
.getImplementation()
.setUrl(serverBase)
.setDescription(serverConfiguration.getImplementationDescription());
retVal.setKind(ConformanceStatementKindEnum.INSTANCE);
retVal.getSoftware().setName(serverConfiguration.getServerName());
retVal.getSoftware().setVersion(serverConfiguration.getServerVersion());
retVal.addFormat(Constants.CT_FHIR_XML);
retVal.addFormat(Constants.CT_FHIR_JSON);
Rest rest = retVal.addRest();
rest.setMode(RestfulConformanceModeEnum.SERVER);
Set<SystemRestfulInteractionEnum> systemOps = new HashSet<>();
Set<String> operationNames = new HashSet<>();
Map<String, List<BaseMethodBinding<?>>> resourceToMethods = collectMethodBindings(theRequestDetails);
for (Entry<String, List<BaseMethodBinding<?>>> nextEntry : resourceToMethods.entrySet()) {
if (nextEntry.getKey().isEmpty() == false) {
Set<TypeRestfulInteractionEnum> resourceOps = new HashSet<>();
RestResource resource = rest.addResource();
String resourceName = nextEntry.getKey();
RuntimeResourceDefinition def = serverConfiguration.getFhirContext().getResourceDefinition(resourceName);
resource.getTypeElement().setValue(def.getName());
resource.getProfile().setReference(new IdDt(def.getResourceProfile(serverBase)));
TreeSet<String> includes = new TreeSet<>();
// Map<String, Conformance.RestResourceSearchParam> nameToSearchParam = new HashMap<String,
// Conformance.RestResourceSearchParam>();
for (BaseMethodBinding<?> nextMethodBinding : nextEntry.getValue()) {
if (nextMethodBinding.getRestOperationType() != null) {
String resOpCode = nextMethodBinding.getRestOperationType().getCode();
if (resOpCode != null) {
TypeRestfulInteractionEnum resOp = TypeRestfulInteractionEnum.VALUESET_BINDER.fromCodeString(resOpCode);
if (resOp != null) {
if (resourceOps.contains(resOp) == false) {
resourceOps.add(resOp);
resource.addInteraction().setCode(resOp);
}
if ("vread".equals(resOpCode)) {
// vread implies read
resOp = TypeRestfulInteractionEnum.READ;
if (resourceOps.contains(resOp) == false) {
resourceOps.add(resOp);
resource.addInteraction().setCode(resOp);
}
}
if (nextMethodBinding.isSupportsConditional()) {
switch (resOp) {
case CREATE:
resource.setConditionalCreate(true);
break;
case DELETE:
if (nextMethodBinding.isSupportsConditionalMultiple()) {
resource.setConditionalDelete(ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED);
} else {
resource.setConditionalDelete(ConditionalDeleteStatusEnum.SINGLE_DELETES_SUPPORTED);
}
break;
case UPDATE:
resource.setConditionalUpdate(true);
break;
default:
break;
}
}
}
}
}
checkBindingForSystemOps(rest, systemOps, nextMethodBinding);
if (nextMethodBinding instanceof SearchMethodBinding) {
handleSearchMethodBinding(resource, def, includes, (SearchMethodBinding) nextMethodBinding, theRequestDetails);
} else if (nextMethodBinding instanceof OperationMethodBinding) {
OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding;
String opName = bindings.getOperationBindingToName().get(methodBinding);
if (operationNames.add(opName)) {
// Only add each operation (by name) once
rest.addOperation().setName(methodBinding.getName().substring(1)).getDefinition().setReference("OperationDefinition/" + opName);
}
}
Collections.sort(resource.getInteraction(), new Comparator<RestResourceInteraction>() {
@Override
public int compare(RestResourceInteraction theO1, RestResourceInteraction theO2) {
TypeRestfulInteractionEnum o1 = theO1.getCodeElement().getValueAsEnum();
TypeRestfulInteractionEnum o2 = theO2.getCodeElement().getValueAsEnum();
if (o1 == null && o2 == null) {
return 0;
}
if (o1 == null) {
return 1;
}
if (o2 == null) {
return -1;
}
return o1.ordinal() - o2.ordinal();
}
});
}
for (String nextInclude : includes) {
resource.addSearchInclude(nextInclude);
}
} else {
for (BaseMethodBinding<?> nextMethodBinding : nextEntry.getValue()) {
checkBindingForSystemOps(rest, systemOps, nextMethodBinding);
if (nextMethodBinding instanceof OperationMethodBinding) {
OperationMethodBinding methodBinding = (OperationMethodBinding) nextMethodBinding;
String opName = bindings.getOperationBindingToName().get(methodBinding);
if (operationNames.add(opName)) {
rest.addOperation().setName(methodBinding.getName().substring(1)).getDefinition().setReference("OperationDefinition/" + opName);
}
}
}
}
}
return retVal;
}
private void handleSearchMethodBinding(RestResource resource, RuntimeResourceDefinition def, TreeSet<String> includes, SearchMethodBinding searchMethodBinding, RequestDetails theRequestDetails) {
includes.addAll(searchMethodBinding.getIncludes());
List<IParameter> params = searchMethodBinding.getParameters();
List<SearchParameter> searchParameters = new ArrayList<>();
for (IParameter nextParameter : params) {
if ((nextParameter instanceof SearchParameter)) {
searchParameters.add((SearchParameter) nextParameter);
}
}
sortSearchParameters(searchParameters);
if (!searchParameters.isEmpty()) {
// boolean allOptional = searchParameters.get(0).isRequired() == false;
//
// OperationDefinition query = null;
// if (!allOptional) {
// RestOperation operation = rest.addOperation();
// query = new OperationDefinition();
// operation.setDefinition(new ResourceReferenceDt(query));
// query.getDescriptionElement().setValue(searchMethodBinding.getDescription());
// query.addUndeclaredExtension(false, ExtensionConstants.QUERY_RETURN_TYPE, new CodeDt(resourceName));
// for (String nextInclude : searchMethodBinding.getIncludes()) {
// query.addUndeclaredExtension(false, ExtensionConstants.QUERY_ALLOWED_INCLUDE, new StringDt(nextInclude));
// }
// }
for (SearchParameter nextParameter : searchParameters) {
String nextParamName = nextParameter.getName();
String chain = null;
String nextParamUnchainedName = nextParamName;
if (nextParamName.contains(".")) {
chain = nextParamName.substring(nextParamName.indexOf('.') + 1);
nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.'));
}
String nextParamDescription = nextParameter.getDescription();
/*
* If the parameter has no description, default to the one from the resource
*/
if (StringUtils.isBlank(nextParamDescription)) {
RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName);
if (paramDef != null) {
nextParamDescription = paramDef.getDescription();
}
}
String finalNextParamUnchainedName = nextParamUnchainedName;
RestResourceSearchParam param =
resource
.getSearchParam()
.stream()
.filter(t -> t.getName().equals(finalNextParamUnchainedName))
.findFirst()
.orElseGet(() -> resource.addSearchParam());
param.setName(nextParamUnchainedName);
if (StringUtils.isNotBlank(chain)) {
param.addChain(chain);
} else {
if (nextParameter.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
for (String nextWhitelist : new TreeSet<>(nextParameter.getQualifierWhitelist())) {
if (nextWhitelist.startsWith(".")) {
param.addChain(nextWhitelist.substring(1));
}
}
}
}
param.setDocumentation(nextParamDescription);
if (nextParameter.getParamType() != null) {
param.getTypeElement().setValueAsString(nextParameter.getParamType().getCode());
}
for (Class<? extends IBaseResource> nextTarget : nextParameter.getDeclaredTypes()) {
RuntimeResourceDefinition targetDef = getServerConfiguration(theRequestDetails).getFhirContext().getResourceDefinition(nextTarget);
if (targetDef != null) {
ResourceTypeEnum code = ResourceTypeEnum.VALUESET_BINDER.fromCodeString(targetDef.getName());
if (code != null) {
param.addTarget(code);
}
}
}
}
}
}
@Read(type = OperationDefinition.class)
public OperationDefinition readOperationDefinition(@IdParam IdDt theId, RequestDetails theRequestDetails) {
if (theId == null || theId.hasIdPart() == false) {
throw new ResourceNotFoundException(theId);
}
RestfulServerConfiguration serverConfiguration = getServerConfiguration(theRequestDetails);
Bindings bindings = serverConfiguration.provideBindings();
List<OperationMethodBinding> sharedDescriptions = bindings.getOperationNameToBindings().get(theId.getIdPart());
if (sharedDescriptions == null || sharedDescriptions.isEmpty()) {
throw new ResourceNotFoundException(theId);
}
OperationDefinition op = new OperationDefinition();
op.setStatus(ConformanceResourceStatusEnum.ACTIVE);
op.setKind(OperationKindEnum.OPERATION);
op.setIdempotent(true);
Set<String> inParams = new HashSet<>();
Set<String> outParams = new HashSet<>();
for (OperationMethodBinding sharedDescription : sharedDescriptions) {
if (isNotBlank(sharedDescription.getDescription())) {
op.setDescription(sharedDescription.getDescription());
}
if (!sharedDescription.isIdempotent()) {
op.setIdempotent(sharedDescription.isIdempotent());
}
op.setCode(sharedDescription.getName().substring(1));
if (sharedDescription.isCanOperateAtInstanceLevel()) {
op.setInstance(sharedDescription.isCanOperateAtInstanceLevel());
}
if (sharedDescription.isCanOperateAtServerLevel()) {
op.setSystem(sharedDescription.isCanOperateAtServerLevel());
}
if (isNotBlank(sharedDescription.getResourceName())) {
op.addType().setValue(sharedDescription.getResourceName());
}
for (IParameter nextParamUntyped : sharedDescription.getParameters()) {
if (nextParamUntyped instanceof OperationParameter) {
OperationParameter nextParam = (OperationParameter) nextParamUntyped;
Parameter param = op.addParameter();
if (!inParams.add(nextParam.getName())) {
continue;
}
param.setUse(OperationParameterUseEnum.IN);
if (nextParam.getParamType() != null) {
param.setType(nextParam.getParamType());
}
param.setMin(nextParam.getMin());
param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()));
param.setName(nextParam.getName());
}
}
for (ReturnType nextParam : sharedDescription.getReturnParams()) {
if (!outParams.add(nextParam.getName())) {
continue;
}
Parameter param = op.addParameter();
param.setUse(OperationParameterUseEnum.OUT);
if (nextParam.getType() != null) {
param.setType(nextParam.getType());
}
param.setMin(nextParam.getMin());
param.setMax(nextParam.getMax() == -1 ? "*" : Integer.toString(nextParam.getMax()));
param.setName(nextParam.getName());
}
}
if (isBlank(op.getName())) {
if (isNotBlank(op.getDescription())) {
op.setName(op.getDescription());
} else {
op.setName(op.getCode());
}
}
if (op.getSystem() == null) {
op.setSystem(false);
}
if (op.getInstance() == null) {
op.setInstance(false);
}
return op;
}
/**
* Sets the cache property (default is true). If set to true, the same response will be returned for each invocation.
* <p>
* See the class documentation for an important note if you are extending this class
* </p>
* @deprecated Since 4.0.0 this does nothing
*/
@Deprecated
public void setCache(boolean theCache) {
// nothing
}
@Override
public void setRestfulServer(RestfulServer theRestfulServer) {
// nothing
}
private void sortSearchParameters(List<SearchParameter> searchParameters) {
Collections.sort(searchParameters, new Comparator<SearchParameter>() {
@Override
public int compare(SearchParameter theO1, SearchParameter theO2) {
if (theO1.isRequired() == theO2.isRequired()) {
return theO1.getName().compareTo(theO2.getName());
}
if (theO1.isRequired()) {
return -1;
}
return 1;
}
});
}
}
| |
import java.io.*;
import java.net.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Vector;
import java.util.*;
import javax.swing.JOptionPane;
public class MultithreadedServer {
Vector<ClientHandler> clients = new Vector<ClientHandler>();
Vector<String> users = new Vector<String>();
private static ServerSocket servSocket;
private static final int PORT = 1247;
int[] choice;
public MultithreadedServer(int[] ch) throws IOException {
this.choice = ch;
servSocket = new ServerSocket(PORT);
HashMap<String, Integer> values = new HashMap<String, Integer>();
HashMap<String, ClockValues> data = new HashMap<String, ClockValues>();
HashMap<String, SyncStatus> sync_table = new HashMap<String, SyncStatus>();
values.put("SeverValue", 0);
data.put("Thread-0", new ClockValues(0, 0));
data.put("Thread-1", new ClockValues(0, 0));
data.put("Thread-2", new ClockValues(0, 0));
data.put("Thread-3", new ClockValues(0, 0));
sync_table.put("Thread-0", new SyncStatus(0, -1));
sync_table.put("Thread-1", new SyncStatus(0, -1));
sync_table.put("Thread-2", new SyncStatus(0, -1));
sync_table.put("Thread-3", new SyncStatus(0, -1));
while (true) {
Socket client = servSocket.accept();
//System.out.println("\n********************************New client accepted.*****************************\n");
//client.setSoTimeout(50000);
ClientHandler handler;
handler = new ClientHandler(client, values, data, sync_table);
clients.add(handler);
}
}
public static void main(String[] args) throws IOException {
int probab_send = Integer.parseInt(JOptionPane.showInputDialog("Enter probablity for SEND: "));
int probab_received = Integer.parseInt(JOptionPane.showInputDialog("Enter probablity for RECEIVE: "));
int probab_internal = 100 - (probab_received + probab_send);
if ((probab_received + probab_send + probab_internal) != 100) {
JOptionPane.showMessageDialog(null, "Invalid probability!!");
return;
}
int choice[] = new int[100];
for (int i = 0; i < probab_send; i++) {
choice[i] = 1;
}
for (int i = probab_send; i < probab_send + probab_received; i++) {
choice[i] = 2;
}
for (int i = probab_send + probab_received; i < 100; i++) {
choice[i] = 3;
}
MultithreadedServer ms = new MultithreadedServer(choice);
}
class ClientHandler extends Thread {
private Socket client;
private BufferedReader in;
private PrintWriter out;
String name, message, response;
HashMap<String, Integer> values;
HashMap<String, ClockValues> data;
HashMap<String, SyncStatus> sync_table;
public ClientHandler(Socket socket, HashMap<String, Integer> values, HashMap<String, ClockValues> data,
HashMap<String, SyncStatus> sync_table) {
this.values = values;
this.data = data;
this.sync_table = sync_table;
client = socket;
try {
in = new BufferedReader(new InputStreamReader(client.getInputStream()));
out = new PrintWriter(client.getOutputStream(), true);
} catch (IOException e) {
e.printStackTrace();
}
start();
}
public void sendMessage(String msg) {
int offset = Transfer.encrypt(Integer.parseInt(msg));
out.println(offset + "\n");
//out.flush();
}
public void boradcast(String message) {
for (ClientHandler c : clients) {
c.sendMessage(message);
}
}
public String getUserName() {
return name;
}
public void run() {
String line;
int hSize, sum, avg = 0, t_clk = 0, n;
int offset = 0, preClock = 0;
try {
String received = "";
int flagR = 0, flagS = 1;
do {
Random rand = new Random();
int i = rand.nextInt(100);
int ch = choice[i];
if (ch == 2) { //Receive
t_clk = Integer.parseInt("" + this.values.get("SeverValue")) + 1;
this.values.put("SeverValue", t_clk);
int smax = 0;
for (Map.Entry m : this.sync_table.entrySet()) {
SyncStatus ss = (SyncStatus) m.getValue();
int r_flag = ss.getRflag();
int snum = ss.getSeqNum();
if(r_flag == 1 && snum > smax) {
smax = snum;
}
}
//System.out.println("Receive" + this.getName() + "\n");
if (flagS == 1) {
String message = "";
System.out.println("Receive");
message = in.readLine();
if(!(message.isEmpty()) ){
line = ""+ Transfer.decrypt(Integer.parseInt(message));
n = Integer.parseInt(line);
if (n == -1) {
break;
}
System.out.println("Received " + line + " from Connection " + this.getName() + ".");
//if (n>=0)
this.data.put(this.getName(), new ClockValues(n, 0));
this.sync_table.put(this.getName(), new SyncStatus(1, smax+1));
flagR = 1;
flagS = 0;
}
//line = "" + Transfer.decrypt(Integer.parseInt(in.readLine()));
}
}
else if (ch == 1) { //Send
t_clk = Integer.parseInt("" + this.values.get("SeverValue")) + 1;
this.values.put("SeverValue", t_clk);
//System.out.println("Send: " + this.getName() + "\n");
SyncStatus status = this.sync_table.get(this.getName());
int r_flag = status.getRflag();
int seq_num = status.getSeqNum();
int proceed = 1;
for (Map.Entry m : this.sync_table.entrySet()) {
SyncStatus ss = (SyncStatus) m.getValue();
int r = ss.getRflag();
int s = ss.getSeqNum();
if(r == 1 && s < seq_num) {
proceed = 0;
}
}
if (flagR == 1 && proceed == 1) {
hSize = 4;// total num of POs
sum = Integer.parseInt("" + this.values.get("SeverValue"));
System.out.println("Server Clock (B): " + this.values.get("SeverValue"));
for (Map.Entry m : this.data.entrySet()) {
ClockValues tmpclock = (ClockValues) m.getValue();
//System.out.println("Thread Name : " + m.getKey() + " Clock : " + tmpclock.getClock());
sum += Integer.parseInt("" + tmpclock.getClock());
}
avg = sum / (hSize + 1);
this.values.put("SeverValue", avg);
//System.out.println("Server Clock (A): " + this.values.get("SeverValue")+": flagR = " + flagR + "\n\n");
System.out.println("Server Clock (A): " + this.values.get("SeverValue")+ "\n\n");
for (ClientHandler c : clients) {
preClock = this.data.get(c.getName()).getClock();
offset = avg - preClock;
System.out.println("Server send to " + c.getName() + ": " + avg + " - " + preClock + "=" + offset + "\n");
//int current_clk = this.data.get(c.getName()).getClock();
this.data.put(c.getName(), new ClockValues(avg, offset));
int offset_s = this.data.get(c.getName()).getOffset();
c.sendMessage("" + offset_s);
this.sync_table.put(this.getName(), new SyncStatus(0, seq_num));
}
flagS = 1;
flagR = 0;
}
}
else { //Internal communication
//System.out.println("Internal communication" + this.getName() + "\n");
try {
t_clk = Integer.parseInt("" + this.values.get("SeverValue")) + 1;
this.values.put("SeverValue", t_clk);
Thread.sleep(100);
} catch (Exception e) {
System.out.println(e);
}
//}
}
} while (true);
}catch(
IOException e)
{
e.printStackTrace();
}finally
{
try {
if (client != null) {
//System.out.println("********************Closing down connection...***********************");
client.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}}
class Transfer {
public static int encrypt(int item) {
return item + 10;
}
public static int decrypt(int item) {
return item - 10;
}
}
class ClockValues {
public int clock;
public int offset;
public ClockValues(int clock, int offset) {
this.clock = clock;
this.offset = offset;
}
public int getClock() {
return this.clock;
}
public void setClock(int clock) {
this.clock = clock;
}
public int getOffset() {
return this.offset;
}
public void setOffset(int offset) {
this.offset = offset;
}
public String show() {
return "" + this.clock + " " + this.offset;
}
}
class SyncStatus {
public int r_flag;
public int seq_num;
public SyncStatus(int r_flag, int seq_num) {
this.r_flag = r_flag;
this.seq_num = seq_num;
}
public int getRflag() {
return this.r_flag;
}
public void setRflag(int r_flag) {
this.r_flag = r_flag;
}
public int getSeqNum() {
return this.seq_num;
}
public void setSeqNum(int seq_num) {
this.seq_num = seq_num;
}
public String show() {
return "" + this.r_flag + " " + this.seq_num;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.functions;
import java.io.File;
import java.util.Properties;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.DiskStore;
import org.apache.geode.cache.EvictionAction;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.asyncqueue.AsyncEventQueue;
import org.apache.geode.cache.execute.FunctionContext;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.execute.InternalFunction;
import org.apache.geode.internal.lang.ObjectUtils;
import org.apache.geode.internal.util.ArrayUtils;
import org.apache.geode.logging.internal.log4j.api.LogService;
import org.apache.geode.management.internal.cli.domain.DiskStoreDetails;
import org.apache.geode.management.internal.exceptions.EntityNotFoundException;
/**
* The DescribeDiskStoreFunction class is an implementation of a GemFire Function used to collect
* information and details about a particular disk store for a particular GemFire distributed system
* member.
*
* @see org.apache.geode.cache.DiskStore
* @see org.apache.geode.cache.execute.Function
* @see org.apache.geode.cache.execute.FunctionAdapter
* @see org.apache.geode.cache.execute.FunctionContext
* @see org.apache.geode.internal.InternalEntity
* @see org.apache.geode.management.internal.cli.domain.DiskStoreDetails
* @since GemFire 7.0
*/
public class DescribeDiskStoreFunction implements InternalFunction {
private static final Logger logger = LogService.getLogger();
protected static void assertState(final boolean condition, final String message,
final Object... args) {
if (!condition) {
throw new IllegalStateException(String.format(message, args));
}
}
@Override
public String getId() {
return getClass().getName();
}
@SuppressWarnings("unused")
public void init(final Properties props) {}
@Override
public void execute(final FunctionContext context) {
Cache cache = context.getCache();
try {
if (cache instanceof InternalCache) {
InternalCache gemfireCache = (InternalCache) cache;
DistributedMember member = gemfireCache.getMyId();
String diskStoreName = (String) context.getArguments();
String memberId = member.getId();
String memberName = member.getName();
DiskStore diskStore = gemfireCache.findDiskStore(diskStoreName);
if (diskStore != null) {
DiskStoreDetails diskStoreDetails = new DiskStoreDetails(diskStore.getDiskStoreUUID(),
diskStore.getName(), memberId, memberName);
diskStoreDetails.setAllowForceCompaction(diskStore.getAllowForceCompaction());
diskStoreDetails.setAutoCompact(diskStore.getAutoCompact());
diskStoreDetails.setCompactionThreshold(diskStore.getCompactionThreshold());
diskStoreDetails.setMaxOplogSize(diskStore.getMaxOplogSize());
diskStoreDetails.setQueueSize(diskStore.getQueueSize());
diskStoreDetails.setTimeInterval(diskStore.getTimeInterval());
diskStoreDetails.setWriteBufferSize(diskStore.getWriteBufferSize());
diskStoreDetails.setDiskUsageWarningPercentage(diskStore.getDiskUsageWarningPercentage());
diskStoreDetails
.setDiskUsageCriticalPercentage(diskStore.getDiskUsageCriticalPercentage());
setDiskDirDetails(diskStore, diskStoreDetails);
setRegionDetails(gemfireCache, diskStore, diskStoreDetails);
setCacheServerDetails(gemfireCache, diskStore, diskStoreDetails);
setGatewayDetails(gemfireCache, diskStore, diskStoreDetails);
setPdxSerializationDetails(gemfireCache, diskStore, diskStoreDetails);
setAsyncEventQueueDetails(gemfireCache, diskStore, diskStoreDetails);
context.getResultSender().lastResult(diskStoreDetails);
} else {
context.getResultSender()
.sendException(new EntityNotFoundException(
String.format("A disk store with name '%1$s' was not found on member '%2$s'.",
diskStoreName, memberName)));
}
}
} catch (Exception e) {
logger.error("Error occurred while executing 'describe disk-store': {}!", e.getMessage(), e);
context.getResultSender().sendException(e);
}
}
private void setDiskDirDetails(final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
File[] diskDirs = diskStore.getDiskDirs();
Integer[] diskDirSizes = ArrayUtils.toIntegerArray(diskStore.getDiskDirSizes());
assertState(diskDirs.length == diskDirSizes.length,
"The number of disk directories with a specified size (%1$d) does not match the number of disk directories (%2$d)!",
diskDirSizes.length, diskDirs.length);
for (int index = 0; index < diskDirs.length; index++) {
diskStoreDetails.add(new DiskStoreDetails.DiskDirDetails(diskDirs[index].getAbsolutePath(),
ArrayUtils.getElementAtIndex(diskDirSizes, index, 0)));
}
}
protected String getDiskStoreName(final Region region) {
return StringUtils.defaultIfBlank(region.getAttributes().getDiskStoreName(),
DiskStoreDetails.DEFAULT_DISK_STORE_NAME);
}
protected boolean isOverflowToDisk(final Region region) {
return (region.getAttributes().getEvictionAttributes() != null
&& EvictionAction.OVERFLOW_TO_DISK
.equals(region.getAttributes().getEvictionAttributes().getAction()));
}
protected boolean isPersistent(final Region region) {
return region.getAttributes().getDataPolicy().withPersistence();
}
protected boolean isUsingDiskStore(final Region region, final DiskStore diskStore) {
return ((isPersistent(region) || isOverflowToDisk(region))
&& ObjectUtils.equals(getDiskStoreName(region), diskStore.getName()));
}
protected void setRegionDetails(final InternalCache cache, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
for (Region<?, ?> region : cache.rootRegions()) {
setRegionDetails(region, diskStore, diskStoreDetails);
}
}
private void setRegionDetails(final Region<?, ?> region, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
if (isUsingDiskStore(region, diskStore)) {
String regionFullPath = region.getFullPath();
DiskStoreDetails.RegionDetails regionDetails = new DiskStoreDetails.RegionDetails(
regionFullPath, StringUtils.defaultIfBlank(region.getName(), regionFullPath));
regionDetails.setOverflowToDisk(isOverflowToDisk(region));
regionDetails.setPersistent(isPersistent(region));
diskStoreDetails.add(regionDetails);
}
for (Region<?, ?> subregion : region.subregions(false)) {
setRegionDetails(subregion, diskStore, diskStoreDetails); // depth-first, recursive strategy
}
}
protected String getDiskStoreName(final CacheServer cacheServer) {
return (cacheServer.getClientSubscriptionConfig() == null ? null
: StringUtils.defaultIfBlank(cacheServer.getClientSubscriptionConfig().getDiskStoreName(),
DiskStoreDetails.DEFAULT_DISK_STORE_NAME));
}
protected boolean isUsingDiskStore(final CacheServer cacheServer, final DiskStore diskStore) {
return ObjectUtils.equals(getDiskStoreName(cacheServer), diskStore.getName());
}
protected void setCacheServerDetails(final InternalCache cache, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
for (CacheServer cacheServer : cache.getCacheServers()) {
if (isUsingDiskStore(cacheServer, diskStore)) {
DiskStoreDetails.CacheServerDetails cacheServerDetails =
new DiskStoreDetails.CacheServerDetails(cacheServer.getBindAddress(),
cacheServer.getPort());
cacheServerDetails.setHostName(cacheServer.getHostnameForClients());
diskStoreDetails.add(cacheServerDetails);
}
}
}
protected String getDiskStoreName(final GatewaySender gateway) {
return StringUtils.defaultIfBlank(gateway.getDiskStoreName(),
DiskStoreDetails.DEFAULT_DISK_STORE_NAME);
}
protected boolean isPersistent(final GatewaySender gateway) {
return gateway.isPersistenceEnabled();
}
protected boolean isUsingDiskStore(final GatewaySender gateway, final DiskStore diskStore) {
return ObjectUtils.equals(getDiskStoreName(gateway), diskStore.getName());
}
protected void setGatewayDetails(final InternalCache cache, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
for (GatewaySender gatewaySender : cache.getGatewaySenders()) {
if (isUsingDiskStore(gatewaySender, diskStore)) {
DiskStoreDetails.GatewayDetails gatewayDetails =
new DiskStoreDetails.GatewayDetails(gatewaySender.getId());
gatewayDetails.setPersistent(isPersistent(gatewaySender));
diskStoreDetails.add(gatewayDetails);
}
}
}
protected void setPdxSerializationDetails(final InternalCache cache, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
if (cache.getPdxPersistent()) {
String diskStoreName = StringUtils.defaultIfBlank(cache.getPdxDiskStore(),
DiskStoreDetails.DEFAULT_DISK_STORE_NAME);
diskStoreDetails.setPdxSerializationMetaDataStored(
ObjectUtils.equals(diskStoreName, diskStore.getName()));
}
}
protected String getDiskStoreName(final AsyncEventQueue queue) {
return StringUtils.defaultIfBlank(queue.getDiskStoreName(),
DiskStoreDetails.DEFAULT_DISK_STORE_NAME);
}
protected boolean isUsingDiskStore(final AsyncEventQueue queue, final DiskStore diskStore) {
return (queue.isPersistent()
&& ObjectUtils.equals(getDiskStoreName(queue), diskStore.getName()));
}
protected void setAsyncEventQueueDetails(final InternalCache cache, final DiskStore diskStore,
final DiskStoreDetails diskStoreDetails) {
for (AsyncEventQueue queue : cache.getAsyncEventQueues()) {
if (isUsingDiskStore(queue, diskStore)) {
diskStoreDetails.add(new DiskStoreDetails.AsyncEventQueueDetails(queue.getId()));
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.recipe.util;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.falcon.entity.v0.Entity;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.SchemaHelper;
import org.apache.falcon.entity.v0.process.ACL;
import org.apache.falcon.entity.v0.process.Cluster;
import org.apache.falcon.entity.v0.process.Notification;
import org.apache.falcon.entity.v0.process.PolicyType;
import org.apache.falcon.entity.v0.process.Property;
import org.apache.falcon.entity.v0.process.Retry;
import org.apache.falcon.entity.v0.process.Workflow;
import org.apache.falcon.recipe.RecipeToolOptions;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.ValidationEvent;
import javax.xml.bind.ValidationEventHandler;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Recipe builder utility.
*/
public final class RecipeProcessBuilderUtils {
private static final Pattern RECIPE_VAR_PATTERN = Pattern.compile("##[A-Za-z0-9_.]*##");
private RecipeProcessBuilderUtils() {
}
public static String createProcessFromTemplate(final String processTemplateFile, final Properties recipeProperties,
final String processFilename) throws Exception {
org.apache.falcon.entity.v0.process.Process process = bindAttributesInTemplate(
processTemplateFile, recipeProperties);
String recipeProcessFilename = createProcessXmlFile(processFilename, process);
validateProcessXmlFile(recipeProcessFilename);
return recipeProcessFilename;
}
private static org.apache.falcon.entity.v0.process.Process
bindAttributesInTemplate(final String templateFile, final Properties recipeProperties)
throws Exception {
if (templateFile == null || recipeProperties == null) {
throw new IllegalArgumentException("Invalid arguments passed");
}
Unmarshaller unmarshaller = EntityType.PROCESS.getUnmarshaller();
// Validation can be skipped for unmarshalling as we want to bind tempalte with the properties. Vaildation is
// hanles as part of marshalling
unmarshaller.setSchema(null);
unmarshaller.setEventHandler(new ValidationEventHandler() {
public boolean handleEvent(ValidationEvent validationEvent) {
return true;
}
}
);
URL processResourceUrl = new File(templateFile).toURI().toURL();
org.apache.falcon.entity.v0.process.Process process =
(org.apache.falcon.entity.v0.process.Process) unmarshaller.unmarshal(processResourceUrl);
/* For optional properties user might directly set them in the process xml and might not set it in properties
file. Before doing the submission validation is done to confirm process xml doesn't have RECIPE_VAR_PATTERN
*/
String processName = recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName());
if (StringUtils.isNotEmpty(processName)) {
process.setName(processName);
}
// DR process template has only one cluster
bindClusterProperties(process.getClusters().getClusters().get(0), recipeProperties);
// bind scheduling properties
String processFrequency = recipeProperties.getProperty(RecipeToolOptions.PROCESS_FREQUENCY.getName());
if (StringUtils.isNotEmpty(processFrequency)) {
process.setFrequency(Frequency.fromString(processFrequency));
}
bindWorkflowProperties(process.getWorkflow(), recipeProperties);
bindRetryProperties(process.getRetry(), recipeProperties);
bindNotificationProperties(process.getNotification(), recipeProperties);
bindACLProperties(process.getACL(), recipeProperties);
bindTagsProperties(process, recipeProperties);
bindCustomProperties(process.getProperties(), recipeProperties);
return process;
}
private static void bindClusterProperties(final Cluster cluster,
final Properties recipeProperties) {
// DR process template has only one cluster
String clusterName = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_NAME.getName());
if (StringUtils.isNotEmpty(clusterName)) {
cluster.setName(clusterName);
}
String clusterStartValidity = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_START.getName());
if (StringUtils.isNotEmpty(clusterStartValidity)) {
cluster.getValidity().setStart(SchemaHelper.parseDateUTC(clusterStartValidity));
}
String clusterEndValidity = recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_END.getName());
if (StringUtils.isNotEmpty(clusterEndValidity)) {
cluster.getValidity().setEnd(SchemaHelper.parseDateUTC(clusterEndValidity));
}
}
private static void bindWorkflowProperties(final Workflow wf,
final Properties recipeProperties) {
String wfName = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_NAME.getName());
if (StringUtils.isNotEmpty(wfName)) {
wf.setName(wfName);
}
String wfLibPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_LIB_PATH.getName());
if (StringUtils.isNotEmpty(wfLibPath)) {
wf.setLib(wfLibPath);
} else if (wf.getLib().startsWith("##")) {
wf.setLib("");
}
String wfPath = recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_PATH.getName());
if (StringUtils.isNotEmpty(wfPath)) {
wf.setPath(wfPath);
}
}
private static void bindRetryProperties(final Retry processRetry,
final Properties recipeProperties) {
String retryPolicy = recipeProperties.getProperty(RecipeToolOptions.RETRY_POLICY.getName());
if (StringUtils.isNotEmpty(retryPolicy)) {
processRetry.setPolicy(PolicyType.fromValue(retryPolicy));
}
String retryAttempts = recipeProperties.getProperty(RecipeToolOptions.RETRY_ATTEMPTS.getName());
if (StringUtils.isNotEmpty(retryAttempts)) {
processRetry.setAttempts(Integer.parseInt(retryAttempts));
}
String retryDelay = recipeProperties.getProperty(RecipeToolOptions.RETRY_DELAY.getName());
if (StringUtils.isNotEmpty(retryDelay)) {
processRetry.setDelay(Frequency.fromString(retryDelay));
}
}
private static void bindNotificationProperties(final Notification processNotification,
final Properties recipeProperties) {
processNotification.setType(recipeProperties.getProperty(
RecipeToolOptions.RECIPE_NOTIFICATION_TYPE.getName()));
String notificationAddress = recipeProperties.getProperty(
RecipeToolOptions.RECIPE_NOTIFICATION_ADDRESS.getName());
if (StringUtils.isNotBlank(notificationAddress)) {
processNotification.setTo(notificationAddress);
} else {
processNotification.setTo("NA");
}
}
private static void bindACLProperties(final ACL acl,
final Properties recipeProperties) {
String aclowner = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_OWNER.getName());
if (StringUtils.isNotEmpty(aclowner)) {
acl.setOwner(aclowner);
}
String aclGroup = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_GROUP.getName());
if (StringUtils.isNotEmpty(aclGroup)) {
acl.setGroup(aclGroup);
}
String aclPermission = recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_PERMISSION.getName());
if (StringUtils.isNotEmpty(aclPermission)) {
acl.setPermission(aclPermission);
}
}
private static void bindTagsProperties(final org.apache.falcon.entity.v0.process.Process process,
final Properties recipeProperties) {
String falconSystemTags = process.getTags();
String tags = recipeProperties.getProperty(RecipeToolOptions.RECIPE_TAGS.getName());
if (StringUtils.isNotEmpty(tags)) {
if (StringUtils.isNotEmpty(falconSystemTags)) {
tags += ", " + falconSystemTags;
}
process.setTags(tags);
}
}
private static void bindCustomProperties(final org.apache.falcon.entity.v0.process.Properties customProperties,
final Properties recipeProperties) {
List<Property> propertyList = new ArrayList<>();
for (Map.Entry<Object, Object> recipeProperty : recipeProperties.entrySet()) {
if (RecipeToolOptions.OPTIONSMAP.get(recipeProperty.getKey().toString()) == null) {
addProperty(propertyList, (String) recipeProperty.getKey(), (String) recipeProperty.getValue());
}
}
customProperties.getProperties().addAll(propertyList);
}
private static void addProperty(List<Property> propertyList, String name, String value) {
Property prop = new Property();
prop.setName(name);
prop.setValue(value);
propertyList.add(prop);
}
private static String createProcessXmlFile(final String outFilename,
final Entity entity) throws Exception {
if (outFilename == null || entity == null) {
throw new IllegalArgumentException("Invalid arguments passed");
}
EntityType type = EntityType.PROCESS;
OutputStream out = null;
try {
out = new FileOutputStream(outFilename);
type.getMarshaller().marshal(entity, out);
} catch (JAXBException e) {
throw new Exception("Unable to serialize the entity object " + type + "/" + entity.getName(), e);
} finally {
IOUtils.closeQuietly(out);
}
return outFilename;
}
private static void validateProcessXmlFile(final String processFileName) throws Exception {
if (processFileName == null) {
throw new IllegalArgumentException("Invalid arguments passed");
}
String line;
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(processFileName));
while ((line = reader.readLine()) != null) {
Matcher matcher = RECIPE_VAR_PATTERN.matcher(line);
if (matcher.find()) {
String variable = line.substring(matcher.start(), matcher.end());
throw new Exception("Match not found for the template: " + variable
+ " in recipe template file. Please add it in recipe properties file");
}
}
} finally {
IOUtils.closeQuietly(reader);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.pageStore;
import java.io.Serializable;
import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.wicket.page.IManageablePage;
import org.apache.wicket.serialize.ISerializer;
import org.apache.wicket.util.lang.Args;
import org.apache.wicket.util.lang.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The {@link IPageStore} that converts {@link IManageablePage} instances to {@link SerializedPage}s
* before passing them to the {@link IDataStore} to store them and the same in the opposite
* direction when loading {@link SerializedPage} from the data store.
*
*/
public class DefaultPageStore implements IPageStore
{
private static final Logger LOG = LoggerFactory.getLogger(DefaultPageStore.class);
private final SerializedPagesCache serializedPagesCache;
private final IDataStore pageDataStore;
/**
* The {@link ISerializer} that will be used to convert pages from/to byte arrays
*/
private final ISerializer pageSerializer;
/**
* Construct.
*
* @param pageSerializer
* the {@link ISerializer} that will be used to convert pages from/to byte arrays
* @param dataStore
* the {@link IDataStore} that actually stores the pages
* @param cacheSize
* the number of pages to cache in memory before passing them to
* {@link IDataStore#storeData(String, int, byte[])}
*/
public DefaultPageStore(final ISerializer pageSerializer, final IDataStore dataStore,
final int cacheSize)
{
Args.notNull(pageSerializer, "pageSerializer");
Args.notNull(dataStore, "DataStore");
this.pageSerializer = pageSerializer;
pageDataStore = dataStore;
serializedPagesCache = new SerializedPagesCache(cacheSize);
}
/**
* @see org.apache.wicket.pageStore.IPageStore#destroy()
*/
@Override
public void destroy()
{
pageDataStore.destroy();
}
/**
* @param sessionId
* @param pageId
* @return page data
* @see IDataStore#getData(String, int)
*/
protected byte[] getPageData(final String sessionId, final int pageId)
{
return pageDataStore.getData(sessionId, pageId);
}
/**
* @param sessionId
* @param pageId
* @see IDataStore#removeData(String, int)
*/
protected void removePageData(final String sessionId, final int pageId)
{
pageDataStore.removeData(sessionId, pageId);
}
/**
* @param sessionId
* @see IDataStore#removeData(String)
*/
protected void removePageData(final String sessionId)
{
pageDataStore.removeData(sessionId);
}
/**
* @param sessionId
* @param pageId
* @param data
* @see IDataStore#storeData(String, int, byte[])
*/
protected void storePageData(final String sessionId, final int pageId, final byte[] data)
{
pageDataStore.storeData(sessionId, pageId, data);
}
@Override
public IManageablePage getPage(final String sessionId, final int id)
{
SerializedPage fromCache = serializedPagesCache.getPage(sessionId, id);
if (fromCache != null && fromCache.data != null)
{
return deserializePage(fromCache.data);
}
byte[] data = getPageData(sessionId, id);
if (data != null)
{
return deserializePage(data);
}
return null;
}
@Override
public void removePage(final String sessionId, final int id)
{
serializedPagesCache.removePage(sessionId, id);
removePageData(sessionId, id);
}
@Override
public void storePage(final String sessionId, final IManageablePage page)
{
SerializedPage serialized = serializePage(sessionId, page);
if (serialized != null)
{
serializedPagesCache.storePage(serialized);
storePageData(sessionId, serialized.getPageId(), serialized.getData());
}
}
@Override
public void unbind(final String sessionId)
{
removePageData(sessionId);
serializedPagesCache.removePages(sessionId);
}
@Override
public IManageablePage convertToPage(final Object object)
{
if (object == null)
{
return null;
}
else if (object instanceof IManageablePage)
{
return (IManageablePage)object;
}
else if (object instanceof SerializedPage)
{
SerializedPage page = (SerializedPage)object;
byte data[] = page.getData();
if (data == null)
{
data = getPageData(page.getSessionId(), page.getPageId());
}
if (data != null)
{
return deserializePage(data);
}
return null;
}
String type = object.getClass().getName();
throw new IllegalArgumentException("Unknown object type " + type);
}
/**
* Reloads the {@link SerializedPage} from the backing {@link IDataStore} if the
* {@link SerializedPage#data} is stripped earlier
*
* @param serializedPage
* the {@link SerializedPage} with empty {@link SerializedPage#data} slot
* @return the fully functional {@link SerializedPage}
*/
private SerializedPage restoreStrippedSerializedPage(final SerializedPage serializedPage)
{
SerializedPage result = serializedPagesCache.getPage(serializedPage.getSessionId(),
serializedPage.getPageId());
if (result != null)
{
return result;
}
byte data[] = getPageData(serializedPage.getSessionId(), serializedPage.getPageId());
return new SerializedPage(serializedPage.getSessionId(), serializedPage.getPageId(), data);
}
@Override
public Serializable prepareForSerialization(final String sessionId, final Object object)
{
if (pageDataStore.isReplicated())
{
return null;
}
SerializedPage result = null;
if (object instanceof IManageablePage)
{
IManageablePage page = (IManageablePage)object;
result = serializedPagesCache.getPage(sessionId, page.getPageId());
if (result == null)
{
result = serializePage(sessionId, page);
if (result != null)
{
serializedPagesCache.storePage(result);
}
}
}
else if (object instanceof SerializedPage)
{
SerializedPage page = (SerializedPage)object;
if (page.getData() == null)
{
result = restoreStrippedSerializedPage(page);
}
else
{
result = page;
}
}
if (result != null)
{
return result;
}
return (Serializable)object;
}
/**
*
* @return Always true for this implementation
*/
protected boolean storeAfterSessionReplication()
{
return true;
}
@Override
public Object restoreAfterSerialization(final Serializable serializable)
{
if (serializable == null)
{
return null;
}
else if (!storeAfterSessionReplication() || serializable instanceof IManageablePage)
{
return serializable;
}
else if (serializable instanceof SerializedPage)
{
SerializedPage page = (SerializedPage)serializable;
if (page.getData() != null)
{
storePageData(page.getSessionId(), page.getPageId(), page.getData());
return new SerializedPage(page.getSessionId(), page.getPageId(), null);
}
return page;
}
String type = serializable.getClass().getName();
throw new IllegalArgumentException("Unknown object type " + type);
}
/**
* A representation of {@link IManageablePage} that knows additionally the id of the http
* session in which this {@link IManageablePage} instance is used. The {@link #sessionId} and
* {@link #pageId} are used for better clustering in the {@link IDataStore} structures.
*/
protected static class SerializedPage implements Serializable
{
private static final long serialVersionUID = 1L;
/**
* The id of the serialized {@link IManageablePage}
*/
private final int pageId;
/**
* The id of the http session in which the serialized {@link IManageablePage} is used.
*/
private final String sessionId;
/**
* The serialized {@link IManageablePage}
*/
private final byte[] data;
public SerializedPage(String sessionId, int pageId, byte[] data)
{
this.pageId = pageId;
this.sessionId = sessionId;
this.data = data;
}
public byte[] getData()
{
return data;
}
public int getPageId()
{
return pageId;
}
public String getSessionId()
{
return sessionId;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if ((obj instanceof SerializedPage) == false)
{
return false;
}
SerializedPage rhs = (SerializedPage)obj;
return Objects.equal(getPageId(), rhs.getPageId()) &&
Objects.equal(getSessionId(), rhs.getSessionId());
}
@Override
public int hashCode()
{
return Objects.hashCode(getPageId(), getSessionId());
}
}
/**
*
* @param sessionId
* @param page
* @return the serialized page information
*/
protected SerializedPage serializePage(final String sessionId, final IManageablePage page)
{
Args.notNull(sessionId, "sessionId");
Args.notNull(page, "page");
SerializedPage serializedPage = null;
byte[] data = pageSerializer.serialize(page);
if (data != null)
{
serializedPage = new SerializedPage(sessionId, page.getPageId(), data);
}
else if (LOG.isWarnEnabled())
{
LOG.warn("Page {} cannot be serialized. See previous logs for possible reasons.", page);
}
return serializedPage;
}
/**
*
* @param data
* @return page data deserialized
*/
protected IManageablePage deserializePage(final byte[] data)
{
IManageablePage page = (IManageablePage)pageSerializer.deserialize(data);
return page;
}
/**
* Cache that stores serialized pages. This is important to make sure that a single page is not
* serialized twice or more when not necessary.
* <p>
* For example a page is serialized during request, but it might be also later serialized on
* session replication. The purpose of this cache is to make sure that the data obtained from
* first serialization is reused on second serialization.
*
* @author Matej Knopp
*/
static class SerializedPagesCache
{
private final int size;
private final List<SoftReference<SerializedPage>> cache;
/**
* Construct.
*
* @param size
*/
public SerializedPagesCache(final int size)
{
this.size = size;
cache = new ArrayList<SoftReference<SerializedPage>>(size);
}
/**
*
* @param sessionId
* @param id
* @return the removed {@link SerializedPage} or <code>null</code> - otherwise
*/
public SerializedPage removePage(final String sessionId, final int id)
{
Args.notNull(sessionId, "sessionId");
if (size > 0)
{
synchronized (cache)
{
for (Iterator<SoftReference<SerializedPage>> i = cache.iterator(); i.hasNext();)
{
SoftReference<SerializedPage> ref = i.next();
SerializedPage entry = ref.get();
if (entry != null && entry.getPageId() == id &&
entry.getSessionId().equals(sessionId))
{
i.remove();
return entry;
}
}
}
}
return null;
}
/**
* Removes all {@link SerializedPage}s for the session with <code>sessionId</code> from the
* cache.
*
* @param sessionId
*/
public void removePages(String sessionId)
{
Args.notNull(sessionId, "sessionId");
if (size > 0)
{
synchronized (cache)
{
for (Iterator<SoftReference<SerializedPage>> i = cache.iterator(); i.hasNext();)
{
SoftReference<SerializedPage> ref = i.next();
SerializedPage entry = ref.get();
if (entry != null && entry.getSessionId().equals(sessionId))
{
i.remove();
}
}
}
}
}
/**
* Returns a {@link SerializedPage} by looking it up by <code>sessionId</code> and
* <code>pageId</code>. If there is a match then it is <i>touched</i>, i.e. it is moved at
* the top of the cache.
*
* @param sessionId
* @param pageId
* @return the found serialized page or <code>null</code> when not found
*/
public SerializedPage getPage(String sessionId, int pageId)
{
Args.notNull(sessionId, "sessionId");
SerializedPage result = null;
if (size > 0)
{
synchronized (cache)
{
for (Iterator<SoftReference<SerializedPage>> i = cache.iterator(); i.hasNext();)
{
SoftReference<SerializedPage> ref = i.next();
SerializedPage entry = ref.get();
if (entry != null && entry.getPageId() == pageId &&
entry.getSessionId().equals(sessionId))
{
i.remove();
result = entry;
break;
}
}
if (result != null)
{
// move to top
storePage(result);
}
}
}
return result;
}
/**
* Store the serialized page in cache
*
* @param page
* the data to serialize (page id, session id, bytes)
*/
void storePage(SerializedPage page)
{
SoftReference<SerializedPage> ref = new SoftReference<SerializedPage>(page);
if (size > 0)
{
synchronized (cache)
{
for (Iterator<SoftReference<SerializedPage>> i = cache.iterator(); i.hasNext();)
{
SoftReference<SerializedPage> r = i.next();
SerializedPage entry = r.get();
if (entry != null && entry.equals(page))
{
i.remove();
break;
}
}
cache.add(ref);
if (cache.size() > size)
{
cache.remove(0);
}
}
}
}
}
}
| |
package psidev.psi.mi.jami.xml.model.extension.binary.xml25;
import psidev.psi.mi.jami.binary.BinaryInteractionEvidence;
import psidev.psi.mi.jami.model.*;
import psidev.psi.mi.jami.utils.CvTermUtils;
import psidev.psi.mi.jami.utils.XrefUtils;
import psidev.psi.mi.jami.utils.collection.AbstractListHavingProperties;
import psidev.psi.mi.jami.xml.model.extension.AbstractAvailability;
import psidev.psi.mi.jami.xml.model.extension.ExtendedPsiXmlExperiment;
import psidev.psi.mi.jami.xml.model.extension.ExtendedPsiXmlInteractionEvidence;
import psidev.psi.mi.jami.xml.model.extension.XmlXref;
import psidev.psi.mi.jami.xml.model.extension.binary.AbstractExtendedXmlBinaryInteraction;
import javax.xml.bind.annotation.XmlTransient;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Xml implementation of BinaryInteractionEvidence
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>16/10/13</pre>
*/
@XmlTransient
public class XmlBinaryInteractionEvidence extends AbstractExtendedXmlBinaryInteraction<ParticipantEvidence> implements BinaryInteractionEvidence,
ExtendedPsiXmlInteractionEvidence {
private Xref imexId;
private String availability;
private Collection<Parameter> parameters;
private boolean isInferred = false;
private Collection<Confidence> confidences;
private boolean isNegative;
private Collection<VariableParameterValueSet> variableParameterValueSets;
private AbstractAvailability xmlAvailability;
private Boolean isModelled;
private List<Experiment> experiments;
private List<ExtendedPsiXmlExperiment> originalExperiments;
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*/
public XmlBinaryInteractionEvidence() {
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
*/
public XmlBinaryInteractionEvidence(String shortName) {
super(shortName);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param type a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(String shortName, CvTerm type) {
super(shortName, type);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
*/
public XmlBinaryInteractionEvidence(ParticipantEvidence participantA, ParticipantEvidence participantB) {
super(participantA, participantB);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
*/
public XmlBinaryInteractionEvidence(String shortName, ParticipantEvidence participantA, ParticipantEvidence participantB) {
super(shortName, participantA, participantB);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param type a {@link psidev.psi.mi.jami.model.CvTerm} object.
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
*/
public XmlBinaryInteractionEvidence(String shortName, CvTerm type, ParticipantEvidence participantA, ParticipantEvidence participantB) {
super(shortName, type, participantA, participantB);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param complexExpansion a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(CvTerm complexExpansion) {
super(complexExpansion);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param type a {@link psidev.psi.mi.jami.model.CvTerm} object.
* @param complexExpansion a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(String shortName, CvTerm type, CvTerm complexExpansion) {
super(shortName, type, complexExpansion);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param complexExpansion a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(ParticipantEvidence participantA, ParticipantEvidence participantB, CvTerm complexExpansion) {
super(participantA, participantB, complexExpansion);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param complexExpansion a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(String shortName, ParticipantEvidence participantA, ParticipantEvidence participantB, CvTerm complexExpansion) {
super(shortName, participantA, participantB, complexExpansion);
}
/**
* <p>Constructor for XmlBinaryInteractionEvidence.</p>
*
* @param shortName a {@link java.lang.String} object.
* @param type a {@link psidev.psi.mi.jami.model.CvTerm} object.
* @param participantA a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param participantB a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object.
* @param complexExpansion a {@link psidev.psi.mi.jami.model.CvTerm} object.
*/
public XmlBinaryInteractionEvidence(String shortName, CvTerm type, ParticipantEvidence participantA, ParticipantEvidence participantB, CvTerm complexExpansion) {
super(shortName, type, participantA, participantB, complexExpansion);
}
/**
* <p>initialiseExperimentalConfidences.</p>
*/
protected void initialiseExperimentalConfidences(){
this.confidences = new ArrayList<Confidence>();
}
/**
* <p>initialiseVariableParameterValueSets.</p>
*/
protected void initialiseVariableParameterValueSets(){
this.variableParameterValueSets = new ArrayList<VariableParameterValueSet>();
}
/**
* <p>initialiseExperimentalParameters.</p>
*/
protected void initialiseExperimentalParameters(){
this.parameters = new ArrayList<Parameter>();
}
/** {@inheritDoc} */
@Override
protected void initialiseXrefs() {
initialiseXrefsWith(new ExperimentalBinaryInteractionXrefList());
}
/**
* <p>Getter for the field <code>imexId</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getImexId() {
return this.imexId != null ? this.imexId.getId() : null;
}
/** {@inheritDoc} */
public void assignImexId(String identifier) {
// add new imex if not null
if (identifier != null){
ExperimentalBinaryInteractionXrefList interactionXrefs = (ExperimentalBinaryInteractionXrefList) getXrefs();
CvTerm imexDatabase = CvTermUtils.createImexDatabase();
CvTerm imexPrimaryQualifier = CvTermUtils.createImexPrimaryQualifier();
// first remove old doi if not null
if (this.imexId != null){
interactionXrefs.removeOnly(this.imexId);
}
this.imexId = new XmlXref(imexDatabase, identifier, imexPrimaryQualifier);
interactionXrefs.addOnly(this.imexId);
}
else {
throw new IllegalArgumentException("The imex id has to be non null.");
}
}
/**
* <p>getExperiment.</p>
*
* @return a {@link psidev.psi.mi.jami.model.Experiment} object.
*/
public Experiment getExperiment() {
if (getExperiments().isEmpty()){
return null;
}
return getExperiments().iterator().next();
}
/** {@inheritDoc} */
public void setExperiment(Experiment experiment) {
if (experiment != null){
if (!getExperiments().isEmpty()){
getExperiments().remove(0);
}
getExperiments().add(0, experiment);
}
else{
if (!getExperiments().isEmpty()){
this.getExperiments().remove(0);
}
}
}
/** {@inheritDoc} */
public void setExperimentAndAddInteractionEvidence(Experiment experiment) {
Experiment current = getExperiment();
if (current != null){
current.removeInteractionEvidence(this);
}
if (experiment != null){
experiment.addInteractionEvidence(this);
}
}
/**
* <p>getVariableParameterValues.</p>
*
* @return a {@link java.util.Collection} object.
*/
public Collection<VariableParameterValueSet> getVariableParameterValues() {
if (variableParameterValueSets == null){
initialiseVariableParameterValueSets();
}
return this.variableParameterValueSets;
}
/**
* <p>Getter for the field <code>confidences</code>.</p>
*
* @return a {@link java.util.Collection} object.
*/
public Collection<Confidence> getConfidences() {
if (confidences == null){
initialiseExperimentalConfidences();
}
return this.confidences;
}
/**
* <p>Getter for the field <code>availability</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getAvailability() {
return this.availability;
}
/** {@inheritDoc} */
public void setAvailability(String availability) {
this.availability = availability;
}
/**
* <p>isNegative.</p>
*
* @return a boolean.
*/
public boolean isNegative() {
return this.isNegative;
}
/** {@inheritDoc} */
public void setNegative(boolean negative) {
this.isNegative = negative;
}
/**
* <p>Getter for the field <code>parameters</code>.</p>
*
* @return a {@link java.util.Collection} object.
*/
public Collection<Parameter> getParameters() {
if (parameters == null){
initialiseExperimentalParameters();
}
return this.parameters;
}
/**
* <p>isInferred.</p>
*
* @return a boolean.
*/
public boolean isInferred() {
return this.isInferred;
}
/** {@inheritDoc} */
public void setInferred(boolean inferred) {
this.isInferred = inferred;
}
/**
* <p>processAddedXrefEvent.</p>
*
* @param added a {@link psidev.psi.mi.jami.model.Xref} object.
*/
protected void processAddedXrefEvent(Xref added) {
// the added identifier is imex and the current imex is not set
if (imexId == null && XrefUtils.isXrefFromDatabase(added, Xref.IMEX_MI, Xref.IMEX)){
// the added xref is imex-primary
if (XrefUtils.doesXrefHaveQualifier(added, Xref.IMEX_PRIMARY_MI, Xref.IMEX_PRIMARY)){
imexId = added;
}
}
}
/**
* <p>processRemovedXrefEvent.</p>
*
* @param removed a {@link psidev.psi.mi.jami.model.Xref} object.
*/
protected void processRemovedXrefEvent(Xref removed) {
// the removed identifier is pubmed
if (imexId != null && imexId.equals(removed)){
imexId = null;
}
}
/**
* <p>clearPropertiesLinkedToXrefs.</p>
*/
protected void clearPropertiesLinkedToXrefs() {
imexId = null;
}
/** {@inheritDoc} */
@Override
public String toString() {
return imexId != null ? imexId.getId() : super.toString();
}
/** {@inheritDoc} */
@Override
public AbstractAvailability getXmlAvailability() {
return this.xmlAvailability;
}
/** {@inheritDoc} */
@Override
public void setXmlAvailability(AbstractAvailability availability) {
this.xmlAvailability = availability;
}
/** {@inheritDoc} */
@Override
public boolean isModelled() {
return isModelled != null ? isModelled : false;
}
/** {@inheritDoc} */
@Override
public void setModelled(boolean modelled) {
isModelled = modelled;
}
/** {@inheritDoc} */
@Override
public List<Experiment> getExperiments() {
if (experiments == null){
experiments = new ArrayList<Experiment>();
}
return experiments;
}
/** {@inheritDoc} */
@Override
public List<ExtendedPsiXmlExperiment> getOriginalExperiments() {
if (this.originalExperiments == null){
this.originalExperiments = new ArrayList<ExtendedPsiXmlExperiment>();
}
return originalExperiments;
}
/**
* Experimental interaction Xref list
*/
private class ExperimentalBinaryInteractionXrefList extends AbstractListHavingProperties<Xref> {
public ExperimentalBinaryInteractionXrefList(){
super();
}
@Override
protected void processAddedObjectEvent(Xref added) {
processAddedXrefEvent(added);
}
@Override
protected void processRemovedObjectEvent(Xref removed) {
processRemovedXrefEvent(removed);
}
@Override
protected void clearProperties() {
clearPropertiesLinkedToXrefs();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.feeds;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.asterix.api.common.FeedWorkCollection.SubscribeFeedWork;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.feeds.FeedActivity;
import org.apache.asterix.common.feeds.FeedConnectJobInfo;
import org.apache.asterix.common.feeds.FeedConnectionId;
import org.apache.asterix.common.feeds.FeedConnectionRequest;
import org.apache.asterix.common.feeds.FeedId;
import org.apache.asterix.common.feeds.FeedIntakeInfo;
import org.apache.asterix.common.feeds.FeedJobInfo;
import org.apache.asterix.common.feeds.FeedJobInfo.FeedJobState;
import org.apache.asterix.common.feeds.FeedJobInfo.JobType;
import org.apache.asterix.common.feeds.FeedJointKey;
import org.apache.asterix.common.feeds.FeedPolicyAccessor;
import org.apache.asterix.common.feeds.api.IFeedJoint;
import org.apache.asterix.common.feeds.api.IFeedJoint.State;
import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
import org.apache.asterix.common.feeds.message.StorageReportFeedMessage;
import org.apache.asterix.feeds.FeedLifecycleListener.Message;
import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
import org.apache.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
import org.apache.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
import org.apache.asterix.metadata.feeds.FeedMetaOperatorDescriptor;
import org.apache.asterix.metadata.feeds.FeedWorkManager;
import org.apache.asterix.om.util.AsterixAppContextInfo;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobInfo;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.api.job.JobStatus;
import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
public class FeedJobNotificationHandler implements Runnable {
private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
private final LinkedBlockingQueue<Message> inbox;
private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
private final Map<JobId, FeedJobInfo> jobInfos;
private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
private final Map<FeedId, List<IFeedJoint>> feedPipeline;
private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
this.inbox = inbox;
this.jobInfos = new HashMap<JobId, FeedJobInfo>();
this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
}
@Override
public void run() {
Message mesg;
while (true) {
try {
mesg = inbox.take();
switch (mesg.messageKind) {
case JOB_START:
handleJobStartMessage(mesg);
break;
case JOB_FINISH:
handleJobFinishMessage(mesg);
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
IIntakeProgressTracker feedIntakeProgressTracker) {
if (feedIntakeProgressTrackers.get(connectionId) == null) {
this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
feedIntakeProgressTracker, 0L));
} else {
throw new IllegalStateException(" Progress tracker for connection " + connectionId
+ " is alreader registered");
}
}
public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
this.feedIntakeProgressTrackers.remove(connectionId);
}
public void updateTrackingInformation(StorageReportFeedMessage srm) {
Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
p.second = srm.getLastPersistedTupleIntakeTimestamp();
p.first.notifyIngestedTupleTimestamp(p.second);
}
}
public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
return intakeJobInfos.values();
}
public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
return connectJobInfos.values();
}
public void registerFeedJoint(IFeedJoint feedJoint) {
List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
if (feedJointsOnPipeline == null) {
feedJointsOnPipeline = new ArrayList<IFeedJoint>();
feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
feedJointsOnPipeline.add(feedJoint);
} else {
if (!feedJointsOnPipeline.contains(feedJoint)) {
feedJointsOnPipeline.add(feedJoint);
} else {
throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
}
}
}
public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
if (jobInfos.get(jobId) != null) {
throw new IllegalStateException("Feed job already registered");
}
List<IFeedJoint> joints = feedPipeline.get(feedId);
IFeedJoint intakeJoint = null;
for (IFeedJoint joint : joints) {
if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
intakeJoint = joint;
break;
}
}
if (intakeJoint != null) {
FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
feedId, intakeJoint, jobSpec);
intakeJobInfos.put(feedId, intakeJobInfo);
jobInfos.put(jobId, intakeJobInfo);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
}
} else {
throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed "
+ feedId);
}
}
public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
JobSpecification jobSpec, Map<String, String> feedPolicy) {
if (jobInfos.get(jobId) != null) {
throw new IllegalStateException("Feed job already registered");
}
List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
FeedConnectionId cid = null;
IFeedJoint sourceFeedJoint = null;
for (IFeedJoint joint : feedJoints) {
cid = joint.getReceiver(connectionId);
if (cid != null) {
sourceFeedJoint = joint;
break;
}
}
if (cid != null) {
FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
sourceFeedJoint, null, jobSpec, feedPolicy);
jobInfos.put(jobId, cInfo);
connectJobInfos.put(connectionId, cInfo);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
}
} else {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
+ connectionId);
}
}
}
public void deregisterFeedIntakeJob(JobId jobId) {
if (jobInfos.get(jobId) == null) {
throw new IllegalStateException(" Feed Intake job not registered ");
}
FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
jobInfos.remove(jobId);
intakeJobInfos.remove(info.getFeedId());
if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
joints.remove(info.getIntakeFeedJoint());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Deregistered feed intake job [" + jobId + "]");
}
} else {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
}
}
}
private void handleJobStartMessage(Message message) throws Exception {
FeedJobInfo jobInfo = jobInfos.get(message.jobId);
switch (jobInfo.getJobType()) {
case INTAKE:
handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
break;
case FEED_CONNECT:
handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
break;
}
}
private void handleJobFinishMessage(Message message) throws Exception {
FeedJobInfo jobInfo = jobInfos.get(message.jobId);
switch (jobInfo.getJobType()) {
case INTAKE:
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
}
handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
break;
case FEED_CONNECT:
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Collect Job finished for " + (FeedConnectJobInfo) jobInfo);
}
handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
break;
}
}
private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
IOperatorDescriptor opDesc = entry.getValue();
if (opDesc instanceof FeedIntakeOperatorDescriptor) {
intakeOperatorIds.add(opDesc.getOperatorId());
}
}
IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
List<String> intakeLocations = new ArrayList<String>();
for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
int nOperatorInstances = operatorLocations.size();
for (int i = 0; i < nOperatorInstances; i++) {
intakeLocations.add(operatorLocations.get(i));
}
}
// intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
intakeJobInfo.setIntakeLocation(intakeLocations);
intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
intakeJobInfo.setState(FeedJobState.ACTIVE);
// notify event listeners
notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
}
private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
// set locations of feed sub-operations (intake, compute, store)
setLocations(cInfo);
// activate joints
List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
for (IFeedJoint joint : joints) {
if (joint.getProvider().equals(cInfo.getConnectionId())) {
joint.setState(State.ACTIVE);
if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
cInfo.setComputeFeedJoint(joint);
}
}
}
cInfo.setState(FeedJobState.ACTIVE);
// register activity in metadata
registerFeedActivity(cInfo);
// notify event listeners
notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
}
private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
JobType jobType = jobInfo.getJobType();
List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
if (jobType.equals(JobType.INTAKE)) {
FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
for (FeedConnectionId connId : eventSubscribers.keySet()) {
if (connId.getFeedId().equals(feedId)) {
impactedConnections.add(connId);
}
}
} else {
impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
}
for (FeedConnectionId connId : impactedConnections) {
List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
if (subscribers != null && !subscribers.isEmpty()) {
for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
subscriber.handleFeedEvent(event);
}
}
}
}
public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
throws Exception {
List<String> locations = null;
switch (feedJoint.getType()) {
case INTAKE:
FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
locations = intakeInfo.getIntakeLocation();
break;
case COMPUTE:
FeedConnectionId connectionId = feedJoint.getProvider();
FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
locations = cInfo.getComputeLocations();
break;
}
SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
}
public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
if (cInfo != null) {
return cInfo.getSourceFeedJoint();
}
return null;
}
public Set<FeedConnectionId> getActiveFeedConnections() {
Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
activeConnections.add(cInfo.getConnectionId());
}
}
return activeConnections;
}
public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
if (cInfo != null) {
return cInfo.getState().equals(FeedJobState.ACTIVE);
}
return false;
}
public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
connectJobInfo.setState(jobState);
}
public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId).getState();
}
private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
JobInfo info = hcc.getJobInfo(message.jobId);
JobStatus status = info.getStatus();
FeedLifecycleEvent event;
event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
: FeedLifecycleEvent.FEED_ENDED;
// remove feed joints
deregisterFeedIntakeJob(message.jobId);
// notify event listeners
notifyFeedEventSubscribers(intakeInfo, event);
}
private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
FeedConnectionId connectionId = cInfo.getConnectionId();
IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
JobInfo info = hcc.getJobInfo(cInfo.getJobId());
JobStatus status = info.getStatus();
boolean failure = status != null && status.equals(JobStatus.FAILURE);
FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
boolean removeJobHistory = !failure;
boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
|| (failure && fpa.continueOnHardwareFailure());
if (!retainSubsription) {
IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
feedJoint.removeReceiver(connectionId);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
}
removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
}
if (removeJobHistory) {
connectJobInfos.remove(connectionId);
jobInfos.remove(cInfo.getJobId());
feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
}
deregisterFeedActivity(cInfo);
// notify event listeners
FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
notifyFeedEventSubscribers(cInfo, event);
}
private void registerFeedActivity(FeedConnectJobInfo cInfo) {
Map<String, String> feedActivityDetails = new HashMap<String, String>();
if (cInfo.getCollectLocations() != null) {
feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
}
if (cInfo.getComputeLocations() != null) {
feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
}
if (cInfo.getStorageLocations() != null) {
feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
}
String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
try {
FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
.getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
feedActivityDetails);
CentralFeedManager.getInstance().getFeedLoadManager()
.reportFeedActivity(cInfo.getConnectionId(), feedActivity);
} catch (Exception e) {
e.printStackTrace();
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
}
}
}
public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
try {
CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
} catch (Exception e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
}
}
}
public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
List<FeedConnectionId> all = sourceJoint.getReceivers();
boolean removeSourceJoint = all.size() < 2;
if (removeSourceJoint) {
feedJoints.remove(sourceJoint);
}
IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
feedJoints.remove(computeJoint);
}
}
public boolean isRegisteredFeedJob(JobId jobId) {
return jobInfos.get(jobId) != null;
}
public List<String> getFeedComputeLocations(FeedId feedId) {
List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
for (IFeedJoint joint : feedJoints) {
if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
return connectJobInfos.get(joint.getProvider()).getComputeLocations();
}
}
return null;
}
public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId).getStorageLocations();
}
public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId).getCollectLocations();
}
public List<String> getFeedIntakeLocations(FeedId feedId) {
return intakeJobInfos.get(feedId).getIntakeLocation();
}
public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId).getJobId();
}
public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
if (subscribers == null) {
subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
eventSubscribers.put(connectionId, subscribers);
}
subscribers.add(subscriber);
}
public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
if (subscribers != null) {
subscribers.remove(subscriber);
}
}
//============================
public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
if (joints != null && !joints.isEmpty()) {
for (IFeedJoint joint : joints) {
if (joint.getFeedJointKey().equals(feedJointKey)) {
return true;
}
}
}
return false;
}
public Collection<IFeedJoint> getFeedIntakeJoints() {
List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
for (FeedIntakeInfo info : intakeJobInfos.values()) {
intakeFeedPoints.add(info.getIntakeFeedJoint());
}
return intakeFeedPoints;
}
public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
if (joints != null && !joints.isEmpty()) {
for (IFeedJoint joint : joints) {
if (joint.getFeedJointKey().equals(feedPointKey)) {
return joint;
}
}
}
return null;
}
public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
IFeedJoint feedJoint = getFeedJoint(feedJointKey);
if (feedJoint != null) {
return feedJoint;
} else {
String jointKeyString = feedJointKey.getStringRep();
List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
IFeedJoint candidateJoint = null;
if (jointsOnPipeline != null) {
for (IFeedJoint joint : jointsOnPipeline) {
if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
if (candidateJoint == null) {
candidateJoint = joint;
} else if (joint.getFeedJointKey().getStringRep()
.contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
candidateJoint = joint;
}
}
}
}
return candidateJoint;
}
}
public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId).getSpec();
}
public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
for (IFeedJoint joint : joints) {
if (joint.getType().equals(type)) {
return joint;
}
}
return null;
}
public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
return connectJobInfos.get(connectionId);
}
private void setLocations(FeedConnectJobInfo cInfo) {
JobSpecification jobSpec = cInfo.getSpec();
List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
IOperatorDescriptor opDesc = entry.getValue();
IOperatorDescriptor actualOp = null;
if (opDesc instanceof FeedMetaOperatorDescriptor) {
actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
} else {
actualOp = opDesc;
}
if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
boolean computeOp = false;
for (IPushRuntimeFactory rf : runtimeFactories) {
if (rf instanceof AssignRuntimeFactory) {
IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
.getLeft().getLeft();
if (sourceOp instanceof FeedCollectOperatorDescriptor) {
computeOp = true;
break;
}
}
}
if (computeOp) {
computeOperatorIds.add(entry.getKey());
}
} else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
storageOperatorIds.add(entry.getKey());
} else if (actualOp instanceof FeedCollectOperatorDescriptor) {
collectOperatorIds.add(entry.getKey());
}
}
try {
IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
JobInfo info = hcc.getJobInfo(cInfo.getJobId());
List<String> collectLocations = new ArrayList<String>();
for (OperatorDescriptorId collectOpId : collectOperatorIds) {
Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
int nOperatorInstances = operatorLocations.size();
for (int i = 0; i < nOperatorInstances; i++) {
collectLocations.add(operatorLocations.get(i));
}
}
List<String> computeLocations = new ArrayList<String>();
for (OperatorDescriptorId computeOpId : computeOperatorIds) {
Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
if (operatorLocations != null) {
int nOperatorInstances = operatorLocations.size();
for (int i = 0; i < nOperatorInstances; i++) {
computeLocations.add(operatorLocations.get(i));
}
} else {
computeLocations.clear();
computeLocations.addAll(collectLocations);
}
}
List<String> storageLocations = new ArrayList<String>();
for (OperatorDescriptorId storageOpId : storageOperatorIds) {
Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
if (operatorLocations == null) {
continue;
}
int nOperatorInstances = operatorLocations.size();
for (int i = 0; i < nOperatorInstances; i++) {
storageLocations.add(operatorLocations.get(i));
}
}
cInfo.setCollectLocations(collectLocations);
cInfo.setComputeLocations(computeLocations);
cInfo.setStorageLocations(storageLocations);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.android.toolchain.ndk.AndroidNdk;
import com.facebook.buck.android.toolchain.ndk.AndroidNdkConstants;
import com.facebook.buck.android.toolchain.ndk.NdkCxxPlatform;
import com.facebook.buck.android.toolchain.ndk.NdkCxxPlatformsProvider;
import com.facebook.buck.android.toolchain.ndk.TargetCpuType;
import com.facebook.buck.core.description.BuildRuleParams;
import com.facebook.buck.core.description.arg.CommonDescriptionArg;
import com.facebook.buck.core.description.arg.HasDeclaredDeps;
import com.facebook.buck.core.description.arg.HasSrcs;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.targetgraph.BuildRuleCreationContextWithTargetGraph;
import com.facebook.buck.core.model.targetgraph.DescriptionWithTargetGraph;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.sourcepath.PathSourcePath;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.cxx.CxxHeaders;
import com.facebook.buck.cxx.CxxPreprocessables;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.CxxSourceTypes;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.Preprocessor;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkableInput;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.BuildableSupport;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.macros.EnvironmentVariableMacroExpander;
import com.facebook.buck.rules.macros.MacroHandler;
import com.facebook.buck.toolchain.ToolchainProvider;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.MoreStrings;
import com.facebook.buck.util.environment.Platform;
import com.facebook.buck.util.types.Pair;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.EnumSet;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
import org.immutables.value.Value;
public class NdkLibraryDescription implements DescriptionWithTargetGraph<NdkLibraryDescriptionArg> {
private static final Pattern EXTENSIONS_REGEX =
Pattern.compile(
".*\\."
+ MoreStrings.regexPatternForAny("mk", "h", "hpp", "c", "cpp", "cc", "cxx")
+ "$");
public static final MacroHandler MACRO_HANDLER =
new MacroHandler(
ImmutableMap.of("env", new EnvironmentVariableMacroExpander(Platform.detect())));
@Override
public Class<NdkLibraryDescriptionArg> getConstructorArgType() {
return NdkLibraryDescriptionArg.class;
}
private Iterable<String> escapeForMakefile(ProjectFilesystem filesystem, Iterable<String> args) {
ImmutableList.Builder<String> escapedArgs = ImmutableList.builder();
for (String arg : args) {
String escapedArg = arg;
// The ndk-build makefiles make heavy use of the "eval" function to propagate variables,
// which means we need to perform additional makefile escaping for *every* "eval" that
// gets used. Turns out there are three "evals", so we escape a total of four times
// including the initial escaping. Since the makefiles eventually hand-off these values
// to the shell, we first perform bash escaping.
//
escapedArg = Escaper.escapeAsShellString(escapedArg);
for (int i = 0; i < 4; i++) {
escapedArg = Escaper.escapeAsMakefileValueString(escapedArg);
}
// We run ndk-build from the root of the NDK, so fixup paths that use the relative path to
// the buck out directory.
if (arg.startsWith(filesystem.getBuckPaths().getBuckOut().toString())) {
escapedArg = "$(BUCK_PROJECT_DIR)/" + escapedArg;
}
escapedArgs.add(escapedArg);
}
return escapedArgs.build();
}
private String getTargetArchAbi(TargetCpuType cpuType) {
switch (cpuType) {
case ARM:
return "armeabi";
case ARMV7:
return "armeabi-v7a";
case ARM64:
return "arm64-v8a";
case X86:
return "x86";
case X86_64:
return "x86_64";
case MIPS:
return "mips";
default:
throw new IllegalStateException();
}
}
@VisibleForTesting
protected static Path getGeneratedMakefilePath(BuildTarget target, ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(filesystem, target, "Android.%s.mk");
}
/**
* @return a {@link BuildRule} which generates a Android.mk which pulls in the local Android.mk
* file and also appends relevant preprocessor and linker flags to use C/C++ library deps.
*/
private Pair<String, Iterable<BuildRule>> generateMakefile(
ToolchainProvider toolchainProvider,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver resolver) {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
ImmutableList.Builder<String> outputLinesBuilder = ImmutableList.builder();
ImmutableSortedSet.Builder<BuildRule> deps = ImmutableSortedSet.naturalOrder();
NdkCxxPlatformsProvider ndkCxxPlatformsProvider =
toolchainProvider.getByName(
NdkCxxPlatformsProvider.DEFAULT_NAME, NdkCxxPlatformsProvider.class);
for (Map.Entry<TargetCpuType, NdkCxxPlatform> entry :
ndkCxxPlatformsProvider.getNdkCxxPlatforms().entrySet()) {
CxxPlatform cxxPlatform = entry.getValue().getCxxPlatform();
// Collect the preprocessor input for all C/C++ library deps. We search *through* other
// NDK library rules.
CxxPreprocessorInput cxxPreprocessorInput =
CxxPreprocessorInput.concat(
CxxPreprocessables.getTransitiveCxxPreprocessorInput(
cxxPlatform, resolver, params.getBuildDeps(), NdkLibrary.class::isInstance));
// We add any dependencies from the C/C++ preprocessor input to this rule, even though
// it technically should be added to the top-level rule.
deps.addAll(cxxPreprocessorInput.getDeps(resolver, ruleFinder));
// Add in the transitive preprocessor flags contributed by C/C++ library rules into the
// NDK build.
ImmutableList.Builder<String> ppFlags = ImmutableList.builder();
ppFlags.addAll(
Arg.stringify(
cxxPreprocessorInput.getPreprocessorFlags().get(CxxSource.Type.C), pathResolver));
Preprocessor preprocessor =
CxxSourceTypes.getPreprocessor(cxxPlatform, CxxSource.Type.C).resolve(resolver);
ppFlags.addAll(
CxxHeaders.getArgs(
cxxPreprocessorInput.getIncludes(), pathResolver, Optional.empty(), preprocessor));
String localCflags =
Joiner.on(' ').join(escapeForMakefile(projectFilesystem, ppFlags.build()));
// Collect the native linkable input for all C/C++ library deps. We search *through* other
// NDK library rules.
NativeLinkableInput nativeLinkableInput =
NativeLinkables.getTransitiveNativeLinkableInput(
cxxPlatform,
resolver,
params.getBuildDeps(),
Linker.LinkableDepType.SHARED,
r -> r instanceof NdkLibrary ? Optional.of(r.getBuildDeps()) : Optional.empty());
// We add any dependencies from the native linkable input to this rule, even though
// it technically should be added to the top-level rule.
deps.addAll(
nativeLinkableInput
.getArgs()
.stream()
.flatMap(arg -> BuildableSupport.getDeps(arg, ruleFinder))
.iterator());
// Add in the transitive native linkable flags contributed by C/C++ library rules into the
// NDK build.
String localLdflags =
Joiner.on(' ')
.join(
escapeForMakefile(
projectFilesystem,
Arg.stringify(nativeLinkableInput.getArgs(), pathResolver)));
// Write the relevant lines to the generated makefile.
if (!localCflags.isEmpty() || !localLdflags.isEmpty()) {
TargetCpuType targetCpuType = entry.getKey();
String targetArchAbi = getTargetArchAbi(targetCpuType);
outputLinesBuilder.add(String.format("ifeq ($(TARGET_ARCH_ABI),%s)", targetArchAbi));
if (!localCflags.isEmpty()) {
outputLinesBuilder.add("BUCK_DEP_CFLAGS=" + localCflags);
}
if (!localLdflags.isEmpty()) {
outputLinesBuilder.add("BUCK_DEP_LDFLAGS=" + localLdflags);
}
outputLinesBuilder.add("endif");
outputLinesBuilder.add("");
}
}
// GCC-only magic that rewrites non-deterministic parts of builds
String ndksubst = AndroidNdkConstants.ANDROID_NDK_ROOT;
outputLinesBuilder.addAll(
ImmutableList.copyOf(
new String[] {
// We're evaluated once per architecture, but want to add the cflags only once.
"ifeq ($(BUCK_ALREADY_HOOKED_CFLAGS),)",
"BUCK_ALREADY_HOOKED_CFLAGS := 1",
// Only GCC supports -fdebug-prefix-map
"ifeq ($(filter clang%,$(NDK_TOOLCHAIN_VERSION)),)",
// Replace absolute paths with machine-relative ones.
"NDK_APP_CFLAGS += -fdebug-prefix-map=$(NDK_ROOT)/=" + ndksubst + "/",
"NDK_APP_CFLAGS += -fdebug-prefix-map=$(abspath $(BUCK_PROJECT_DIR))/=./",
// Replace paths relative to the build rule with paths relative to the
// repository root.
"NDK_APP_CFLAGS += -fdebug-prefix-map=$(BUCK_PROJECT_DIR)/=./",
"NDK_APP_CFLAGS += -fdebug-prefix-map=./="
+ ".$(subst $(abspath $(BUCK_PROJECT_DIR)),,$(abspath $(CURDIR)))/",
"NDK_APP_CFLAGS += -fno-record-gcc-switches",
"ifeq ($(filter 4.6,$(TOOLCHAIN_VERSION)),)",
// Do not let header canonicalization undo the work we just did above. Note that GCC
// 4.6 doesn't support this option, but that's okay, because it doesn't canonicalize
// headers either.
"NDK_APP_CPPFLAGS += -fno-canonical-system-headers",
// If we include the -fdebug-prefix-map in the switches, the "from"-parts of which
// contain machine-specific paths, we lose determinism. GCC 4.6 didn't include
// detailed command line argument information anyway.
"NDK_APP_CFLAGS += -gno-record-gcc-switches",
"endif", // !GCC 4.6
"endif", // !clang
// Rewrite NDK module paths to import managed modules by relative path instead of by
// absolute path, but only for modules under the project root.
"BUCK_SAVED_IMPORTS := $(__ndk_import_dirs)",
"__ndk_import_dirs :=",
"$(foreach __dir,$(BUCK_SAVED_IMPORTS),\\",
"$(call import-add-path-optional,\\",
"$(if $(filter $(abspath $(BUCK_PROJECT_DIR))%,$(__dir)),\\",
"$(BUCK_PROJECT_DIR)$(patsubst $(abspath $(BUCK_PROJECT_DIR))%,%,$(__dir)),\\",
"$(__dir))))",
"endif", // !already hooked
// Now add a toolchain directory to replace. GCC's debug path replacement evaluates
// candidate replaces last-first (because it internally pushes them all onto a stack
// and scans the stack first-match-wins), so only add them after the more
// generic paths.
"NDK_APP_CFLAGS += -fdebug-prefix-map=$(TOOLCHAIN_PREBUILT_ROOT)/="
+ "@ANDROID_NDK_ROOT@/toolchains/$(TOOLCHAIN_NAME)/prebuilt/@BUILD_HOST@/",
}));
outputLinesBuilder.add("include Android.mk");
String contents = Joiner.on(System.lineSeparator()).join(outputLinesBuilder.build());
return new Pair<String, Iterable<BuildRule>>(contents, deps.build());
}
@VisibleForTesting
protected ImmutableSortedSet<SourcePath> findSources(
ProjectFilesystem filesystem, Path buildRulePath) {
ImmutableSortedSet.Builder<SourcePath> srcs = ImmutableSortedSet.naturalOrder();
try {
Path rootDirectory = filesystem.resolve(buildRulePath);
Files.walkFileTree(
rootDirectory,
EnumSet.of(FileVisitOption.FOLLOW_LINKS),
/* maxDepth */ Integer.MAX_VALUE,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
if (EXTENSIONS_REGEX.matcher(file.toString()).matches()) {
srcs.add(
PathSourcePath.of(
filesystem, buildRulePath.resolve(rootDirectory.relativize(file))));
}
return super.visitFile(file, attrs);
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
return srcs.build();
}
@Override
public NdkLibrary createBuildRule(
BuildRuleCreationContextWithTargetGraph context,
BuildTarget buildTarget,
BuildRuleParams params,
NdkLibraryDescriptionArg args) {
ToolchainProvider toolchainProvider = context.getToolchainProvider();
ProjectFilesystem projectFilesystem = context.getProjectFilesystem();
Pair<String, Iterable<BuildRule>> makefilePair =
generateMakefile(
toolchainProvider, projectFilesystem, params, context.getBuildRuleResolver());
ImmutableSortedSet<SourcePath> sources;
if (!args.getSrcs().isEmpty()) {
sources = args.getSrcs();
} else {
sources = findSources(projectFilesystem, buildTarget.getBasePath());
}
AndroidNdk androidNdk = toolchainProvider.getByName(AndroidNdk.DEFAULT_NAME, AndroidNdk.class);
return new NdkLibrary(
buildTarget,
projectFilesystem,
toolchainProvider.getByName(AndroidNdk.DEFAULT_NAME, AndroidNdk.class),
params.copyAppendingExtraDeps(
ImmutableSortedSet.<BuildRule>naturalOrder().addAll(makefilePair.getSecond()).build()),
getGeneratedMakefilePath(buildTarget, projectFilesystem),
makefilePair.getFirst(),
sources,
args.getFlags(),
args.getIsAsset(),
androidNdk.getNdkVersion(),
MACRO_HANDLER.getExpander(
buildTarget, context.getCellPathResolver(), context.getBuildRuleResolver()));
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractNdkLibraryDescriptionArg
extends CommonDescriptionArg, HasDeclaredDeps, HasSrcs {
ImmutableList<String> getFlags();
@Value.Default
default boolean getIsAsset() {
return false;
}
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.bi.conversion;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.bi.BiIntConsumer;
import at.gridtec.lambda4j.function.bi.BiFunction2;
import at.gridtec.lambda4j.function.bi.BiIntFunction;
import at.gridtec.lambda4j.function.bi.to.ToDoubleBiFunction2;
import at.gridtec.lambda4j.function.conversion.BooleanToIntFunction;
import at.gridtec.lambda4j.function.conversion.ByteToIntFunction;
import at.gridtec.lambda4j.function.conversion.CharToIntFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToByteFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToCharFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToFloatFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToShortFunction;
import at.gridtec.lambda4j.function.conversion.FloatToIntFunction;
import at.gridtec.lambda4j.function.conversion.IntToDoubleFunction2;
import at.gridtec.lambda4j.function.conversion.ShortToIntFunction;
import at.gridtec.lambda4j.operator.binary.DoubleBinaryOperator2;
import at.gridtec.lambda4j.operator.binary.IntBinaryOperator2;
import at.gridtec.lambda4j.predicate.bi.BiIntPredicate;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleConsumer;
import java.util.function.DoubleFunction;
import java.util.function.DoublePredicate;
import java.util.function.DoubleToIntFunction;
import java.util.function.DoubleToLongFunction;
import java.util.function.DoubleUnaryOperator;
import java.util.function.IntToDoubleFunction;
import java.util.function.IntUnaryOperator;
import java.util.function.LongToIntFunction;
import java.util.function.ToIntFunction;
/**
* Represents an operation that accepts two {@code int}-valued input arguments and produces a
* {@code double}-valued result.
* This is a primitive specialization of {@link BiFunction2}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsDouble(int, int)}.
*
* @see BiFunction2
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface BiIntToDoubleFunction extends Lambda {
/**
* Constructs a {@link BiIntToDoubleFunction} based on a lambda expression or a method reference. Thereby the given
* lambda expression or method reference is returned on an as-is basis to implicitly transform it to the desired
* type. With this method, it is possible to ensure that correct type is used from lambda expression or method
* reference.
*
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code BiIntToDoubleFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static BiIntToDoubleFunction of(@Nullable final BiIntToDoubleFunction expression) {
return expression;
}
/**
* Calls the given {@link BiIntToDoubleFunction} with the given arguments and returns its result.
*
* @param function The function to be called
* @param value1 The first argument to the function
* @param value2 The second argument to the function
* @return The result from the given {@code BiIntToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
static double call(@Nonnull final BiIntToDoubleFunction function, int value1, int value2) {
Objects.requireNonNull(function);
return function.applyAsDouble(value1, value2);
}
/**
* Creates a {@link BiIntToDoubleFunction} which uses the {@code first} parameter of this one as argument for the
* given {@link IntToDoubleFunction}.
*
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code BiIntToDoubleFunction} which uses the {@code first} parameter of this one as argument
* for the given {@code IntToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static BiIntToDoubleFunction onlyFirst(@Nonnull final IntToDoubleFunction function) {
Objects.requireNonNull(function);
return (value1, value2) -> function.applyAsDouble(value1);
}
/**
* Creates a {@link BiIntToDoubleFunction} which uses the {@code second} parameter of this one as argument for the
* given {@link IntToDoubleFunction}.
*
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code BiIntToDoubleFunction} which uses the {@code second} parameter of this one as argument
* for the given {@code IntToDoubleFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static BiIntToDoubleFunction onlySecond(@Nonnull final IntToDoubleFunction function) {
Objects.requireNonNull(function);
return (value1, value2) -> function.applyAsDouble(value2);
}
/**
* Creates a {@link BiIntToDoubleFunction} which always returns a given value.
*
* @param ret The return value for the constant
* @return A {@code BiIntToDoubleFunction} which always returns a given value.
*/
@Nonnull
static BiIntToDoubleFunction constant(double ret) {
return (value1, value2) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param value1 The first argument to the function
* @param value2 The second argument to the function
* @return The return value from the function, which is its result.
*/
double applyAsDouble(int value1, int value2);
/**
* Applies this function partially to some arguments of this one, producing a {@link IntToDoubleFunction2} as
* result.
*
* @param value1 The first argument to this function used to partially apply this function
* @return A {@code IntToDoubleFunction2} that represents this function partially applied the some arguments.
*/
@Nonnull
default IntToDoubleFunction2 papplyAsDouble(int value1) {
return (value2) -> this.applyAsDouble(value1, value2);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ToDoubleBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ToDoubleBiFunction2} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ToDoubleBiFunction2<A, B> compose(@Nonnull final ToIntFunction<? super A> before1,
@Nonnull final ToIntFunction<? super B> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> applyAsDouble(before1.applyAsInt(a), before2.applyAsInt(b));
}
/**
* Returns a composed {@link BiBooleanToDoubleFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiBooleanToDoubleFunction} that first applies the {@code before} functions to its
* input, and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiBooleanToDoubleFunction composeFromBoolean(@Nonnull final BooleanToIntFunction before1,
@Nonnull final BooleanToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiByteToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiByteToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BiByteToDoubleFunction composeFromByte(@Nonnull final ByteToIntFunction before1,
@Nonnull final ByteToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiCharToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiCharToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiCharToDoubleFunction composeFromChar(@Nonnull final CharToIntFunction before1,
@Nonnull final CharToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link DoubleBinaryOperator2} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code DoubleBinaryOperator2} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default DoubleBinaryOperator2 composeFromDouble(@Nonnull final DoubleToIntFunction before1,
@Nonnull final DoubleToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiFloatToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiFloatToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiFloatToDoubleFunction composeFromFloat(@Nonnull final FloatToIntFunction before1,
@Nonnull final FloatToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiIntToDoubleFunction} that first applies the {@code before} operators to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first operator to apply before this function is applied
* @param before2 The second operator to apply before this function is applied
* @return A composed {@code BiIntToDoubleFunction} that first applies the {@code before} operators to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default BiIntToDoubleFunction composeFromInt(@Nonnull final IntUnaryOperator before1,
@Nonnull final IntUnaryOperator before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiLongToDoubleFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiLongToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default BiLongToDoubleFunction composeFromLong(@Nonnull final LongToIntFunction before1,
@Nonnull final LongToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiShortToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiShortToDoubleFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiShortToDoubleFunction composeFromShort(@Nonnull final ShortToIntFunction before1,
@Nonnull final ShortToIntFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsDouble(before1.applyAsInt(value1), before2.applyAsInt(value2));
}
/**
* Returns a composed {@link BiIntFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> BiIntFunction<S> andThen(@Nonnull final DoubleFunction<? extends S> after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.apply(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code BiIntPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiIntPredicate andThenToBoolean(@Nonnull final DoublePredicate after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.test(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code byte}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntToByteFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BiIntToByteFunction andThenToByte(@Nonnull final DoubleToByteFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsByte(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiIntToCharFunction andThenToChar(@Nonnull final DoubleToCharFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsChar(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToDoubleFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code double}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code BiIntToDoubleFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default BiIntToDoubleFunction andThenToDouble(@Nonnull final DoubleUnaryOperator after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsDouble(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToFloatFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntToFloatFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiIntToFloatFunction andThenToFloat(@Nonnull final DoubleToFloatFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsFloat(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link IntBinaryOperator2} that first applies this function to its input, and then applies the
* {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to the
* caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code IntBinaryOperator2} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default IntBinaryOperator2 andThenToInt(@Nonnull final DoubleToIntFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsInt(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code long}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default BiIntToLongFunction andThenToLong(@Nonnull final DoubleToLongFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsLong(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntToShortFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code BiIntToShortFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiIntToShortFunction andThenToShort(@Nonnull final DoubleToShortFunction after) {
Objects.requireNonNull(after);
return (value1, value2) -> after.applyAsShort(applyAsDouble(value1, value2));
}
/**
* Returns a composed {@link BiIntConsumer} that fist applies this function to its input, and then consumes the
* result using the given {@link DoubleConsumer}. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code BiIntConsumer} that first applies this function to its input, and then consumes the
* result using the given {@code DoubleConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default BiIntConsumer consume(@Nonnull final DoubleConsumer consumer) {
Objects.requireNonNull(consumer);
return (value1, value2) -> consumer.accept(applyAsDouble(value1, value2));
}
/**
* Returns a memoized (caching) version of this {@link BiIntToDoubleFunction}. Whenever it is called, the mapping
* between the input parameters and the return value is preserved in a cache, making subsequent calls returning the
* memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code BiIntToDoubleFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default BiIntToDoubleFunction memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Pair<Integer, Integer>, Double> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (BiIntToDoubleFunction & Memoized) (value1, value2) -> {
final double returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Pair.of(value1, value2),
key -> applyAsDouble(key.getLeft(), key.getRight()));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link BiFunction2} which represents this {@link BiIntToDoubleFunction}. Thereby the primitive
* input argument for this function is autoboxed. This method provides the possibility to use this
* {@code BiIntToDoubleFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code BiFunction2} which represents this {@code BiIntToDoubleFunction}.
*/
@Nonnull
default BiFunction2<Integer, Integer, Double> boxed() {
return this::applyAsDouble;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jetty;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.ResolveEndpointFailedException;
import org.apache.camel.component.http.HttpConsumer;
import org.apache.camel.component.http.HttpEndpoint;
import org.apache.camel.impl.SynchronousDelegateProducer;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.jsse.SSLContextParameters;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.server.Handler;
import javax.servlet.Filter;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @version
*/
//@UriEndpoint(scheme = "jetty", title = "Jetty 9", syntax = "jetty:httpUri", consumerClass = HttpConsumer.class, label = "http")
@UriEndpoint(scheme = "jetty", consumerClass = HttpConsumer.class, title = "Jetty Endpoint", syntax ="http://")
public abstract class JettyHttpEndpoint extends HttpEndpoint {
@UriParam
private boolean sessionSupport;
private List<Handler> handlers;
private HttpClient client;
@UriParam
private Integer httpClientMinThreads;
@UriParam
private Integer httpClientMaxThreads;
private JettyHttpBinding jettyBinding;
@UriParam
private boolean enableJmx;
@UriParam
private boolean enableMultipartFilter;
@UriParam
private boolean sendServerVersion = true;
@UriParam
private boolean sendDateHeader;
private Filter multipartFilter;
private List<Filter> filters;
@UriParam
private Long continuationTimeout;
@UriParam
private Boolean useContinuation;
private SSLContextParameters sslContextParameters;
private Map<String, Object> httpClientParameters;
public JettyHttpEndpoint(JettyHttpComponent component, String uri, URI httpURL) throws URISyntaxException {
super(uri, component, httpURL);
}
@Override
public JettyHttpComponent getComponent() {
return (JettyHttpComponent) super.getComponent();
}
@Override
public Producer createProducer() throws Exception {
JettyHttpProducer answer = new JettyHttpProducer(this);
if (client != null) {
// use shared client, and ensure its started so we can use it
client.start();
answer.setSharedClient(client);
answer.setBinding(getJettyBinding(client));
} else {
HttpClient httpClient = createJettyHttpClient();
answer.setClient(httpClient);
answer.setBinding(getJettyBinding(httpClient));
}
if (isSynchronous()) {
return new SynchronousDelegateProducer(answer);
} else {
return answer;
}
}
protected HttpClient createJettyHttpClient() throws Exception {
// create a new client
// thread pool min/max from endpoint take precedence over from component
Integer min = httpClientMinThreads != null ? httpClientMinThreads : getComponent().getHttpClientMinThreads();
Integer max = httpClientMaxThreads != null ? httpClientMaxThreads : getComponent().getHttpClientMaxThreads();
HttpClient httpClient = getComponent().createHttpClient(this, min, max, sslContextParameters);
// set optional http client parameters
if (httpClientParameters != null) {
// copy parameters as we need to re-use them again if creating a new producer later
Map<String, Object> params = new HashMap<String, Object>(httpClientParameters);
// Can not be set on httpClient for jetty 9
params.remove("timeout");
IntrospectionSupport.setProperties(httpClient, params);
// validate we could set all parameters
if (params.size() > 0) {
throw new ResolveEndpointFailedException(getEndpointUri(), "There are " + params.size()
+ " parameters that couldn't be set on the endpoint."
+ " Check the uri if the parameters are spelt correctly and that they are properties of the endpoint."
+ " Unknown parameters=[" + params + "]");
}
}
return httpClient;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
HttpConsumer answer = new HttpConsumer(this, processor);
configureConsumer(answer);
return answer;
}
public void setSessionSupport(boolean support) {
sessionSupport = support;
}
public boolean isSessionSupport() {
return sessionSupport;
}
public List<Handler> getHandlers() {
return handlers;
}
public void setHandlers(List<Handler> handlers) {
this.handlers = handlers;
}
public HttpClient getClient() throws Exception {
return client;
}
/**
* Sets a shared {@link HttpClient} to use for all producers
* created by this endpoint. By default each producer will
* use a new http client, and not share.
* <p/>
* <b>Important: </b> Make sure to handle the lifecycle of the shared
* client, such as stopping the client, when it is no longer in use.
* Camel will call the <tt>start</tt> method on the client to ensure
* its started when this endpoint creates a producer.
* <p/>
* This options should only be used in special circumstances.
*/
public void setClient(HttpClient client) {
this.client = client;
}
public synchronized JettyHttpBinding getJettyBinding(HttpClient httpClient) {
if (jettyBinding == null) {
jettyBinding = new DefaultJettyHttpBinding();
jettyBinding.setHeaderFilterStrategy(getHeaderFilterStrategy());
jettyBinding.setThrowExceptionOnFailure(isThrowExceptionOnFailure());
jettyBinding.setTransferException(isTransferException());
}
return jettyBinding;
}
public void setJettyBinding(JettyHttpBinding jettyBinding) {
this.jettyBinding = jettyBinding;
}
public boolean isEnableJmx() {
return this.enableJmx;
}
public void setEnableJmx(boolean enableJmx) {
this.enableJmx = enableJmx;
}
public boolean isSendServerVersion() {
return sendServerVersion;
}
public void setSendServerVersion(boolean sendServerVersion) {
this.sendServerVersion = sendServerVersion;
}
public boolean isSendDateHeader() {
return sendDateHeader;
}
public void setSendDateHeader(boolean sendDateHeader) {
this.sendDateHeader = sendDateHeader;
}
public boolean isEnableMultipartFilter() {
return enableMultipartFilter;
}
public void setEnableMultipartFilter(boolean enableMultipartFilter) {
this.enableMultipartFilter = enableMultipartFilter;
}
public void setMultipartFilter(Filter filter) {
this.multipartFilter = filter;
}
public Filter getMultipartFilter() {
return multipartFilter;
}
public void setFilters(List<Filter> filterList) {
this.filters = filterList;
}
public List<Filter> getFilters() {
return filters;
}
public Long getContinuationTimeout() {
return continuationTimeout;
}
public void setContinuationTimeout(Long continuationTimeout) {
this.continuationTimeout = continuationTimeout;
}
public Boolean getUseContinuation() {
return useContinuation;
}
public void setUseContinuation(Boolean useContinuation) {
this.useContinuation = useContinuation;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public Integer getHttpClientMinThreads() {
return httpClientMinThreads;
}
public void setHttpClientMinThreads(Integer httpClientMinThreads) {
this.httpClientMinThreads = httpClientMinThreads;
}
public Integer getHttpClientMaxThreads() {
return httpClientMaxThreads;
}
public void setHttpClientMaxThreads(Integer httpClientMaxThreads) {
this.httpClientMaxThreads = httpClientMaxThreads;
}
public Map<String, Object> getHttpClientParameters() {
return httpClientParameters;
}
public void setHttpClientParameters(Map<String, Object> httpClientParameters) {
this.httpClientParameters = httpClientParameters;
}
public abstract JettyContentExchange createContentExchange();
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.CreateVpcRequestMarshaller;
/**
* Container for the parameters to the {@link com.amazonaws.services.ec2.AmazonEC2#createVpc(CreateVpcRequest) CreateVpc operation}.
* <p>
* Creates a VPC with the specified CIDR block.
* </p>
* <p>
* The smallest VPC you can create uses a /28 netmask (16 IP addresses),
* and the largest uses a /16 netmask (65,536 IP addresses). To help you
* decide how big to make your VPC, see
* <a href="http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Subnets.html"> Your VPC and Subnets </a>
* in the <i>Amazon Virtual Private Cloud User Guide</i> .
* </p>
* <p>
* By default, each instance you launch in the VPC has the default DHCP
* options, which includes only a default DNS server that we provide
* (AmazonProvidedDNS). For more information about DHCP options, see
* <a href="http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html"> DHCP Options Sets </a>
* in the <i>Amazon Virtual Private Cloud User Guide</i> .
* </p>
*
* @see com.amazonaws.services.ec2.AmazonEC2#createVpc(CreateVpcRequest)
*/
public class CreateVpcRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<CreateVpcRequest> {
/**
* The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*/
private String cidrBlock;
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*/
private String instanceTenancy;
/**
* Default constructor for a new CreateVpcRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public CreateVpcRequest() {}
/**
* Constructs a new CreateVpcRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param cidrBlock The network range for the VPC, in CIDR notation. For
* example, <code>10.0.0.0/16</code>.
*/
public CreateVpcRequest(String cidrBlock) {
setCidrBlock(cidrBlock);
}
/**
* The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*
* @return The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*/
public String getCidrBlock() {
return cidrBlock;
}
/**
* The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*
* @param cidrBlock The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*/
public void setCidrBlock(String cidrBlock) {
this.cidrBlock = cidrBlock;
}
/**
* The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cidrBlock The network range for the VPC, in CIDR notation. For example,
* <code>10.0.0.0/16</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CreateVpcRequest withCidrBlock(String cidrBlock) {
this.cidrBlock = cidrBlock;
return this;
}
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*
* @return The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
*
* @see Tenancy
*/
public String getInstanceTenancy() {
return instanceTenancy;
}
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*
* @param instanceTenancy The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
*
* @see Tenancy
*/
public void setInstanceTenancy(String instanceTenancy) {
this.instanceTenancy = instanceTenancy;
}
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*
* @param instanceTenancy The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see Tenancy
*/
public CreateVpcRequest withInstanceTenancy(String instanceTenancy) {
this.instanceTenancy = instanceTenancy;
return this;
}
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*
* @param instanceTenancy The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
*
* @see Tenancy
*/
public void setInstanceTenancy(Tenancy instanceTenancy) {
this.instanceTenancy = instanceTenancy.toString();
}
/**
* The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>default, dedicated
*
* @param instanceTenancy The supported tenancy options for instances launched into the VPC. A
* value of <code>default</code> means that instances can be launched
* with any tenancy; a value of <code>dedicated</code> means all
* instances launched into the VPC are launched as dedicated tenancy
* instances regardless of the tenancy assigned to the instance at
* launch. Dedicated tenancy instances run on single-tenant hardware.
* <p>Default: <code>default</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see Tenancy
*/
public CreateVpcRequest withInstanceTenancy(Tenancy instanceTenancy) {
this.instanceTenancy = instanceTenancy.toString();
return this;
}
/**
* This method is intended for internal use only.
* Returns the marshaled request configured with additional parameters to
* enable operation dry-run.
*/
@Override
public Request<CreateVpcRequest> getDryRunRequest() {
Request<CreateVpcRequest> request = new CreateVpcRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCidrBlock() != null) sb.append("CidrBlock: " + getCidrBlock() + ",");
if (getInstanceTenancy() != null) sb.append("InstanceTenancy: " + getInstanceTenancy() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCidrBlock() == null) ? 0 : getCidrBlock().hashCode());
hashCode = prime * hashCode + ((getInstanceTenancy() == null) ? 0 : getInstanceTenancy().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof CreateVpcRequest == false) return false;
CreateVpcRequest other = (CreateVpcRequest)obj;
if (other.getCidrBlock() == null ^ this.getCidrBlock() == null) return false;
if (other.getCidrBlock() != null && other.getCidrBlock().equals(this.getCidrBlock()) == false) return false;
if (other.getInstanceTenancy() == null ^ this.getInstanceTenancy() == null) return false;
if (other.getInstanceTenancy() != null && other.getInstanceTenancy().equals(this.getInstanceTenancy()) == false) return false;
return true;
}
@Override
public CreateVpcRequest clone() {
return (CreateVpcRequest) super.clone();
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.jaxrs.service.api;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.Extension;
import io.swagger.annotations.ExtensionProperty;
import io.swagger.annotations.Info;
import io.swagger.annotations.ResponseHeader;
import io.swagger.annotations.SwaggerDefinition;
import io.swagger.annotations.Tag;
import org.wso2.carbon.apimgt.annotations.api.Scope;
import org.wso2.carbon.apimgt.annotations.api.Scopes;
import org.wso2.carbon.device.mgt.common.geo.service.Alert;
import org.wso2.carbon.device.mgt.jaxrs.util.Constants;
import javax.validation.Valid;
import javax.validation.constraints.Size;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
@SwaggerDefinition(
info = @Info(
version = "1.0.0",
title = "",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = "name", value = "geo_services"),
@ExtensionProperty(name = "context", value = "/api/device-mgt/v1.0/geo-services"),
})
}
),
tags = {
@Tag(name = "device_management", description = "")
}
)
@Scopes(
scopes = {
@Scope(
name = "View Analytics",
description = "",
key = "perm:geo-service:analytics-view",
permissions = {"/device-mgt/devices/owning-device/view-analytics"}
),
@Scope(
name = "Manage Alerts",
description = "",
key = "perm:geo-service:alerts-manage",
permissions = {"/device-mgt/devices/owning-device/manage-alerts"}
)
}
)
@Path("/geo-services")
@Api(value = "Geo Service",
description = "This carries all the resources related to the geo service functionalities.")
public interface GeoLocationBasedService {
/**
* Retrieve Analytics for the device type
*/
@GET
@Path("stats/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Getting the Location Details of a Device",
notes = "Get the location details of a device during a define time period.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:analytics-view")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoDeviceStats(
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "device-type",
value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("deviceType")
@Size(max = 45)
String deviceType,
@ApiParam(
name = "from",
value = "Define the time to start getting the geo location history of the device in the Epoch or UNIX format.",
required = true)
@QueryParam("from") long from,
@ApiParam(
name = "to",
value = "Define the time to finish getting the geo location history of the device in the Epoch or UNIX format.",
required = true)
@QueryParam("to") long to);
/**
* Get data to show device locations in a map
*/
@GET
@Path("stats/device-locations")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Getting the Devices in a Defined Geofence",
notes = "Get the details of the devices that are within the defined geofence coordinates. The geofence you are defining is enclosed with four coordinates in the shape of a square or rectangle. This is done by defining two points of the geofence. The other two points are automatically created using the given points. You can define the zoom level or scale of the map too.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:analytics-view")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid parameters found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoDeviceLocations(
@ApiParam(
name = "minLat",
value = "Define the minimum latitude of the geofence.",
required = true,
defaultValue ="79.85213577747345")
@QueryParam("minLat") double minLat,
@ApiParam(
name = "maxLat",
value = "Define the maximum latitude of the geofence.",
required = true,
defaultValue ="79.85266149044037")
@QueryParam("maxLat") double maxLat,
@ApiParam(
name = "minLong",
value = "Define the minimum longitude of the geofence.",
required = true,
defaultValue ="6.909673257977737")
@QueryParam("minLong") double minLong,
@ApiParam(
name = "maxLong",
value = "Define the maximum longitude of the geofence",
required = true,
defaultValue ="6.909673257977737")
@QueryParam("maxLong") double maxLong,
@ApiParam(
name = "zoom",
value = "Define the level to zoom or scale the map. You can define any value between 1 to 14.",
required = true,
defaultValue ="2")
@QueryParam("zoom") int zoom);
/**
* Create Geo alerts
*/
@POST
@Path("alerts/{alertType}/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "POST",
value = "Retrieving a Specific Geo Alert Type from a Device",
notes = "Retrieve a specific geo alert from a device, such as getting a speed alert that was sent to a device.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response createGeoAlerts(
@ApiParam(
name = "alert",
value = "The alert object",
required = true)
@Valid Alert alert,
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "device-type",
value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("deviceType")
@Size(max = 45)
String deviceType,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed,Exit, or Stationary.",
required = true)
@PathParam("alertType") String alertType);
/**
* Create Geo alerts for geo clusters
*/
@POST
@Path("/alerts/{alertType}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "POST",
value = "Create Geo alerts for geo clusters",
notes = "Creating geo alerts for cluster of devices",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n A geo alert with this name already exists.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response createGeoAlertsForGeoClusters(
@ApiParam(
name = "alert",
value = "The alert object",
required = true)
@Valid Alert alert,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Stationary",
required = true)
@PathParam("alertType") String alertType);
/**
* Update Geo alerts
*/
@PUT
@Path("alerts/{alertType}/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "PUT",
value = "Updating the Geo Alerts of a Device",
notes = "Update the a geo alert that was sent to a device.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response updateGeoAlerts(
@ApiParam(
name = "alert",
value = "The alert object",
required = true)
@Valid Alert alert,
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "device-type",
value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("deviceType")
@Size(max = 45)
String deviceType,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Exit, or Stationary",
required = true)
@PathParam("alertType") String alertType);
/**
* Update Geo alerts for geo clusters
*/
@PUT
@Path("alerts/{alertType}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Update Geo alerts for geo clusters",
notes = "Updating an existing geo alert that was defined for geo clusters",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response updateGeoAlertsForGeoClusters(
@ApiParam(
name = "alert",
value = "The alert object",
required = true)
@Valid Alert alert,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Stationary",
required = true)
@PathParam("alertType") String alertType);
/**
* Retrieve Geo alerts
*/
@GET
@Path("alerts/{alertType}/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Getting a Geo Alert from a Device",
notes = "Retrieve a specific geo alert from a device, such as getting a speed alert that was sent to a device.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoAlerts(
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "device-type",
value = "The device type, such as ios, android. or windows.",
required = true)
@PathParam("deviceType")
@Size(max = 45)
String deviceType,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Exit, or Stationary",
required = true)
@PathParam("alertType") String alertType);
/**
* Retrieve Geo alerts for geo clusters
*/
@GET
@Path("alerts/{alertType}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Retrieve Geo alerts for geo clusters",
notes = "Retrieve all the defined alerts for a specific alert type",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoAlertsForGeoClusters(
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Stationary",
required = true)
@PathParam("alertType") String alertType);
/**
* Retrieve Geo alerts history
*/
@GET
@Path("alerts/history/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Getting the Geo Service Alert History of a Device",
notes = "Get the geo alert history of a device during the defined time period.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoAlertsHistory(
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "device-type",
value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("deviceType")
@Size(max = 45)
String deviceType,
@ApiParam(
name = "from",
value = "Define the time to start getting the geo location history of the device in the Epoch or UNIX format.",
required = true)
@QueryParam("from") long from,
@ApiParam(
name = "to",
value = "Define the time to finish getting the geo location history of the device in the Epoch or UNIX format.",
required = true)
@QueryParam("to") long to);
/**
* Retrieve Geo alerts history for geo clusters
*/
@GET
@Path("alerts/history")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "GET",
value = "Retrieve Geo alerts history for geo clusters",
notes = "Retrieving geo alert history of all defined alerts for geo clusters",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response getGeoAlertsHistoryForGeoClusters(
@ApiParam(
name = "from",
value = "Get stats from what time",
required = true)
@QueryParam("from") long from,
@ApiParam(
name = "to",
value = "Get stats up to what time",
required = true)
@QueryParam("to") long to);
/**
* Remove geo alerts
*/
@DELETE
@Path("alerts/{alertType}/{deviceType}/{deviceId}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "DELETE",
value = "Deleting a Geo Alert from a Device",
notes = "Delete a specific geo alert from a device, such as deleting a speed alert that was sent to the device.",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response removeGeoAlerts(
@ApiParam(
name = "deviceId",
value = "The device ID.",
required = true)
@PathParam("deviceId") String deviceId,
@ApiParam(
name = "deviceType",
value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("deviceType") String deviceType,
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Exit, or Stationary",
required = true)
@PathParam("alertType") String alertType,
@ApiParam(
name = "queryName",
value = "When you define a geofence you define a fence name for it. That name needs to be defined" +
" here.",
required = true)
@QueryParam("queryName") String queryName);
/**
* Remove geo alerts for geo clusters
*/
@DELETE
@Path("alerts/{alertType}")
@ApiOperation(
consumes = "application/json",
produces = "application/json",
httpMethod = "DELETE",
value = "Deletes Geo alerts for geo clusters",
notes = "Deleting any type of a geo alert that was defined for geo clusters",
response = Response.class,
tags = "Geo Service Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:geo-service:alerts-manage")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK.",
response = Response.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid Device Identifiers found.",
response = Response.class),
@ApiResponse(
code = 401,
message = "Unauthorized. \n Unauthorized request."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Error on retrieving stats",
response = Response.class)
})
Response removeGeoAlertsForGeoClusters(
@ApiParam(
name = "alertType",
value = "The alert type, such as Within, Speed, Stationary",
required = true)
@PathParam("alertType") String alertType,
@ApiParam(
name = "queryName",
value = "The query name.",
required = true)
@QueryParam("queryName") String queryName);
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.openid;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.*;
import org.mockito.ArgumentMatchers;
import org.openid4java.association.AssociationException;
import org.openid4java.consumer.ConsumerException;
import org.openid4java.consumer.ConsumerManager;
import org.openid4java.consumer.VerificationResult;
import org.openid4java.discovery.DiscoveryException;
import org.openid4java.discovery.DiscoveryInformation;
import org.openid4java.discovery.Identifier;
import org.openid4java.message.AuthRequest;
import org.openid4java.message.Message;
import org.openid4java.message.MessageException;
import org.openid4java.message.ParameterList;
import org.openid4java.message.ax.AxMessage;
import org.openid4java.message.ax.FetchResponse;
import org.springframework.mock.web.MockHttpServletRequest;
import java.util.*;
/**
* @author Luke Taylor
*/
public class OpenID4JavaConsumerTests {
List<OpenIDAttribute> attributes = Arrays.asList(new OpenIDAttribute("a", "b"),
new OpenIDAttribute("b", "b", Arrays.asList("c")));
@SuppressWarnings("deprecation")
@Test
public void beginConsumptionCreatesExpectedSessionData() throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
AuthRequest authReq = mock(AuthRequest.class);
DiscoveryInformation di = mock(DiscoveryInformation.class);
when(mgr.authenticate(any(DiscoveryInformation.class), any(), any()))
.thenReturn(authReq);
when(mgr.associate(any())).thenReturn(di);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new MockAttributesFactory());
MockHttpServletRequest request = new MockHttpServletRequest();
consumer.beginConsumption(request, "", "", "");
assertThat(request.getSession().getAttribute(
"SPRING_SECURITY_OPEN_ID_ATTRIBUTES_FETCH_LIST")).isEqualTo(attributes);
assertThat(
request.getSession().getAttribute(DiscoveryInformation.class.getName())).isEqualTo(di);
// Check with empty attribute fetch list
consumer = new OpenID4JavaConsumer(mgr, new NullAxFetchListFactory());
request = new MockHttpServletRequest();
consumer.beginConsumption(request, "", "", "");
}
@Test(expected = OpenIDConsumerException.class)
public void discoveryExceptionRaisesOpenIDException() throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new NullAxFetchListFactory());
when(mgr.discover(any())).thenThrow(new DiscoveryException("msg"));
consumer.beginConsumption(new MockHttpServletRequest(), "", "", "");
}
@Test
public void messageOrConsumerAuthenticationExceptionRaisesOpenIDException()
throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new NullAxFetchListFactory());
when(mgr.authenticate(ArgumentMatchers.<DiscoveryInformation>any(), any(), any()))
.thenThrow(new MessageException("msg"), new ConsumerException("msg"));
try {
consumer.beginConsumption(new MockHttpServletRequest(), "", "", "");
fail("OpenIDConsumerException was not thrown");
}
catch (OpenIDConsumerException expected) {
}
try {
consumer.beginConsumption(new MockHttpServletRequest(), "", "", "");
fail("OpenIDConsumerException was not thrown");
}
catch (OpenIDConsumerException expected) {
}
}
@Test
public void failedVerificationReturnsFailedAuthenticationStatus() throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new NullAxFetchListFactory());
VerificationResult vr = mock(VerificationResult.class);
DiscoveryInformation di = mock(DiscoveryInformation.class);
when(
mgr.verify(any(), any(ParameterList.class),
any(DiscoveryInformation.class))).thenReturn(vr);
MockHttpServletRequest request = new MockHttpServletRequest();
request.getSession().setAttribute(DiscoveryInformation.class.getName(), di);
OpenIDAuthenticationToken auth = consumer.endConsumption(request);
assertThat(auth.getStatus()).isEqualTo(OpenIDAuthenticationStatus.FAILURE);
}
@Test
public void verificationExceptionsRaiseOpenIDException() throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new NullAxFetchListFactory());
when(
mgr.verify(any(), any(ParameterList.class),
any(DiscoveryInformation.class)))
.thenThrow(new MessageException(""))
.thenThrow(new AssociationException(""))
.thenThrow(new DiscoveryException(""));
MockHttpServletRequest request = new MockHttpServletRequest();
request.setQueryString("x=5");
try {
consumer.endConsumption(request);
fail("OpenIDConsumerException was not thrown");
}
catch (OpenIDConsumerException expected) {
}
try {
consumer.endConsumption(request);
fail("OpenIDConsumerException was not thrown");
}
catch (OpenIDConsumerException expected) {
}
try {
consumer.endConsumption(request);
fail("OpenIDConsumerException was not thrown");
}
catch (OpenIDConsumerException expected) {
}
}
@SuppressWarnings("serial")
@Test
public void successfulVerificationReturnsExpectedAuthentication() throws Exception {
ConsumerManager mgr = mock(ConsumerManager.class);
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(mgr,
new NullAxFetchListFactory());
VerificationResult vr = mock(VerificationResult.class);
DiscoveryInformation di = mock(DiscoveryInformation.class);
Identifier id = (Identifier) () -> "id";
Message msg = mock(Message.class);
when(
mgr.verify(any(), any(ParameterList.class),
any(DiscoveryInformation.class))).thenReturn(vr);
when(vr.getVerifiedId()).thenReturn(id);
when(vr.getAuthResponse()).thenReturn(msg);
MockHttpServletRequest request = new MockHttpServletRequest();
request.getSession().setAttribute(DiscoveryInformation.class.getName(), di);
request.getSession().setAttribute(
"SPRING_SECURITY_OPEN_ID_ATTRIBUTES_FETCH_LIST", attributes);
OpenIDAuthenticationToken auth = consumer.endConsumption(request);
assertThat(auth.getStatus()).isEqualTo(OpenIDAuthenticationStatus.SUCCESS);
}
@Test
public void fetchAttributesReturnsExpectedValues() throws Exception {
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(
new NullAxFetchListFactory());
Message msg = mock(Message.class);
FetchResponse fr = mock(FetchResponse.class);
when(msg.hasExtension(AxMessage.OPENID_NS_AX)).thenReturn(true);
when(msg.getExtension(AxMessage.OPENID_NS_AX)).thenReturn(fr);
when(fr.getAttributeValues("a")).thenReturn(Arrays.asList("x", "y"));
List<OpenIDAttribute> fetched = consumer.fetchAxAttributes(msg, attributes);
assertThat(fetched).hasSize(1);
assertThat(fetched.get(0).getValues()).hasSize(2);
}
@Test(expected = OpenIDConsumerException.class)
public void messageExceptionFetchingAttributesRaisesOpenIDException()
throws Exception {
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(
new NullAxFetchListFactory());
Message msg = mock(Message.class);
FetchResponse fr = mock(FetchResponse.class);
when(msg.hasExtension(AxMessage.OPENID_NS_AX)).thenReturn(true);
when(msg.getExtension(AxMessage.OPENID_NS_AX))
.thenThrow(new MessageException(""));
when(fr.getAttributeValues("a")).thenReturn(Arrays.asList("x", "y"));
consumer.fetchAxAttributes(msg, attributes);
}
@Test(expected = OpenIDConsumerException.class)
public void missingDiscoveryInformationThrowsException() throws Exception {
OpenID4JavaConsumer consumer = new OpenID4JavaConsumer(
new NullAxFetchListFactory());
consumer.endConsumption(new MockHttpServletRequest());
}
@Test
public void additionalConstructorsWork() throws Exception {
new OpenID4JavaConsumer();
new OpenID4JavaConsumer(new MockAttributesFactory());
}
private class MockAttributesFactory implements AxFetchListFactory {
public List<OpenIDAttribute> createAttributeList(String identifier) {
return attributes;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.core.axis2;
import org.apache.axiom.om.OMException;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.addressing.RelatesTo;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.async.AxisCallback;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.axis2.util.CallbackReceiver;
import org.apache.axis2.wsdl.WSDLConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.nio.NHttpServerConnection;
import org.apache.sandesha2.client.SandeshaClientConstants;
import org.apache.synapse.ContinuationState;
import org.apache.synapse.FaultHandler;
import org.apache.synapse.ServerContextInformation;
import org.apache.synapse.SynapseConstants;
import org.apache.synapse.SynapseException;
import org.apache.synapse.aspects.statistics.ErrorLogFactory;
import org.apache.synapse.aspects.statistics.StatisticsReporter;
import org.apache.synapse.carbonext.TenantInfoConfigurator;
import org.apache.synapse.config.SynapseConfigUtils;
import org.apache.synapse.config.SynapseConfiguration;
import org.apache.synapse.continuation.ContinuationStackManager;
import org.apache.synapse.endpoints.AbstractEndpoint;
import org.apache.synapse.endpoints.Endpoint;
import org.apache.synapse.endpoints.FailoverEndpoint;
import org.apache.synapse.endpoints.dispatch.Dispatcher;
import org.apache.synapse.transport.nhttp.NhttpConstants;
import org.apache.synapse.transport.passthru.PassThroughConstants;
import org.apache.synapse.transport.passthru.Pipe;
import org.apache.synapse.transport.passthru.config.SourceConfiguration;
import org.apache.synapse.transport.passthru.util.RelayUtils;
import org.apache.synapse.util.ResponseAcceptEncodingProcessor;
import java.util.Stack;
import java.util.Timer;
/**
* This is the message receiver that receives the responses for outgoing messages sent out
* by Synapse. It holds a callbackStore that maps the [unique] messageID of each message to
* a callback object that gets executed on timeout or when a response is received (before timeout)
*
* The AnonymousServiceFactory uses this MessageReceiver for all Anonymous services created by it.
* This however - effectively - is a singleton class
*/
public class SynapseCallbackReceiver extends CallbackReceiver {
private static final Log log = LogFactory.getLog(SynapseCallbackReceiver.class);
/** This is the synchronized callbackStore that maps outgoing messageID's to callback objects */
// private final Map<String, AxisCallback> callbackStore; // will made thread safe in the constructor
/**
* Create the *single* instance of this class that would be used by all anonymous services
* used for outgoing messaging.
* @param synCfg the Synapse configuration
* @param contextInformation server runtime information
*/
public SynapseCallbackReceiver(SynapseConfiguration synCfg,
ServerContextInformation contextInformation) {
// callbackStore = Collections.synchronizedMap(new HashMap<String, AxisCallback>());
// create the Timer object and a TimeoutHandler task
TimeoutHandler timeoutHandler = new TimeoutHandler(callbackStore, contextInformation);
Timer timeOutTimer = synCfg.getSynapseTimer();
long timeoutHandlerInterval = SynapseConfigUtils.getTimeoutHandlerInterval();
// schedule timeout handler to run every n seconds (n : specified or defaults to 15s)
timeOutTimer.schedule(timeoutHandler, 0, timeoutHandlerInterval);
}
public int getCallbackCount() {
return callbackStore.size();
}
public void addCallback(String MsgID, AxisCallback callback) {
callbackStore.put(MsgID, callback);
if (log.isDebugEnabled()) {
log.debug("Callback added. Total callbacks waiting for : " + callbackStore.size());
}
}
/**
* Everytime a response message is received this method gets invoked. It will then select
* the outgoing *Synapse* message context for the reply we received, and determine what action
* to take at the Synapse level
*
* @param messageCtx the Axis2 message context of the reply received
* @throws AxisFault
*/
public void receive(MessageContext messageCtx) throws AxisFault {
String messageID = null;
/**
* In an Out-only scenario if the client receives a HTTP 202 accepted we need to
* remove the call back/s registered for that request.
* This if will check weather this is a message sent in a that scenario and remove the callback
*/
if (messageCtx.getProperty(NhttpConstants.HTTP_202_RECEIVED) != null && "true".equals(
messageCtx.getProperty(NhttpConstants.HTTP_202_RECEIVED))) {
if (callbackStore.containsKey(messageCtx.getMessageID())) {
callbackStore.remove(messageCtx.getMessageID());
if (log.isDebugEnabled()) {
log.debug("CallBack registered with Message id : " + messageCtx.getMessageID() +
" removed from the " +
"callback store since we got an accepted Notification");
}
}
return;
}
if (messageCtx.getOptions() != null && messageCtx.getOptions().getRelatesTo() != null) {
// never take a chance with a NPE at this stage.. so check at each level :-)
Options options = messageCtx.getOptions();
if (options != null) {
RelatesTo relatesTo = options.getRelatesTo();
if (relatesTo != null) {
messageID = relatesTo.getValue();
}
}
} else if (messageCtx.getProperty(SandeshaClientConstants.SEQUENCE_KEY) == null) {
messageID = (String) messageCtx.getProperty(SynapseConstants.RELATES_TO_FOR_POX);
}
if (messageID != null) {
AsyncCallback callback = (AsyncCallback)callbackStore.remove(messageID);
if (log.isDebugEnabled()) {
log.debug("Callback removed for request message id : " + messageID +
". Pending callbacks count : " + callbackStore.size());
}
RelatesTo[] relates = messageCtx.getRelationships();
if (relates != null && relates.length > 1) {
// we set a relates to to the response message so that if WSA is not used, we
// could still link back to the original message. But if WSA was used, this
// gets duplicated, and we should remove it
removeDuplicateRelatesTo(messageCtx, relates);
}
if (callback != null) {
handleMessage(messageID, messageCtx, ((AsyncCallback) callback).getSynapseOutMsgCtx(),
(AsyncCallback)callback);
} else {
// TODO invoke a generic synapse error handler for this message
log.warn("Synapse received a response for the request with message Id : " +
messageID + " But a callback is not registered (anymore) to process this response");
}
} else if (!messageCtx.isPropertyTrue(NhttpConstants.SC_ACCEPTED)){
// TODO invoke a generic synapse error handler for this message
log.warn("Synapse received a response message without a message Id");
}
}
/**
* Handle the response or error (during a failed send) message received for an outgoing request
*
* @param messageID Request message ID
* @param response the Axis2 MessageContext that has been received and has to be handled
* @param synapseOutMsgCtx the corresponding (outgoing) Synapse MessageContext for the above
* Axis2 MC, that holds Synapse specific information such as the error
* handler stack and local properties etc.
* @throws AxisFault if the message cannot be processed
*/
private void handleMessage(String messageID ,MessageContext response,
org.apache.synapse.MessageContext synapseOutMsgCtx, AsyncCallback callback) throws AxisFault {
// apply the tenant information to the out message context
TenantInfoConfigurator configurator = synapseOutMsgCtx.getEnvironment()
.getTenantInfoConfigurator();
if (configurator != null) {
configurator.applyTenantInfo(synapseOutMsgCtx);
}
Object o = response.getProperty(SynapseConstants.SENDING_FAULT);
if (o != null && Boolean.TRUE.equals(o)) {
StatisticsReporter.reportFaultForAll(synapseOutMsgCtx,
ErrorLogFactory.createErrorLog(response));
// there is a sending fault. propagate the fault to fault handlers.
Stack faultStack = synapseOutMsgCtx.getFaultStack();
if (faultStack != null && !faultStack.isEmpty()) {
// if we have access to the full synapseOutMsgCtx.getEnvelope(), then let
// it flow with the error details. Else, replace its envelope with the
// fault envelope
try {
synapseOutMsgCtx.getEnvelope().build();
} catch (OMException x) {
synapseOutMsgCtx.setEnvelope(response.getEnvelope());
}
Exception e = (Exception) response.getProperty(SynapseConstants.ERROR_EXCEPTION);
synapseOutMsgCtx.setProperty(SynapseConstants.SENDING_FAULT, Boolean.TRUE);
synapseOutMsgCtx.setProperty(SynapseConstants.ERROR_CODE,
response.getProperty(SynapseConstants.ERROR_CODE));
synapseOutMsgCtx.setProperty(SynapseConstants.ERROR_MESSAGE,
response.getProperty(SynapseConstants.ERROR_MESSAGE));
synapseOutMsgCtx.setProperty(SynapseConstants.ERROR_DETAIL,
response.getProperty(SynapseConstants.ERROR_DETAIL));
synapseOutMsgCtx.setProperty(SynapseConstants.ERROR_EXCEPTION, e);
if (synapseOutMsgCtx.getEnvironment().isContinuationEnabled()) {
synapseOutMsgCtx.setContinuationEnabled(true);
ContinuationStackManager.clearStack(synapseOutMsgCtx);
}
if (log.isDebugEnabled()) {
log.debug("[Failed Request Message ID : " + messageID + "]" +
" [New to be Retried Request Message ID : " +
synapseOutMsgCtx.getMessageID() + "]");
}
int errorCode = (Integer)response.getProperty(SynapseConstants.ERROR_CODE);
//If a timeout has occured and the timeout action of the callback is to discard the message
if(errorCode == SynapseConstants.NHTTP_CONNECTION_TIMEOUT &&
callback.getTimeOutAction() == SynapseConstants.DISCARD){
//Do not execute any fault sequences. Discard message
if(log.isWarnEnabled()){
log.warn("Synapse timed out for the request with Message ID : " + messageID +
". Ignoring fault handlers since the timeout action is DISCARD");
}
faultStack.removeAllElements();
}
else{
((FaultHandler) faultStack.pop()).handleFault(synapseOutMsgCtx, null);
}
}
} else {
// there can always be only one instance of an Endpoint in the faultStack of a message
// if the send was successful, so remove it before we proceed any further
Stack faultStack = synapseOutMsgCtx.getFaultStack();
Endpoint successfulEndpoint=null;
if (faultStack != null && !faultStack.isEmpty()
&& faultStack.peek() instanceof Endpoint) {
successfulEndpoint = (Endpoint) faultStack.pop();
}
if (log.isDebugEnabled()) {
log.debug("Synapse received an asynchronous response message");
log.debug("Received To: " +
(response.getTo() != null ? response.getTo().getAddress() : "null"));
log.debug("SOAPAction: " +
(response.getSoapAction() != null ? response.getSoapAction() : "null"));
log.debug("WSA-Action: " +
(response.getWSAAction() != null ? response.getWSAAction() : "null"));
String[] cids = response.getAttachmentMap().getAllContentIDs();
if (cids != null && cids.length > 0) {
for (String cid : cids) {
log.debug("Attachment : " + cid);
}
}
log.debug("Body : \n" + response.getEnvelope());
}
MessageContext axisOutMsgCtx =
((Axis2MessageContext) synapseOutMsgCtx).getAxis2MessageContext();
//Processes 'Accept-Encoding'
ResponseAcceptEncodingProcessor.process(response, axisOutMsgCtx);
response.setServiceContext(null);
response.setOperationContext(axisOutMsgCtx.getOperationContext());
response.setAxisMessage(axisOutMsgCtx.getAxisOperation().getMessage(
WSDLConstants.MESSAGE_LABEL_OUT_VALUE));
// set properties on response
response.setServerSide(true);
response.setProperty(SynapseConstants.ISRESPONSE_PROPERTY, Boolean.TRUE);
response.setProperty(MessageContext.TRANSPORT_OUT,
axisOutMsgCtx.getProperty(MessageContext.TRANSPORT_OUT));
response.setProperty(org.apache.axis2.Constants.OUT_TRANSPORT_INFO,
axisOutMsgCtx.getProperty(org.apache.axis2.Constants.OUT_TRANSPORT_INFO));
response.setTransportIn(axisOutMsgCtx.getTransportIn());
response.setTransportOut(axisOutMsgCtx.getTransportOut());
// If request is REST assume that the response is REST too
response.setDoingREST(axisOutMsgCtx.isDoingREST());
if (axisOutMsgCtx.isDoingMTOM()) {
response.setDoingMTOM(true);
response.setProperty(
org.apache.axis2.Constants.Configuration.ENABLE_MTOM,
org.apache.axis2.Constants.VALUE_TRUE);
}
if (axisOutMsgCtx.isDoingSwA()) {
response.setDoingSwA(true);
response.setProperty(
org.apache.axis2.Constants.Configuration.ENABLE_SWA,
org.apache.axis2.Constants.VALUE_TRUE);
}
// when axis2 receives a soap message without addressing headers it users
// DISABLE_ADDRESSING_FOR_OUT_MESSAGES property to keep it and hence avoid addressing
// headers on the response. this causes a problem for synapse if the original message
// it receivs (from client) has addressing and the synaspse service invocation has not
// engage addressing. in this case when synapse receives the response from the server
// addessing In handler dissable addressing since that response does not have addressing
// headers. synapse sends the response to its orignal client using the same message
// context. Then this response does not have addressing headers since it already
// disable. to avoid this we need to set the DISABLE_ADDRESSING_FOR_OUT_MESSAGES
// property state to original state.
if (axisOutMsgCtx.getProperty(
AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES) != null) {
response.setProperty(AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES,
axisOutMsgCtx.getProperty(
AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES));
} else {
response.removeProperty(AddressingConstants.DISABLE_ADDRESSING_FOR_OUT_MESSAGES);
}
Object messageType = axisOutMsgCtx.getProperty(
org.apache.axis2.Constants.Configuration.MESSAGE_TYPE);
if (!HTTPConstants.MEDIA_TYPE_X_WWW_FORM.equals(messageType)) {
// copy the message type property that's used by the out message to the
// response message
response.setProperty(org.apache.axis2.Constants.Configuration.MESSAGE_TYPE,
messageType);
}
// compare original received message (axisOutMsgCtx) soap version with the response
// if they are different change to original version
if(axisOutMsgCtx.isSOAP11() != response.isSOAP11()) {
if(axisOutMsgCtx.isSOAP11()) {
SOAPUtils.convertSOAP12toSOAP11(response);
} else {
SOAPUtils.convertSOAP11toSOAP12(response);
}
}
if (axisOutMsgCtx.getMessageID() != null) {
response.setRelationships(
new RelatesTo[]{new RelatesTo(axisOutMsgCtx.getMessageID())});
}
response.setReplyTo(axisOutMsgCtx.getReplyTo());
response.setFaultTo(axisOutMsgCtx.getFaultTo());
if (axisOutMsgCtx.isPropertyTrue(NhttpConstants.IGNORE_SC_ACCEPTED)) {
response.setProperty(NhttpConstants.FORCE_SC_ACCEPTED, Constants.VALUE_TRUE);
}
// create the synapse message context for the response
Axis2MessageContext synapseInMessageContext =
new Axis2MessageContext(
response,
synapseOutMsgCtx.getConfiguration(),
synapseOutMsgCtx.getEnvironment());
synapseInMessageContext.setResponse(true);
Object obj = synapseOutMsgCtx.getProperty(SynapseConstants.FORCE_ERROR_PROPERTY);
String errorOnSOAPFault = (String) obj;
if (Constants.VALUE_TRUE.equals(errorOnSOAPFault) && successfulEndpoint != null) {
if(log.isDebugEnabled()){
log.debug("FORCE_ERROR_ON_SOAP_FAULT is true, checking for SOAPFault");
}
try {
RelayUtils.buildMessage(((Axis2MessageContext) synapseInMessageContext).getAxis2MessageContext(),true);
} catch (Exception e) {
// handleException("Error while building message", e, synapseInMessageContext);
}
if ((synapseInMessageContext.getEnvelope() != null) && synapseInMessageContext.getEnvelope().hasFault()) {
if(log.isDebugEnabled()){
log.debug("SOAPFault found in response message, forcing endpoint "+
successfulEndpoint.getName()+" to fail");
}
//setup new pipe configuration..if failure happens (this will be setup as the source writer and during the TargetContext
//clean up operation the writer will be reset and pull to the buffer
MessageContext axis2OUTMC =((Axis2MessageContext) synapseOutMsgCtx).getAxis2MessageContext();
NHttpServerConnection conn = (NHttpServerConnection) axis2OUTMC.getProperty("pass-through.Source-Connection");
if (conn != null) {
SourceConfiguration sourceConfiguration = (SourceConfiguration) axis2OUTMC.getProperty("PASS_THROUGH_SOURCE_CONFIGURATION");
Pipe pipe = new Pipe(conn, sourceConfiguration.getBufferFactory().getBuffer(), "source",
sourceConfiguration);
axis2OUTMC.setProperty(PassThroughConstants.PASS_THROUGH_PIPE, pipe);
}
StatisticsReporter.reportFaultForAll(synapseOutMsgCtx,
ErrorLogFactory.createErrorLog(response));
synapseOutMsgCtx.setProperty(SynapseConstants.SENDING_FAULT, Boolean.TRUE);
synapseOutMsgCtx.setProperty(SynapseConstants.ERROR_CODE, SynapseConstants.ENDPOINT_CUSTOM_ERROR);
boolean failOver =false;
if(successfulEndpoint instanceof AbstractEndpoint){
Endpoint endpoint =((AbstractEndpoint)successfulEndpoint).getParentEndpoint();
if(endpoint != null && (endpoint instanceof FailoverEndpoint)){
failOver =true;
}
}
// set the properties of the original MC to the new MC
for (Object key : synapseOutMsgCtx.getPropertyKeySet()) {
synapseInMessageContext.setProperty(
(String) key, synapseOutMsgCtx.getProperty((String) key));
}
if(failOver){
//we may required to handle same message for failover cases only other than that
//should treat based on the incoming message
((FaultHandler) successfulEndpoint).handleFault(synapseOutMsgCtx, null);
}else{
faultStack = synapseOutMsgCtx.getFaultStack();
if (faultStack != null) {
synapseInMessageContext.getFaultStack().addAll(faultStack);
((FaultHandler) successfulEndpoint).handleFault(synapseInMessageContext,
null);
}
}
return;
} else {
successfulEndpoint.onSuccess();
}
} else if(successfulEndpoint != null) {
successfulEndpoint.onSuccess();
}
synapseInMessageContext.setTo(
new EndpointReference(AddressingConstants.Final.WSA_ANONYMOUS_URL));
synapseInMessageContext.setTracingState(synapseOutMsgCtx.getTracingState());
// set the properties of the original MC to the new MC
for (Object key : synapseOutMsgCtx.getPropertyKeySet()) {
synapseInMessageContext.setProperty(
(String) key, synapseOutMsgCtx.getProperty((String) key));
}
// Copy SequenceCallStack from original MC to the new MC
Boolean isContinuationCall =
(Boolean) synapseOutMsgCtx.getProperty(SynapseConstants.CONTINUATION_CALL);
if (isContinuationCall != null && isContinuationCall) {
// Set the message direction
if (!synapseOutMsgCtx.isResponse()) {
synapseInMessageContext.setResponse(false);
}
Stack<ContinuationState> seqContinuationStates =
synapseOutMsgCtx.getContinuationStateStack();
for (int i = 0; i < seqContinuationStates.size(); i++) {
synapseInMessageContext.pushContinuationState(seqContinuationStates.get(i));
}
}
// If this response is related to session affinity endpoints -Server initiated session
Dispatcher dispatcher =
(Dispatcher) synapseOutMsgCtx.getProperty(
SynapseConstants.PROP_SAL_ENDPOINT_CURRENT_DISPATCHER);
if (dispatcher != null && dispatcher.isServerInitiatedSession()) {
dispatcher.updateSession(synapseInMessageContext);
}
StatisticsReporter.reportForAllOnResponseReceived(synapseInMessageContext);
// send the response message through the synapse mediation flow
try {
synapseOutMsgCtx.getEnvironment().injectMessage(synapseInMessageContext);
} catch (SynapseException syne) {
Stack stack = synapseInMessageContext.getFaultStack();
if (stack != null &&
!stack.isEmpty()) {
((FaultHandler) stack.pop()).handleFault(synapseInMessageContext, syne);
} else {
log.error("Synapse encountered an exception, " +
"No error handlers found - [Message Dropped]\n" + syne.getMessage());
}
}
}
}
/**
* It is possible for us (Synapse) to cause the creation of a duplicate relatesTo as we
* try to hold onto the outgoing message ID even for POX messages using the relates to
* Now once we get a response, make sure we remove any trace of this before we proceed any
* further
* @param mc the message context from which a possibly duplicated relatesTo should be removed
* @param relates the existing relatedTo array of the message
*/
private void removeDuplicateRelatesTo(MessageContext mc, RelatesTo[] relates) {
int insertPos = 0;
RelatesTo[] newRelates = new RelatesTo[relates.length];
for (RelatesTo current : relates) {
boolean found = false;
for (int j = 0; j < newRelates.length && j < insertPos; j++) {
if (newRelates[j].equals(current) ||
newRelates[j].getValue().equals(current.getValue())) {
found = true;
break;
}
}
if (!found) {
newRelates[insertPos++] = current;
}
}
RelatesTo[] trimmedRelates = new RelatesTo[insertPos];
System.arraycopy(newRelates, 0, trimmedRelates, 0, insertPos);
mc.setRelationships(trimmedRelates);
}
}
| |
// //////////////////////////////////////////////////////////////////////////////
//
// RMG - Reaction Mechanism Generator
//
// Copyright (c) 2002-2011 Prof. William H. Green (whgreen@mit.edu) and the
// RMG Team (rmg_dev@mit.edu)
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
// //////////////////////////////////////////////////////////////////////////////
package jing.rxnSys;
import java.util.*;
import jing.param.*;
import jing.rxn.NegativeRateException;
import jing.rxn.Reaction;
import jing.rxn.TemplateReaction;
import jing.chem.ChemGraph;
import jing.chem.Species;
// import RMG;
// ## package jing::rxnSys
// ----------------------------------------------------------------------------
// jing\rxnSys\RateBasedRME.java
// ----------------------------------------------------------------------------
// ## class RateBasedRME
public class RateBasedRME implements ReactionModelEnlarger {
// Constructors
public RateBasedRME() {
}
// 9/25/07 gmagoon: added ReactionModel parameter
// 10/30/07 gmagoon: updated parameters to match ReactionModelEnlarger
// ## operation enlargeReactionModel(ReactionSystem)
public void enlargeReactionModel(LinkedList p_reactionSystemList,
ReactionModel rm, LinkedList p_validList) {
// public void enlargeReactionModel(ReactionSystem p_reactionSystem, ReactionModel rm)
// #[ operation enlargeReactionModel(ReactionSystem)
// ReactionModel rm = p_reactionSystem.getReactionModel();
if (!(rm instanceof CoreEdgeReactionModel))
throw new InvalidReactionModelTypeException();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel) rm;
// 10/30/07 gmagoon: iterate over reaction systems that are not valid
LinkedList nextList = new LinkedList();
double startTime = System.currentTimeMillis();
for (Integer i = 0; i < p_reactionSystemList.size(); i++) {
if (!(Boolean) p_validList.get(i)) {
PresentStatus ps = ((ReactionSystem) p_reactionSystemList
.get(i)).getPresentStatus();
String maxflux = "";
Species next = getNextCandidateSpecies(cerm, ps, maxflux);
nextList.add(next);
} else {
nextList.add(null);// ****hopefully, null will contribute to length of list; otherwise, modifications
// will be needed
}
}
// generate new reaction set
/*
* startTime = System.currentTimeMillis(); LinkedHashSet newReactionSet =
* p_reactionSystem.lrg.react(cerm.getReactedSpeciesSet(),next);
* newReactionSet.addAll(p_reactionSystem.getReactionGenerator().react(cerm.getReactedSpeciesSet(),next));
* double enlargeTime = (System.currentTimeMillis()-startTime)/1000/60;
*/
startTime = System.currentTimeMillis();
// 10/30/07 gmagoon: add species from nextList
for (Integer i = 0; i < p_reactionSystemList.size(); i++) {
if (!(Boolean) p_validList.get(i)) {
Species newCoreSpecies = (Species) nextList.get(i);
if (cerm.containsAsReactedSpecies(newCoreSpecies)) // throw new InvalidNextCandidateSpeciesException();
{
Logger.warning("Tried to add species "
+ newCoreSpecies.getFullName()
+ ", but is already present in reaction model (this should be OK if you are running multiple reaction conditions)");
} else {
double findSpeciesTime = (System.currentTimeMillis() - startTime) / 1000 / 60;
// Global.diagnosticInfo.append(next.getChemkinName() + "\t" + maxflux + "\t" + ((RateBasedVT)
// ((ReactionSystem) p_reactionSystemList.get(i)).finishController.validityTester).Rmin + "\t" + findSpeciesTime +
// "\t");
Logger.info("\nAdd a new species to the model core: "
+ newCoreSpecies.getFullName());
Temperature temp = new Temperature(298, "K");
double H = newCoreSpecies.calculateH(temp);
double S = newCoreSpecies.calculateS(temp);
double G = newCoreSpecies.calculateG(temp);
double Cp = newCoreSpecies.calculateCp(temp);
Logger.debug("Thermo of species at 298K (H, S, G, Cp, respectively)\t"
+ String.valueOf(H)
+ '\t'
+ String.valueOf(S)
+ '\t'
+ String.valueOf(G)
+ '\t'
+ String.valueOf(Cp));
cerm.moveFromUnreactedToReactedSpecies(newCoreSpecies);
cerm.moveFromUnreactedToReactedReaction();
Global.moveUnreactedToReacted = (System.currentTimeMillis() - startTime) / 1000 / 60;
// add species status to reaction system
SpeciesStatus speciesStatus = new SpeciesStatus(
newCoreSpecies, 1, 0.0, 0.0); // (species, type (reacted=1), concentration, flux)
PresentStatus ps = ((ReactionSystem) p_reactionSystemList
.get(i)).getPresentStatus();
ps.putSpeciesStatus(speciesStatus);
// generate new reaction set
startTime = System.currentTimeMillis();
// Species List is first reacted by Library Reaction Generator and then sent to RMG Model Generator
LinkedHashSet newReactionSet_nodup;
ReactionSystem rxnSystem = (ReactionSystem) p_reactionSystemList
.get(i);
// Check Reaction Library
if (rxnSystem.getLibraryReactionGenerator()
.getReactionLibrary() != null) {
Logger.info("Checking Reaction Library "
+ rxnSystem.getLibraryReactionGenerator()
.getReactionLibrary().getName()
+ " for reactions of "
+ newCoreSpecies.getFullName()
+ " with the core.");
// At this point the core (cerm.getReactedSpeciesSet()) already contains newCoreSpecies, so we
// can just react the entire core.
LinkedHashSet newReactionSet = rxnSystem
.getLibraryReactionGenerator().react(
cerm.getReactedSpeciesSet());
// Report only those that contain the new species (newCoreSpecies)
Iterator ReactionIter = newReactionSet.iterator();
while (ReactionIter.hasNext()) {
Reaction current_reaction = (Reaction) ReactionIter
.next();
if (current_reaction.contains(newCoreSpecies)) {
Logger.info("Library Reaction: "
+ current_reaction.toString());
}
}
// Calls in Reaction Model Generator and adds it to Reaction Set ( if duplicate reaction is
// found it is not added I think )
Logger.info("Generating reactions using reaction family templates.");
// Add reactions found from reaction template to current reaction set
newReactionSet
.addAll(((ReactionSystem) p_reactionSystemList
.get(i)).getReactionGenerator().react(
cerm.getReactedSpeciesSet(),
newCoreSpecies, "All"));
// shamel 6/22/2010 Suppressed output , line is only for debugging
// System.out.println("Reaction Set Found after LRG + ReactionGenerator call "+newReactionSet);
// Remove Duplicate entrys from reaction set i.e same reaction might be coming from
// seed/reaction library and reaction template
// Same means == same family and not same structure coming from different families
newReactionSet_nodup = rxnSystem
.getLibraryReactionGenerator()
.RemoveDuplicateReac(newReactionSet);
} else {
// When no Reaction Library is present
Logger.info("Generating reactions using reaction family templates.");
newReactionSet_nodup = rxnSystem.getReactionGenerator()
.react(cerm.getReactedSpeciesSet(),
newCoreSpecies, "All");
}
// shamel 6/22/2010 Suppressed output , line is only for debugging
// System.out.println("Reaction Set Found after LRG + ReactionGenerator call and Removing Dups"+newReactionSet_nodup);
double enlargeTime = (System.currentTimeMillis() - startTime) / 1000 / 60;
startTime = System.currentTimeMillis();
double restartTime = (System.currentTimeMillis() - startTime) / 1000 / 60;
Global.diagnosticInfo.append(Global.moveUnreactedToReacted
+ "\t" + enlargeTime + "\t" + restartTime + "\t");
// partition the reaction set into reacted reaction set and unreacted reaction set
// update the corresponding core and edge model of CoreEdgeReactionModel
cerm.addReactionSet(newReactionSet_nodup);
}
}
}
return;
}
// ## operation getNextCandidateSpecies(CoreEdgeReactionModel,PresentStatus)
public Species getNextCandidateSpecies(
CoreEdgeReactionModel p_reactionModel,
PresentStatus p_presentStatus, String maxflux) {
// #[ operation getNextCandidateSpecies(CoreEdgeReactionModel,PresentStatus)
LinkedHashSet unreactedSpecies = p_reactionModel
.getUnreactedSpeciesSet();
Species maxSpecies = null;
double maxFlux = 0;
Iterator iter = unreactedSpecies.iterator();
while (iter.hasNext()) {
Species us = (Species) iter.next();
// double thisFlux = Math.abs(p_presentStatus.getSpeciesStatus(us).getFlux());
// System.out.println(p_presentStatus.unreactedSpeciesFlux[83]);
// System.exit(0);
double thisFlux = Math.abs(p_presentStatus.unreactedSpeciesFlux[us
.getID()]);
if (thisFlux > maxFlux) {
maxFlux = thisFlux;
maxSpecies = us;
}
}
maxflux = "" + maxFlux;
if (maxSpecies == null)
throw new NullPointerException();
LinkedHashSet ur = p_reactionModel.getUnreactedReactionSet();
LinkedHashMap significantReactions = new LinkedHashMap();
int reactionWithSpecies = 0;
for (Iterator iur = ur.iterator(); iur.hasNext();) {
Reaction r = (Reaction) iur.next();
double flux = 0;
Temperature p_temperature = p_presentStatus.temperature;
Pressure p_pressure = p_presentStatus.pressure;// 10/30/07 gmagoon: added
if (r.contains(maxSpecies)) {
reactionWithSpecies++;
if (r instanceof TemplateReaction) {
flux = ((TemplateReaction) r).calculateTotalPDepRate(
p_temperature, p_pressure);// 10/30/07 gmagoon: changed to include pressure
// flux = ((TemplateReaction)r).calculateTotalPDepRate(p_temperature);
} else {
flux = r.calculateTotalRate(p_temperature);
}
if (flux >= 0) {
for (Iterator rIter = r.getReactants(); rIter.hasNext();) {
Species spe = (Species) rIter.next();
SpeciesStatus status = p_presentStatus
.getSpeciesStatus(spe);
if (status == null)
flux = 0;
else {
double conc = status.getConcentration();
if (conc < 0) {
double aTol = ReactionModelGenerator.getAtol();
// if (Math.abs(conc) < aTol) conc = 0;
// else throw new NegativeConcentrationException(spe.getFullName() + ": " +
// String.valueOf(conc));
if (conc < -100.0 * aTol)
throw new NegativeConcentrationException(
"Species "
+ spe.getFullName()
+ " has negative concentration: "
+ String.valueOf(conc));
}
flux *= conc;
}
}
} else {
throw new NegativeRateException(
r.toChemkinString(p_temperature) + ": "
+ String.valueOf(flux));// 10/30/07 gmagoon: changed to avoid use of
// Global.temperature
// throw new NegativeRateException(r.toChemkinString(Global.temperature) + ": " +
// String.valueOf(flux));
}
if (flux > 0.01 * maxFlux)
significantReactions.put(r, flux);
}
}
Logger.info(String.format("Time: %10.4e s", p_presentStatus.getTime()
.getTime()));
Logger.info(String.format(
"Edge species %s has highest flux: %10.4e mol/cm^3*s",
maxSpecies.getFullName(), maxFlux));
Logger.verbose("The total number of edge reactions with this species is "
+ reactionWithSpecies + ". Significant ones are:");
Iterator reactionIter = significantReactions.keySet().iterator();
while (reactionIter.hasNext()) {
Reaction r = (Reaction) reactionIter.next();
Logger.verbose(String.format(" %s %9.3e mol/cm^3*s", r
.getStructure().toChemkinString(r.hasReverseReaction()),
significantReactions.get(r)));
}
return maxSpecies;
// #]
}
}
/*********************************************************************
* File Path : RMG\RMG\jing\rxnSys\RateBasedRME.java
*********************************************************************/
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.folding;
import com.intellij.codeInsight.folding.JavaCodeFoldingSettings;
import com.intellij.codeInsight.folding.impl.JavaFoldingBuilderBase;
import com.intellij.lang.ASTNode;
import com.intellij.lang.folding.CustomFoldingBuilder;
import com.intellij.lang.folding.FoldingDescriptor;
import com.intellij.lang.folding.NamedFoldingDescriptor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.FoldingGroup;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.tree.LeafPsiElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.util.containers.hash.HashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.groovydoc.parser.GroovyDocElementTypes;
import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes;
import org.jetbrains.plugins.groovy.lang.lexer.TokenSets;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrCodeBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.literals.GrString;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.literals.GrStringInjection;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.imports.GrImportStatement;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.GrStringUtil;
import java.util.List;
import java.util.Set;
/**
* @author ilyas
*/
public class GroovyFoldingBuilder extends CustomFoldingBuilder implements DumbAware {
@Override
protected void buildLanguageFoldRegions(@NotNull List<FoldingDescriptor> descriptors,
@NotNull PsiElement root,
@NotNull Document document,
boolean quick) {
appendDescriptors(root, descriptors, new HashSet<PsiElement>());
}
private void appendDescriptors(PsiElement element, List<FoldingDescriptor> descriptors, Set<PsiElement> usedComments) {
ASTNode node = element.getNode();
if (node == null) return;
IElementType type = node.getElementType();
if (TokenSets.BLOCK_SET.contains(type) && !isSingleHighLevelClassBody(element) || type == GroovyElementTypes.CLOSABLE_BLOCK) {
if (isMultiline(element)) {
collapseBlock(descriptors, element);
}
}
// comments
if ((type.equals(GroovyTokenTypes.mML_COMMENT) || type.equals(GroovyDocElementTypes.GROOVY_DOC_COMMENT)) &&
isMultiline(element) &&
isWellEndedComment(element)) {
descriptors.add(new FoldingDescriptor(node, node.getTextRange()));
}
if (type.equals(GroovyTokenTypes.mSL_COMMENT) && !usedComments.contains(element)) {
boolean containsCustomRegionMarker = isCustomRegionElement(element);
usedComments.add(element);
PsiElement end = null;
for (PsiElement current = element.getNextSibling(); current != null; current = current.getNextSibling()) {
if (PsiImplUtil.isWhiteSpaceOrNls(current)) continue;
IElementType elementType = current.getNode().getElementType();
if (elementType == GroovyTokenTypes.mSL_COMMENT) {
end = current;
usedComments.add(current);
containsCustomRegionMarker |= isCustomRegionElement(current);
continue;
}
break;
}
if (end != null && !containsCustomRegionMarker) {
final TextRange range = new TextRange(element.getTextRange().getStartOffset(), end.getTextRange().getEndOffset());
descriptors.add(new FoldingDescriptor(element, range));
}
}
//multiline strings
addFoldingForStrings(descriptors, node);
Set<PsiElement> newUsedComments = new HashSet<PsiElement>();
for (PsiElement child = element.getFirstChild(); child != null; child = child.getNextSibling()) {
appendDescriptors(child, descriptors, newUsedComments);
}
if (element instanceof GroovyFile) {
processImports(descriptors, ((GroovyFile)element).getImportStatements());
}
}
private static void collapseBlock(List<FoldingDescriptor> descriptors, PsiElement psi) {
if (psi instanceof GrCodeBlock) {
final int lineFeedCount = StringUtil.countChars(psi.getText(), '\n');
if (lineFeedCount <= 2) {
final PsiElement lbrace = ((GrCodeBlock)psi).getLBrace();
final PsiElement rbrace = ((GrCodeBlock)psi).getRBrace();
if (lbrace != null && rbrace != null) {
final PsiElement next = lbrace.getNextSibling();
final PsiElement prev = rbrace.getPrevSibling();
if (next != null && PsiImplUtil.isWhiteSpaceOrNls(next) &&
prev != null && PsiImplUtil.isWhiteSpaceOrNls(prev)) {
final FoldingGroup group = FoldingGroup.newGroup("block_group");
descriptors.add(new NamedFoldingDescriptor(psi, lbrace.getTextRange().getStartOffset(), next.getTextRange().getEndOffset(), group, "{"));
descriptors.add(new NamedFoldingDescriptor(psi, prev.getTextRange().getStartOffset(), rbrace.getTextRange().getEndOffset(), group, "}"));
return;
}
}
}
}
descriptors.add(new FoldingDescriptor(psi, psi.getTextRange()));
}
private static boolean isSingleHighLevelClassBody(PsiElement element) {
if (!(element instanceof GrTypeDefinitionBody)) return false;
final PsiElement parent = element.getParent();
if (!(parent instanceof GrTypeDefinition)) return false;
final GrTypeDefinition clazz = (GrTypeDefinition)parent;
if (clazz.isAnonymous() || clazz.getContainingClass() != null) return false;
final PsiFile file = element.getContainingFile();
return file instanceof GroovyFile && ((GroovyFile)file).getClasses().length == 1;
}
private static void addFoldingForStrings(List<FoldingDescriptor> descriptors, ASTNode node) {
if (!isMultiLineStringLiteral(node)) return;
if (!node.getElementType().equals(GroovyElementTypes.GSTRING) && !node.getElementType().equals(GroovyElementTypes.REGEX)) {
descriptors.add(new FoldingDescriptor(node, node.getTextRange()));
return;
}
final GrString grString = (GrString)node.getPsi();
if (grString == null) return;
final GrStringInjection[] injections = grString.getInjections();
if (injections.length == 0) {
descriptors.add(new FoldingDescriptor(node, node.getTextRange()));
return;
}
final String start_quote = GrStringUtil.getStartQuote(node.getText());
final String end_quote = GrStringUtil.getEndQuote(node.getText());
final FoldingGroup group = FoldingGroup.newGroup("GString");
final TextRange nodeRange = node.getTextRange();
int startOffset = nodeRange.getStartOffset();
GrStringInjection injection = injections[0];
TextRange injectionRange = injection.getTextRange();
if (startOffset + 1 < injectionRange.getStartOffset()) {
descriptors.add(new NamedFoldingDescriptor(node, startOffset, injectionRange.getStartOffset(), group, start_quote));
}
final String placeholder = " ";
startOffset = injectionRange.getEndOffset();
for (int i = 1; i < injections.length; i++) {
injection = injections[i];
injectionRange = injection.getTextRange();
final int endOffset = injectionRange.getStartOffset();
if (endOffset - startOffset >= 2) {
descriptors.add(new NamedFoldingDescriptor(injection.getNode().getTreePrev(), startOffset, endOffset, group, placeholder));
}
startOffset = injectionRange.getEndOffset();
}
if (startOffset + 1 < nodeRange.getEndOffset()) {
descriptors.add(new NamedFoldingDescriptor(node.getLastChildNode(), startOffset, nodeRange.getEndOffset(), group, end_quote));
}
}
private static void processImports(final List<FoldingDescriptor> descriptors, GrImportStatement[] imports) {
if (imports.length < 2) return;
PsiElement first = imports[0];
while (first != null) {
PsiElement marker = first;
PsiElement next = first.getNextSibling();
while (next instanceof GrImportStatement || next instanceof LeafPsiElement) {
if (next instanceof GrImportStatement) marker = next;
next = next.getNextSibling();
}
if (marker != first) {
int start = first.getTextRange().getStartOffset();
int end = marker.getTextRange().getEndOffset();
int tail = "import ".length();
if (start + tail < end && !JavaFoldingBuilderBase.hasErrorElementsNearby(first.getContainingFile(), start, end)) {
FoldingDescriptor descriptor = new FoldingDescriptor(first.getNode(), new TextRange(start + tail, end));
// imports are often added/removed automatically, so we enable autoupdate of folded region for foldings even if it's collapsed
descriptor.setCanBeRemovedWhenCollapsed(true);
descriptors.add(descriptor);
}
}
while (!(next instanceof GrImportStatement) && next != null) next = next.getNextSibling();
first = next;
}
}
private static boolean isWellEndedComment(PsiElement element) {
return element.getText().endsWith("*/");
}
private static boolean isMultiline(PsiElement element) {
String text = element.getText();
return text.contains("\n") || text.contains("\r") || text.contains("\r\n");
}
@Nullable
@Override
protected String getLanguagePlaceholderText(@NotNull ASTNode node, @NotNull TextRange range) {
final IElementType elemType = node.getElementType();
if (TokenSets.BLOCK_SET.contains(elemType) || elemType == GroovyElementTypes.CLOSABLE_BLOCK) {
return "{...}";
}
if (elemType.equals(GroovyTokenTypes.mML_COMMENT)) {
return "/*...*/";
}
if (elemType.equals(GroovyDocElementTypes.GROOVY_DOC_COMMENT)) {
return "/**...*/";
}
if (GroovyElementTypes.IMPORT_STATEMENT.equals(elemType)) {
return "...";
}
if (isMultiLineStringLiteral(node)) {
final String start_quote = GrStringUtil.getStartQuote(node.getText());
final String end_quote = GrStringUtil.getEndQuote(node.getText());
return start_quote + "..." + end_quote;
}
return null;
}
@Override
protected boolean isRegionCollapsedByDefault(@NotNull ASTNode node) {
final JavaCodeFoldingSettings settings = JavaCodeFoldingSettings.getInstance();
if ( node.getElementType() == GroovyElementTypes.IMPORT_STATEMENT){
return settings.isCollapseImports();
}
if (node.getElementType() == GroovyDocElementTypes.GROOVY_DOC_COMMENT || node.getElementType() == GroovyTokenTypes.mML_COMMENT) {
PsiElement element = node.getPsi();
PsiElement parent = element.getParent();
if (parent instanceof GroovyFile) {
PsiElement firstChild = parent.getFirstChild();
if (firstChild instanceof PsiWhiteSpace) {
firstChild = firstChild.getNextSibling();
}
if (element.equals(firstChild)) {
return settings.isCollapseFileHeader();
}
}
if (node.getElementType() == GroovyDocElementTypes.GROOVY_DOC_COMMENT) {
return settings.isCollapseJavadocs();
}
}
if ((node.getElementType() == GroovyElementTypes.OPEN_BLOCK || node.getElementType() == GroovyElementTypes.CONSTRUCTOR_BODY) && node.getTreeParent().getElementType() ==
GroovyElementTypes.METHOD_DEFINITION) {
return settings.isCollapseMethods();
}
if (node.getElementType() == GroovyElementTypes.CLOSABLE_BLOCK) {
return settings.isCollapseAnonymousClasses();
}
if (node.getElementType() == GroovyElementTypes.CLASS_BODY) {
final PsiElement parent = node.getPsi().getParent();
if (parent instanceof PsiClass) {
if (parent instanceof PsiAnonymousClass) {
return settings.isCollapseAnonymousClasses();
}
if (((PsiClass)parent).getContainingClass() != null) {
return settings.isCollapseInnerClasses();
}
}
}
if (node.getElementType() == GroovyTokenTypes.mSL_COMMENT) {
return settings.isCollapseEndOfLineComments();
}
return false;
}
private static boolean isMultiLineStringLiteral(ASTNode node) {
return (TokenSets.STRING_LITERAL_SET.contains(node.getElementType()) ||
node.getElementType().equals(GroovyElementTypes.GSTRING) ||
node.getElementType().equals(GroovyElementTypes.REGEX)) &&
isMultiline(node.getPsi()) &&
GrStringUtil.isWellEndedString(node.getPsi());
}
@Override
protected boolean isCustomFoldingCandidate(ASTNode node) {
return node.getElementType() == GroovyTokenTypes.mSL_COMMENT;
}
@Override
protected boolean isCustomFoldingRoot(ASTNode node) {
IElementType nodeType = node.getElementType();
return nodeType == GroovyElementTypes.CLASS_DEFINITION || nodeType == GroovyElementTypes.OPEN_BLOCK;
}
}
| |
package org.bouncycastle.crypto.digests;
import org.bouncycastle.crypto.util.Pack;
import org.bouncycastle.util.Memoable;
/**
* Implementation of Chinese SM3 digest as described at
* http://tools.ietf.org/html/draft-shen-sm3-hash-00
* and at .... ( Chinese PDF )
* <p>
* The specification says "process a bit stream",
* but this is written to process bytes in blocks of 4,
* meaning this will process 32-bit word groups.
* But so do also most other digest specifications,
* including the SHA-256 which was a origin for
* this specification.
*/
public class SM3Digest
extends GeneralDigest
{
private static final int DIGEST_LENGTH = 32; // bytes
private static final int BLOCK_SIZE = 64 / 4; // of 32 bit ints (16 ints)
private int[] V = new int[DIGEST_LENGTH / 4]; // in 32 bit ints (8 ints)
private int[] inwords = new int[BLOCK_SIZE];
private int xOff;
// Work-bufs used within processBlock()
private int[] W = new int[68];
private int[] W1 = new int[64];
// Round constant T for processBlock() which is 32 bit integer rolled left up to (63 MOD 32) bit positions.
private static final int[] T = new int[64];
static
{
for (int i = 0; i < 16; ++i)
{
int t = 0x79CC4519;
T[i] = (t << i) | (t >>> (32 - i));
}
for (int i = 16; i < 64; ++i)
{
int n = i % 32;
int t = 0x7A879D8A;
T[i] = (t << n) | (t >>> (32 - n));
}
}
/**
* Standard constructor
*/
public SM3Digest()
{
reset();
}
/**
* Copy constructor. This will copy the state of the provided
* message digest.
*/
public SM3Digest(SM3Digest t)
{
super(t);
copyIn(t);
}
private void copyIn(SM3Digest t)
{
System.arraycopy(t.V, 0, this.V, 0, this.V.length);
System.arraycopy(t.inwords, 0, this.inwords, 0, this.inwords.length);
xOff = t.xOff;
}
public String getAlgorithmName()
{
return "SM3";
}
public int getDigestSize()
{
return DIGEST_LENGTH;
}
public Memoable copy()
{
return new SM3Digest(this);
}
public void reset(Memoable other)
{
SM3Digest d = (SM3Digest)other;
super.copyIn(d);
copyIn(d);
}
/**
* reset the chaining variables
*/
public void reset()
{
super.reset();
this.V[0] = 0x7380166F;
this.V[1] = 0x4914B2B9;
this.V[2] = 0x172442D7;
this.V[3] = 0xDA8A0600;
this.V[4] = 0xA96F30BC;
this.V[5] = 0x163138AA;
this.V[6] = 0xE38DEE4D;
this.V[7] = 0xB0FB0E4E;
this.xOff = 0;
}
public int doFinal(byte[] out,
int outOff)
{
finish();
Pack.intToBigEndian(this.V[0], out, outOff + 0);
Pack.intToBigEndian(this.V[1], out, outOff + 4);
Pack.intToBigEndian(this.V[2], out, outOff + 8);
Pack.intToBigEndian(this.V[3], out, outOff + 12);
Pack.intToBigEndian(this.V[4], out, outOff + 16);
Pack.intToBigEndian(this.V[5], out, outOff + 20);
Pack.intToBigEndian(this.V[6], out, outOff + 24);
Pack.intToBigEndian(this.V[7], out, outOff + 28);
reset();
return DIGEST_LENGTH;
}
protected void processWord(byte[] in,
int inOff)
{
// Note: Inlined for performance
// this.inwords[xOff] = Pack.bigEndianToInt(in, inOff);
int n = (((in[inOff] & 0xff) << 24) |
((in[++inOff] & 0xff) << 16) |
((in[++inOff] & 0xff) << 8) |
((in[++inOff] & 0xff)));
this.inwords[this.xOff] = n;
++this.xOff;
if (this.xOff >= 16)
{
processBlock();
}
}
protected void processLength(long bitLength)
{
if (this.xOff > (BLOCK_SIZE - 2))
{
// xOff == 15 --> can't fit the 64 bit length field at tail..
this.inwords[this.xOff] = 0; // fill with zero
++this.xOff;
processBlock();
}
// Fill with zero words, until reach 2nd to last slot
while (this.xOff < (BLOCK_SIZE - 2))
{
this.inwords[this.xOff] = 0;
++this.xOff;
}
// Store input data length in BITS
this.inwords[this.xOff++] = (int)(bitLength >>> 32);
this.inwords[this.xOff++] = (int)(bitLength);
}
/*
3.4.2. Constants
Tj = 79cc4519 when 0 < = j < = 15
Tj = 7a879d8a when 16 < = j < = 63
3.4.3. Boolean function
FFj(X;Y;Z) = X XOR Y XOR Z when 0 < = j < = 15
= (X AND Y) OR (X AND Z) OR (Y AND Z) when 16 < = j < = 63
GGj(X;Y;Z) = X XOR Y XOR Z when 0 < = j < = 15
= (X AND Y) OR (NOT X AND Z) when 16 < = j < = 63
The X, Y, Z in the fomular are words!GBP
3.4.4. Permutation function
P0(X) = X XOR (X <<< 9) XOR (X <<< 17) ## ROLL, not SHIFT
P1(X) = X XOR (X <<< 15) XOR (X <<< 23) ## ROLL, not SHIFT
The X in the fomular are a word.
----------
Each ROLL converted to Java expression:
ROLL 9 : ((x << 9) | (x >>> (32-9))))
ROLL 17 : ((x << 17) | (x >>> (32-17)))
ROLL 15 : ((x << 15) | (x >>> (32-15)))
ROLL 23 : ((x << 23) | (x >>> (32-23)))
*/
private int P0(final int x)
{
final int r9 = ((x << 9) | (x >>> (32 - 9)));
final int r17 = ((x << 17) | (x >>> (32 - 17)));
return (x ^ r9 ^ r17);
}
private int P1(final int x)
{
final int r15 = ((x << 15) | (x >>> (32 - 15)));
final int r23 = ((x << 23) | (x >>> (32 - 23)));
return (x ^ r15 ^ r23);
}
private int FF0(final int x, final int y, final int z)
{
return (x ^ y ^ z);
}
private int FF1(final int x, final int y, final int z)
{
return ((x & y) | (x & z) | (y & z));
}
private int GG0(final int x, final int y, final int z)
{
return (x ^ y ^ z);
}
private int GG1(final int x, final int y, final int z)
{
return ((x & y) | ((~x) & z));
}
protected void processBlock()
{
for (int j = 0; j < 16; ++j)
{
this.W[j] = this.inwords[j];
}
for (int j = 16; j < 68; ++j)
{
int wj3 = this.W[j - 3];
int r15 = ((wj3 << 15) | (wj3 >>> (32 - 15)));
int wj13 = this.W[j - 13];
int r7 = ((wj13 << 7) | (wj13 >>> (32 - 7)));
this.W[j] = P1(this.W[j - 16] ^ this.W[j - 9] ^ r15) ^ r7 ^ this.W[j - 6];
}
for (int j = 0; j < 64; ++j)
{
this.W1[j] = this.W[j] ^ this.W[j + 4];
}
int A = this.V[0];
int B = this.V[1];
int C = this.V[2];
int D = this.V[3];
int E = this.V[4];
int F = this.V[5];
int G = this.V[6];
int H = this.V[7];
for (int j = 0; j < 16; ++j)
{
int a12 = ((A << 12) | (A >>> (32 - 12)));
int s1_ = a12 + E + T[j];
int SS1 = ((s1_ << 7) | (s1_ >>> (32 - 7)));
int SS2 = SS1 ^ a12;
int TT1 = FF0(A, B, C) + D + SS2 + this.W1[j];
int TT2 = GG0(E, F, G) + H + SS1 + this.W[j];
D = C;
C = ((B << 9) | (B >>> (32 - 9)));
B = A;
A = TT1;
H = G;
G = ((F << 19) | (F >>> (32 - 19)));
F = E;
E = P0(TT2);
}
// Different FF,GG functions on rounds 16..63
for (int j = 16; j < 64; ++j)
{
int a12 = ((A << 12) | (A >>> (32 - 12)));
int s1_ = a12 + E + T[j];
int SS1 = ((s1_ << 7) | (s1_ >>> (32 - 7)));
int SS2 = SS1 ^ a12;
int TT1 = FF1(A, B, C) + D + SS2 + this.W1[j];
int TT2 = GG1(E, F, G) + H + SS1 + this.W[j];
D = C;
C = ((B << 9) | (B >>> (32 - 9)));
B = A;
A = TT1;
H = G;
G = ((F << 19) | (F >>> (32 - 19)));
F = E;
E = P0(TT2);
}
this.V[0] ^= A;
this.V[1] ^= B;
this.V[2] ^= C;
this.V[3] ^= D;
this.V[4] ^= E;
this.V[5] ^= F;
this.V[6] ^= G;
this.V[7] ^= H;
this.xOff = 0;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.spring;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLock;
import org.apache.ignite.IgniteSpring;
import org.apache.ignite.Ignition;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.springframework.cache.Cache;
import org.springframework.cache.CacheManager;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.context.event.EventListener;
/**
* Implementation of Spring cache abstraction based on Ignite cache.
* <h1 class="header">Overview</h1>
* Spring cache abstraction allows to enable caching for Java methods
* so that the result of a method execution is stored in some storage. If
* later the same method is called with the same set of parameters,
* the result will be retrieved from that storage instead of actually
* executing the method. For more information, refer to
* <a href="http://docs.spring.io/spring/docs/current/spring-framework-reference/html/cache.html">
* Spring Cache Abstraction documentation</a>.
* <h1 class="header">How To Enable Caching</h1>
* To enable caching based on Ignite cache in your Spring application,
* you will need to do the following:
* <ul>
* <li>
* Start an Ignite node with proper configuration in embedded mode
* (i.e., in the same JVM where the application is running). It can
* already have predefined caches, but it's not required - caches
* will be created automatically on first access if needed.
* </li>
* <li>
* Configure {@code SpringCacheManager} as a cache provider
* in the Spring application context.
* </li>
* </ul>
* {@code SpringCacheManager} can start a node itself on its startup
* based on provided Ignite configuration. You can provide path to a
* Spring configuration XML file, like below (path can be absolute or
* relative to {@code IGNITE_HOME}):
* <pre name="code" class="xml">
* <beans xmlns="http://www.springframework.org/schema/beans"
* xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
* xmlns:cache="http://www.springframework.org/schema/cache"
* xsi:schemaLocation="
* http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
* http://www.springframework.org/schema/cache http://www.springframework.org/schema/cache/spring-cache.xsd">
* <-- Provide configuration file path. -->
* <bean id="cacheManager" class="org.apache.ignite.cache.spring.SpringCacheManager">
* <property name="configurationPath" value="examples/config/spring-cache.xml"/>
* </bean>
*
* <-- Use annotation-driven caching configuration. -->
* <cache:annotation-driven/>
* </beans>
* </pre>
* Or you can provide a {@link IgniteConfiguration} bean, like below:
* <pre name="code" class="xml">
* <beans xmlns="http://www.springframework.org/schema/beans"
* xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
* xmlns:cache="http://www.springframework.org/schema/cache"
* xsi:schemaLocation="
* http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
* http://www.springframework.org/schema/cache http://www.springframework.org/schema/cache/spring-cache.xsd">
* <-- Provide configuration bean. -->
* <bean id="cacheManager" class="org.apache.ignite.cache.spring.SpringCacheManager">
* <property name="configuration">
* <bean id="gridCfg" class="org.apache.ignite.configuration.IgniteConfiguration">
* ...
* </bean>
* </property>
* </bean>
*
* <-- Use annotation-driven caching configuration. -->
* <cache:annotation-driven/>
* </beans>
* </pre>
* Note that providing both configuration path and configuration bean is illegal
* and results in {@link IllegalArgumentException}.
* <p>
* If you already have Ignite node running within your application,
* simply provide correct Ignite instance name, like below (if there is no Grid
* instance with such name, exception will be thrown):
* <pre name="code" class="xml">
* <beans xmlns="http://www.springframework.org/schema/beans"
* xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
* xmlns:cache="http://www.springframework.org/schema/cache"
* xsi:schemaLocation="
* http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
* http://www.springframework.org/schema/cache http://www.springframework.org/schema/cache/spring-cache.xsd">
* <-- Provide Ignite instance name. -->
* <bean id="cacheManager" class="org.apache.ignite.cache.spring.SpringCacheManager">
* <property name="igniteInstanceName" value="myGrid"/>
* </bean>
*
* <-- Use annotation-driven caching configuration. -->
* <cache:annotation-driven/>
* </beans>
* </pre>
* This can be used, for example, when you are running your application
* in a J2EE Web container and use {@ignitelink org.apache.ignite.startup.servlet.ServletContextListenerStartup}
* for node startup.
* <p>
* If neither {@link #setConfigurationPath(String) configurationPath},
* {@link #setConfiguration(IgniteConfiguration) configuration}, nor
* {@link #setIgniteInstanceName(String) igniteInstanceName} are provided, cache manager
* will try to use default Grid instance (the one with the {@code null}
* name). If it doesn't exist, exception will be thrown.
* <h1>Starting Remote Nodes</h1>
* Keep in mind that the node started inside your application is an entry point
* to the whole topology it connects to. You can start as many remote standalone
* nodes as you need using {@code bin/ignite.{sh|bat}} scripts provided in
* Ignite distribution, and all these nodes will participate
* in caching the data.
*/
public class SpringCacheManager implements CacheManager, ApplicationListener<ContextRefreshedEvent>, ApplicationContextAware {
/** Default locks count. */
private static final int DEFAULT_LOCKS_COUNT = 512;
/** IgniteLock name prefix. */
private static final String SPRING_LOCK_NAME_PREFIX = "springSync";
/** Caches map. */
private final ConcurrentMap<String, SpringCache> caches = new ConcurrentHashMap<>();
/** Grid configuration file path. */
private String cfgPath;
/** Ignite configuration. */
private IgniteConfiguration cfg;
/** Ignite instance name. */
private String igniteInstanceName;
/** Count of IgniteLocks are used for sync get */
private int locksCnt = DEFAULT_LOCKS_COUNT;
/** Dynamic cache configuration template. */
private CacheConfiguration<Object, Object> dynamicCacheCfg;
/** Dynamic near cache configuration template. */
private NearCacheConfiguration<Object, Object> dynamicNearCacheCfg;
/** Ignite instance. */
private Ignite ignite;
/** Spring context. */
private ApplicationContext springCtx;
/** Locks for value loading to support sync option. */
private ConcurrentHashMap<Integer, IgniteLock> locks = new ConcurrentHashMap<>();
/** {@inheritDoc} */
@Override public void setApplicationContext(ApplicationContext ctx) {
this.springCtx = ctx;
}
/**
* Gets configuration file path.
*
* @return Grid configuration file path.
*/
public String getConfigurationPath() {
return cfgPath;
}
/**
* Sets configuration file path.
*
* @param cfgPath Grid configuration file path.
*/
public void setConfigurationPath(String cfgPath) {
this.cfgPath = cfgPath;
}
/**
* Gets configuration bean.
*
* @return Grid configuration bean.
*/
public IgniteConfiguration getConfiguration() {
return cfg;
}
/**
* Sets configuration bean.
*
* @param cfg Grid configuration bean.
*/
public void setConfiguration(IgniteConfiguration cfg) {
this.cfg = cfg;
}
/**
* Gets grid name.
*
* @return Grid name.
* @deprecated Use {@link #getIgniteInstanceName()}.
*/
@Deprecated
public String getGridName() {
return getIgniteInstanceName();
}
/**
* Sets grid name.
*
* @param gridName Grid name.
* @deprecated Use {@link #setIgniteInstanceName(String)}.
*/
@Deprecated
public void setGridName(String gridName) {
setIgniteInstanceName(gridName);
}
/**
* Gets Ignite instance name.
*
* @return Ignite instance name.
*/
public String getIgniteInstanceName() {
return igniteInstanceName;
}
/**
* Sets Ignite instance name.
*
* @param igniteInstanceName Ignite instance name.
*/
public void setIgniteInstanceName(String igniteInstanceName) {
this.igniteInstanceName = igniteInstanceName;
}
/**
* Gets locks count.
*
* @return locks count.
*/
public int getLocksCount() {
return locksCnt;
}
/**
* @param locksCnt locks count.
*/
public void setLocksCount(int locksCnt) {
this.locksCnt = locksCnt;
}
/**
* Gets dynamic cache configuration template.
*
* @return Dynamic cache configuration template.
*/
public CacheConfiguration<Object, Object> getDynamicCacheConfiguration() {
return dynamicCacheCfg;
}
/**
* Sets dynamic cache configuration template.
*
* @param dynamicCacheCfg Dynamic cache configuration template.
*/
public void setDynamicCacheConfiguration(CacheConfiguration<Object, Object> dynamicCacheCfg) {
this.dynamicCacheCfg = dynamicCacheCfg;
}
/**
* Gets dynamic near cache configuration template.
*
* @return Dynamic near cache configuration template.
*/
public NearCacheConfiguration<Object, Object> getDynamicNearCacheConfiguration() {
return dynamicNearCacheCfg;
}
/**
* Sets dynamic cache configuration template.
*
* @param dynamicNearCacheCfg Dynamic cache configuration template.
*/
public void setDynamicNearCacheConfiguration(NearCacheConfiguration<Object, Object> dynamicNearCacheCfg) {
this.dynamicNearCacheCfg = dynamicNearCacheCfg;
}
/** {@inheritDoc} */
@Override public void onApplicationEvent(ContextRefreshedEvent event) {
assert ignite == null;
if (cfgPath != null && cfg != null) {
throw new IllegalArgumentException("Both 'configurationPath' and 'configuration' are " +
"provided. Set only one of these properties if you need to start a Ignite node inside of " +
"SpringCacheManager. If you already have a node running, omit both of them and set" +
"'igniteInstanceName' property.");
}
try {
if (cfgPath != null) {
ignite = IgniteSpring.start(cfgPath, springCtx);
}
else if (cfg != null)
ignite = IgniteSpring.start(cfg, springCtx);
else
ignite = Ignition.ignite(igniteInstanceName);
}
catch (IgniteCheckedException e) {
throw U.convertException(e);
}
}
/** {@inheritDoc} */
@Override public Cache getCache(String name) {
assert ignite != null;
SpringCache cache = caches.get(name);
if (cache == null) {
CacheConfiguration<Object, Object> cacheCfg = dynamicCacheCfg != null ?
new CacheConfiguration<>(dynamicCacheCfg) : new CacheConfiguration<>();
NearCacheConfiguration<Object, Object> nearCacheCfg = dynamicNearCacheCfg != null ?
new NearCacheConfiguration<>(dynamicNearCacheCfg) : null;
cacheCfg.setName(name);
cache = new SpringCache(nearCacheCfg != null ? ignite.getOrCreateCache(cacheCfg, nearCacheCfg) :
ignite.getOrCreateCache(cacheCfg), this);
SpringCache old = caches.putIfAbsent(name, cache);
if (old != null)
cache = old;
}
return cache;
}
/** {@inheritDoc} */
@Override public Collection<String> getCacheNames() {
assert ignite != null;
return new ArrayList<>(caches.keySet());
}
/**
* Provides {@link org.apache.ignite.IgniteLock} for specified cache name and key.
*
* @param name cache name
* @param key key
* @return {@link org.apache.ignite.IgniteLock}
*/
IgniteLock getSyncLock(String name, Object key) {
int hash = Objects.hash(name, key);
final int idx = hash % getLocksCount();
return locks.computeIfAbsent(idx, i -> ignite.reentrantLock(SPRING_LOCK_NAME_PREFIX + idx, true, false, true));
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.leanback.app;
import android.animation.Animator;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.os.Build;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
import androidx.annotation.ColorInt;
import androidx.annotation.NonNull;
import androidx.core.content.ContextCompat;
import androidx.core.graphics.drawable.DrawableCompat;
import androidx.interpolator.view.animation.FastOutLinearInInterpolator;
import androidx.leanback.R;
import androidx.leanback.widget.BackgroundHelper;
import java.lang.ref.WeakReference;
/**
* Supports background image continuity between multiple Activities.
*
* <p>An Activity should instantiate a BackgroundManager and {@link #attach}
* to the Activity's window. When the Activity is started, the background is
* initialized to the current background values stored in a continuity service.
* The background continuity service is updated as the background is updated.
*
* <p>At some point, for example when it is stopped, the Activity may release
* its background state.
*
* <p>When an Activity is resumed, if the BackgroundManager has not been
* released, the continuity service is updated from the BackgroundManager state.
* If the BackgroundManager was released, the BackgroundManager inherits the
* current state from the continuity service.
*
* <p>When the last Activity is destroyed, the background state is reset.
*
* <p>Backgrounds consist of several layers, from back to front:
* <ul>
* <li>the background Drawable of the theme</li>
* <li>a solid color (set via {@link #setColor})</li>
* <li>two Drawables, previous and current (set via {@link #setBitmap} or
* {@link #setDrawable}), which may be in transition</li>
* </ul>
*
* <p>BackgroundManager holds references to potentially large bitmap Drawables.
* Call {@link #release} to release these references when the Activity is not
* visible.
*/
// TODO: support for multiple app processes requires a proper android service
// instead of the shared memory "service" implemented here. Such a service could
// support continuity between fragments of different applications if desired.
public final class BackgroundManager {
static final String TAG = "BackgroundManager";
static final boolean DEBUG = false;
static final int FULL_ALPHA = 255;
private static final int CHANGE_BG_DELAY_MS = 500;
private static final int FADE_DURATION = 500;
private static final String FRAGMENT_TAG = BackgroundManager.class.getCanonicalName();
Activity mContext;
Handler mHandler;
private View mBgView;
private BackgroundContinuityService mService;
private int mThemeDrawableResourceId;
private BackgroundFragment mFragmentState;
private boolean mAutoReleaseOnStop = true;
private int mHeightPx;
private int mWidthPx;
int mBackgroundColor;
Drawable mBackgroundDrawable;
private boolean mAttached;
private long mLastSetTime;
private final Interpolator mAccelerateInterpolator;
private final Interpolator mDecelerateInterpolator;
final ValueAnimator mAnimator;
static class BitmapDrawable extends Drawable {
static final class ConstantState extends Drawable.ConstantState {
final Bitmap mBitmap;
final Matrix mMatrix;
final Paint mPaint = new Paint();
ConstantState(Bitmap bitmap, Matrix matrix) {
mBitmap = bitmap;
mMatrix = matrix != null ? matrix : new Matrix();
mPaint.setFilterBitmap(true);
}
ConstantState(ConstantState copyFrom) {
mBitmap = copyFrom.mBitmap;
mMatrix = copyFrom.mMatrix != null ? new Matrix(copyFrom.mMatrix) : new Matrix();
if (copyFrom.mPaint.getAlpha() != FULL_ALPHA) {
mPaint.setAlpha(copyFrom.mPaint.getAlpha());
}
if (copyFrom.mPaint.getColorFilter() != null) {
mPaint.setColorFilter(copyFrom.mPaint.getColorFilter());
}
mPaint.setFilterBitmap(true);
}
@Override
public Drawable newDrawable() {
return new BitmapDrawable(this);
}
@Override
public int getChangingConfigurations() {
return 0;
}
}
ConstantState mState;
boolean mMutated;
BitmapDrawable(Resources resources, Bitmap bitmap) {
this(resources, bitmap, null);
}
BitmapDrawable(Resources resources, Bitmap bitmap, Matrix matrix) {
mState = new ConstantState(bitmap, matrix);
}
BitmapDrawable(ConstantState state) {
mState = state;
}
Bitmap getBitmap() {
return mState.mBitmap;
}
@Override
public void draw(Canvas canvas) {
if (mState.mBitmap == null) {
return;
}
if (mState.mPaint.getAlpha() < FULL_ALPHA && mState.mPaint.getColorFilter() != null) {
throw new IllegalStateException("Can't draw with translucent alpha and color filter");
}
canvas.drawBitmap(mState.mBitmap, mState.mMatrix, mState.mPaint);
}
@Override
public int getOpacity() {
return android.graphics.PixelFormat.TRANSLUCENT;
}
@Override
public void setAlpha(int alpha) {
mutate();
if (mState.mPaint.getAlpha() != alpha) {
mState.mPaint.setAlpha(alpha);
invalidateSelf();
}
}
/**
* Does not invalidateSelf to avoid recursion issues.
* Caller must ensure appropriate invalidation.
*/
@Override
public void setColorFilter(ColorFilter cf) {
mutate();
mState.mPaint.setColorFilter(cf);
invalidateSelf();
}
@Override
public ColorFilter getColorFilter() {
return mState.mPaint.getColorFilter();
}
@Override
public ConstantState getConstantState() {
return mState;
}
@NonNull
@Override
public Drawable mutate() {
if (!mMutated) {
mMutated = true;
mState = new ConstantState(mState);
}
return this;
}
}
static final class DrawableWrapper {
int mAlpha = FULL_ALPHA;
final Drawable mDrawable;
public DrawableWrapper(Drawable drawable) {
mDrawable = drawable;
}
public DrawableWrapper(DrawableWrapper wrapper, Drawable drawable) {
mDrawable = drawable;
mAlpha = wrapper.mAlpha;
}
public Drawable getDrawable() {
return mDrawable;
}
public void setColor(int color) {
((ColorDrawable) mDrawable).setColor(color);
}
}
static final class TranslucentLayerDrawable extends LayerDrawable {
DrawableWrapper[] mWrapper;
int mAlpha = FULL_ALPHA;
boolean mSuspendInvalidation;
WeakReference<BackgroundManager> mManagerWeakReference;
TranslucentLayerDrawable(BackgroundManager manager, Drawable[] drawables) {
super(drawables);
mManagerWeakReference = new WeakReference(manager);
int count = drawables.length;
mWrapper = new DrawableWrapper[count];
for (int i = 0; i < count; i++) {
mWrapper[i] = new DrawableWrapper(drawables[i]);
}
}
@Override
public void setAlpha(int alpha) {
if (mAlpha != alpha) {
mAlpha = alpha;
invalidateSelf();
BackgroundManager manager = mManagerWeakReference.get();
if (manager != null) {
manager.postChangeRunnable();
}
}
}
void setWrapperAlpha(int wrapperIndex, int alpha) {
if (mWrapper[wrapperIndex] != null) {
mWrapper[wrapperIndex].mAlpha = alpha;
invalidateSelf();
}
}
// Queried by system transitions
@Override
public int getAlpha() {
return mAlpha;
}
@Override
public Drawable mutate() {
Drawable drawable = super.mutate();
int count = getNumberOfLayers();
for (int i = 0; i < count; i++) {
if (mWrapper[i] != null) {
mWrapper[i] = new DrawableWrapper(mWrapper[i], getDrawable(i));
}
}
return drawable;
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
@Override
public boolean setDrawableByLayerId(int id, Drawable drawable) {
return updateDrawable(id, drawable) != null;
}
public DrawableWrapper updateDrawable(int id, Drawable drawable) {
super.setDrawableByLayerId(id, drawable);
for (int i = 0; i < getNumberOfLayers(); i++) {
if (getId(i) == id) {
mWrapper[i] = new DrawableWrapper(drawable);
// Must come after mWrapper was updated so it can be seen by updateColorFilter
invalidateSelf();
return mWrapper[i];
}
}
return null;
}
public void clearDrawable(int id, Context context) {
for (int i = 0; i < getNumberOfLayers(); i++) {
if (getId(i) == id) {
mWrapper[i] = null;
if (!(getDrawable(i) instanceof EmptyDrawable)) {
super.setDrawableByLayerId(id, createEmptyDrawable(context));
}
break;
}
}
}
public int findWrapperIndexById(int id) {
for (int i = 0; i < getNumberOfLayers(); i++) {
if (getId(i) == id) {
return i;
}
}
return -1;
}
@Override
public void invalidateDrawable(Drawable who) {
// Prevent invalidate when temporarily change child drawable's alpha in draw()
if (!mSuspendInvalidation) {
super.invalidateDrawable(who);
}
}
@Override
public void draw(Canvas canvas) {
for (int i = 0; i < mWrapper.length; i++) {
final Drawable d;
// For each child drawable, we multiple Wrapper's alpha and LayerDrawable's alpha
// temporarily using mSuspendInvalidation to suppress invalidate event.
if (mWrapper[i] != null && (d = mWrapper[i].getDrawable()) != null) {
int alpha = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
? DrawableCompat.getAlpha(d) : FULL_ALPHA;
final int savedAlpha = alpha;
int multiple = 0;
if (mAlpha < FULL_ALPHA) {
alpha = alpha * mAlpha;
multiple++;
}
if (mWrapper[i].mAlpha < FULL_ALPHA) {
alpha = alpha * mWrapper[i].mAlpha;
multiple++;
}
if (multiple == 0) {
d.draw(canvas);
} else {
if (multiple == 1) {
alpha = alpha / FULL_ALPHA;
} else if (multiple == 2) {
alpha = alpha / (FULL_ALPHA * FULL_ALPHA);
}
try {
mSuspendInvalidation = true;
d.setAlpha(alpha);
d.draw(canvas);
d.setAlpha(savedAlpha);
} finally {
mSuspendInvalidation = false;
}
}
}
}
}
}
TranslucentLayerDrawable createTranslucentLayerDrawable(
LayerDrawable layerDrawable) {
int numChildren = layerDrawable.getNumberOfLayers();
Drawable[] drawables = new Drawable[numChildren];
for (int i = 0; i < numChildren; i++) {
drawables[i] = layerDrawable.getDrawable(i);
}
TranslucentLayerDrawable result = new TranslucentLayerDrawable(this, drawables);
for (int i = 0; i < numChildren; i++) {
result.setId(i, layerDrawable.getId(i));
}
return result;
}
TranslucentLayerDrawable mLayerDrawable;
int mImageInWrapperIndex;
int mImageOutWrapperIndex;
ChangeBackgroundRunnable mChangeRunnable;
private boolean mChangeRunnablePending;
private final Animator.AnimatorListener mAnimationListener = new Animator.AnimatorListener() {
final Runnable mRunnable = new Runnable() {
@Override
public void run() {
postChangeRunnable();
}
};
@Override
public void onAnimationStart(Animator animation) {
}
@Override
public void onAnimationRepeat(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
if (mLayerDrawable != null) {
mLayerDrawable.clearDrawable(R.id.background_imageout, mContext);
}
mHandler.post(mRunnable);
}
@Override
public void onAnimationCancel(Animator animation) {
}
};
private final ValueAnimator.AnimatorUpdateListener mAnimationUpdateListener =
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int fadeInAlpha = (Integer) animation.getAnimatedValue();
if (mImageInWrapperIndex != -1) {
mLayerDrawable.setWrapperAlpha(mImageInWrapperIndex, fadeInAlpha);
}
}
};
/**
* Shared memory continuity service.
*/
private static class BackgroundContinuityService {
private static final String TAG = "BackgroundContinuity";
private static final boolean DEBUG = BackgroundManager.DEBUG;
private static BackgroundContinuityService sService = new BackgroundContinuityService();
private int mColor;
private Drawable mDrawable;
private int mCount;
/** Single cache of theme drawable */
private int mLastThemeDrawableId;
private WeakReference<Drawable.ConstantState> mLastThemeDrawableState;
private BackgroundContinuityService() {
reset();
}
private void reset() {
mColor = Color.TRANSPARENT;
mDrawable = null;
}
public static BackgroundContinuityService getInstance() {
final int count = sService.mCount++;
if (DEBUG) Log.v(TAG, "Returning instance with new count " + count);
return sService;
}
public void unref() {
if (mCount <= 0) throw new IllegalStateException("Can't unref, count " + mCount);
if (--mCount == 0) {
if (DEBUG) Log.v(TAG, "mCount is zero, resetting");
reset();
}
}
public int getColor() {
return mColor;
}
public Drawable getDrawable() {
return mDrawable;
}
public void setColor(int color) {
mColor = color;
mDrawable = null;
}
public void setDrawable(Drawable drawable) {
mDrawable = drawable;
}
public Drawable getThemeDrawable(Context context, int themeDrawableId) {
Drawable drawable = null;
if (mLastThemeDrawableState != null && mLastThemeDrawableId == themeDrawableId) {
Drawable.ConstantState drawableState = mLastThemeDrawableState.get();
if (DEBUG) Log.v(TAG, "got cached theme drawable state " + drawableState);
if (drawableState != null) {
drawable = drawableState.newDrawable();
}
}
if (drawable == null) {
drawable = ContextCompat.getDrawable(context, themeDrawableId);
if (DEBUG) Log.v(TAG, "loaded theme drawable " + drawable);
mLastThemeDrawableState = new WeakReference<Drawable.ConstantState>(
drawable.getConstantState());
mLastThemeDrawableId = themeDrawableId;
}
// No mutate required because this drawable is never manipulated.
return drawable;
}
}
Drawable getDefaultDrawable() {
if (mBackgroundColor != Color.TRANSPARENT) {
return new ColorDrawable(mBackgroundColor);
} else {
return getThemeDrawable();
}
}
private Drawable getThemeDrawable() {
Drawable drawable = null;
if (mThemeDrawableResourceId != -1) {
drawable = mService.getThemeDrawable(mContext, mThemeDrawableResourceId);
}
if (drawable == null) {
drawable = createEmptyDrawable(mContext);
}
return drawable;
}
/**
* Returns the BackgroundManager associated with the given Activity.
* <p>
* The BackgroundManager will be created on-demand for each individual
* Activity. Subsequent calls will return the same BackgroundManager created
* for this Activity.
*/
public static BackgroundManager getInstance(Activity activity) {
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment != null) {
BackgroundManager manager = fragment.getBackgroundManager();
if (manager != null) {
return manager;
}
// manager is null: this is a fragment restored by FragmentManager,
// fall through to create a BackgroundManager attach to it.
}
return new BackgroundManager(activity);
}
private BackgroundManager(Activity activity) {
mContext = activity;
mService = BackgroundContinuityService.getInstance();
mHeightPx = mContext.getResources().getDisplayMetrics().heightPixels;
mWidthPx = mContext.getResources().getDisplayMetrics().widthPixels;
mHandler = new Handler();
Interpolator defaultInterpolator = new FastOutLinearInInterpolator();
mAccelerateInterpolator = AnimationUtils.loadInterpolator(mContext,
android.R.anim.accelerate_interpolator);
mDecelerateInterpolator = AnimationUtils.loadInterpolator(mContext,
android.R.anim.decelerate_interpolator);
mAnimator = ValueAnimator.ofInt(0, FULL_ALPHA);
mAnimator.addListener(mAnimationListener);
mAnimator.addUpdateListener(mAnimationUpdateListener);
mAnimator.setInterpolator(defaultInterpolator);
TypedArray ta = activity.getTheme().obtainStyledAttributes(new int[] {
android.R.attr.windowBackground });
mThemeDrawableResourceId = ta.getResourceId(0, -1);
if (mThemeDrawableResourceId < 0) {
if (DEBUG) Log.v(TAG, "BackgroundManager no window background resource!");
}
ta.recycle();
createFragment(activity);
}
private void createFragment(Activity activity) {
// Use a fragment to ensure the background manager gets detached properly.
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment == null) {
fragment = new BackgroundFragment();
activity.getFragmentManager().beginTransaction().add(fragment, FRAGMENT_TAG).commit();
} else {
if (fragment.getBackgroundManager() != null) {
throw new IllegalStateException("Created duplicated BackgroundManager for same "
+ "activity, please use getInstance() instead");
}
}
fragment.setBackgroundManager(this);
mFragmentState = fragment;
}
DrawableWrapper getImageInWrapper() {
return mLayerDrawable == null
? null : mLayerDrawable.mWrapper[mImageInWrapperIndex];
}
DrawableWrapper getImageOutWrapper() {
return mLayerDrawable == null
? null : mLayerDrawable.mWrapper[mImageOutWrapperIndex];
}
/**
* Synchronizes state when the owning Activity is started.
* At that point the view becomes visible.
*/
void onActivityStart() {
updateImmediate();
}
void onStop() {
if (isAutoReleaseOnStop()) {
release();
}
}
void onResume() {
if (DEBUG) Log.v(TAG, "onResume " + this);
postChangeRunnable();
}
private void syncWithService() {
int color = mService.getColor();
Drawable drawable = mService.getDrawable();
if (DEBUG) Log.v(TAG, "syncWithService color " + Integer.toHexString(color)
+ " drawable " + drawable);
mBackgroundColor = color;
mBackgroundDrawable = drawable == null ? null :
drawable.getConstantState().newDrawable().mutate();
updateImmediate();
}
/**
* Makes the background visible on the given Window. The background manager must be attached
* when the background is set.
*/
public void attach(Window window) {
attachToViewInternal(window.getDecorView());
}
/**
* Sets the resource id for the drawable to be shown when there is no background set.
* Overrides the window background drawable from the theme. This should
* be called before attaching.
*/
public void setThemeDrawableResourceId(int resourceId) {
mThemeDrawableResourceId = resourceId;
}
/**
* Adds the composite drawable to the given view.
*/
public void attachToView(View sceneRoot) {
attachToViewInternal(sceneRoot);
// clear background to reduce overdraw since the View will act as background.
// Activity transition below O has ghost effect for null window background where we
// need set a transparent background to force redraw the whole window.
mContext.getWindow().getDecorView().setBackground(
Build.VERSION.SDK_INT >= 26 ? null : new ColorDrawable(Color.TRANSPARENT));
}
void attachToViewInternal(View sceneRoot) {
if (mAttached) {
throw new IllegalStateException("Already attached to " + mBgView);
}
mBgView = sceneRoot;
mAttached = true;
syncWithService();
}
/**
* Returns true if the background manager is currently attached; false otherwise.
*/
public boolean isAttached() {
return mAttached;
}
/**
* Release references to Drawables and put the BackgroundManager into the
* detached state. Called when the associated Activity is destroyed.
*/
void detach() {
if (DEBUG) Log.v(TAG, "detach " + this);
release();
mBgView = null;
mAttached = false;
if (mService != null) {
mService.unref();
mService = null;
}
}
/**
* Release references to Drawable/Bitmap. Typically called in Activity onStop() to reduce memory
* overhead when not visible. It's app's responsibility to restore the drawable/bitmap in
* Activity onStart(). The method is automatically called in onStop() when
* {@link #isAutoReleaseOnStop()} is true.
* @see #setAutoReleaseOnStop(boolean)
*/
public void release() {
if (DEBUG) Log.v(TAG, "release " + this);
if (mChangeRunnable != null) {
mHandler.removeCallbacks(mChangeRunnable);
mChangeRunnable = null;
}
if (mAnimator.isStarted()) {
mAnimator.cancel();
}
if (mLayerDrawable != null) {
mLayerDrawable.clearDrawable(R.id.background_imagein, mContext);
mLayerDrawable.clearDrawable(R.id.background_imageout, mContext);
mLayerDrawable = null;
}
mBackgroundDrawable = null;
}
/**
* Sets the drawable used as a dim layer.
* @deprecated No longer support dim layer.
*/
@Deprecated
public void setDimLayer(Drawable drawable) {
}
/**
* Returns the drawable used as a dim layer.
* @deprecated No longer support dim layer.
*/
@Deprecated
public Drawable getDimLayer() {
return null;
}
/**
* Returns the default drawable used as a dim layer.
* @deprecated No longer support dim layer.
*/
@Deprecated
public Drawable getDefaultDimLayer() {
return ContextCompat.getDrawable(mContext, R.color.lb_background_protection);
}
void postChangeRunnable() {
if (mChangeRunnable == null || !mChangeRunnablePending) {
return;
}
// Postpone a pending change runnable until: no existing change animation in progress &&
// activity is resumed (in the foreground) && layerdrawable fully opaque.
// If the layerdrawable is translucent then an activity transition is in progress
// and we want to use the optimized drawing path for performance reasons (see
// OptimizedTranslucentLayerDrawable).
if (mAnimator.isStarted()) {
if (DEBUG) Log.v(TAG, "animation in progress");
} else if (!mFragmentState.isResumed()) {
if (DEBUG) Log.v(TAG, "not resumed");
} else if (mLayerDrawable.getAlpha() < FULL_ALPHA) {
if (DEBUG) Log.v(TAG, "in transition, alpha " + mLayerDrawable.getAlpha());
} else {
long delayMs = getRunnableDelay();
if (DEBUG) Log.v(TAG, "posting runnable delayMs " + delayMs);
mLastSetTime = System.currentTimeMillis();
mHandler.postDelayed(mChangeRunnable, delayMs);
mChangeRunnablePending = false;
}
}
private void lazyInit() {
if (mLayerDrawable != null) {
return;
}
LayerDrawable layerDrawable = (LayerDrawable)
ContextCompat.getDrawable(mContext, R.drawable.lb_background).mutate();
mLayerDrawable = createTranslucentLayerDrawable(layerDrawable);
mImageInWrapperIndex = mLayerDrawable.findWrapperIndexById(R.id.background_imagein);
mImageOutWrapperIndex = mLayerDrawable.findWrapperIndexById(R.id.background_imageout);
BackgroundHelper.setBackgroundPreservingAlpha(mBgView, mLayerDrawable);
}
private void updateImmediate() {
if (!mAttached) {
return;
}
lazyInit();
if (mBackgroundDrawable == null) {
if (DEBUG) Log.v(TAG, "Use defefault background");
mLayerDrawable.updateDrawable(R.id.background_imagein, getDefaultDrawable());
} else {
if (DEBUG) Log.v(TAG, "Background drawable is available " + mBackgroundDrawable);
mLayerDrawable.updateDrawable(R.id.background_imagein, mBackgroundDrawable);
}
mLayerDrawable.clearDrawable(R.id.background_imageout, mContext);
}
/**
* Sets the background to the given color. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setColor(@ColorInt int color) {
if (DEBUG) Log.v(TAG, "setColor " + Integer.toHexString(color));
mService.setColor(color);
mBackgroundColor = color;
mBackgroundDrawable = null;
if (mLayerDrawable == null) {
return;
}
setDrawableInternal(getDefaultDrawable());
}
/**
* Sets the given drawable into the background. The provided Drawable will be
* used unmodified as the background, without any scaling or cropping
* applied to it. The timing for when this becomes visible in the app is
* undefined and may take place after a small delay.
*/
public void setDrawable(Drawable drawable) {
if (DEBUG) Log.v(TAG, "setBackgroundDrawable " + drawable);
mService.setDrawable(drawable);
mBackgroundDrawable = drawable;
if (mLayerDrawable == null) {
return;
}
if (drawable == null) {
setDrawableInternal(getDefaultDrawable());
} else {
setDrawableInternal(drawable);
}
}
/**
* Clears the Drawable set by {@link #setDrawable(Drawable)} or {@link #setBitmap(Bitmap)}.
* BackgroundManager will show a solid color set by {@link #setColor(int)} or theme drawable
* if color is not provided.
*/
public void clearDrawable() {
setDrawable(null);
}
private void setDrawableInternal(Drawable drawable) {
if (!mAttached) {
throw new IllegalStateException("Must attach before setting background drawable");
}
if (mChangeRunnable != null) {
if (sameDrawable(drawable, mChangeRunnable.mDrawable)) {
if (DEBUG) Log.v(TAG, "new drawable same as pending");
return;
}
mHandler.removeCallbacks(mChangeRunnable);
mChangeRunnable = null;
}
mChangeRunnable = new ChangeBackgroundRunnable(drawable);
mChangeRunnablePending = true;
postChangeRunnable();
}
private long getRunnableDelay() {
return Math.max(0, mLastSetTime + CHANGE_BG_DELAY_MS - System.currentTimeMillis());
}
/**
* Sets the given bitmap into the background. When using setCoverImageBitmap to set the
* background, the provided bitmap will be scaled and cropped to correctly
* fit within the dimensions of the view. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setBitmap(Bitmap bitmap) {
if (DEBUG) {
Log.v(TAG, "setCoverImageBitmap " + bitmap);
}
if (bitmap == null) {
setDrawable(null);
return;
}
if (bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
if (DEBUG) {
Log.v(TAG, "invalid bitmap width or height");
}
return;
}
Matrix matrix = null;
if ((bitmap.getWidth() != mWidthPx || bitmap.getHeight() != mHeightPx)) {
int dwidth = bitmap.getWidth();
int dheight = bitmap.getHeight();
float scale;
// Scale proportionately to fit width and height.
if (dwidth * mHeightPx > mWidthPx * dheight) {
scale = (float) mHeightPx / (float) dheight;
} else {
scale = (float) mWidthPx / (float) dwidth;
}
int subX = Math.min((int) (mWidthPx / scale), dwidth);
int dx = Math.max(0, (dwidth - subX) / 2);
matrix = new Matrix();
matrix.setScale(scale, scale);
matrix.preTranslate(-dx, 0);
if (DEBUG) {
Log.v(TAG, "original image size " + bitmap.getWidth() + "x" + bitmap.getHeight()
+ " scale " + scale + " dx " + dx);
}
}
BitmapDrawable bitmapDrawable = new BitmapDrawable(mContext.getResources(), bitmap, matrix);
setDrawable(bitmapDrawable);
}
/**
* Enable or disable call release() in Activity onStop(). Default is true.
* @param autoReleaseOnStop True to call release() in Activity onStop(), false otherwise.
*/
public void setAutoReleaseOnStop(boolean autoReleaseOnStop) {
mAutoReleaseOnStop = autoReleaseOnStop;
}
/**
* @return True if release() in Activity.onStop(), false otherwise.
*/
public boolean isAutoReleaseOnStop() {
return mAutoReleaseOnStop;
}
/**
* Returns the current background color.
*/
@ColorInt
public final int getColor() {
return mBackgroundColor;
}
/**
* Returns the current background {@link Drawable}.
*/
public Drawable getDrawable() {
return mBackgroundDrawable;
}
boolean sameDrawable(Drawable first, Drawable second) {
if (first == null || second == null) {
return false;
}
if (first == second) {
return true;
}
if (first instanceof BitmapDrawable && second instanceof BitmapDrawable) {
if (((BitmapDrawable) first).getBitmap().sameAs(((BitmapDrawable) second).getBitmap())) {
return true;
}
}
if (first instanceof ColorDrawable && second instanceof ColorDrawable) {
if (((ColorDrawable) first).getColor() == ((ColorDrawable) second).getColor()) {
return true;
}
}
return false;
}
/**
* Task which changes the background.
*/
final class ChangeBackgroundRunnable implements Runnable {
final Drawable mDrawable;
ChangeBackgroundRunnable(Drawable drawable) {
mDrawable = drawable;
}
@Override
public void run() {
runTask();
mChangeRunnable = null;
}
private void runTask() {
if (mLayerDrawable == null) {
if (DEBUG) Log.v(TAG, "runTask while released - should not happen");
return;
}
DrawableWrapper imageInWrapper = getImageInWrapper();
if (imageInWrapper != null) {
if (sameDrawable(mDrawable, imageInWrapper.getDrawable())) {
if (DEBUG) Log.v(TAG, "new drawable same as current");
return;
}
if (DEBUG) Log.v(TAG, "moving image in to image out");
// Order is important! Setting a drawable "removes" the
// previous one from the view
mLayerDrawable.clearDrawable(R.id.background_imagein, mContext);
mLayerDrawable.updateDrawable(R.id.background_imageout,
imageInWrapper.getDrawable());
}
applyBackgroundChanges();
}
void applyBackgroundChanges() {
if (!mAttached) {
return;
}
if (DEBUG) Log.v(TAG, "applyBackgroundChanges drawable " + mDrawable);
DrawableWrapper imageInWrapper = getImageInWrapper();
if (imageInWrapper == null && mDrawable != null) {
if (DEBUG) Log.v(TAG, "creating new imagein drawable");
imageInWrapper = mLayerDrawable.updateDrawable(
R.id.background_imagein, mDrawable);
if (DEBUG) Log.v(TAG, "imageInWrapper animation starting");
mLayerDrawable.setWrapperAlpha(mImageInWrapperIndex, 0);
}
mAnimator.setDuration(FADE_DURATION);
mAnimator.start();
}
}
static class EmptyDrawable extends BitmapDrawable {
EmptyDrawable(Resources res) {
super(res, (Bitmap) null);
}
}
static Drawable createEmptyDrawable(Context context) {
return new EmptyDrawable(context.getResources());
}
}
| |
package com.intellij.openapi.externalSystem.service.project.manage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.Key;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.LibraryPathType;
import com.intellij.openapi.externalSystem.service.project.ExternalLibraryPathTypeMapper;
import com.intellij.openapi.externalSystem.service.project.PlatformFacade;
import com.intellij.openapi.externalSystem.service.project.ProjectStructureHelper;
import com.intellij.openapi.externalSystem.util.DisposeAwareProjectChange;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemConstants;
import com.intellij.openapi.externalSystem.util.Order;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.NotNullFunction;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.ContainerUtilRt;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* @author Denis Zhdanov
* @since 2/15/12 11:32 AM
*/
@Order(ExternalSystemConstants.BUILTIN_SERVICE_ORDER)
public class LibraryDataService implements ProjectDataService<LibraryData, Library> {
private static final Logger LOG = Logger.getInstance("#" + LibraryDataService.class.getName());
@NotNull public static final NotNullFunction<String, File> PATH_TO_FILE = new NotNullFunction<String, File>() {
@NotNull
@Override
public File fun(String path) {
return new File(path);
}
};
@NotNull private final PlatformFacade myPlatformFacade;
@NotNull private final ProjectStructureHelper myProjectStructureHelper;
@NotNull private final ExternalLibraryPathTypeMapper myLibraryPathTypeMapper;
public LibraryDataService(@NotNull PlatformFacade platformFacade,
@NotNull ProjectStructureHelper helper,
@NotNull ExternalLibraryPathTypeMapper mapper)
{
myPlatformFacade = platformFacade;
myProjectStructureHelper = helper;
myLibraryPathTypeMapper = mapper;
}
@NotNull
@Override
public Key<LibraryData> getTargetDataKey() {
return ProjectKeys.LIBRARY;
}
@Override
public void importData(@NotNull Collection<DataNode<LibraryData>> toImport, @NotNull Project project, boolean synchronous) {
for (DataNode<LibraryData> dataNode : toImport) {
importLibrary(dataNode.getData(), project, synchronous);
}
}
public void importLibrary(@NotNull final LibraryData toImport, @NotNull final Project project, boolean synchronous) {
Map<OrderRootType, Collection<File>> libraryFiles = prepareLibraryFiles(toImport);
Library library = myProjectStructureHelper.findIdeLibrary(toImport, project);
if (library != null) {
syncPaths(toImport, library, project, synchronous);
return;
}
importLibrary(toImport.getName(), libraryFiles, project, synchronous);
}
@NotNull
public Map<OrderRootType, Collection<File>> prepareLibraryFiles(@NotNull LibraryData data) {
Map<OrderRootType, Collection<File>> result = ContainerUtilRt.newHashMap();
for (LibraryPathType pathType : LibraryPathType.values()) {
final Set<String> paths = data.getPaths(pathType);
if (paths.isEmpty()) {
continue;
}
result.put(myLibraryPathTypeMapper.map(pathType), ContainerUtil.map(paths, PATH_TO_FILE));
}
return result;
}
public void importLibrary(@NotNull final String libraryName,
@NotNull final Map<OrderRootType, Collection<File>> libraryFiles,
@NotNull final Project project,
boolean synchronous)
{
ExternalSystemApiUtil.executeProjectChangeAction(synchronous, new DisposeAwareProjectChange(project) {
@Override
public void execute() {
// Is assumed to be called from the EDT.
final LibraryTable libraryTable = myPlatformFacade.getProjectLibraryTable(project);
final LibraryTable.ModifiableModel projectLibraryModel = libraryTable.getModifiableModel();
final Library intellijLibrary;
try {
intellijLibrary = projectLibraryModel.createLibrary(libraryName);
}
finally {
projectLibraryModel.commit();
}
final Library.ModifiableModel libraryModel = intellijLibrary.getModifiableModel();
try {
registerPaths(libraryFiles, libraryModel, libraryName);
}
finally {
libraryModel.commit();
}
}
});
}
@SuppressWarnings("MethodMayBeStatic")
public void registerPaths(@NotNull final Map<OrderRootType, Collection<File>> libraryFiles,
@NotNull Library.ModifiableModel model,
@NotNull String libraryName)
{
for (Map.Entry<OrderRootType, Collection<File>> entry : libraryFiles.entrySet()) {
for (File file : entry.getValue()) {
VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file);
if (virtualFile == null) {
if (ExternalSystemConstants.VERBOSE_PROCESSING && entry.getKey() == OrderRootType.CLASSES) {
LOG.warn(
String.format("Can't find %s of the library '%s' at path '%s'", entry.getKey(), libraryName, file.getAbsolutePath())
);
}
String url = VfsUtil.getUrlForLibraryRoot(file);
model.addRoot(url, entry.getKey());
continue;
}
if (virtualFile.isDirectory()) {
model.addRoot(virtualFile, entry.getKey());
}
else {
VirtualFile jarRoot = JarFileSystem.getInstance().getJarRootForLocalFile(virtualFile);
if (jarRoot == null) {
LOG.warn(String.format(
"Can't parse contents of the jar file at path '%s' for the library '%s''", file.getAbsolutePath(), libraryName
));
continue;
}
model.addRoot(jarRoot, entry.getKey());
}
}
}
}
public void removeData(@NotNull final Collection<? extends Library> libraries, @NotNull final Project project, boolean synchronous) {
if (libraries.isEmpty()) {
return;
}
ExternalSystemApiUtil.executeProjectChangeAction(synchronous, new DisposeAwareProjectChange(project) {
@Override
public void execute() {
final LibraryTable libraryTable = myPlatformFacade.getProjectLibraryTable(project);
final LibraryTable.ModifiableModel model = libraryTable.getModifiableModel();
try {
for (Library library : libraries) {
String libraryName = library.getName();
if (libraryName != null) {
Library libraryToRemove = model.getLibraryByName(libraryName);
if (libraryToRemove != null) {
model.removeLibrary(libraryToRemove);
}
}
}
}
finally {
model.commit();
}
}
});
}
public void syncPaths(@NotNull final LibraryData externalLibrary, @NotNull final Library ideLibrary, @NotNull final Project project, boolean synchronous) {
if (externalLibrary.isUnresolved()) {
return;
}
final Map<OrderRootType, Set<String>> toRemove = ContainerUtilRt.newHashMap();
final Map<OrderRootType, Set<String>> toAdd = ContainerUtilRt.newHashMap();
for (LibraryPathType pathType : LibraryPathType.values()) {
OrderRootType ideType = myLibraryPathTypeMapper.map(pathType);
HashSet<String> toAddPerType = ContainerUtilRt.newHashSet(externalLibrary.getPaths(pathType));
toAdd.put(ideType, toAddPerType);
HashSet<String> toRemovePerType = ContainerUtilRt.newHashSet();
toRemove.put(ideType, toRemovePerType);
for (VirtualFile ideFile : ideLibrary.getFiles(ideType)) {
String idePath = ExternalSystemApiUtil.getLocalFileSystemPath(ideFile);
if (!toAddPerType.remove(idePath)) {
toRemovePerType.add(ideFile.getUrl());
}
}
}
if (toRemove.isEmpty() && toAdd.isEmpty()) {
return;
}
ExternalSystemApiUtil.executeProjectChangeAction(synchronous, new DisposeAwareProjectChange(project) {
@Override
public void execute() {
Library.ModifiableModel model = ideLibrary.getModifiableModel();
try {
for (Map.Entry<OrderRootType, Set<String>> entry : toRemove.entrySet()) {
for (String path : entry.getValue()) {
model.removeRoot(path, entry.getKey());
}
}
for (Map.Entry<OrderRootType, Set<String>> entry : toAdd.entrySet()) {
Map<OrderRootType, Collection<File>> roots = ContainerUtilRt.newHashMap();
roots.put(entry.getKey(), ContainerUtil.map(entry.getValue(), PATH_TO_FILE));
registerPaths(roots, model, externalLibrary.getName());
}
}
finally {
model.commit();
}
}
});
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.forge.camel.commands.project.helper;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import io.fabric8.forge.camel.commands.project.CamelComponentDetails;
import io.fabric8.forge.camel.commands.project.completer.CamelComponentsCompleter;
import io.fabric8.forge.camel.commands.project.completer.CamelComponentsLabelCompleter;
import org.apache.camel.catalog.CamelCatalog;
import org.apache.camel.catalog.DefaultCamelCatalog;
import org.apache.camel.catalog.JSonSchemaHelper;
import org.jboss.forge.addon.dependencies.Dependency;
import org.jboss.forge.addon.dependencies.builder.DependencyBuilder;
import org.jboss.forge.addon.projects.Project;
import org.jboss.forge.addon.projects.dependencies.DependencyInstaller;
import org.jboss.forge.addon.ui.input.UISelectOne;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.result.Results;
import org.jboss.forge.roaster.model.source.JavaClassSource;
import org.jboss.forge.roaster.model.source.MethodSource;
import org.jboss.forge.roaster.model.util.Strings;
public final class CamelCommandsHelper {
// to speed up performance on command line completion lets not perform a full classpath validation of the project until its being used on a command
private static final boolean validateClassPathForProjectValidation = false;
public static Iterable<String> createComponentNameValues(Project project) {
return new CamelComponentsLabelCompleter(project).getValueChoices();
}
public static Callable<Iterable<String>> createComponentNameValues(final Project project,
final UISelectOne<String> componentCategoryFilter,
final boolean excludeComponentsOnClasspath) {
// use callable so we can live update the filter
return new Callable<Iterable<String>>() {
@Override
public Iterable<String> call() throws Exception {
String label = componentCategoryFilter.getValue();
return new CamelComponentsCompleter(project, null, excludeComponentsOnClasspath).getValueChoices(label);
}
};
}
/**
* Populates the details for the given component, returning a Result if it fails.
*/
public static Result loadCamelComponentDetails(String camelComponentName, CamelComponentDetails details) {
CamelCatalog catalog = new DefaultCamelCatalog();
String json = catalog.componentJSonSchema(camelComponentName);
if (json == null) {
return Results.fail("Could not find catalog entry for component name: " + camelComponentName);
}
List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("component", json, false);
for (Map<String, String> row : data) {
String javaType = row.get("javaType");
if (!Strings.isNullOrEmpty(javaType)) {
details.setComponentClassQName(javaType);
}
String artifactId = row.get("artifactId");
if (!Strings.isNullOrEmpty(artifactId)) {
details.setArtifactId(artifactId);
}
}
if (Strings.isNullOrEmpty(details.getComponentClassQName())) {
return Results.fail("Could not find fully qualified class name in catalog for component name: " + camelComponentName);
}
return null;
}
public static Result ensureCamelArtifactIdAdded(Project project, CamelComponentDetails details, DependencyInstaller dependencyInstaller) {
String artifactId = details.getArtifactId();
Dependency core = CamelProjectHelper.findCamelCoreDependency(project);
if (core == null) {
return Results.fail("The project does not include camel-core");
}
DependencyBuilder component = DependencyBuilder.create().setGroupId("org.apache.camel")
.setArtifactId(artifactId).setVersion(core.getCoordinate().getVersion());
// install the component
dependencyInstaller.install(project, component);
return null;
}
public static boolean isCdiProject(Project project) {
return (!validateClassPathForProjectValidation || JavaHelper.projectHasClassOnClassPath(project, "javax.enterprise.inject.Produces")) &&
CamelProjectHelper.findCamelCDIDependency(project) != null;
}
public static boolean isSpringProject(Project project) {
return (!validateClassPathForProjectValidation || JavaHelper.projectHasClassOnClassPath(project, "org.springframework.context.ApplicationContext")) &&
CamelProjectHelper.findCamelSpringDependency(project) != null;
}
public static boolean isBlueprintProject(Project project) {
return CamelProjectHelper.findCamelBlueprintDependency(project) != null;
}
public static void createCdiComponentProducerClass(JavaClassSource javaClass, CamelComponentDetails details, String camelComponentName, String componentInstanceName, String configurationCode) {
javaClass.addImport("javax.enterprise.inject.Produces");
javaClass.addImport("javax.inject.Singleton");
javaClass.addImport("javax.inject.Named");
javaClass.addImport(details.getComponentClassQName());
String componentClassName = details.getComponentClassName();
String methodName = "create" + Strings.capitalize(componentInstanceName) + "Component";
String body = componentClassName + " component = new " + componentClassName + "();" + configurationCode + "\nreturn component;";
MethodSource<JavaClassSource> method = javaClass.addMethod()
.setPublic()
.setReturnType(componentClassName)
.setName(methodName)
.setBody(body)
.addThrows(Exception.class);
method.addAnnotation("Named").setStringValue(camelComponentName);
method.addAnnotation("Produces");
method.addAnnotation("Singleton");
}
public static void createSpringComponentFactoryClass(JavaClassSource javaClass, CamelComponentDetails details, String camelComponentName, String componentInstanceName, String configurationCode) {
javaClass.addAnnotation("Component");
javaClass.addImport("org.springframework.beans.factory.config.BeanDefinition");
javaClass.addImport("org.springframework.beans.factory.annotation.Qualifier");
javaClass.addImport("org.springframework.context.annotation.Bean");
javaClass.addImport("org.springframework.context.annotation.Scope");
javaClass.addImport("org.springframework.stereotype.Component");
javaClass.addImport(details.getComponentClassQName());
String componentClassName = details.getComponentClassName();
String methodName = "create" + Strings.capitalize(componentInstanceName) + "Component";
String body = componentClassName + " component = new " + componentClassName + "();" + configurationCode + "\nreturn component;";
MethodSource<JavaClassSource> method = javaClass.addMethod()
.setPublic()
.setReturnType(componentClassName)
.setName(methodName)
.setBody(body)
.addThrows(Exception.class);
method.addAnnotation("Qualifier").setStringValue(camelComponentName);
method.addAnnotation("Bean");
method.addAnnotation("Scope").setLiteralValue("BeanDefinition.SCOPE_SINGLETON");
}
/**
* Converts a java type as a string to a valid input type and returns the class or null if its not supported
*/
public static Class<Object> loadValidInputTypes(String javaType, String type) {
try {
Class<Object> clazz = getPrimitiveClassType(type);
if (clazz == null) {
clazz = loadPrimitiveType(javaType);
}
if (clazz == null) {
clazz = loadStringSupportedType(javaType);
}
if (clazz == null) {
clazz = (Class<Object>) Class.forName(javaType);
}
if (clazz.equals(String.class) || clazz.equals(Date.class)
|| clazz.isPrimitive() || Number.class.isAssignableFrom(clazz)) {
return clazz;
}
} catch (ClassNotFoundException e) {
// ignore errors
}
return null;
}
private static Class loadStringSupportedType(String javaType) {
if ("java.io.File".equals(javaType)) {
return String.class;
} else if ("java.net.URL".equals(javaType)) {
return String.class;
} else if ("java.net.URI".equals(javaType)) {
return String.class;
}
return null;
}
/**
* Gets the JSon schema primitive type.
*
* @param name the json type
* @return the primitive Java Class type
*/
public static Class getPrimitiveClassType(String name) {
if ("string".equals(name)) {
return String.class;
} else if ("boolean".equals(name)) {
return boolean.class;
} else if ("integer".equals(name)) {
return int.class;
} else if ("number".equals(name)) {
return float.class;
}
return null;
}
private static Class loadPrimitiveType(String name) {
// special for byte[] or Object[] as its common to use
if ("java.lang.byte[]".equals(name) || "byte[]".equals(name)) {
return byte[].class;
} else if ("java.lang.Byte[]".equals(name) || "Byte[]".equals(name)) {
return Byte[].class;
} else if ("java.lang.Object[]".equals(name) || "Object[]".equals(name)) {
return Object[].class;
} else if ("java.lang.String[]".equals(name) || "String[]".equals(name)) {
return String[].class;
// and these is common as well
} else if ("java.lang.String".equals(name) || "String".equals(name)) {
return String.class;
} else if ("java.lang.Boolean".equals(name) || "Boolean".equals(name)) {
return Boolean.class;
} else if ("boolean".equals(name)) {
return boolean.class;
} else if ("java.lang.Integer".equals(name) || "Integer".equals(name)) {
return Integer.class;
} else if ("int".equals(name)) {
return int.class;
} else if ("java.lang.Long".equals(name) || "Long".equals(name)) {
return Long.class;
} else if ("long".equals(name)) {
return long.class;
} else if ("java.lang.Short".equals(name) || "Short".equals(name)) {
return Short.class;
} else if ("short".equals(name)) {
return short.class;
} else if ("java.lang.Byte".equals(name) || "Byte".equals(name)) {
return Byte.class;
} else if ("byte".equals(name)) {
return byte.class;
} else if ("java.lang.Float".equals(name) || "Float".equals(name)) {
return Float.class;
} else if ("float".equals(name)) {
return float.class;
} else if ("java.lang.Double".equals(name) || "Double".equals(name)) {
return Double.class;
} else if ("double".equals(name)) {
return double.class;
} else if ("java.lang.Character".equals(name) || "Character".equals(name)) {
return Character.class;
} else if ("char".equals(name)) {
return char.class;
}
return null;
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* TargetingIdeaServiceSoapBindingStub.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201809.o;
public class TargetingIdeaServiceSoapBindingStub extends org.apache.axis.client.Stub implements com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaServiceInterface {
private java.util.Vector cachedSerClasses = new java.util.Vector();
private java.util.Vector cachedSerQNames = new java.util.Vector();
private java.util.Vector cachedSerFactories = new java.util.Vector();
private java.util.Vector cachedDeserFactories = new java.util.Vector();
static org.apache.axis.description.OperationDesc [] _operations;
static {
_operations = new org.apache.axis.description.OperationDesc[1];
_initOperationDesc1();
}
private static void _initOperationDesc1(){
org.apache.axis.description.OperationDesc oper;
org.apache.axis.description.ParameterDesc param;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("get");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "selector"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaSelector"), com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaSelector.class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaPage"));
oper.setReturnClass(com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage.class);
oper.setReturnQName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "ApiExceptionFault"),
"com.google.api.ads.adwords.axis.v201809.cm.ApiException",
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiException"),
true
));
_operations[0] = oper;
}
public TargetingIdeaServiceSoapBindingStub() throws org.apache.axis.AxisFault {
this(null);
}
public TargetingIdeaServiceSoapBindingStub(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
this(service);
super.cachedEndpoint = endpointURL;
}
public TargetingIdeaServiceSoapBindingStub(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
if (service == null) {
super.service = new org.apache.axis.client.Service();
} else {
super.service = service;
}
((org.apache.axis.client.Service)super.service).setTypeMappingVersion("1.2");
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
addBindings0();
addBindings1();
}
private void addBindings0() {
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionLimitExceeded");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionLimitExceeded.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionLimitExceeded.CriteriaLimitType");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionLimitExceededCriteriaLimitType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdxError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdxError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdxError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdxErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApiException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApplicationException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApplicationException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthenticationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthenticationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthenticationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthenticationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthorizationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthorizationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthorizationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthorizationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BudgetError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.BudgetError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BudgetError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.BudgetErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ClientTermsError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ClientTermsError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ClientTermsError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ClientTermsErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CollectionSizeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CollectionSizeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CollectionSizeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CollectionSizeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ComparableValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ComparableValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Criterion");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Criterion.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Criterion.Type");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionPolicyError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionPolicyError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionUserInterest");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionUserInterest.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionUserList");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionUserList.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionUserList.MembershipStatus");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionUserListMembershipStatus.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DatabaseError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DatabaseError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DatabaseError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DatabaseErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DistinctError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DistinctError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DistinctError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DistinctErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DoubleValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DoubleValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityCountLimitExceeded");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityCountLimitExceeded.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityCountLimitExceeded.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityCountLimitExceededReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityNotFound");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityNotFound.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityNotFound.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityNotFoundReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FieldPathElement");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FieldPathElement.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "IdError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.IdError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "IdError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.IdErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "InternalApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.InternalApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "InternalApiError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.InternalApiErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Keyword");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Keyword.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "KeywordMatchType");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.KeywordMatchType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Language");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Language.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Location");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Location.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "LocationTargetingStatus");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.LocationTargetingStatus.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "LongValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.LongValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MobileAppCategory");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MobileAppCategory.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MobileApplication");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MobileApplication.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Money");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Money.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NetworkSetting");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NetworkSetting.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NotEmptyError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NotEmptyError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NotEmptyError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NotEmptyErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NullError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NullError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NullError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NullErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NumberValue");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NumberValue.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperationAccessDenied");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperationAccessDenied.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperationAccessDenied.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperationAccessDeniedReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperatorError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperatorError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperatorError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperatorErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Paging");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Paging.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Placement");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Placement.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Platform");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Platform.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationError.Part");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationErrorPart.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationKey");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationKey.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QuotaCheckError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QuotaCheckError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QuotaCheckError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QuotaCheckErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RangeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RangeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RangeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RangeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RateExceededError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RateExceededError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RateExceededError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RateExceededErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ReadOnlyError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ReadOnlyError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ReadOnlyError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ReadOnlyErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RegionCodeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RegionCodeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RegionCodeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RegionCodeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RejectedError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RejectedError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RejectedError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RejectedErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequestError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequestError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequestError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequestErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequiredError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequiredError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequiredError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequiredErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SizeLimitError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SizeLimitError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SizeLimitError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SizeLimitErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SoapHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SoapHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SoapResponseHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SoapResponseHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StatsQueryError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StatsQueryError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StatsQueryError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StatsQueryErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringFormatError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringFormatError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringFormatError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringFormatErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringLengthError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringLengthError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringLengthError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringLengthErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Vertical");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Vertical.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "Attribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.Attribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "AttributeType");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.AttributeType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "BooleanAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.BooleanAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CategoryProductsAndServicesSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CategoryProductsAndServicesSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
}
private void addBindings1() {
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CompetitionSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CompetitionSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CompetitionSearchParameter.Level");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CompetitionSearchParameterLevel.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CriterionAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CriterionAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CurrencyCodeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CurrencyCodeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "CurrencyCodeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.CurrencyCodeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "DoubleAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.DoubleAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IdeaTextFilterSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IdeaTextFilterSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IdeaType");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IdeaType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IdeaTypeAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IdeaTypeAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IncludeAdultContentSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IncludeAdultContentSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IntegerAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IntegerAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "IntegerSetAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.IntegerSetAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "KeywordAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.KeywordAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "LanguageSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.LanguageSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "LocationSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.LocationSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "LongAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.LongAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "LongComparisonOperation");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.LongComparisonOperation.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "LongRangeAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.LongRangeAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "MoneyAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.MoneyAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "MonthlySearchVolume");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.MonthlySearchVolume.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "MonthlySearchVolumeAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.MonthlySearchVolumeAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "NetworkSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.NetworkSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "Range");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.Range.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "RelatedToQuerySearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.RelatedToQuerySearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "RelatedToUrlSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.RelatedToUrlSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "RequestType");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.RequestType.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "SearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.SearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "SearchVolumeSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.SearchVolumeSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "SeedAdGroupIdSearchParameter");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.SeedAdGroupIdSearchParameter.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "StringAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.StringAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdea");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TargetingIdea.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaPage");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TargetingIdeaSelector");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaSelector.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TrafficEstimatorError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TrafficEstimatorError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "TrafficEstimatorError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.TrafficEstimatorErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "Type_AttributeMapEntry");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.Type_AttributeMapEntry.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "WebpageDescriptor");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.WebpageDescriptor.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "WebpageDescriptorAttribute");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.o.WebpageDescriptorAttribute.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
}
protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException {
try {
org.apache.axis.client.Call _call = super._createCall();
if (super.maintainSessionSet) {
_call.setMaintainSession(super.maintainSession);
}
if (super.cachedUsername != null) {
_call.setUsername(super.cachedUsername);
}
if (super.cachedPassword != null) {
_call.setPassword(super.cachedPassword);
}
if (super.cachedEndpoint != null) {
_call.setTargetEndpointAddress(super.cachedEndpoint);
}
if (super.cachedTimeout != null) {
_call.setTimeout(super.cachedTimeout);
}
if (super.cachedPortName != null) {
_call.setPortName(super.cachedPortName);
}
java.util.Enumeration keys = super.cachedProperties.keys();
while (keys.hasMoreElements()) {
java.lang.String key = (java.lang.String) keys.nextElement();
_call.setProperty(key, super.cachedProperties.get(key));
}
// All the type mapping information is registered
// when the first call is made.
// The type mapping information is actually registered in
// the TypeMappingRegistry of the service, which
// is the reason why registration is only needed for the first call.
synchronized (this) {
if (firstCall()) {
// must set encoding style before registering serializers
_call.setEncodingStyle(null);
for (int i = 0; i < cachedSerFactories.size(); ++i) {
java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i);
javax.xml.namespace.QName qName =
(javax.xml.namespace.QName) cachedSerQNames.get(i);
java.lang.Object x = cachedSerFactories.get(i);
if (x instanceof Class) {
java.lang.Class sf = (java.lang.Class)
cachedSerFactories.get(i);
java.lang.Class df = (java.lang.Class)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) {
org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory)
cachedSerFactories.get(i);
org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
}
}
}
return _call;
}
catch (java.lang.Throwable _t) {
throw new org.apache.axis.AxisFault("Failure trying to get the Call object", _t);
}
}
public com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage get(com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaSelector selector) throws java.rmi.RemoteException, com.google.api.ads.adwords.axis.v201809.cm.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[0]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201809", "get"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {selector});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.adwords.axis.v201809.o.TargetingIdeaPage.class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.adwords.axis.v201809.cm.ApiException) {
throw (com.google.api.ads.adwords.axis.v201809.cm.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
}
| |
package de.uni_hildesheim.sse.monitoring.runtime.utils;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import de.uni_hildesheim.sse.monitoring.runtime.boot.ArrayList;
/**
* A hash map mapping long values to objects.
*/
public class LongLongHashMap {
/**
* The default capacity for hash map instances.
*/
public static final int DEFAULT_CAPACITY = 17;
/**
* The maximum allowed capacity for hash map instances.
*/
public static final int MAXIMUM_CAPACITY = 1 << 30;
/**
* The default load factor for hash map instances.
*/
public static final float DEFAULT_LOADFACTOR = 0.75f;
/**
* Stores the {@link MapElement} pool.
*/
private static ArrayList<MapElement> mapElementPool
= new ArrayList<MapElement>(5);
/**
* Stores the first bucket for each key.
*/
private MapElement[] map = null;
/**
* Stores the number of elements.
*/
private int contents = 0;
/**
* Stores the counter for objects created.
*/
private int objectCounter = 0; // Counter for objects created
/**
* Stores the current capacity.
*/
private int capacity = DEFAULT_CAPACITY;
/**
* Stores the initial capacity.
*/
//private int initialCap = DEFAULT_CAPACITY;
/**
* Stores the load factor.
*/
private float loadFactor = DEFAULT_LOADFACTOR;
/**
* Stores the maximum load.
*/
private int maxLoad = 0;
/**
* Stores if rehashing is permitted.
*/
private boolean rehashing = true;
/**
* Constructs an empty instance with the default initial capacity and the
* default load factor.
*/
public LongLongHashMap() {
this(DEFAULT_CAPACITY, DEFAULT_LOADFACTOR);
}
/**
* Constructs an empty instance with the given initial capacity and the
* default load factor.
*
* @param initialCapacity The initial capacity for this hash map.
*/
public LongLongHashMap(int initialCapacity) {
this(initialCapacity, DEFAULT_LOADFACTOR);
}
/**
* Constructs an empty instance with the given initial capacity and the
* given load factor.
*
* @param initialCapacity The initial capacity for this hash map.
* @param loadFactor The load factor for this hash map.
*/
public LongLongHashMap(int initialCapacity, float loadFactor) {
construct(initialCapacity, loadFactor);
}
/**
* Constructs a new LongHashMap with the same mappings as the specified Map.
* The LongLongHashMap is created with default load factor and an initial
* capacity sufficient to hold the mappings in the specified Map.
*
* @param map The map whose mappings are to be placed in this map. Throws:
* @throws NullPointerException if the specified map is <code>null</code>.
*/
public LongLongHashMap(LongLongHashMap map) {
if (map == null) {
throw new IllegalArgumentException("m may not be null");
}
//.... Determine parameters
loadFactor = DEFAULT_LOADFACTOR;
capacity = (int) (map.size() / loadFactor);
if (capacity < DEFAULT_CAPACITY) {
// Avoid underflow
capacity = DEFAULT_CAPACITY;
} else if (capacity % 2 == 0) {
// Make sure we have an odd value
capacity++;
}
//.... Standard initialization for the internal map elements
maxLoad = (int) (loadFactor * capacity + 0.5f);
//initialCap = capacity;
objectCounter += 2;
this.map = new MapElement[capacity];
//.... Copy the elements to the new map
long[] keys = map.keySet();
for (int i = 0; i < map.size(); i++) {
put(keys[i], map.get(keys[i]));
}
}
/**
* Puts all the keys and values in the specified hash
* map into this hash map.
*
* @param map the source map
*
* @since SugiBib 1.30
*/
public void putAll(LongLongHashMap map) {
long[] keys = map.keySet();
for (int i = 0; i < map.size(); i++) {
put(keys[i], map.get(keys[i]));
}
}
/**
* Return the current number of mappings in the hash map.
*
* @return The current number of mappings in the hash map.
*/
public int size() {
return contents;
}
/**
* Returns <code>true</code> if this map contains no key-value mappings.
*
* @return <code>true</code> in case of no elements, <code>false</code> else
*/
public boolean isEmpty() {
return contents == 0 ? true : false;
}
/**
* Return the number of objects created in / by this instance.
*
* @return The number of objects created
*/
public int getObjectCounter() {
return objectCounter;
}
/**
* Return the current capacity of the instance. If rehashing is enabled
* (which it is per default), the capacity may have been increased as
* necessary from the initial value.
*
* @return The current capacity for this hash map.
*/
public int getCapacity() {
return capacity;
}
/**
* Return the load factor of the instance.
*
* @return The load factor for this hash map.
*/
public float getLoadFactor() {
return loadFactor;
}
/**
* Return the keys in the hash map.
*
* @return An array containing the keys for which mappings are stored in
* this hash map.
*/
public long[] keySet() {
objectCounter++;
long[] keys = new long[contents];
int cnt = 0;
MapElement me = null;
for (int i = 0; i < capacity; i++) {
if (map[i] != null) {
me = map[i];
while (null != me) {
keys[cnt++] = me.getKey();
me = me.getNext();
}
}
}
return keys;
}
/**
* Writes the map to the given stream.
*
* @param out the stream to write to
* @throws IOException in case of any I/O problem
*/
public void write(DataOutputStream out) throws IOException {
MapElement me = null;
out.writeInt(size());
for (int i = 0; i < capacity; i++) {
if (map[i] != null) {
me = map[i];
while (null != me) {
out.writeLong(me.getKey());
out.writeLong(me.getValue());
me = me.getNext();
}
}
}
}
/**
* Reads contents of this map from the given input stream.
*
* @param in the stream to read from
* @throws IOException in case of any I/O problem
*
* @since 1.00
*/
public void read(DataInputStream in) throws IOException {
int size = in.readInt();
for (int i = 0; i < size; i++) {
long key = in.readLong();
long value = in.readLong();
put(key, value);
}
}
/**
* Enable/disable rehashing (defaults to <code>true</code>).
*
* @param rehashing A boolean indicating the desired rehashing status.
*/
public void setRehash(boolean rehashing) {
this.rehashing = rehashing;
}
/**
* Associates the specified value with the specified key in this map. If
* the map previously contained a mapping for this key, the old value is
* replaced.
*
* @param key The key with which the specified value is to be associated.
* @param value The value to be associated with the specified key.
*/
public void put(long key, long value) {
int index = (int) (key % capacity);
if (index < 0) {
index = -index;
}
//.... This is a new key since no bucket exists
if (map[index] == null) {
objectCounter++;
map[index] = getMapElementFromPool(key, value);
contents++;
if (contents > maxLoad) {
rehash();
}
//.... A bucket already exists for this index: check whether
// we already have a mapping for this key
} else {
MapElement me = map[index];
while (true) {
if (me.getKey() == key) {
// We have a mapping: just replace the value for this elt
me.setValue(value);
return;
} else {
if (me.getNext() == null) {
// No next element: so we have no mapping for this key
objectCounter++;
me.setNext(getMapElementFromPool(key, value));
contents++;
if (contents > maxLoad) {
rehash();
}
return;
} else {
me = me.getNext();
}
}
}
}
}
/**
* Returns the value to which the specified key is mapped in this identity
* hash map, or <code>null</code> if the map contains no mapping for this
* key. A return value of <code>null</code> does not necessarily indicate
* that the map contains no mapping for the key; it is also possible that
* the map explicitly maps the key to <code>null</code>. The
* <code>containsKey</code> method may be used to distinguish these two
* cases.
*
* @param key The key whose associated value is to be returned.
*
* @return The value to which this map maps the specified key, or
* <code>null</code> if the map contains no mapping for this key.
*/
public long get(long key) {
MapElement me = exists(key);
if (me == null) {
throw new IllegalArgumentException("not found");
} else {
return me.getValue();
}
}
/**
* Returns the value to which the specified key is mapped in this identity
* hash map, or <code>null</code> if the map contains no mapping for this
* key. A return value of <code>null</code> does not necessarily indicate
* that the map contains no mapping for the key; it is also possible that
* the map explicitly maps the key to <code>null</code>. The
* <code>containsKey</code> method may be used to distinguish these two
* cases.
*
* @param key The key whose associated value is to be returned.
* @param deflt the default value to be return if <code>key</code> is not
* found
*
* @return The value to which this map maps the specified key, or
* <code>null</code> if the map contains no mapping for this key
* or <code>deflt</code>.
*/
public long get(long key, long deflt) {
MapElement me = exists(key);
if (me == null) {
return deflt;
} else {
return me.getValue();
}
}
/**
* Returns <code>true</code> if this map contains a mapping for the
* specified key.
*
* @param key The key whose presence in this map is to be tested.
*
* @return <code>true</code> if this map contains a mapping for the
* specified key.
*/
public boolean containsKey(long key) {
return exists(key) != null;
}
/**
* Removes the mapping for this key from this map if present.
*
* @param key The key whose mapping is to be removed from the map.
*/
public void remove(long key) {
int index = (int) (key % capacity);
if (index < 0) {
index = -index;
}
if (map[index] == null) {
return;
} else {
MapElement me = map[index];
MapElement prev = null;
while (true) {
if (me.getKey() == key) {
// Keys match
if (prev == null) {
// The first element in the chain matches
map[index] = me.getNext();
} else {
// An element further down in the chain matches -
// delete it from the chain
prev.setNext(me.getNext());
}
releaseMapElement(me);
contents--;
return;
} else {
// Keys don't match, try the next element
prev = me;
me = me.getNext();
if (me == null) {
return;
}
}
}
}
}
/**
* Helper method: returns the element matching the key, or <code>null</code>
* if no such element exists.
*
* @param key the key to search for
* @return the element matching the key
*/
private MapElement exists(long key) {
int index = (int) (key % capacity);
if (index < 0) {
index = -index;
}
if (map[index] == null) {
return null;
} else {
MapElement me = map[index];
while (true) {
if (me.getKey() == key) {
return me;
} else {
me = me.getNext();
if (me == null) {
return null;
}
}
}
}
}
/**
* Increase the capacity of the map to improve performance.
*/
private void rehash() {
if (rehashing) {
int newCapacity = 2 * capacity + 1;
if (newCapacity > MAXIMUM_CAPACITY) {
return;
}
objectCounter += 2;
MapElement[] newMap = new MapElement[newCapacity];
MapElement me = null;
MapElement t = null;
MapElement next = null;
int newIndex = 0;
for (int index = 0; index < capacity; index++) {
me = map[index];
while (me != null) {
next = me.getNext();
newIndex = (int) (me.getKey() % newCapacity);
if (newIndex < 0) {
newIndex = -newIndex;
}
if (newMap[newIndex] == null) {
// No element yet for this new index
newMap[newIndex] = me;
me.setNext(null);
} else {
// Hook the element into the beginning of the chain
t = newMap[newIndex];
newMap[newIndex] = me;
me.setNext(t);
}
me = next;
}
}
map = newMap;
capacity = newCapacity;
// Max. number of elements before a rehash occurs
maxLoad = (int) (loadFactor * capacity + 0.5f);
newMap = null;
}
}
/**
* Construction helper method.
*
* @param initialCapacity The initial capacity for this hash map.
* @param loadFactor The load factor for this hash map.
*/
private void construct(int initialCapacity, float loadFactor) {
if (initialCapacity < 0) {
throw new IllegalArgumentException("Invalid initial capacity: "
+ initialCapacity);
}
if (initialCapacity < DEFAULT_CAPACITY) {
initialCapacity = DEFAULT_CAPACITY;
}
if (initialCapacity > MAXIMUM_CAPACITY) {
initialCapacity = MAXIMUM_CAPACITY;
}
if (loadFactor <= 0.0f || Float.isNaN(loadFactor)) {
throw new IllegalArgumentException("Invalid load factor: "
+ loadFactor);
}
//this.initialCap = initialCapacity;
this.capacity = initialCapacity;
this.loadFactor = loadFactor;
objectCounter += 2;
// Max. number of elements before a rehash occurs
maxLoad = (int) (loadFactor * capacity + 0.5f);
map = new MapElement[capacity];
contents = 0;
}
/**
* Implements a map element storing the contents of the buckets.
*/
static class MapElement {
/**
* Stores the key.
*/
private long key = 0;
/**
* Stores the value.
*/
private long value = 0;
/**
* Stores the next map element.
*/
private MapElement next = null;
/**
* Constructor of a map element.
*
* @param key the hash key of the element
* @param value the value to be stored
*/
public MapElement(long key, long value) {
this.key = key;
this.value = value;
}
/**
* Getter method for <code>key</code> property.
*
* @return The value for the <code>key</code> property
*/
public long getKey() {
return key;
}
/**
* Setter method for <code>value</code> property.
*
* @param value The value for the <code>value</code> property
*/
void setValue(long value) {
this.value = value;
}
/**
* Setter method for <code>key</code> property.
*
* @param key The value for the <code>key</code> property
*/
void setKey(long key) {
this.key = key;
}
/**
* Getter method for <code>value</code> property.
*
* @return The value for the <code>value</code> property
*/
public long getValue() {
return value;
}
/**
* Setter method for <code>next</code> property.
*
* @param next The value for the <code>next</code> property
*/
void setNext(MapElement next) {
this.next = next;
}
/**
* Getter method for <code>next</code> property.
*
* @return The value for the <code>next</code> property
*/
public MapElement getNext() {
return next;
}
}
/**
* Cleans up this map by removing all entries.
*
* @since 1.00
*/
public void clear() {
//construct(initialCap, loadFactor);
MapElement me = null;
for (int i = 0; i < capacity; i++) {
if (map[i] != null) {
me = map[i];
while (null != me) {
releaseMapElement(me);
me = me.getNext();
}
map[i] = null;
}
}
contents = 0;
}
/**
* Clean and finalize.
*
* @throws Throwable any exception that may occur during finalization
*/
@SuppressWarnings("deprecation")
protected void finalize() throws Throwable {
clear();
super.finalize();
}
/**
* Returns a {@link MapElement} from the shared pool.
*
* @param key the key value the resulting element should be initialized with
* @param value the value the resulting element should be initialized with
* @return the instance from the pool. This instance has
* to be released by {@link #releaseMapElement(MapElement)}.
*/
private static final synchronized MapElement getMapElementFromPool(long key,
long value) {
int size = mapElementPool.size();
MapElement result;
if (0 == size) {
result = new MapElement(key, value);
} else {
result = mapElementPool.remove(size - 1);
}
result.setValue(value);
result.setKey(key);
return result;
}
/**
* Releases and clears the specified {@link MapElement}
* to the shared pool.
*
* @param mapElement the {@link MapElement} to be released (must not
* be <b>null</b>)
*/
private static final synchronized void releaseMapElement(
MapElement mapElement) {
mapElement.setNext(null);
mapElementPool.add(mapElement);
}
}
| |
package expands;
import java.awt.BasicStroke;
import java.awt.Color;
//import java.awt.Dimension;
//import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Shape;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
//import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import javax.imageio.IIOException;
import javax.imageio.ImageIO;
import javax.imageio.stream.FileImageOutputStream;
import javax.imageio.stream.ImageOutputStream;
//import javax.swing.JFrame;
//import javax.swing.JPanel;
public class ArtGif2 {
// A list of cool settings follows:
// 300, 300, 1000, N/A, true, false (rectangular)
// 500, 300, 1000, N/A, true, true (ellipse)
public static void main(String[] args) throws Exception {
int width = 1000, height = 1000;
int dots = 1000;
int numImages = 1;
String folderPath = "c:\\Users\\Davis\\computerArtGif2\\test6\\";
File f = new File(folderPath);
f.mkdirs();
// Create a bunch of images
for (int i = 0; i < numImages; i++) {
ArtGif2 a = new ArtGif2(width, height, dots, getColorScheme(), true, false, new File(folderPath + "test" + i + ".gif"));
/*ArrayList<Circle> circles = a.getCircles();
BufferedImage bi = new BufferedImage(width * 2, height * 2, BufferedImage.TYPE_INT_ARGB);
Graphics2D ig2 = bi.createGraphics();
// Optional code makes the background non-transparent
ig2.setColor(Color.WHITE);
ig2.fillRect(0, 0, width * 2, height * 2);
// Write to the image.
for (Circle c : circles) {
ig2.setColor(c.color);
ig2.fill(new Ellipse2D.Float(c.x - c.r + width * 0.5f, c.y - c.r + height * 0.5f, 2 * c.r, 2 * c.r));
}
// Optional draw borders around each circle
ig2.setColor(Color.BLACK);
ig2.setStroke(new BasicStroke(3));
for (Circle c : circles) {
ig2.draw(new Ellipse2D.Float(c.x - c.r + width * 0.5f, c.y - c.r + height * 0.5f, 2 * c.r, 2 * c.r));
}*/
}
/*JFrame jf = new JFrame();
jf.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel jp = new JPanel() {
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2D = (Graphics2D) g;
for (Circle c : circles) {
g2D.setColor(c.color);
g2D.fill(new Ellipse2D.Float(c.x - c.r + width * 0.5f, c.y - c.r + height * 0.5f, 2 * c.r, 2 * c.r));
}
}
};
jp.setPreferredSize(new Dimension(width * 2, height * 2));
jf.add(jp);
jf.pack();
jf.setVisible(true);*/
}
private class Circle {
public float x, y, r;
public Color color;
public Circle(float x, float y, float r, Color color) {
this.x = x;
this.y = y;
this.r = r;
this.color = color;
}
}
private static Random random = new Random();
// Code for generating a nice random color scheme
public static Color randColor() {
int r = random.nextInt(255);
int g = random.nextInt(255);
int b = random.nextInt(255);
return new Color(r,g,b);
}
public static final int NUM_COLORS = 20;
public static Color[] getColorScheme() {
Color[] colors = new Color[NUM_COLORS];
Color c1 = randColor(); Color c2 = randColor();
for (int i = 0; i < NUM_COLORS; i++) {
float percent2 = i / ((float) NUM_COLORS);
float percent1 = 1 - percent2;
int newR = (int) (c1.getRed()*percent1 + c2.getRed()*percent2);
int newG = (int) (c1.getGreen()*percent1 + c2.getGreen()*percent2);
int newB = (int) (c1.getBlue()*percent1 + c2.getBlue()*percent2);
Color cNew = new Color(newR, newG, newB);
colors[i] = cNew;
}
return colors;
}
public static float dist(Point2D.Float p, Circle c) {
return (float) Math.sqrt(((p.x - c.x) * (p.x - c.x)) + ((p.y - c.y) * (p.y - c.y)));
}
private ArrayList<Circle> circles;
public static final float TOO_SMALL_RADII = 5.0f;
public static final float CONTAINS_TOLERANCE = 30.0f;
// Functions for generating the points in interesting patterns.
// Rectangular.
public static boolean rectangleValid(float width, float height, float x, float y) {
if (x >= 0 && y >= 0 && x <= width && y <= height) {
return true;
}
return false;
}
public static boolean rectangleContainsCircle(float width, float height, float x, float y, float r, float tolerance) {
if (x - r <= (0 - tolerance) || x + r >= (width + tolerance) || y - r <= (0 - tolerance) || y + r >= (height + tolerance)) {
return false;
}
return true;
}
public static ArrayList<Point2D.Float> rectanglePoints(float width, float height, int num) {
ArrayList<Point2D.Float> points = new ArrayList<Point2D.Float>();
for (int i = 0; i < num; i++) {
float x = random.nextFloat() * width;
float y = random.nextFloat() * height;
// Don't bother to check for point validity.
Point2D.Float point = new Point2D.Float(x, y);
points.add(point);
}
return points;
}
// Ellipse.
public static boolean ellipseValid(float width, float height, float x, float y) {
float testValX = (x - width * 0.5f) * (x - width * 0.5f) / ((width * 0.5f) * (width * 0.5f));
float testValY = (y - height * 0.5f) * (y - height * 0.5f) / ((height * 0.5f) * (height * 0.5f));
if (testValX + testValY <= 1) {
return true;
}
return false;
}
public static boolean ellipseContainsCircle(float width, float height, float x, float y, float r, float tolerance) {
if (!ellipseValid(width, height, x + r, y)) {
return false;
}
if (!ellipseValid(width, height, x - r, y)) {
return false;
}
if (!ellipseValid(width, height, x, y + r)) {
return false;
}
if (!ellipseValid(width, height, x, y - r)) {
return false;
}
return true;
}
public ArrayList<Point2D.Float> ellipsePoints(float width, float height, int num) {
ArrayList<Point2D.Float> points = new ArrayList<Point2D.Float>();
while (points.size() < num) {
float x = random.nextFloat() * width;
float y = random.nextFloat() * height;
if (ellipseValid(width, height, x, y)) {
Point2D.Float point = new Point2D.Float(x, y);
points.add(point);
}
}
return points;
}
public ArtGif2(int width, int height, int num, Color[] colorScheme, boolean tooSmall, boolean withinBounds, File write) throws Exception {
// Get ready to make a cool gif - ART STUFF
List<GifFrame> gifFrames = new ArrayList<GifFrame>();
OutputStream output = new FileOutputStream(write);
int transparantColor = 0xFF00FF; // purple
BufferedImage bi = new BufferedImage(width * 2, height * 2, BufferedImage.TYPE_INT_ARGB);
Graphics2D ig2 = bi.createGraphics();
ig2.setColor(Color.WHITE);
ig2.fillRect(0, 0, width * 2, height * 2);
BufferedImage gif = ImageUtil.convertRGBAToGIF(bi, transparantColor);
// every frame takes 100ms
long delay = 100;
// make transparent pixels not 'shine through'
String disposal = GifFrame.RESTORE_TO_PREVIOUS;
// add frame to sequence
gifFrames.add(new GifFrame(gif, delay, disposal));
// Calculate the maximum radius of any circle to be half the diagonal of the rectangle defined by width and height.
float maxRadius = (float) Math.sqrt((width * width) + (height * height)) * 0.25f;
// Create randomly distributed points within the rectangle defined by width and height.
ArrayList<Point2D.Float> points = rectanglePoints(width, height, num);
// Randomly select points and expand them into circles, removing any points that would be enclosed by the circle.
boolean done = false;
int colorIndex = 0; // Index for which color in the gradient has been assigned already.
circles = new ArrayList<Circle>();
int indexCounter = 1;
while (!done) {
System.out.println("Image " + indexCounter);
indexCounter++;
int index = random.nextInt(points.size());
Point2D.Float p1 = points.get(index);
boolean rGood = false;
float radius = 0;
while (!rGood) {
radius = random.nextFloat() * maxRadius;
boolean passed = true;
for (Circle check : circles) {
if (dist(p1, check) < radius + check.r) {
passed = false;
break;
}
}
// Optional check to remove circles that pass outside of the boundary - NVM this is hard.
if (withinBounds) {
if (!rectangleContainsCircle(width, height, p1.x, p1.y, radius, CONTAINS_TOLERANCE)) {
passed = false;
}
}
if (passed) {
rGood = true;
}
}
// Determine if the new circle would overlap with existing ones
Circle c = new Circle(p1.x, p1.y, radius, colorScheme[colorIndex]);
colorIndex++;
if (colorIndex >= NUM_COLORS) {
colorIndex = 0;
}
// Optional check to remove circles with really small radii
if (c.r > TOO_SMALL_RADII || (!tooSmall && c.r <= TOO_SMALL_RADII)) {
circles.add(c);
// Draw the circle - ART STUFF
ig2.setColor(c.color);
ig2.fill(new Ellipse2D.Float(c.x - c.r + width * 0.5f, c.y - c.r + height * 0.5f, 2 * c.r, 2 * c.r));
gif = ImageUtil.convertRGBAToGIF(bi, transparantColor);
// add frame to sequence
gifFrames.add(new GifFrame(gif, delay, disposal));
}
// Iterate over the points still existing and remove ones within the new circle
for (int i = 0; i < points.size(); i++) {
if (dist(points.get(i), c) <= radius) {
points.remove(i);
i--;
}
}
if (points.size() <= 0) {
done = true;
}
}
// save the gif
int loopCount = 0; // loop indefinitely
ImageUtil.saveAnimatedGIF(output, gifFrames, loopCount);
}
public ArrayList<Circle> getCircles() {
return circles;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.rest.service.api.history;
import static net.javacrumbs.jsonunit.assertj.JsonAssertions.assertThatJson;
import java.util.HashMap;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.flowable.common.engine.impl.identity.Authentication;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.rest.service.BaseSpringRestTestCase;
import org.flowable.rest.service.api.RestUrls;
import org.flowable.task.api.Task;
import org.junit.Test;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import net.javacrumbs.jsonunit.core.Option;
/**
* Test for REST-operation related to the historic process instance query resource.
*
* @author Tijs Rademakers
*/
public class HistoricProcessInstanceQueryResourceTest extends BaseSpringRestTestCase {
/**
* Test querying historic process instance based on variables. POST query/historic-process-instances
*/
@Test
@Deployment
public void testQueryProcessInstancesWithVariables() throws Exception {
HashMap<String, Object> processVariables = new HashMap<>();
processVariables.put("stringVar", "Azerty");
processVariables.put("intVar", 67890);
processVariables.put("booleanVar", false);
Authentication.setAuthenticatedUserId("historyQueryAndSortUser");
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables);
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables);
String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_PROCESS_INSTANCE_QUERY);
// Process variables
ObjectNode requestNode = objectMapper.createObjectNode();
ArrayNode variableArray = objectMapper.createArrayNode();
ObjectNode variableNode = objectMapper.createObjectNode();
variableArray.add(variableNode);
requestNode.set("variables", variableArray);
// String equals
variableNode.put("name", "stringVar");
variableNode.put("value", "Azerty");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// Integer equals
variableNode.removeAll();
variableNode.put("name", "intVar");
variableNode.put("value", 67890);
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// Boolean equals
variableNode.removeAll();
variableNode.put("name", "booleanVar");
variableNode.put("value", false);
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// String not equals
variableNode.removeAll();
variableNode.put("name", "stringVar");
variableNode.put("value", "ghijkl");
variableNode.put("operation", "notEquals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// Integer not equals
variableNode.removeAll();
variableNode.put("name", "intVar");
variableNode.put("value", 45678);
variableNode.put("operation", "notEquals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// Boolean not equals
variableNode.removeAll();
variableNode.put("name", "booleanVar");
variableNode.put("value", true);
variableNode.put("operation", "notEquals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// String equals ignore case
variableNode.removeAll();
variableNode.put("name", "stringVar");
variableNode.put("value", "azeRTY");
variableNode.put("operation", "equalsIgnoreCase");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// String not equals ignore case (not supported)
variableNode.removeAll();
variableNode.put("name", "stringVar");
variableNode.put("value", "HIJKLm");
variableNode.put("operation", "notEqualsIgnoreCase");
assertErrorResult(url, requestNode, HttpStatus.SC_BAD_REQUEST);
// String equals without value
variableNode.removeAll();
variableNode.put("value", "Azerty");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
// String equals with non existing value
variableNode.removeAll();
variableNode.put("value", "Azerty2");
variableNode.put("operation", "equals");
assertResultsPresentInPostDataResponse(url, requestNode);
// String like ignore case
variableNode.removeAll();
variableNode.put("name", "stringVar");
variableNode.put("value", "azerty");
variableNode.put("operation", "likeIgnoreCase");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
variableNode.removeAll();
variableNode.put("name", "stringVar");
variableNode.put("value", "azerty2");
variableNode.put("operation", "likeIgnoreCase");
assertResultsPresentInPostDataResponse(url, requestNode);
requestNode = objectMapper.createObjectNode();
requestNode.put("finished", true);
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("finished", false);
assertResultsPresentInPostDataResponse(url, requestNode, processInstance2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionId", processInstance.getProcessDefinitionId());
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKey", "oneTaskProcess");
assertResultsPresentInPostDataResponse(url, requestNode, processInstance.getId(), processInstance2.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("processDefinitionKey", "oneTaskProcess");
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + url + "?sort=startTime");
httpPost.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_OK);
// Check status and size
JsonNode dataNode = objectMapper.readTree(response.getEntity().getContent()).get("data");
closeResponse(response);
assertThatJson(dataNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("["
+ "{"
+ " id: '" + processInstance.getId() + "',"
+ " processDefinitionName: 'The One Task Process',"
+ " processDefinitionDescription: 'One task process description',"
+ " startTime: '${json-unit.any-string}',"
+ " startUserId: '" + processInstance.getStartUserId() + "'"
+ "},"
+ "{"
+ " id: '" + processInstance2.getId() + "'"
+ "}"
+ "]");
}
@Test
@Deployment(resources = { "org/flowable/rest/service/api/twoTaskProcess.bpmn20.xml" })
public void testQueryProcessInstancesByActiveActivityId() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
ObjectNode requestNode = objectMapper.createObjectNode();
requestNode.put("activeActivityId", "processTask");
String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_PROCESS_INSTANCE_QUERY);
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + url);
httpPost.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_OK);
JsonNode rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: [ {"
+ " id: '" + processInstance.getId() + "',"
+ " processDefinitionId: '" + processInstance.getProcessDefinitionId() + "'"
+ "} ]"
+ "}");
requestNode = objectMapper.createObjectNode();
requestNode.put("activeActivityId", "processTask2");
httpPost.setEntity(new StringEntity(requestNode.toString()));
response = executeRequest(httpPost, HttpStatus.SC_OK);
rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: []"
+ "}");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
requestNode = objectMapper.createObjectNode();
requestNode.put("activeActivityId", "processTask2");
httpPost.setEntity(new StringEntity(requestNode.toString()));
response = executeRequest(httpPost, HttpStatus.SC_OK);
rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: [ {"
+ " id: '" + processInstance.getId() + "',"
+ " processDefinitionId: '" + processInstance.getProcessDefinitionId() + "'"
+ "} ]"
+ "}");
}
@Test
@Deployment(resources = { "org/flowable/rest/service/api/twoTaskProcess.bpmn20.xml" })
public void testQueryProcessInstancesByActiveActivityIds() throws Exception {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
ObjectNode requestNode = objectMapper.createObjectNode();
ArrayNode activityIdArray = requestNode.putArray("activeActivityIds");
activityIdArray.add("processTask");
activityIdArray.add("processTask3");
String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_PROCESS_INSTANCE_QUERY);
HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + url);
httpPost.setEntity(new StringEntity(requestNode.toString()));
CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_OK);
JsonNode rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: [ {"
+ " id: '" + processInstance.getId() + "',"
+ " processDefinitionId: '" + processInstance.getProcessDefinitionId() + "'"
+ "} ]"
+ "}");
requestNode = objectMapper.createObjectNode();
activityIdArray = requestNode.putArray("activeActivityIds");
activityIdArray.add("processTask2");
activityIdArray.add("processTask3");
httpPost.setEntity(new StringEntity(requestNode.toString()));
response = executeRequest(httpPost, HttpStatus.SC_OK);
rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: []"
+ "}");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult();
taskService.complete(task.getId());
requestNode = objectMapper.createObjectNode();
activityIdArray = requestNode.putArray("activeActivityIds");
activityIdArray.add("processTask2");
activityIdArray.add("processTask3");
httpPost.setEntity(new StringEntity(requestNode.toString()));
response = executeRequest(httpPost, HttpStatus.SC_OK);
rootNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
assertThatJson(rootNode)
.when(Option.IGNORING_EXTRA_FIELDS)
.isEqualTo("{"
+ "data: [ {"
+ " id: '" + processInstance.getId() + "',"
+ " processDefinitionId: '" + processInstance.getProcessDefinitionId() + "'"
+ "} ]"
+ "}");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.vault.rcp.impl;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.jcr.Credentials;
import javax.jcr.RepositoryException;
import javax.jcr.SimpleCredentials;
import org.apache.jackrabbit.spi2dav.ConnectionOptions;
import org.apache.jackrabbit.vault.fs.api.RepositoryAddress;
import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter;
import org.apache.jackrabbit.vault.rcp.RcpTask;
import org.apache.jackrabbit.vault.rcp.RcpTaskManager;
import org.apache.jackrabbit.vault.util.RepositoryCopier;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.osgi.framework.BundleContext;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Modified;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.propertytypes.ServiceVendor;
import org.osgi.service.metatype.annotations.AttributeDefinition;
import org.osgi.service.metatype.annotations.Designate;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
/** {@code RcpTaskManager}... */
@Component
@ServiceVendor("The Apache Software Foundation")
@Designate(ocd = RcpTaskManagerImpl.ComponentPropertyType.class)
public class RcpTaskManagerImpl implements RcpTaskManager {
@ObjectClassDefinition(name = "Apache Jackrabbit FileVault RCP Task Manager", description = "Manages tasks for RCP (remote copy)")
public static @interface ComponentPropertyType {
@AttributeDefinition(name = "Serialized Tasks", description = "The JSON serialization of all tasks. Credentials are not stored in here, but rather in the bundle context data file.")
String serialized_tasks_json() default "";
}
private static final String TASKS_DATA_FILE_NAME = "tasks";
private static final String PROP_TASKS_SERIALIZATION = "serialized.tasks.json";
private static final String PID = RcpTaskManagerImpl.class.getName();
/** default logger */
private static final Logger log = LoggerFactory.getLogger(RcpTaskManagerImpl.class);
SortedMap<String, RcpTaskImpl> tasks;
private File dataFile;
private final ObjectMapper mapper = new ObjectMapper();
private Configuration configuration;
@Reference
ConfigurationAdmin configurationAdmin;
/** the serialized tasks which have been processed (for detecting relevant updates) */
private String serializedTasks;
@Activate
void activate(BundleContext bundleContext, Map <String, Object> newConfigProperties) throws IOException {
mapper.configure(MapperFeature.PROPAGATE_TRANSIENT_MARKER, true);
mapper.addMixIn(RepositoryAddress.class, RepositoryAddressMixin.class);
SimpleModule module = new SimpleModule();
module.addSerializer(DefaultWorkspaceFilter.class, new DefaultWorkspaceFilterSerializer());
module.addDeserializer(WorkspaceFilter.class, new WorkspaceFilterDeserializer());
mapper.registerModule(module);
mapper.addMixIn(SimpleCredentials.class, SimpleCredentialsMixin.class);
mapper.addMixIn(ConnectionOptions.class, ConnectionOptionsMixin.class);
mapper.addMixIn(RepositoryCopier.class, RepositoryCopierMixin.class);
this.dataFile = bundleContext.getDataFile(TASKS_DATA_FILE_NAME);
this.configuration = configurationAdmin.getConfiguration(PID);
try {
tasks = loadTasks((String)newConfigProperties.get(PROP_TASKS_SERIALIZATION), dataFile);
} catch (IOException e) {
log.error("Could not restore previous tasks", e);
tasks = new TreeMap<>();
}
}
// default constructor, used by DS 1.3
public RcpTaskManagerImpl() {
}
// alternative constructor, currently only used for testing
public RcpTaskManagerImpl(BundleContext bundleContext, ConfigurationAdmin configurationAdmin, Map <String, Object> newConfigProperties) throws IOException {
this.configurationAdmin = configurationAdmin;
activate(bundleContext, newConfigProperties);
}
@Deactivate
void deactivate() throws IOException, RepositoryException {
log.info("RcpTaskManager deactivated. Stopping running tasks...");
for (RcpTask task : tasks.values()) {
task.stop();
}
// necessary again, because tasks are not fully immutable (i.e. may be modified after addTask or editTask has been called)
persistTasks();
log.info("RcpTaskManager deactivated. Stopping running tasks...done.");
}
@Modified
void modified(Map <String, Object> newConfigProperties) throws IOException {
this.configuration = configurationAdmin.getConfiguration(PID);
// might be triggered internally or externally
// only external events are relevant
if (serializedTasks == null || !serializedTasks.equals(newConfigProperties.get(PROP_TASKS_SERIALIZATION))) {
log.info("Detected external properties change");
tasks = loadTasks((String) newConfigProperties.get(PROP_TASKS_SERIALIZATION), null);
}
}
static Map<String, Object> createMapFromDictionary(Dictionary<String, Object> dictionary) {
// filter out irrelevant properties
List<String> keys = Collections.list(dictionary.keys());
return keys.stream().collect(Collectors.toMap(Function.identity(), dictionary::get));
}
private SortedMap<String, RcpTaskImpl> loadTasks(String serializedTasks, File dataFile) throws IOException {
if (serializedTasks != null && serializedTasks.isEmpty()) {
log.info("No previously persisted tasks found in OSGi configuation");
return new TreeMap<>();
}
if (serializedTasks == null) {
log.info("No previously persisted tasks found in OSGi configuation");
return new TreeMap<>();
}
SortedMap<String, RcpTaskImpl> tasks = mapper.readValue(serializedTasks, new TypeReference<SortedMap<String, RcpTaskImpl>>() {});
validateTasks(tasks);
// additionally load credentials data from bundle context
if (dataFile != null && dataFile.exists()) {
loadTasksCredentials(tasks, dataFile);
} else {
log.info("No previously persisted task credentials found at '{}'", dataFile);
}
this.serializedTasks = serializedTasks;
return tasks;
}
void validateTasks(SortedMap<String, RcpTaskImpl> tasks) {
for (Map.Entry<String, RcpTaskImpl> entry : tasks.entrySet()) {
// make sure id of map entry is task id
if (!entry.getKey().equals(entry.getValue().getId())) {
throw new IllegalArgumentException("Id of entry " + entry.getKey() + " does not match its task id " + entry.getValue().getId());
}
// set classloader to use for retrieving the RepositoryImpl
entry.getValue().setClassLoader(getClassLoaderForRepositoryFactory());
}
}
private void loadTasksCredentials(Map<String, RcpTaskImpl> tasks, File dataFile) throws IOException {
Properties props = new Properties();
try (FileInputStream inputStream = new FileInputStream(dataFile)) {
props.load(inputStream);
}
for (RcpTaskImpl task : tasks.values()) {
String serializedCredentials = props.getProperty(task.getId());
if (serializedCredentials != null) {
Credentials credentials = mapper.readValue(serializedCredentials, SimpleCredentials.class);
task.setSourceCredentials(credentials);
}
}
}
private void persistTasks() {
Dictionary<String, Object> configProperties = new Hashtable<>();
try {
persistTasks(configProperties, dataFile);
configuration.updateIfDifferent(configProperties);
log.info("Persisted RCP tasks in OSGi configuration");
} catch (RepositoryException | IOException e) {
throw new IllegalStateException("Could not persist tasks", e);
}
}
private void persistTasks(Dictionary<String, Object> configProperties, File dataFile) throws RepositoryException, JsonGenerationException, JsonMappingException, IOException {
serializedTasks = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(tasks);
configProperties.put(PROP_TASKS_SERIALIZATION, serializedTasks);
// additionally persist the sensitive data in a data file
if (dataFile != null) {
persistTasksCredentials(dataFile);
}
log.info("Persisted sensitive part of RCP tasks in '{}'", dataFile);
}
private void persistTasksCredentials() {
try {
persistTasksCredentials(dataFile);
} catch (IOException e) {
throw new IllegalStateException("Could not persist tasks credentials", e);
}
}
private void persistTasksCredentials(File dataFile) throws IOException {
// persist credentials of tasks to data file
Properties props = new Properties();
for (RcpTaskImpl task : tasks.values()) {
// include type information
String value = mapper.writeValueAsString(task.getSourceCredentials());
props.setProperty(task.getId(), value);
}
try (FileOutputStream output = new FileOutputStream(dataFile)) {
props.store(output, "Credentials used for Apache Jackrabbit FileVault RCP");
}
}
public RcpTask getTask(String taskId) {
return tasks.get(taskId);
}
public Map<String, RcpTask> getTasks() {
return Collections.unmodifiableMap(tasks);
}
@Override
public RcpTask addTask(RepositoryAddress src, ConnectionOptions connectionOptions, Credentials srcCreds, String dst, String id, List<String> excludes, @Nullable Boolean recursive)
throws ConfigurationException {
if (id != null && id.length() > 0 && tasks.containsKey(id)) {
throw new IllegalArgumentException("Task with id " + id + " already exists.");
}
RcpTaskImpl task = new RcpTaskImpl(getClassLoaderForRepositoryFactory(), src, connectionOptions, srcCreds, dst, id, excludes, recursive);
tasks.put(task.getId(), task);
persistTasks();
return task;
}
@Override
public RcpTask addTask(RepositoryAddress src, ConnectionOptions connectionOptions, Credentials srcCreds, String dst, String id, WorkspaceFilter srcFilter,
@Nullable Boolean recursive) {
if (id != null && id.length() > 0 && tasks.containsKey(id)) {
throw new IllegalArgumentException("Task with id " + id + " already exists.");
}
RcpTaskImpl task = new RcpTaskImpl(getClassLoaderForRepositoryFactory(), src, connectionOptions, srcCreds, dst, id, srcFilter, recursive);
tasks.put(task.getId(), task);
persistTasks();
return task;
}
@Override
public RcpTask editTask(@NotNull String taskId, @Nullable RepositoryAddress src, @Nullable ConnectionOptions connectionOptions, @Nullable Credentials srcCreds, @Nullable String dst, @Nullable List<String> excludes,
@Nullable WorkspaceFilter srcFilter, @Nullable Boolean recursive) throws ConfigurationException {
RcpTaskImpl oldTask = tasks.get(taskId);
if (oldTask == null) {
throw new IllegalArgumentException("No such task with id='" + taskId + "'");
}
RcpTaskImpl newTask = new RcpTaskImpl(oldTask, src, connectionOptions, srcCreds, dst, excludes, srcFilter, recursive);
tasks.put(taskId, newTask);
persistTasks();
return newTask;
}
@Override
public boolean removeTask(String taskId) {
RcpTask rcpTask = tasks.remove(taskId);
if (rcpTask != null) {
rcpTask.stop();
persistTasks();
return true;
}
return false;
}
@Override
public void setSourceCredentials(String taskId, Credentials srcCreds) {
RcpTaskImpl task = tasks.get(taskId);
if (task == null) {
throw new IllegalArgumentException("No such task with id='" + taskId + "'");
}
task.setSourceCredentials(srcCreds);
persistTasksCredentials();
}
protected ClassLoader getClassLoaderForRepositoryFactory() {
// everything is embedded in the current bundle, therefore just take the bundle classloader
return this.getClass().getClassLoader();
}
}
| |
/*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.ui.code;
import static android.app.Activity.RESULT_OK;
import static com.github.mobile.Intents.EXTRA_REPOSITORY;
import static com.github.mobile.RequestCodes.REF_UPDATE;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.github.kevinsawicki.wishlist.ViewUtils;
import com.github.mobile.R;
import com.github.mobile.core.code.FullTree;
import com.github.mobile.core.code.FullTree.Entry;
import com.github.mobile.core.code.FullTree.Folder;
import com.github.mobile.core.code.RefreshTreeTask;
import com.github.mobile.core.ref.RefUtils;
import com.github.mobile.ui.DialogFragment;
import com.github.mobile.ui.DialogFragmentActivity;
import com.github.mobile.ui.HeaderFooterListAdapter;
import com.github.mobile.ui.StyledText;
import com.github.mobile.ui.ref.BranchFileViewActivity;
import com.github.mobile.ui.ref.CodeTreeAdapter;
import com.github.mobile.ui.ref.RefDialog;
import com.github.mobile.ui.ref.RefDialogFragment;
import com.github.mobile.util.ToastUtils;
import com.github.mobile.util.TypefaceUtils;
import com.google.inject.Inject;
import java.util.LinkedList;
import org.eclipse.egit.github.core.Reference;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.service.DataService;
/**
* Fragment to display a repository's source code tree
*/
public class RepositoryCodeFragment extends DialogFragment implements
OnItemClickListener {
private FullTree tree;
private ListView listView;
private ProgressBar progressView;
private TextView branchIconView;
private TextView branchView;
private TextView pathView;
private View pathHeaderView;
private View branchFooterView;
private HeaderFooterListAdapter<CodeTreeAdapter> adapter;
private boolean pathShowing;
private Folder folder;
private Repository repository;
@Inject
private DataService service;
private RefDialog dialog;
@Override
public void onAttach(Context context) {
super.onAttach(context);
repository = getSerializableExtra(EXTRA_REPOSITORY);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (tree == null || folder == null)
refreshTree(null);
else
setFolder(tree, folder);
}
@Override
public void onCreateOptionsMenu(Menu optionsMenu, MenuInflater inflater) {
inflater.inflate(R.menu.refresh, optionsMenu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.m_refresh:
if (tree != null)
refreshTree(new Reference().setRef(tree.reference.getRef()));
else
refreshTree(null);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void showLoading(final boolean loading) {
ViewUtils.setGone(progressView, !loading);
ViewUtils.setGone(listView, loading);
ViewUtils.setGone(branchFooterView, loading);
}
private void refreshTree(final Reference reference) {
showLoading(true);
new RefreshTreeTask(repository, reference, getActivity()) {
@Override
protected void onSuccess(final FullTree fullTree) throws Exception {
super.onSuccess(fullTree);
if (folder == null || folder.parent == null)
setFolder(fullTree, fullTree.root);
else {
// Look for current folder in new tree or else reset to root
Folder current = folder;
LinkedList<Folder> stack = new LinkedList<Folder>();
while (current != null && current.parent != null) {
stack.addFirst(current);
current = current.parent;
}
Folder refreshed = fullTree.root;
while (!stack.isEmpty()) {
refreshed = refreshed.folders
.get(stack.removeFirst().name);
if (refreshed == null)
break;
}
if (refreshed != null)
setFolder(fullTree, refreshed);
else
setFolder(fullTree, fullTree.root);
}
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
showLoading(false);
ToastUtils.show(getActivity(), e, R.string.error_code_load);
}
}.execute();
}
private void switchBranches() {
if (tree == null)
return;
if (dialog == null)
dialog = new RefDialog((DialogFragmentActivity) getActivity(),
REF_UPDATE, repository, service);
dialog.show(tree.reference);
}
@Override
public void onDialogResult(int requestCode, int resultCode, Bundle arguments) {
if (RESULT_OK != resultCode)
return;
switch (requestCode) {
case REF_UPDATE:
refreshTree(RefDialogFragment.getSelected(arguments));
break;
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.repo_code, null);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
progressView = finder.find(R.id.pb_loading);
listView = finder.find(android.R.id.list);
listView.setOnItemClickListener(this);
Activity activity = getActivity();
adapter = new HeaderFooterListAdapter<CodeTreeAdapter>(listView,
new CodeTreeAdapter(activity));
branchFooterView = finder.find(R.id.rl_branch);
branchView = finder.find(R.id.tv_branch);
branchIconView = finder.find(R.id.tv_branch_icon);
branchIconView.setText(TypefaceUtils.ICON_GIT_BRANCH);
branchFooterView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
switchBranches();
}
});
pathHeaderView = activity.getLayoutInflater().inflate(R.layout.path_item, null);
pathView = (TextView) pathHeaderView.findViewById(R.id.tv_path);
pathView.setMovementMethod(LinkMovementMethod.getInstance());
if (pathShowing)
adapter.addHeader(pathHeaderView);
TextView folderIcon = (TextView) pathHeaderView.findViewById(R.id.tv_folder_icon);
folderIcon.setText(TypefaceUtils.ICON_FILE_SUBMODULE);
TypefaceUtils.setOcticons(branchIconView, folderIcon);
listView.setAdapter(adapter);
}
/**
* Back up the currently viewed folder to its parent
*
* @return true if directory changed, false otherwise
*/
public boolean onBackPressed() {
if (folder != null && folder.parent != null) {
setFolder(tree, folder.parent);
return true;
} else
return false;
}
private void setFolder(final FullTree tree, final Folder folder) {
this.folder = folder;
this.tree = tree;
showLoading(false);
branchView.setText(tree.branch);
if (RefUtils.isTag(tree.reference))
branchIconView.setText(TypefaceUtils.ICON_TAG);
else
branchIconView.setText(TypefaceUtils.ICON_GIT_BRANCH);
adapter.getWrappedAdapter().setIndented(folder.entry != null);
if (folder.entry != null) {
int textLightColor = getResources().getColor(R.color.text_light);
final String[] segments = folder.entry.getPath().split("/");
StyledText text = new StyledText();
for (int i = 0; i < segments.length - 1; i++) {
final int index = i;
text.url(segments[i], new OnClickListener() {
@Override
public void onClick(View v) {
Folder clicked = folder;
for (int i = index; i < segments.length - 1; i++) {
clicked = clicked.parent;
if (clicked == null)
return;
}
setFolder(tree, clicked);
}
}).append(' ').foreground('/', textLightColor).append(' ');
}
text.bold(segments[segments.length - 1]);
pathView.setText(text);
if (!pathShowing) {
adapter.addHeader(pathHeaderView);
pathShowing = true;
}
} else if (pathShowing) {
adapter.removeHeader(pathHeaderView);
pathShowing = false;
}
adapter.getWrappedAdapter().setItems(folder);
listView.setSelection(0);
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
Entry entry = (Entry) parent.getItemAtPosition(position);
if (tree == null || entry == null)
return;
if (entry instanceof Folder)
setFolder(tree, (Folder) entry);
else
startActivity(BranchFileViewActivity.createIntent(repository,
tree.branch, entry.entry.getPath(), entry.entry.getSha()));
}
}
| |
/**
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.rasc.bsoncodec.codegen;
import java.util.Arrays;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.Set;
import java.util.TreeMap;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeMirror;
import org.bson.BsonType;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.MethodSpec.Builder;
import com.squareup.javapoet.TypeName;
import ch.rasc.bsoncodec.Util;
import ch.rasc.bsoncodec.model.FieldModel;
public class MapCodeGen extends CompoundCodeGen {
private TypeMirror implementationType;
private final TypeMirror keyType;
public MapCodeGen(CompoundCodeGen parent, TypeMirror type, TypeMirror keyType) {
super(parent, type);
this.keyType = keyType;
}
public void setImplementationType(TypeMirror implementationType) {
this.implementationType = implementationType;
}
protected TypeName getImplementationType() {
if (this.implementationType == null) {
if (((DeclaredType) getType()).asElement()
.getKind() == ElementKind.INTERFACE) {
return ClassName.get(LinkedHashMap.class);
}
return ClassName.get((TypeElement) Util.typeUtils.asElement(this.getType()));
}
return ClassName
.get((TypeElement) Util.typeUtils.asElement(this.implementationType));
}
@Override
public void addEncodeStatements(CodeGeneratorContext ctx) {
FieldModel field = ctx.field();
Builder builder = ctx.builder();
if (!field.disableEncodeNullCheck() && !hasParent()) {
builder.beginControlFlow("if ($L != null)", ctx.getter());
}
if (!field.storeEmptyCollection()) {
builder.beginControlFlow("if (!$L.isEmpty())", ctx.getter());
}
if (!hasParent()) {
builder.addStatement("writer.writeName($S)", field.name());
}
TypeMirror childType = this.getChildCodeGen().getType();
builder.addStatement("writer.writeStartDocument()");
if (!Util.isSameType(this.keyType, Object.class)) {
builder.beginControlFlow("for (Map.Entry<$T, $T> $L : $L.entrySet())",
this.keyType, childType, ctx.getLoopVar(), ctx.getter());
}
else {
builder.beginControlFlow("for (Map.Entry $L : (Set<Map.Entry>)$L.entrySet())",
ctx.getLoopVar(), ctx.getter());
}
if (Util.isSameType(this.keyType, String.class)) {
builder.addStatement("writer.writeName($L.getKey())", ctx.getLoopVar());
}
else {
if (Util.isSameType(getType(), EnumMap.class)) {
builder.addStatement("writer.writeName($L.getKey().name())",
ctx.getLoopVar());
}
else {
builder.addStatement("writer.writeName($T.valueOf($L.getKey()))",
String.class, ctx.getLoopVar());
}
}
boolean permittNullElements = permitNullElements();
if (!field.disableEncodeNullCheck() && permittNullElements) {
builder.beginControlFlow("if ($L != null)", ctx.getLoopVar() + ".getValue()");
}
this.getChildCodeGen().addEncodeStatements(
ctx.createEncodeChildContext(ctx.getLoopVar() + ".getValue()"));
if (!field.disableEncodeNullCheck() && permittNullElements) {
builder.nextControlFlow("else").addStatement("writer.writeNull()");
builder.endControlFlow();
}
builder.endControlFlow();
builder.addStatement("writer.writeEndDocument()");
if (!field.storeEmptyCollection()) {
builder.endControlFlow();
}
if (!field.disableEncodeNullCheck() && !hasParent()) {
if (field.storeNullValue()) {
builder.nextControlFlow("else").addStatement("writer.writeNull($S)",
field.name());
}
builder.endControlFlow();
}
}
@Override
public void addDecodeStatements(CodeGeneratorContext ctx) {
FieldModel field = ctx.field();
Builder builder = ctx.builder();
char lv = ctx.getLoopVar();
if (!field.disableDecodeNullCheck() && !hasParent()) {
builder.beginControlFlow("if (bsonType != $T.NULL)", BsonType.class);
}
builder.addStatement("reader.readStartDocument()");
if (Util.isSameType(getType(), EnumMap.class)) {
builder.addStatement("$T $L = new $T<>($T.class)", getType(), lv,
EnumMap.class, this.keyType);
}
else {
builder.addStatement("$T $L = new $T<>()", getType(), lv,
this.getImplementationType());
}
builder.beginControlFlow(
"while ((bsonType = reader.readBsonType()) != $T.END_OF_DOCUMENT)",
BsonType.class);
if (Util.isSameType(this.keyType, String.class)
|| Util.isSameType(this.keyType, Object.class)) {
builder.addStatement("String $LKey = reader.readName()", lv);
}
else {
builder.addStatement("$T $LKey = $T.valueOf(reader.readName())", this.keyType,
lv, this.keyType);
}
boolean permittNullElements = permitNullElements();
if (permittNullElements) {
builder.beginControlFlow("if (bsonType != $T.NULL)", BsonType.class);
}
CodeGeneratorContext childCtx = ctx
.createDecodeChildContext(lv + ".put(" + lv + "Key, %s)");
this.getChildCodeGen().addDecodeStatements(childCtx);
if (permittNullElements) {
builder.nextControlFlow("else").addStatement("reader.readNull()");
builder.addStatement("$L.put($LKey, null)", lv, lv);
builder.endControlFlow();
}
builder.endControlFlow();
builder.addStatement("reader.readEndDocument()");
builder.addStatement(ctx.setter("$L"), lv);
if (!field.disableDecodeNullCheck() && !hasParent()) {
builder.nextControlFlow("else").addStatement("reader.readNull()");
if (!ctx.field().disableSetNullStatement()) {
this.getChildCodeGen().addSetNullStatements(ctx);
}
builder.endControlFlow();
}
}
private static Set<String> permitNullCollections = new HashSet<>();
static {
permitNullCollections.addAll(Arrays.asList(HashMap.class.getCanonicalName(),
LinkedHashMap.class.getCanonicalName(), TreeMap.class.getCanonicalName(),
IdentityHashMap.class.getCanonicalName()));
// permit null: EnumMap (values)
// not permit null: EnumMap (keys)
// not permit null: ConcurrentHashMap, ConcurrentSkipListMap
}
private boolean permitNullElements() {
return permitNullCollections.contains(this.getImplementationType().toString());
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.net.nsd;
import android.annotation.SdkConstant;
import android.annotation.SdkConstant.SdkConstantType;
import android.content.Context;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.RemoteException;
import android.os.Messenger;
import android.text.TextUtils;
import android.util.Log;
import android.util.SparseArray;
import java.util.concurrent.CountDownLatch;
import com.android.internal.util.AsyncChannel;
import com.android.internal.util.Protocol;
/**
* The Network Service Discovery Manager class provides the API to discover services
* on a network. As an example, if device A and device B are connected over a Wi-Fi
* network, a game registered on device A can be discovered by a game on device
* B. Another example use case is an application discovering printers on the network.
*
* <p> The API currently supports DNS based service discovery and discovery is currently
* limited to a local network over Multicast DNS. DNS service discovery is described at
* http://files.dns-sd.org/draft-cheshire-dnsext-dns-sd.txt
*
* <p> The API is asynchronous and responses to requests from an application are on listener
* callbacks on a seperate thread.
*
* <p> There are three main operations the API supports - registration, discovery and resolution.
* <pre>
* Application start
* |
* |
* | onServiceRegistered()
* Register any local services /
* to be advertised with \
* registerService() onRegistrationFailed()
* |
* |
* discoverServices()
* |
* Maintain a list to track
* discovered services
* |
* |--------->
* | |
* | onServiceFound()
* | |
* | add service to list
* | |
* |<----------
* |
* |--------->
* | |
* | onServiceLost()
* | |
* | remove service from list
* | |
* |<----------
* |
* |
* | Connect to a service
* | from list ?
* |
* resolveService()
* |
* onServiceResolved()
* |
* Establish connection to service
* with the host and port information
*
* </pre>
* An application that needs to advertise itself over a network for other applications to
* discover it can do so with a call to {@link #registerService}. If Example is a http based
* application that can provide HTML data to peer services, it can register a name "Example"
* with service type "_http._tcp". A successful registration is notified with a callback to
* {@link RegistrationListener#onServiceRegistered} and a failure to register is notified
* over {@link RegistrationListener#onRegistrationFailed}
*
* <p> A peer application looking for http services can initiate a discovery for "_http._tcp"
* with a call to {@link #discoverServices}. A service found is notified with a callback
* to {@link DiscoveryListener#onServiceFound} and a service lost is notified on
* {@link DiscoveryListener#onServiceLost}.
*
* <p> Once the peer application discovers the "Example" http srevice, and needs to receive data
* from the "Example" application, it can initiate a resolve with {@link #resolveService} to
* resolve the host and port details for the purpose of establishing a connection. A successful
* resolve is notified on {@link ResolveListener#onServiceResolved} and a failure is notified
* on {@link ResolveListener#onResolveFailed}.
*
* Applications can reserve for a service type at
* http://www.iana.org/form/ports-service. Existing services can be found at
* http://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xml
*
* Get an instance of this class by calling {@link android.content.Context#getSystemService(String)
* Context.getSystemService(Context.NSD_SERVICE)}.
*
* {@see NsdServiceInfo}
*/
public final class NsdManager {
private static final String TAG = "NsdManager";
INsdManager mService;
/**
* Broadcast intent action to indicate whether network service discovery is
* enabled or disabled. An extra {@link #EXTRA_NSD_STATE} provides the state
* information as int.
*
* @see #EXTRA_NSD_STATE
*/
@SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
public static final String ACTION_NSD_STATE_CHANGED =
"android.net.nsd.STATE_CHANGED";
/**
* The lookup key for an int that indicates whether network service discovery is enabled
* or disabled. Retrieve it with {@link android.content.Intent#getIntExtra(String,int)}.
*
* @see #NSD_STATE_DISABLED
* @see #NSD_STATE_ENABLED
*/
public static final String EXTRA_NSD_STATE = "nsd_state";
/**
* Network service discovery is disabled
*
* @see #ACTION_NSD_STATE_CHANGED
*/
public static final int NSD_STATE_DISABLED = 1;
/**
* Network service discovery is enabled
*
* @see #ACTION_NSD_STATE_CHANGED
*/
public static final int NSD_STATE_ENABLED = 2;
private static final int BASE = Protocol.BASE_NSD_MANAGER;
/** @hide */
public static final int DISCOVER_SERVICES = BASE + 1;
/** @hide */
public static final int DISCOVER_SERVICES_STARTED = BASE + 2;
/** @hide */
public static final int DISCOVER_SERVICES_FAILED = BASE + 3;
/** @hide */
public static final int SERVICE_FOUND = BASE + 4;
/** @hide */
public static final int SERVICE_LOST = BASE + 5;
/** @hide */
public static final int STOP_DISCOVERY = BASE + 6;
/** @hide */
public static final int STOP_DISCOVERY_FAILED = BASE + 7;
/** @hide */
public static final int STOP_DISCOVERY_SUCCEEDED = BASE + 8;
/** @hide */
public static final int REGISTER_SERVICE = BASE + 9;
/** @hide */
public static final int REGISTER_SERVICE_FAILED = BASE + 10;
/** @hide */
public static final int REGISTER_SERVICE_SUCCEEDED = BASE + 11;
/** @hide */
public static final int UNREGISTER_SERVICE = BASE + 12;
/** @hide */
public static final int UNREGISTER_SERVICE_FAILED = BASE + 13;
/** @hide */
public static final int UNREGISTER_SERVICE_SUCCEEDED = BASE + 14;
/** @hide */
public static final int RESOLVE_SERVICE = BASE + 18;
/** @hide */
public static final int RESOLVE_SERVICE_FAILED = BASE + 19;
/** @hide */
public static final int RESOLVE_SERVICE_SUCCEEDED = BASE + 20;
/** @hide */
public static final int ENABLE = BASE + 24;
/** @hide */
public static final int DISABLE = BASE + 25;
/** @hide */
public static final int NATIVE_DAEMON_EVENT = BASE + 26;
/** Dns based service discovery protocol */
public static final int PROTOCOL_DNS_SD = 0x0001;
private Context mContext;
private static final int INVALID_LISTENER_KEY = 0;
private static final int BUSY_LISTENER_KEY = -1;
private int mListenerKey = 1;
private final SparseArray mListenerMap = new SparseArray();
private final SparseArray<NsdServiceInfo> mServiceMap = new SparseArray<NsdServiceInfo>();
private final Object mMapLock = new Object();
private final AsyncChannel mAsyncChannel = new AsyncChannel();
private ServiceHandler mHandler;
private final CountDownLatch mConnected = new CountDownLatch(1);
/**
* Create a new Nsd instance. Applications use
* {@link android.content.Context#getSystemService Context.getSystemService()} to retrieve
* {@link android.content.Context#NSD_SERVICE Context.NSD_SERVICE}.
* @param service the Binder interface
* @hide - hide this because it takes in a parameter of type INsdManager, which
* is a system private class.
*/
public NsdManager(Context context, INsdManager service) {
mService = service;
mContext = context;
init();
}
/**
* Failures are passed with {@link RegistrationListener#onRegistrationFailed},
* {@link RegistrationListener#onUnregistrationFailed},
* {@link DiscoveryListener#onStartDiscoveryFailed},
* {@link DiscoveryListener#onStopDiscoveryFailed} or {@link ResolveListener#onResolveFailed}.
*
* Indicates that the operation failed due to an internal error.
*/
public static final int FAILURE_INTERNAL_ERROR = 0;
/**
* Indicates that the operation failed because it is already active.
*/
public static final int FAILURE_ALREADY_ACTIVE = 3;
/**
* Indicates that the operation failed because the maximum outstanding
* requests from the applications have reached.
*/
public static final int FAILURE_MAX_LIMIT = 4;
/** Interface for callback invocation for service discovery */
public interface DiscoveryListener {
public void onStartDiscoveryFailed(String serviceType, int errorCode);
public void onStopDiscoveryFailed(String serviceType, int errorCode);
public void onDiscoveryStarted(String serviceType);
public void onDiscoveryStopped(String serviceType);
public void onServiceFound(NsdServiceInfo serviceInfo);
public void onServiceLost(NsdServiceInfo serviceInfo);
}
/** Interface for callback invocation for service registration */
public interface RegistrationListener {
public void onRegistrationFailed(NsdServiceInfo serviceInfo, int errorCode);
public void onUnregistrationFailed(NsdServiceInfo serviceInfo, int errorCode);
public void onServiceRegistered(NsdServiceInfo serviceInfo);
public void onServiceUnregistered(NsdServiceInfo serviceInfo);
}
/** Interface for callback invocation for service resolution */
public interface ResolveListener {
public void onResolveFailed(NsdServiceInfo serviceInfo, int errorCode);
public void onServiceResolved(NsdServiceInfo serviceInfo);
}
private class ServiceHandler extends Handler {
ServiceHandler(Looper looper) {
super(looper);
}
@Override
public void handleMessage(Message message) {
switch (message.what) {
case AsyncChannel.CMD_CHANNEL_HALF_CONNECTED:
mAsyncChannel.sendMessage(AsyncChannel.CMD_CHANNEL_FULL_CONNECTION);
return;
case AsyncChannel.CMD_CHANNEL_FULLY_CONNECTED:
mConnected.countDown();
return;
case AsyncChannel.CMD_CHANNEL_DISCONNECTED:
Log.e(TAG, "Channel lost");
return;
default:
break;
}
Object listener = getListener(message.arg2);
if (listener == null) {
Log.d(TAG, "Stale key " + message.arg2);
return;
}
NsdServiceInfo ns = getNsdService(message.arg2);
switch (message.what) {
case DISCOVER_SERVICES_STARTED:
String s = getNsdServiceInfoType((NsdServiceInfo) message.obj);
((DiscoveryListener) listener).onDiscoveryStarted(s);
break;
case DISCOVER_SERVICES_FAILED:
removeListener(message.arg2);
((DiscoveryListener) listener).onStartDiscoveryFailed(getNsdServiceInfoType(ns),
message.arg1);
break;
case SERVICE_FOUND:
((DiscoveryListener) listener).onServiceFound((NsdServiceInfo) message.obj);
break;
case SERVICE_LOST:
((DiscoveryListener) listener).onServiceLost((NsdServiceInfo) message.obj);
break;
case STOP_DISCOVERY_FAILED:
removeListener(message.arg2);
((DiscoveryListener) listener).onStopDiscoveryFailed(getNsdServiceInfoType(ns),
message.arg1);
break;
case STOP_DISCOVERY_SUCCEEDED:
removeListener(message.arg2);
((DiscoveryListener) listener).onDiscoveryStopped(getNsdServiceInfoType(ns));
break;
case REGISTER_SERVICE_FAILED:
removeListener(message.arg2);
((RegistrationListener) listener).onRegistrationFailed(ns, message.arg1);
break;
case REGISTER_SERVICE_SUCCEEDED:
((RegistrationListener) listener).onServiceRegistered(
(NsdServiceInfo) message.obj);
break;
case UNREGISTER_SERVICE_FAILED:
removeListener(message.arg2);
((RegistrationListener) listener).onUnregistrationFailed(ns, message.arg1);
break;
case UNREGISTER_SERVICE_SUCCEEDED:
removeListener(message.arg2);
((RegistrationListener) listener).onServiceUnregistered(ns);
break;
case RESOLVE_SERVICE_FAILED:
removeListener(message.arg2);
((ResolveListener) listener).onResolveFailed(ns, message.arg1);
break;
case RESOLVE_SERVICE_SUCCEEDED:
removeListener(message.arg2);
((ResolveListener) listener).onServiceResolved((NsdServiceInfo) message.obj);
break;
default:
Log.d(TAG, "Ignored " + message);
break;
}
}
}
// if the listener is already in the map, reject it. Otherwise, add it and
// return its key.
private int putListener(Object listener, NsdServiceInfo s) {
if (listener == null) return INVALID_LISTENER_KEY;
int key;
synchronized (mMapLock) {
int valueIndex = mListenerMap.indexOfValue(listener);
if (valueIndex != -1) {
return BUSY_LISTENER_KEY;
}
do {
key = mListenerKey++;
} while (key == INVALID_LISTENER_KEY);
mListenerMap.put(key, listener);
mServiceMap.put(key, s);
}
return key;
}
private Object getListener(int key) {
if (key == INVALID_LISTENER_KEY) return null;
synchronized (mMapLock) {
return mListenerMap.get(key);
}
}
private NsdServiceInfo getNsdService(int key) {
synchronized (mMapLock) {
return mServiceMap.get(key);
}
}
private void removeListener(int key) {
if (key == INVALID_LISTENER_KEY) return;
synchronized (mMapLock) {
mListenerMap.remove(key);
mServiceMap.remove(key);
}
}
private int getListenerKey(Object listener) {
synchronized (mMapLock) {
int valueIndex = mListenerMap.indexOfValue(listener);
if (valueIndex != -1) {
return mListenerMap.keyAt(valueIndex);
}
}
return INVALID_LISTENER_KEY;
}
private String getNsdServiceInfoType(NsdServiceInfo s) {
if (s == null) return "?";
return s.getServiceType();
}
/**
* Initialize AsyncChannel
*/
private void init() {
final Messenger messenger = getMessenger();
if (messenger == null) throw new RuntimeException("Failed to initialize");
HandlerThread t = new HandlerThread("NsdManager");
t.start();
mHandler = new ServiceHandler(t.getLooper());
mAsyncChannel.connect(mContext, mHandler, messenger);
try {
mConnected.await();
} catch (InterruptedException e) {
Log.e(TAG, "interrupted wait at init");
}
}
/**
* Register a service to be discovered by other services.
*
* <p> The function call immediately returns after sending a request to register service
* to the framework. The application is notified of a successful registration
* through the callback {@link RegistrationListener#onServiceRegistered} or a failure
* through {@link RegistrationListener#onRegistrationFailed}.
*
* <p> The application should call {@link #unregisterService} when the service
* registration is no longer required, and/or whenever the application is stopped.
*
* @param serviceInfo The service being registered
* @param protocolType The service discovery protocol
* @param listener The listener notifies of a successful registration and is used to
* unregister this service through a call on {@link #unregisterService}. Cannot be null.
* Cannot be in use for an active service registration.
*/
public void registerService(NsdServiceInfo serviceInfo, int protocolType,
RegistrationListener listener) {
if (TextUtils.isEmpty(serviceInfo.getServiceName()) ||
TextUtils.isEmpty(serviceInfo.getServiceType())) {
throw new IllegalArgumentException("Service name or type cannot be empty");
}
if (serviceInfo.getPort() <= 0) {
throw new IllegalArgumentException("Invalid port number");
}
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
if (protocolType != PROTOCOL_DNS_SD) {
throw new IllegalArgumentException("Unsupported protocol");
}
int key = putListener(listener, serviceInfo);
if (key == BUSY_LISTENER_KEY) {
throw new IllegalArgumentException("listener already in use");
}
mAsyncChannel.sendMessage(REGISTER_SERVICE, 0, key, serviceInfo);
}
/**
* Unregister a service registered through {@link #registerService}. A successful
* unregister is notified to the application with a call to
* {@link RegistrationListener#onServiceUnregistered}.
*
* @param listener This should be the listener object that was passed to
* {@link #registerService}. It identifies the service that should be unregistered
* and notifies of a successful or unsuccessful unregistration via the listener
* callbacks. In API versions 20 and above, the listener object may be used for
* another service registration once the callback has been called. In API versions <= 19,
* there is no entirely reliable way to know when a listener may be re-used, and a new
* listener should be created for each service registration request.
*/
public void unregisterService(RegistrationListener listener) {
int id = getListenerKey(listener);
if (id == INVALID_LISTENER_KEY) {
throw new IllegalArgumentException("listener not registered");
}
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
mAsyncChannel.sendMessage(UNREGISTER_SERVICE, 0, id);
}
/**
* Initiate service discovery to browse for instances of a service type. Service discovery
* consumes network bandwidth and will continue until the application calls
* {@link #stopServiceDiscovery}.
*
* <p> The function call immediately returns after sending a request to start service
* discovery to the framework. The application is notified of a success to initiate
* discovery through the callback {@link DiscoveryListener#onDiscoveryStarted} or a failure
* through {@link DiscoveryListener#onStartDiscoveryFailed}.
*
* <p> Upon successful start, application is notified when a service is found with
* {@link DiscoveryListener#onServiceFound} or when a service is lost with
* {@link DiscoveryListener#onServiceLost}.
*
* <p> Upon failure to start, service discovery is not active and application does
* not need to invoke {@link #stopServiceDiscovery}
*
* <p> The application should call {@link #stopServiceDiscovery} when discovery of this
* service type is no longer required, and/or whenever the application is paused or
* stopped.
*
* @param serviceType The service type being discovered. Examples include "_http._tcp" for
* http services or "_ipp._tcp" for printers
* @param protocolType The service discovery protocol
* @param listener The listener notifies of a successful discovery and is used
* to stop discovery on this serviceType through a call on {@link #stopServiceDiscovery}.
* Cannot be null. Cannot be in use for an active service discovery.
*/
public void discoverServices(String serviceType, int protocolType, DiscoveryListener listener) {
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
if (TextUtils.isEmpty(serviceType)) {
throw new IllegalArgumentException("Service type cannot be empty");
}
if (protocolType != PROTOCOL_DNS_SD) {
throw new IllegalArgumentException("Unsupported protocol");
}
NsdServiceInfo s = new NsdServiceInfo();
s.setServiceType(serviceType);
int key = putListener(listener, s);
if (key == BUSY_LISTENER_KEY) {
throw new IllegalArgumentException("listener already in use");
}
mAsyncChannel.sendMessage(DISCOVER_SERVICES, 0, key, s);
}
/**
* Stop service discovery initiated with {@link #discoverServices}. An active service
* discovery is notified to the application with {@link DiscoveryListener#onDiscoveryStarted}
* and it stays active until the application invokes a stop service discovery. A successful
* stop is notified to with a call to {@link DiscoveryListener#onDiscoveryStopped}.
*
* <p> Upon failure to stop service discovery, application is notified through
* {@link DiscoveryListener#onStopDiscoveryFailed}.
*
* @param listener This should be the listener object that was passed to {@link #discoverServices}.
* It identifies the discovery that should be stopped and notifies of a successful or
* unsuccessful stop. In API versions 20 and above, the listener object may be used for
* another service discovery once the callback has been called. In API versions <= 19,
* there is no entirely reliable way to know when a listener may be re-used, and a new
* listener should be created for each service discovery request.
*/
public void stopServiceDiscovery(DiscoveryListener listener) {
int id = getListenerKey(listener);
if (id == INVALID_LISTENER_KEY) {
throw new IllegalArgumentException("service discovery not active on listener");
}
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
mAsyncChannel.sendMessage(STOP_DISCOVERY, 0, id);
}
/**
* Resolve a discovered service. An application can resolve a service right before
* establishing a connection to fetch the IP and port details on which to setup
* the connection.
*
* @param serviceInfo service to be resolved
* @param listener to receive callback upon success or failure. Cannot be null.
* Cannot be in use for an active service resolution.
*/
public void resolveService(NsdServiceInfo serviceInfo, ResolveListener listener) {
if (TextUtils.isEmpty(serviceInfo.getServiceName()) ||
TextUtils.isEmpty(serviceInfo.getServiceType())) {
throw new IllegalArgumentException("Service name or type cannot be empty");
}
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
int key = putListener(listener, serviceInfo);
if (key == BUSY_LISTENER_KEY) {
throw new IllegalArgumentException("listener already in use");
}
mAsyncChannel.sendMessage(RESOLVE_SERVICE, 0, key, serviceInfo);
}
/** Internal use only @hide */
public void setEnabled(boolean enabled) {
try {
mService.setEnabled(enabled);
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
/**
* Get a reference to NetworkService handler. This is used to establish
* an AsyncChannel communication with the service
*
* @return Messenger pointing to the NetworkService handler
*/
private Messenger getMessenger() {
try {
return mService.getMessenger();
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
}
}
| |
package thaumcraft.api.aspects;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
import org.apache.commons.lang3.text.WordUtils;
import thaumcraft.api.research.ScanAspect;
import thaumcraft.api.research.ScanningManager;
public class Aspect {
String tag;
Aspect[] components;
int color;
private String chatcolor;
ResourceLocation image;
int blend;
/**
* For easy reference of what compounds are made up of
*/
public static HashMap<Integer,Aspect> mixList = new HashMap<Integer,Aspect>();
/**
* Use this constructor to register your own aspects.
* @param tag the key that will be used to reference this aspect, as well as its latin display name
* @param color color to display the tag in
* @param components the aspects this one is formed from
* @param image ResourceLocation pointing to a 32x32 icon of the aspect
* @param blend GL11 blendmode (1 or 771). Used for rendering nodes. Default is 1
*/
public Aspect(String tag, int color, Aspect[] components, ResourceLocation image, int blend) {
if (aspects.containsKey(tag)) throw new IllegalArgumentException(tag+" already registered!");
this.tag = tag;
this.components = components;
this.color = color;
this.image = image;
this.blend = blend;
aspects.put(tag, this);
ScanningManager.addScannableThing(new ScanAspect("!"+tag,this));
if (components!=null) {
int h = (components[0].getTag()+components[1].getTag()).hashCode();
mixList.put(h, this);
}
}
/**
* Shortcut constructor I use for the default aspects - you shouldn't be using this.
*/
public Aspect(String tag, int color, Aspect[] components) {
this(tag,color,components,new ResourceLocation("thaumcraft","textures/aspects/"+tag.toLowerCase()+".png"),1);
}
/**
* Shortcut constructor I use for the default aspects - you shouldn't be using this.
*/
public Aspect(String tag, int color, Aspect[] components, int blend) {
this(tag,color,components,new ResourceLocation("thaumcraft","textures/aspects/"+tag.toLowerCase()+".png"),blend);
}
/**
* Shortcut constructor I use for the primal aspects -
* you shouldn't use this as making your own primal aspects will break all the things.
*/
public Aspect(String tag, int color, String chatcolor, int blend) {
this(tag,color,(Aspect[])null, blend);
this.setChatcolor(chatcolor);
}
public int getColor() {
return color;
}
public String getName() {
return WordUtils.capitalizeFully(tag);
}
public String getLocalizedDescription() {
return StatCollector.translateToLocal("tc.aspect."+tag);
}
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
public Aspect[] getComponents() {
return components;
}
public void setComponents(Aspect[] components) {
this.components = components;
}
public ResourceLocation getImage() {
return image;
}
public static Aspect getAspect(String tag) {
return aspects.get(tag);
}
public int getBlend() {
return blend;
}
public void setBlend(int blend) {
this.blend = blend;
}
public boolean isPrimal() {
return getComponents()==null || getComponents().length!=2;
}
///////////////////////////////
public static ArrayList<Aspect> getPrimalAspects() {
ArrayList<Aspect> primals = new ArrayList<Aspect>();
Collection<Aspect> pa = aspects.values();
for (Aspect aspect:pa) {
if (aspect.isPrimal()) primals.add(aspect);
}
return primals;
}
public static ArrayList<Aspect> getCompoundAspects() {
ArrayList<Aspect> compounds = new ArrayList<Aspect>();
Collection<Aspect> pa = aspects.values();
for (Aspect aspect:pa) {
if (!aspect.isPrimal()) compounds.add(aspect);
}
return compounds;
}
public String getChatcolor() {
return chatcolor;
}
public void setChatcolor(String chatcolor) {
this.chatcolor = chatcolor;
}
///////////////////////////////
public static LinkedHashMap<String,Aspect> aspects = new LinkedHashMap<String,Aspect>();
//PRIMAL
public static final Aspect AIR = new Aspect("aer",0xffff7e,"e",1);
public static final Aspect EARTH = new Aspect("terra",0x56c000,"2",1);
public static final Aspect FIRE = new Aspect("ignis",0xff5a01,"c",1);
public static final Aspect WATER = new Aspect("aqua",0x3cd4fc,"3",1);
public static final Aspect ORDER = new Aspect("ordo",0xd5d4ec,"7",1);
public static final Aspect ENTROPY = new Aspect("perditio",0x404040,"8",771);
//SECONDARY (PRIMAL + PRIMAL)
public static final Aspect VOID = new Aspect("vacuos",0x888888, new Aspect[] {AIR, ENTROPY},771);
public static final Aspect LIGHT = new Aspect("lux",0xffd585, new Aspect[] {AIR, FIRE});
public static final Aspect MOTION = new Aspect("motus",0xcdccf4, new Aspect[] {AIR, ORDER});
public static final Aspect COLD = new Aspect("gelum",0xe1ffff, new Aspect[] {FIRE, ENTROPY});
public static final Aspect CRYSTAL = new Aspect("vitreus",0x80ffff, new Aspect[] {EARTH, AIR});
public static final Aspect METAL = new Aspect("metallum",0xb5b5cd, new Aspect[] {EARTH, ORDER});
public static final Aspect LIFE = new Aspect("victus",0xde0005, new Aspect[] {EARTH, WATER});
public static final Aspect DEATH = new Aspect("mortuus",0x887788, new Aspect[] {WATER, ENTROPY});
public static final Aspect ENERGY = new Aspect("potentia",0xc0ffff, new Aspect[] {ORDER, FIRE});
public static final Aspect EXCHANGE = new Aspect("permutatio",0x578357, new Aspect[] {ENTROPY, ORDER});
// public static final Aspect WEATHER = new Aspect("tempestas",0xFFFFFF, new Aspect[] {AIR, WATER});
// public static final Aspect ?? = new Aspect("??",0xcdccf4, new Aspect[] {FIRE, EARTH});
// public static final Aspect ?? = new Aspect("??",0xcdccf4, new Aspect[] {FIRE, WATER});
// public static final Aspect ?? = new Aspect("??",0xcdccf4, new Aspect[] {ORDER, WATER});
// public static final Aspect ?? = new Aspect("??",0xcdccf4, new Aspect[] {EARTH, ENTROPY});
//TERTIARY
public static final Aspect AURA = new Aspect("auram",0xffc0ff, new Aspect[] {ENERGY, AIR});
public static final Aspect FLUX = new Aspect("vitium",0x800080, new Aspect[] {ENTROPY, ENERGY});
public static final Aspect DARKNESS = new Aspect("tenebrae",0x222222, new Aspect[] {VOID, LIGHT});
public static final Aspect ELDRITCH = new Aspect("alienis",0x805080, new Aspect[] {VOID, DARKNESS});
public static final Aspect FLIGHT = new Aspect("volatus",0xe7e7d7, new Aspect[] {AIR, MOTION});
public static final Aspect PLANT = new Aspect("herba",0x01ac00, new Aspect[] {LIFE, EARTH});
public static final Aspect TOOL = new Aspect("instrumentum",0x4040ee, new Aspect[] {METAL, ENERGY});
public static final Aspect CRAFT = new Aspect("fabrico",0x809d80, new Aspect[] {EXCHANGE, TOOL});
public static final Aspect MECHANISM = new Aspect("machina",0x8080a0, new Aspect[] {MOTION, TOOL});
public static final Aspect TRAP = new Aspect("vinculum",0x9a8080, new Aspect[] {MOTION, ENTROPY});
public static final Aspect SOUL = new Aspect("spiritus",0xebebfb, new Aspect[] {LIFE, DEATH});
public static final Aspect MIND = new Aspect("cognitio",0xffc2b3, new Aspect[] {FIRE, SOUL});
public static final Aspect SENSES = new Aspect("sensus",0x0fd9ff, new Aspect[] {AIR, SOUL});
public static final Aspect AVERSION = new Aspect("aversio",0xc05050, new Aspect[] {SOUL, ENTROPY});
public static final Aspect PROTECT = new Aspect("praemunio",0x00c0c0, new Aspect[] {SOUL, EARTH});
public static final Aspect DESIRE = new Aspect("desiderium",0xe6be44, new Aspect[] {SOUL, VOID});
public static final Aspect UNDEAD = new Aspect("exanimis",0x3a4000, new Aspect[] {MOTION, DEATH});
public static final Aspect BEAST = new Aspect("bestia",0x9f6409, new Aspect[] {MOTION, LIFE});
public static final Aspect MAN = new Aspect("humanus",0xffd7c0, new Aspect[] {SOUL, LIFE});
}
| |
package igx.bots;
// RobotArena.java
import igx.shared.GameInstance;
import java.util.*;
import java.io.*;
import java.awt.Color;
/**
* This is the testing grounds for your 'bot. The RobotArena will run a game or
* multiple games with your robot(s) and report the outcome. You can interface
* this class in two ways. The standard use of the Arena is to call the
* {@link #main main} method with the appropriate parameters from the
* command-line. For more advanced techniques (such as genetic algorithms or
* other kinds of statistical testing) you can write your own testing code that
* calls the {@link #runGame runGame} method.
* <P>
* Unlike the igx server, the RobotArena is kinder to your 'bots with respect to
* time. Your 'bots each run in their own thread asynchronously to the game
* thread. Thus, if your 'bots are taking too long processing, they can miss
* their chance to make moves if the game thread advances. The RobotArena won't
* do this, however. It will always wait until all 'bot threads are finished
* processing before it advances to the next segment. The Arena can return
* information on how long it took the 'bots to process, however, so be sure to
* make sure your 'bot isn't sleeping on the job.
*/
public class RobotArena implements RobotServer, igx.shared.UI {
/**
* The 'bots themselves.
*/
public Bot[] bot = new Bot[Constants.MAXIMUM_PLAYERS];
/**
* The classes of the bots that will compete.
*/
private Class[] botClass = new Class[Constants.MAXIMUM_PLAYERS];
/**
* The skill level of each competing 'bot.
*/
private int[] skill = new int[Constants.MAXIMUM_PLAYERS];
/**
* The number of bots competing.
*/
private int numBots = 0;
/**
* The amount of robot activity to report during a game (between 0 and 4).
*/
public int activityLevel = 0;
/**
* Indicates whether a full report should follow each game.
*/
public boolean fullReport = false;
/**
* Indicates whether random events should be reported.
*/
public boolean reportEvents = false;
/**
* Indicates whether the bots' debug info should be output.
*/
public boolean debugMode = false;
/**
* Indicates whether the bots should be timed or not.
*/
public boolean timeRobots = false;
/**
* The random number seed to be used.
*/
public long seed = System.currentTimeMillis();
private int numGames = 1;
/**
* The number of segments to play.
*/
public int numSegments = 1000;
/**
* Number of segments before updates.
*/
public int updatePeriod = 0;
private Vector[] messages;
private Vector dispatches;
private Vector arrivals;
private boolean robotDone = false;
private GameInstance gameInstance;
private GameState game;
/**
* Adds a robot to the arena given a {@link Bot Bot} object and a skill.
*/
public void addRobot(Bot bot, int skill) {
addRobot(bot.getClass(), skill);
}
/**
* Adds a robot to the arena given the class name and skill. You can also
* add a robot with {@link #addRobot(igx.bots.Bot,int) the other addRobot}.
*/
public void addRobot(Class robotClass, int skill) {
if (numBots == Constants.MAXIMUM_PLAYERS) {
return;
}
Bot robot;
try {
// Try to instantiate as a robot
robot = (Bot) robotClass.newInstance();
botClass[numBots] = robotClass;
this.skill[numBots] = skill;
numBots++;
} catch (ClassCastException e) {
complain("This... thing... is not a 'bot: " + robotClass + "!");
} catch (Exception e) {
complain("Problem with your 'bot, " + robotClass + ". " + e + ".");
}
}
private static void complain(String complaint) {
System.out.println(complaint);
showUsage();
}
/**
* Given a {@link GameState GameState}, outputs the state of the galaxy to
* the screen.
*/
public void displayGalaxy(GameState game) {
System.out.println("Time: " + game.getTurn() + ":" + game.getSegment());
for (int y = 0; y < Constants.MAP_HEIGHT; y++) {
for (int x = 0; x < Constants.MAP_WIDTH; x++) {
int point = game.getXY(x, y);
char c = '.';
if (point != Constants.EMPTY_SPACE) {
c = Planet.planetChar(point);
}
System.out.print(new Character(c).toString());
}
System.out.println("");
}
for (int i = 0; i < Constants.PLANETS; i++) {
System.out.print(new Character(Planet.planetChar(i)).toString()
+ "(" + i + ") - owner: ");
Planet p = game.getPlanet(i);
if (p.getOwner() == Constants.NEUTRAL) {
System.out.println("Neutral");
} else {
System.out.print(game.getPlayer(p.getOwner()).getName() + "(" + p.getOwner() + ")");
System.out.print(" - ships: " + p.getShips());
System.out.print(" - production: " + p.getProduction());
System.out.print(" - ratio: " + p.getRatio());
for (int j = 0; j < numBots; j++) {
if (p.getAttackers(j) != 0) {
System.out.print(" - " + game.getPlayer(j).getName() + "(" + j + "): " + p.getAttackers(j));
}
}
System.out.println("");
}
}
}
/**
* Called by main to run the command-line Arena.
*/
public void doArena() {
if (numGames == 1) {
runGame();
} else {
int[] stats = new int[numBots];
for (int i = 0; i < numBots; i++) {
stats[i] = 0;
}
for (int i = 0; i < numGames; i++) {
System.out.println("----------");
System.out.println("Game #" + (i + 1));
int winner = runGame().winner;
stats[winner]++;
seed++;
}
// Output final results
System.out.println("-------------");
System.out.println("Final Results");
for (int i = 0; i < numBots; i++) {
robotReport(bot[i], ": Won " + stats[i] + " games.");
}
}
}
/**
* Gets a particular bot (by number)
*/
public Bot getBot(int i) {
return bot[i];
}
private static String[] getFileArgs(String file) {
Vector result = new Vector();
try {
BufferedReader br = new BufferedReader(new FileReader(file));
String line = br.readLine();
while (line != null) {
if (line.charAt(0) == '#') {
line = br.readLine();
continue;
}
int end = line.length();
int begin = 0;
int space = line.indexOf(' ', begin);
while (space != -1) {
result.addElement(line.substring(begin, space));
begin = space + 1;
space = line.indexOf(' ', begin);
}
result.addElement(line.substring(begin));
line = br.readLine();
}
} catch (FileNotFoundException e) {
complain("Couldn't find parameter file: " + file + ".");
} catch (IOException e) {
complain("IO Error: " + e);
}
String[] retVal = new String[result.size()];
for (int i = 0; i < result.size(); i++) {
retVal[i] = (String) (result.elementAt(i));
}
return retVal;
}
private Message[] getMessages(int botNum) {
Vector queue = messages[botNum];
Message[] retVal = new Message[queue.size()];
for (int i = 0; i < queue.size(); i++) {
retVal[i] = (Message) queue.elementAt(i);
}
// Empty queue
queue = new Vector();
return retVal;
}
private static int getNumberParam(String s) {
int val = 0;
try {
val = Integer.parseInt(s);
} catch (NumberFormatException e) {
System.out.println("Invalid number format: " + s + ".");
showUsage();
}
return val;
}
/**
* The command-line interface to the Arena. Here's the usage:
* <P>
* <CODE>java igx.bots.RobotArena <I>bot<B>1</B>[skill]</I>
* <I>bot<B>2</B>[skill]</I> ... </I>bot<B>n</B>[skill]</I>
* -<I>option</I> -<I>option</I> ...</CODE>
* <P>
* Where <I>bot<B>i</B></I> is the <B>class name</B> of the 'bot who should
* be the <B>i</B>th player. Put the skill level in square brackets after
* each 'bot class name. There can be no more than 9 robots in a game. For
* example, let's say the class name of the class that extends
* {@link Bot Bot} is igx.bots.MoonBot. Then if I wanted the top 3
* skill-level MoonDroids to face off, here's the command line:
* <P>
* <CODE>java igx.bots.RobotArena igx.bots.MoonBot[8] igx.bots.MoonBot[9] igx.bots.MoonBot[10]</CODE>
* <P>
* By default, a game with a random number seed will be played for 1000
* segments, then the winner will be reported. The options can override this
* behaviour, and they are as follows:
* <UL>
* <LI><B>-file <I>filename</I></B> - This opens the specified file and
* reads the contents as if it was the command line. You'll need to use this
* since Windows only allows a maximum of eight parameters on the command
* line. (Micky Mouse!) You can have linefeeds in the file, but make sure
* there are no unnecessary spaces at the end of any line.
* <LI><B>-seed <I>#</I></B> - This allows you to specify the random number
* seed. Very handy for debugging. If the option is not specified, the seed
* will be "randomly" selected.
* <LI><B>-games <I>#</I></B> - Specifies the number of games to play in
* sequence. The Arena will report basic statistics over all the games at
* the end.
* <LI><B>-time <I>#</I></B> - This specifies the number of segments to
* play. The default is 1000.
* <LI><B>-update <I>#</I></B> - This specifies how often you want the Arena
* to output the state of the galaxy. If you specify 0, then no reports will
* occur. Otherwise, every # segments you'll see a report.
* <LI><B>-report</B> - This will give a detailed report at the end of the
* game (similar to the updates).
* <LI><B>-events</B> - This will cause random events to be reported when
* they occur.
* <LI><B>-activity <I>n</I></B> - This reports the activities of the
* droids.
* <UL>
* <LI>n=0 is the default, and this reports none of their activity.
* <LI>n=1 reports all fleets and messages that the droids send.
* <LI>n=2 also reports invasions.
* <LI>n=3 also reports all attacking and repelled fleets.
* <LI>n=4 also reports all reinforcements.
* </UL>
* <LI><B>-stopwatch</B> - This will cause the arena to time the robots each
* segment and report how long they spent processing.
* <LI><B>-debug</B> - This will cause <B>your</B> debugging information to
* be reported. See the {@link Bot#debug Bot.debug} method for more
* information.
* </UL>
*/
public static void main(String[] args) {
System.out.println("The intergalactics RobotArena: It's like the Turing Test... only better!");
System.out.println("Version " + Constants.VERSION + ", HiVE Software.");
System.out.println("");
RobotArena arena = new RobotArena();
int numArgs = args.length;
if (numArgs == 0) {
showUsage();
}
for (int i = 0; i < numArgs; i++) {
String arg = args[i];
if (arg.charAt(0) == '-') {
// Option
arg = arg.substring(1);
if (arg.equals("file")) {
if (i == numArgs - 1) {
complain("Parameter file not specified.");
} else {
i++;
arg = args[i];
args = getFileArgs(arg);
numArgs = args.length;
i = -1;
}
} else if (arg.equals("seed")) {
if (i == numArgs - 1) {
complain("Random seed not specified.");
} else {
i++;
arg = args[i];
arena.seed = (long) getNumberParam(arg);
}
} else if (arg.equals("games")) {
if (i == numArgs - 1) {
complain("Number of games not specified.");
} else {
i++;
arg = args[i];
arena.numGames = getNumberParam(arg);
if (arena.numGames <= 0) {
System.out.println("Must play at least 1 game.");
showUsage();
}
}
} else if (arg.equals("time")) {
if (i == numArgs - 1) {
complain("Number of segments not specified.");
} else {
i++;
arg = args[i];
arena.numSegments = getNumberParam(arg);
if (arena.numSegments < 0) {
complain("Can't play negative time.");
}
}
} else if (arg.equals("update")) {
if (i == numArgs - 1) {
complain("Update period not specified.");
} else {
i++;
arg = args[i];
arena.updatePeriod = getNumberParam(arg);
if (arena.updatePeriod < 0) {
complain("Negative update period makes no sense.");
}
}
} else if (arg.equals("activity")) {
if (i == numArgs - 1) {
complain("Activity level not specified.");
} else {
i++;
arg = args[i];
arena.activityLevel = getNumberParam(arg);
if ((arena.activityLevel < 0) || (arena.activityLevel > 4)) {
complain("Activity level must be between 0 and 4");
}
}
} else if (arg.equals("report")) {
arena.fullReport = true;
} else if (arg.equals("events")) {
arena.reportEvents = true;
} else if (arg.equals("debug")) {
arena.debugMode = true;
} else if (arg.equals("stopwatch")) {
arena.timeRobots = true;
} else {
complain("Invalid option: " + arg + ".");
}
} else {
// robot class specification
int leftBracket = arg.indexOf('[');
int rightBracket = arg.indexOf(']');
if ((leftBracket < 0) || (rightBracket < leftBracket)) {
complain("Robot improperly specified:" + arg + ".");
}
String className = arg.substring(0, leftBracket);
int skill = getNumberParam(arg.substring(leftBracket + 1, rightBracket));
if ((skill < 0) || (skill > 9)) {
complain("Skill levels must be between 0 and 9.");
}
Class robotClass;
Bot robot;
try {
robotClass = Class.forName(className);
// Try to instantiate as a robot
robot = (Bot) robotClass.newInstance();
if (skill >= robot.numberOfBots()) {
complain("Skill for robot " + className + " must be between 0 and "
+ (robot.numberOfBots() - 1) + ".");
}
arena.botClass[arena.numBots] = robotClass;
arena.skill[arena.numBots] = skill;
arena.numBots++;
} catch (ClassNotFoundException e) {
complain("'bot class name: " + className + " not found.");
} catch (ClassCastException e) {
complain("This... thing... is not a 'bot: " + className + "!");
} catch (Exception e) {
complain("Problem with your 'bot, " + className + ". " + e + ".");
}
}
}
if (arena.numBots == 0) {
complain("Get some 'bots in here!");
} else if (arena.numBots > Constants.MAXIMUM_PLAYERS) {
complain("Too many 'bots for this arena! Maximum is " + Constants.MAXIMUM_PLAYERS + ".");
}
arena.doArena();
}
// Attack
public void postAttack(igx.shared.Fleet fleet, igx.shared.Planet planet) {
if (activityLevel >= 3) {
int botNum = fleet.owner.number;
robotReport(bot[botNum], " Attacks " + planet.planetChar + " with " + fleet.ships + " ships.");
}
arrivals.addElement(new ArrivedFleet(fleet.ships, Planet.planetNumber(planet.planetChar), fleet.owner.number));
}
// Black hole event
public void postBlackHole(igx.shared.Fleet fleet) {
}
// Error
public void postError(String errorMessage) {
System.out.println("OUCH! Error in the game engine... report this to HiVE Software!");
}
// Game end
public void postGameEnd(int winnerNumber) {
}
//// Post game events
// Game start
public void postGameStart(GameInstance game) {
}
// Invasion
public void postInvasion(igx.shared.Fleet fleet, igx.shared.Planet planet) {
if (activityLevel >= 2) {
int botNum = fleet.owner.number;
robotReport(bot[botNum], " Invades " + planet.planetChar + ".");
}
}
// Players sends message
public void postMessage(igx.shared.Player sender, igx.shared.Player recipient, String message) {
}
// Next turn
public void postNextTurn() {
}
// Planet moves
public void postPlanetMove(int oldX, int oldY, igx.shared.Planet planet) {
}
// Player quits
public void postPlayerQuit(igx.shared.Player player) {
}
// Redraw galaxy
public void postRedrawGalaxy() {
}
// Reinforcements
public void postReinforcements(int numberOfShips, igx.shared.Planet planet) {
if (activityLevel >= 4) {
int botNum = planet.owner.number;
robotReport(bot[botNum], " " + numberOfShips + " reinforcements arrive at " + planet.planetChar + ".");
}
arrivals.addElement(new ArrivedFleet(numberOfShips, Planet.planetNumber(planet.planetChar), planet.owner.number));
}
// Repulsion
public void postRepulsion(igx.shared.Player attacker, igx.shared.Planet planet) {
if (activityLevel >= 3) {
int botNum = attacker.number;
robotReport(bot[botNum], " Repelled from " + planet.planetChar + ".");
}
}
// Special Event
public void postSpecial(String[] text, Color[] color) {
if (reportEvents) {
String output = "";
for (int i = 0; i < text.length; i++) {
output += text[i];
}
System.out.println(output);
}
}
// UI STUFF
// Redraw all planets
public void redrawAll() {
}
// Redraw a planet
public void redrawPlanet(int planetNum) {
}
/**
* Called when a robot is done processing.
*/
public synchronized void robotDone(int botNum) {
robotDone = true;
notify();
}
private void robotReport(Bot robot, String text) {
System.out.println(robot.getBotName() + "(" + robot.getNumber() + "): " + text);
}
/**
* Called to send a fleet on behalf of a robot.
*/
public synchronized void robotSendFleet(int botNum, Fleet fleet) {
// Check for source ownership
Planet source = game.getPlanet(fleet.source);
if (botNum != source.getOwner()) {
if (debugMode) {
robotReport(bot[botNum], " Tried to send a fleet from a planet it didn't own: " + Planet.planetChar(fleet.source) + ".");
}
return;
} else if (fleet.ships > gameInstance.planet[fleet.source].ships) {
if (debugMode) {
robotReport(bot[botNum], " Tried to send " + fleet.ships + " from planet " + Planet.planetChar(fleet.source) + ", where there are only " + gameInstance.planet[fleet.source].ships + " ships.");
}
fleet.ships = source.getShips();
return;
}
// Handle useless fleets
if ((fleet.ships <= 0) || (fleet.source == fleet.destination)) {
return;
}
igx.shared.Fleet gameFleet = new igx.shared.Fleet(gameInstance,
gameInstance.planet[fleet.source],
gameInstance.planet[fleet.destination],
fleet.ships);
dispatches.addElement(gameFleet);
if (activityLevel >= 1) {
robotReport(bot[botNum], " Sent " + fleet.ships + " from " + Planet.planetChar(fleet.source) + " to "
+ Planet.planetChar(fleet.destination) + ".");
}
}
/**
* Called to send a message on behalf of a robot.
*/
public synchronized void robotSendMessage(int botNum, int recipient, String text) {
Message message = new Message(botNum, recipient, text);
if (recipient == Constants.MESSAGE_TO_ALL) {
for (int i = 0; i < numBots; i++) {
messages[i].addElement(message);
}
} else {
messages[recipient].addElement(message);
}
if (activityLevel >= 1) {
String output = "Message to ";
if (recipient == Constants.MESSAGE_TO_ALL) {
output += "ALL";
} else {
output += bot[recipient].getBotName();
}
output += ": " + text;
robotReport(bot[botNum], output);
}
}
/**
* Runs a game of igx for the robots. Returns a
* {@link Statistics Statistics} object with info about how the 'bots did.
* This is called by {@link #main main}, but if you want to use it (say for
* a genetic algorithm) then here's how:
* <UL>
* <LI>Create an instance of {@link RobotArena RobotArena}.
* <LI>Set the parameters you want for the game by modifying the public
* variables of the RobotArena object. In particular, make sure that the
* random number {@link #seed seed} differs between games (if you want the
* games to differ).
* <LI>Call {@link #addRobot addRobot} for each 'bot you want in the game.
* <LI>You're ready to call <I>runGame</I>. May the best 'bot win!
* </UL>
*
* @return a set of statistics giving the scores and average running times
* of the bots.
*/
public Statistics runGame() {
// Set up the random seedBo
Bot.setRandomSeed(seed + 1);
// Make some robots
try {
for (int i = 0; i < numBots; i++) {
bot[i] = (Bot) botClass[i].newInstance();
}
} catch (Exception e) {
System.out.println("Hardcore error: " + e);
System.exit(1);
}
// Set up message repositories
messages = new Vector[numBots];
for (int i = 0; i < numBots; i++) {
messages[i] = new Vector();
}
// Fleet dispatching queue
dispatches = new Vector();
// Fleet arrivals queue
arrivals = new Vector();
// Initialize go-between messenger
RobotMessenger rm = new RobotMessenger(this, numBots);
// Generate player structures...
igx.shared.Player player[] = new igx.shared.Player[numBots];
for (int i = 0; i < numBots; i++) {
if (skill[i] > bot[i].numberOfBots()) {
complain("Invalid skill number for bot " + i + ".");
}
player[i] = new igx.shared.Player(bot[i].createName(skill[i]), i);
}
// Generate game
gameInstance = new GameInstance(seed, numBots, player);
gameInstance.registerUI(this);
// Init stats structure
Statistics stats = new Statistics(numBots);
// Initialize game structure for robots
game = new GameState(gameInstance, null);
GameState oldState = game;
// Initialize robots with game
for (int i = 0; i < numBots; i++) {
bot[i].initializeBot(game, rm, i, skill[i], debugMode);
}
// Let's play igx!
for (int i = 0; i < numSegments; i++) {
// Display galaxy if required
if ((updatePeriod > 0) && ((i % updatePeriod) == 0)) {
displayGalaxy(game);
}
for (int j = 0; j < numBots; j++) {
// Call and time user's update code
long startTime = System.currentTimeMillis();
bot[j].updateBot(game, oldState, getMessages(j));
synchronized (this) {
try {
while (!robotDone) {
wait();
}
} catch (InterruptedException e) {
System.out.println("Hardcore error: " + e);
System.exit(1);
}
}
long totalTime = System.currentTimeMillis() - startTime;
stats.reportTime(j, totalTime);
if (timeRobots) {
robotReport(bot[j], "Running Time: " + totalTime);
}
robotDone = false;
}
gameInstance.update(dispatches);
oldState = game;
ArrivedFleet[] arrival = new ArrivedFleet[arrivals.size()];
for (int j = 0; j < arrivals.size(); j++) {
arrival[j] = (ArrivedFleet) (arrivals.elementAt(j));
}
game = new GameState(gameInstance, arrival);
arrivals = new Vector();
}
int winner = 0;
int bestScore = 0;
for (int i = 0; i < numBots; i++) {
stats.gameOver(i, game.getPlayer(i).getScore(), numSegments);
if (game.getPlayer(i).getScore() > bestScore) {
bestScore = game.getPlayer(i).getScore();
winner = i;
}
bot[i].gameEnding();
}
stats.setWinner(winner);
// End game report if needed
if (fullReport) {
System.out.println("Game ends...");
displayGalaxy(game);
for (int i = 0; i < numBots; i++) {
System.out.println(game.getPlayer(i).getName() + "(" + i + "): " + game.getPlayer(i).getScore());
}
System.out.println("----------------");
System.out.println("Winner: " + bot[winner].getBotName() + "(" + winner + ").");
}
return stats;
}
/**
* Shows how to use this damn thing.
*/
protected static void showUsage() {
System.out.println("Usage:");
System.out.println("");
System.out.println("java igx.bots.RobotArena bot1[skill] bot2[skill] ... botn[skill] -option -option ...");
System.out.println("");
System.out.println("The options are as follows:");
System.out.println("-file filename - This opens the specified file and reads the contents as if it was");
System.out.println("the command line. You'll need to use this since Windows only allows a maximum of eight parameters on");
System.out.println("the command line. (Micky Mouse!) You can have linefeeds in the file, but make sure there are no");
System.out.println("unnecessary spaces at the end of any line.");
System.out.println("-seed # - This allows you to specify the random number seed. Very handy for debugging. If the option is not specified, the seed will be \"randomly\" selected.");
System.out.println("-games # - Specifies the number of games to play in sequence. The Arena will report basic statistics over all the games at the end.");
System.out.println("-time # - This specifies the number of segments to play. The default is 1000.");
System.out.println("-update # - This specifies how often you want the Arena to output the state of the galaxy. If you specify 0, then no reports will occur. Otherwise, every # segments you'll see a report.");
System.out.println("-report - This will give a detailed report at the end of the game (similar to the updates).");
System.out.println("-events - This will cause random events to be reported when they occur.");
System.out.println("-activity n - This reports the activities of the droids, where:");
System.out.println(" n=0 is the default, and this reports none of their activity.");
System.out.println(" n=1 reports all fleets and messages that the droids send.");
System.out.println(" n=2 also reports invasions.");
System.out.println(" n=3 also reports all attacking and repelled fleets.");
System.out.println(" n=4 also reports all reinforcements.");
System.out.println("-stopwatch - This will cause the arena to time the robots each segment and report how long they spent processing.");
System.out.println("-debug - This will cause your debugging information to be reported. See the Bot.Debug method for more information.");
System.out.println("");
System.out.println("See the javadocs for more details on using the Robot Arena.");
System.exit(0);
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl.workflow;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.ResourceImpl;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import javax.xml.stream.XMLStreamException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.util.List;
/**
* TenantWorkflowConfigHolder test cases
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest({ServiceReferenceHolder.class, TenantWorkflowConfigHolder.class})
public class TenantWorkflowConfigHolderTest {
private int tenantID = -1234;
private String tenantDomain = "carbon.super";
private UserRegistry registry;
@Before
public void init() throws RegistryException {
ServiceReferenceHolder serviceReferenceHolder = Mockito.mock(ServiceReferenceHolder.class);
RegistryService registryService = Mockito.mock(RegistryService.class);
registry = Mockito.mock(UserRegistry.class);
PowerMockito.mockStatic(ServiceReferenceHolder.class);
Mockito.when(ServiceReferenceHolder.getInstance()).thenReturn(serviceReferenceHolder);
Mockito.when(serviceReferenceHolder.getRegistryService()).thenReturn(registryService);
Mockito.when(registryService.getGovernanceSystemRegistry(Mockito.anyInt())).thenReturn(registry);
}
@Test
public void testLoadingDefaultTenantWorkflowConfig() throws FileNotFoundException, XMLStreamException,
RegistryException {
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader().
getResource("workflow-configs/default-workflow-extensions.xml").getFile());
InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(defaultWFConfigContent);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor
("AM_APPLICATION_REGISTRATION_PRODUCTION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor
("AM_APPLICATION_REGISTRATION_SANDBOX"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_USER_SIGNUP"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_CREATION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_DELETION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_API_STATE"));
} catch (WorkflowException e) {
Assert.fail("Unexpected WorkflowException occurred while loading default tenant workflow configuration");
}
}
@Test
public void testLoadingExtendedTenantWorkflowConfig() throws FileNotFoundException, XMLStreamException,
RegistryException {
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader().
getResource("workflow-configs/workflow-extensions.xml").getFile());
InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(defaultWFConfigContent);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor
("AM_APPLICATION_REGISTRATION_PRODUCTION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor
("AM_APPLICATION_REGISTRATION_SANDBOX"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_USER_SIGNUP"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_CREATION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_DELETION"));
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_API_STATE"));
} catch (WorkflowException e) {
Assert.fail("Unexpected WorkflowException occurred while loading extended tenant workflow configuration");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenErrorWhileLoadingRegistryResource() throws FileNotFoundException,
XMLStreamException, RegistryException {
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenThrow(new RegistryException("Error " +
"loading Workflow Resource"));
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when registry resource loading failed");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Error loading Resource from path" + APIConstants
.WORKFLOW_EXECUTOR_LOCATION);
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorClassNotFound() throws Exception {
//Workflow executor is an non existing class so that ClassNotFoundException will be thrown
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".TestExecutor\"/></WorkFlowExtensions>";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor class not found");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Unable to find class");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorClassCannotBeInstantiated() throws Exception {
//Workflow executor is an abstract class so that InstantiationException will be thrown
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".WorkflowExecutor\"/></WorkFlowExtensions>";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor class cannot be " +
"instantiate");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Unable to instantiate class");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenXMLStreamExceptionOccurredWhileParsingConfig() throws Exception {
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader().
getResource("workflow-configs/workflow-extensions.xml").getFile());
InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile);
Resource defaultWFConfigResource = Mockito.mock(Resource.class);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
Mockito.when(defaultWFConfigResource.getContentStream()).thenReturn(defaultWFConfigContent);
//XMLStreamException will be thrown while building workflow config
PowerMockito.whenNew(StAXOMBuilder.class).withArguments(defaultWFConfigContent).thenThrow(new
XMLStreamException(""));
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when XMLStreamException occurred while " +
"processing workflow config");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Error building xml from Resource at " + APIConstants
.WORKFLOW_EXECUTOR_LOCATION);
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorClassCannotAccessible() throws Exception {
//Workflow executor class is a singleton class with private constructor, so that IllegalAccessException will
// be thrown while instantiation
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".InvalidWorkFlowExecutor1\"/></WorkFlowExtensions>";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor class cannot be " +
"accessible");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Illegal attempt to invoke class methods");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertyNameNotFound() throws Exception {
//Workflow executor class is a singleton class with private constructor, so that IllegalAccessException will
// be thrown while instantiation
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".ApplicationCreationWSWorkflowExecutor\">\n" +
" <Property/>\n" +
" </ApplicationCreation>\n" +
"</WorkFlowExtensions>\n";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor property 'name' " +
"attribute not found");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertySetterNotDefined() throws Exception {
//Workflow executor class does not have setter method for 'testParam'
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".ApplicationCreationWSWorkflowExecutor\">\n" +
" <Property name=\"testParam\">test</Property>\n" +
" </ApplicationCreation>\n" +
"</WorkFlowExtensions>\n";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor property setter method" +
" cannot be found");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class");
Assert.assertEquals(e.getCause().getMessage(), "Error invoking setter method named : setTestParam() " +
"that takes a single String, int, long, float, double or boolean parameter");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertySetterInInvalid() throws Exception {
//Workflow executor class setter method is invalid since it has multiple parameter types
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".InvalidWorkFlowExecutor2\">\n" +
" <Property name=\"username\">admin</Property>\n" +
" </ApplicationCreation>\n" +
"</WorkFlowExtensions>\n";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.fail("Expected WorkflowException has not been thrown when workflow executor property setter method" +
" is invalid");
} catch (WorkflowException e) {
Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class");
Assert.assertEquals(e.getCause().getMessage(), "Error invoking setter method named : setUsername() " +
"that takes a single String, int, long, float, double or boolean parameter");
}
}
@Test
public void testFailureToLoadTenantWFConfigWhenWFExecutorHasMultipleParamTypes() throws Exception {
//Workflow executor class setter methods are available for different parameter types
String invalidWFExecutor =
"<WorkFlowExtensions>\n" +
" <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".WorkflowExecutorWithMultipleParamTypes\">\n" +
" <Property name=\"stringParam\">admin</Property>\n" +
" <Property name=\"intParam\">1</Property>\n" +
" <Property name=\"booleanParam\">true</Property>\n" +
" <Property name=\"longParam\">10000000</Property>\n" +
" <Property name=\"doubleParam\">10.1000000000</Property>\n" +
" <Property name=\"floatParam\">10.1</Property>\n" +
" <Property name=\"omElement\">" +
" <omElement>test</omElement>" +
" </Property>\n" +
" </ApplicationCreation>\n" +
" <ProductionApplicationRegistration executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".ApplicationRegistrationSimpleWorkflowExecutor\"/>" +
" <SandboxApplicationRegistration executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".ApplicationRegistrationSimpleWorkflowExecutor\"/>\n" +
" <SubscriptionCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".SubscriptionCreationSimpleWorkflowExecutor\"/>\n"+
" <SubscriptionUpdate executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".SubscriptionUpdateSimpleWorkflowExecutor\"/>\n"+
" <UserSignUp executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".UserSignUpSimpleWorkflowExecutor\"/>\n"+
" <SubscriptionDeletion executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".SubscriptionDeletionSimpleWorkflowExecutor\"/>\n"+
" <ApplicationDeletion executor=\"org.wso2.carbon.apimgt.impl.workflow" +
".ApplicationDeletionSimpleWorkflowExecutor\"/>\n"+
"</WorkFlowExtensions>\n";
InputStream invalidInputStream = new ByteArrayInputStream(invalidWFExecutor.getBytes("UTF-8"));
TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID);
Resource defaultWFConfigResource = new ResourceImpl();
defaultWFConfigResource.setContentStream(invalidInputStream);
Mockito.when(registry.get(APIConstants.WORKFLOW_EXECUTOR_LOCATION)).thenReturn(defaultWFConfigResource);
try {
tenantWorkflowConfigHolder.load();
Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION"));
} catch (WorkflowException e) {
Assert.fail("Unexpected WorkflowException has been thrown while loading workflow executor for different " +
"param types");
}
}
}
/**
* This WorkflowExecutor is a singleton class and cannot be instantiated
*/
class InvalidWorkFlowExecutor1 {
private InvalidWorkFlowExecutor1() {
}
}
/**
* This WorkflowExecutor has invalid setter method with multiple input types
*/
class InvalidWorkFlowExecutor2 extends WorkflowExecutor{
public void setUsername(String username, int id){
}
@Override
public String getWorkflowType() {
return null;
}
@Override
public List<WorkflowDTO> getWorkflowDetails(String workflowStatus) throws WorkflowException {
return null;
}
}
class WorkflowExecutorWithMultipleParamTypes extends WorkflowExecutor{
@Override
public String getWorkflowType() {
return null;
}
@Override
public List<WorkflowDTO> getWorkflowDetails(String workflowStatus) throws WorkflowException {
return null;
}
public void setStringParam(String stringParam){}
public void setIntParam(int intParam){}
public void setLongParam(long longParam){}
public void setFloatParam(float floatParam){}
public void setDoubleParam(double doubleParam){}
public void setBooleanParam(boolean booleanParam){}
public void setOmElement(OMElement omElement){}
}
| |
package com.solderbyte.openfit;
import android.util.Log;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import com.solderbyte.openfit.protocol.OpenFitNotificationProtocol;
import com.solderbyte.openfit.util.OpenFitData;
import com.solderbyte.openfit.util.OpenFitDataType;
import com.solderbyte.openfit.util.OpenFitDataTypeAndString;
import com.solderbyte.openfit.util.OpenFitTimeZoneUtil;
import com.solderbyte.openfit.util.OpenFitVariableDataComposer;
public class OpenFitApi {
public static byte[] getReady() {
//000400000003000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)0);
oVariableDataComposer.writeInt(OpenFitData.SIZE_OF_INT);
oVariableDataComposer.writeInt(3);
return oVariableDataComposer.toByteArray();
}
public static byte[] getUpdate() {
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte(OpenFitData.PORT_FOTA);
oVariableDataComposer.writeInt(OpenFitData.SIZE_OF_INT);
oVariableDataComposer.writeBytes("ODIN".getBytes());
//oVariableDataComposer.writeByte((byte)79); // O
//oVariableDataComposer.writeByte((byte)68); // D
//oVariableDataComposer.writeByte((byte)73); // I
//oVariableDataComposer.writeByte((byte)78); // N
return oVariableDataComposer.toByteArray();
}
public static byte[] getUpdateFollowUp() {
//640800000004020501
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte(OpenFitData.OPENFIT_DATA);
oVariableDataComposer.writeInt(OpenFitData.SIZE_OF_DOUBLE);
oVariableDataComposer.writeByte((byte)4);
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeByte((byte)4);
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeByte((byte)5);
oVariableDataComposer.writeByte((byte)1);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFotaCommand() {
//4E020000000101
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte(OpenFitData.PORT_FOTA_COMMAND);
oVariableDataComposer.writeInt(OpenFitData.SIZE_OF_SHORT);
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeByte((byte)1);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFindStart() {
//05020000000100
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)5);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeByte((byte)OpenFitData.FIND_START);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFindStop() {
//05020000000101
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)5);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeByte((byte)OpenFitData.FIND_STOP);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaPrev() {
//06020000000005
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.CONTROL);
oVariableDataComposer.writeByte((byte)OpenFitData.REWIND);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaNext() {
//06020000000004
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.CONTROL);
oVariableDataComposer.writeByte((byte)OpenFitData.FORWARD);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaPlay() {
//06020000000001
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.CONTROL);
oVariableDataComposer.writeByte((byte)OpenFitData.PLAY);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaPause() {
//06020000000002
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.CONTROL);
oVariableDataComposer.writeByte((byte)OpenFitData.PAUSE);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaStop() {
//06020000000003
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.CONTROL);
oVariableDataComposer.writeByte((byte)OpenFitData.STOP);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaVolume() {
//060200000001XX
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.VOLUME);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaSetVolume(byte vol) {
//060200000001XX
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(2);
oVariableDataComposer.writeByte((byte)OpenFitData.VOLUME);
oVariableDataComposer.writeByte((byte)vol);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaReqStart() {
//060100000003
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(1);
oVariableDataComposer.writeByte((byte)OpenFitData.REQUEST_START);
return oVariableDataComposer.toByteArray();
}
public static byte[] getMediaReqStop() {
//060100000004
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)6);
oVariableDataComposer.writeInt(1);
oVariableDataComposer.writeByte((byte)OpenFitData.REQUEST_STOP);
return oVariableDataComposer.toByteArray();
}
//06020000000006
//06020000000007
public static byte[] getFitness() {
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessSyncRes() {
//02080000000300000001000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(8);
oVariableDataComposer.writeInt(3);
oVariableDataComposer.writeInt(1);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessRequest() {
//02050000000001000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(5);
oVariableDataComposer.writeByte((byte)0);
oVariableDataComposer.writeInt(1);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessMenu() {
//02040000001b000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(27);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessMenuResponse() {
//02010000001c
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(28);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessCycling() {
//02040000001200000002100000001300000003000000 3D8A2C4359DAC742
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(18);
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(16);
oVariableDataComposer.writeInt(19);
oVariableDataComposer.writeInt(3);
return oVariableDataComposer.toByteArray();
}
public static byte[] getHealthApp() {
//02040000001200000002100000001300000003000000 3D8A2C4359DAC742
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(21);
return oVariableDataComposer.toByteArray();
}
public static byte[] getHealthAppResponse() {
//02040000001200000002100000001300000003000000 3D8A2C4359DAC742
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(20);
return oVariableDataComposer.toByteArray();
}
public static byte[] getHealthHeartResponse() {
//02040000001200000002100000001300000003000000 3D8A2C4359DAC742
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(2);
return oVariableDataComposer.toByteArray();
}
public static byte[] getGPSReady(int exerciseType) {
//0204000000120000000210000000130000000100000000003B430000D642 -- walking
//0204000000120000000210000000130000000200000000003B430000D642 -- running
//0204000000120000000210000000130000000300000000003B430000D642 -- cycling
//0204000000120000000210000000130000000400000000003B430000D642 -- hiking
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_WINGTIP_TO_HOST_GPS_READY);
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(16);
oVariableDataComposer.writeInt(19);
oVariableDataComposer.writeInt(exerciseType);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseGPSReady() {
//02240000001600000000000000268fbb42b4ecb6420000000000000000000000000000000000000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_GPS_READY);
return oVariableDataComposer.toByteArray();
}
public static byte[] getGPSSubscribe() {
//0204000000120000000210000000130000000400000000003B430000D642
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_WINGTIP_TO_HOST_GPS_SUBSCRIBE);
return oVariableDataComposer.toByteArray();
}
public static byte[] getGPSUnSubscribe() {
//0204000000120000000210000000130000000400000000003B430000D642
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_WINGTIP_TO_HOST_GPS_UNSUBSCRIBE);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseGPSData(float tD, float cS, float cC, float cA) {
//02040000000E000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(24);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_GPS_DATA);
oVariableDataComposer.writeFloat(tD);
oVariableDataComposer.writeFloat(cS);
oVariableDataComposer.writeFloat(cC);
oVariableDataComposer.writeDouble(cA);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseGPSResult(float tD, float maxA, float minA, float maxS, float avgS, float cC, float iD, float dD) {
//02040000000E000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(36);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_GPS_RESULT);
oVariableDataComposer.writeFloat(tD);
oVariableDataComposer.writeFloat(maxA);
oVariableDataComposer.writeFloat(minA);
oVariableDataComposer.writeFloat(maxS);
oVariableDataComposer.writeFloat(avgS);
oVariableDataComposer.writeFloat(cC);
oVariableDataComposer.writeFloat(iD);
oVariableDataComposer.writeFloat(dD);
return oVariableDataComposer.toByteArray();
}
public static byte[] getSync() {
//020400000005000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_WINGTIP_TO_HOST_SYNC_REQUEST);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseSyncDone() {
//020400000005000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_SYNC_DONE);
return oVariableDataComposer.toByteArray();
}
public static byte[] getGPSEnd() {
//02040000000A00000002040000000A00000002040000001B000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_WINGTIP_TO_HOST_GPS_END);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseGPSOFF() {
//02040000000A000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_GPS_GPSOFF);
return oVariableDataComposer.toByteArray();
}
public static byte[] getResponseGPSON() {
//02040000000A000000
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(OpenFitData.DATA_TYPE_HOST_TO_WINGTIP_GPS_GPSON);
return oVariableDataComposer.toByteArray();
}
public static byte[] getFitnessRunning() {
//02040000001200000002100000001300000002000000 3D8A2C4359DAC742
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(4);
oVariableDataComposer.writeInt(18);
oVariableDataComposer.writeByte((byte)2);
oVariableDataComposer.writeInt(16);
oVariableDataComposer.writeInt(19);
oVariableDataComposer.writeInt(2);
//return oVariableDataComposer.toByteArray();
return hexStringToByteArray("02040000000c000000");
}
public static byte[] getCurrentTimeInfo(boolean is24Hour) {
//011E0000000141CB3555F8FFFFFF000000000101010201A01DFC5490D43556100E0000
//01
//1e000000
//01
//41cb3555
//f8ffffff
//00000000
//01
//01
//01
//02
//01
//a01dfc54
//90d43556
//100e0000
// build time data
int millis = (int)(System.currentTimeMillis() / 1000L);
Calendar oCalendar = Calendar.getInstance();
TimeZone oTimeZone = oCalendar.getTimeZone();
int i = oTimeZone.getRawOffset() / 60000;
int j = i / 60;
int k = i % 60;
Date oDate = oCalendar.getTime();
boolean inDaylightTime = oTimeZone.inDaylightTime(oDate);
boolean useDaylightTime = oTimeZone.useDaylightTime();
long l = oCalendar.getTimeInMillis();
int m = (int)(OpenFitTimeZoneUtil.prevTransition(oTimeZone, l) / 1000L);
int n = (int)(OpenFitTimeZoneUtil.nextTransition(oTimeZone, l) / 1000L);
int dst = oTimeZone.getDSTSavings() / 1000;
// write time data
OpenFitVariableDataComposer oVDC = new OpenFitVariableDataComposer();
oVDC.writeByte((byte)1);
oVDC.writeInt(millis);
oVDC.writeInt(j);
oVDC.writeInt(k);
oVDC.writeByte(OpenFitData.TEXT_DATE_FORMAT_TYPE);
//oVDC.writeBoolean(OpenFitData.IS_TIME_DISPLAY_24);
oVDC.writeBoolean(is24Hour);
oVDC.writeBoolean(inDaylightTime);
oVDC.writeByte(OpenFitData.NUMBER_DATE_FORMAT_TYPE);
oVDC.writeBoolean(useDaylightTime);
oVDC.writeInt(m);
oVDC.writeInt(n);
oVDC.writeInt(dst);
int length = oVDC.toByteArray().length;
// write time byte array
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)1);
oVariableDataComposer.writeInt(length);
oVariableDataComposer.writeBytes(oVDC.toByteArray());
return oVariableDataComposer.toByteArray();
}
public static byte[] getOpenFitWelcomeNotification() {
//03
//71000000 = size of msg
//04 = DATA_TYPE_MESSAGE
//0400000000000000 = id
//10 = sender name size + 2
//FF
//FE
//4F00700065006E00460069007400 = OpenFit
//16 = sender number size + 2
//FF
//FE
//3500350035003100320033003400350036003700 = 5551234567
//10 = msg title + 2
//FF
//FE
//4E004F005400490054004C004500 = NOTITLE
//28 = msg data + 2
//00
//FF
//FE
//570065006C0063006F006D006500200074006F0020004F00700065006E004600690074002100 = Welcome to OpenFit!
//00
//5E0E8955 = time stamp
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, "OpenFit"));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, "5551234567"));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, "NOTITLE"));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.SHORT, "Welcome to OpenFit!"));
long id = System.currentTimeMillis() / 1000L;
byte[] msg = OpenFitNotificationProtocol.createNotificationProtocol(OpenFitData.DATA_TYPE_MESSAGE, id, mDataList, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenNotification(String sender, String number, String title, String message, long id) {
//03
//71000000 = size of msg
//04 = DATA_TYPE_MESSAGE
//0400000000000000 = id
//10 = sender name size + 2
//FF
//FE
//4F00700065006E00460069007400 = OpenFit
//16 = sender number size + 2
//FF
//FE
//3500350035003100320033003400350036003700 = 5551234567
//10 = msg title + 2
//FF
//FE
//4E004F005400490054004C004500 = NOTITLE
//28 = msg data + 2
//00
//FF
//FE
//570065006C0063006F006D006500200074006F0020004F00700065006E004600690074002100 = Welcome to OpenFit!
//00
//5E0E8955 = time stamp
if(sender == null || sender.isEmpty()) {
sender = "OpenFit";
}
if(number == null || number.isEmpty()) {
number = "OpenFit";
}
if(title == null || title.isEmpty()) {
title = "OpenFit Title";
}
if(message == null || message.isEmpty()) {
message = "OpenFit Message";
}
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, sender));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, number));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, trimTitle(title)));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.SHORT, trimMessage(message)));
byte[] msg = OpenFitNotificationProtocol.createNotificationProtocol(OpenFitData.DATA_TYPE_MESSAGE, id, mDataList, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenEmail(String sender, String number, String title, String message, long id) {
if(sender == null || sender.isEmpty()) {
sender = "OpenFit";
}
if(number == null || number.isEmpty()) {
number = "OpenFit";
}
if(title == null || title.isEmpty()) {
title = "OpenFit Title";
}
if(message == null || message.isEmpty()) {
message = "OpenFit Email";
}
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, sender));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, number));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, trimTitle(title)));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.SHORT, trimMessage(message)));
byte[] msg = OpenFitNotificationProtocol.createEmailProtocol(OpenFitData.DATA_TYPE_EMAIL, id, mDataList, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenIncomingCall(String sender, String number, long id) {
//09
//30000000 = size of msg
//00 = DATA_TYPE_INCOMING_CALL
//fb73770c4f010000 = id
//00 = call flag
//0a = size + 2
//ff
//fe
//48006f006d006500 = sender
//16 = size + 2
//ff
//fe
//0000000000000000000000000000000000000000 = phone number
//5fc0c555
if(sender == null || sender.isEmpty()) {
sender = "OpenFit";
}
if(number == null || number.isEmpty()) {
number = "OpenFit";
}
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, sender));
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, number));
byte[] msg = OpenFitNotificationProtocol.createIncomingCallProtocol(OpenFitData.DATA_TYPE_INCOMING_CALL, id, mDataList, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)9);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenRejectCall() {
//090600000003013FE1CA55
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)9);
oVariableDataComposer.writeInt(6);
oVariableDataComposer.writeByte((byte)3);
oVariableDataComposer.writeByte((byte)1);
return oVariableDataComposer.toByteArray();
}
public static byte[] getOpenIncomingCallEnd() {
//090100000002
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)9);
oVariableDataComposer.writeInt(1);
oVariableDataComposer.writeByte((byte)2);
return oVariableDataComposer.toByteArray();
}
public static byte[] getOpenRejectCallMessage() {
// 0906000000030201000000
//all msg size _| |_ index of message
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
oVariableDataComposer.writeByte((byte)9);
oVariableDataComposer.writeInt(6);
oVariableDataComposer.writeByte((byte)3);
oVariableDataComposer.writeByte((byte)2);
return oVariableDataComposer.toByteArray();
}
public static byte[] getOpenRejectCallMessageForBracelet(int allCount, int index, String msg) {
OpenFitVariableDataComposer oVariableDataComposer = new OpenFitVariableDataComposer();
byte[] arr = OpenFitVariableDataComposer.convertToByteArray(msg);
oVariableDataComposer.writeByte((byte)9);
oVariableDataComposer.writeInt(5 + arr.length);
oVariableDataComposer.writeByte((byte)4);
oVariableDataComposer.writeByte((byte)allCount);
oVariableDataComposer.writeByte((byte)index);
oVariableDataComposer.writeShort((short)arr.length);
oVariableDataComposer.writeBytes(arr);
return oVariableDataComposer.toByteArray();
}
public static byte[] getOpenMediaTrack(String track) {
//06
//26000000
//02
//24 = size + 2
//ff
//fe
//44006100660074002000500075006e006b0020002d00200046007200650073006800 = track name
byte[] msg = OpenFitNotificationProtocol.createMediaTrackProtocol(OpenFitData.DATA_TYPE_MEDIATRACK, track);
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)6);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenAlarm(long id) {
//0a
//1e000000 = size of mg
//01 = msg type
//0100000000000000 = msg id
//0c = size of string
//ff
//fe
//41006c00610072006d00 = string
//c1040000 = little endian odd time stamp
//00000000 = snooze = false
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, "Alarm"));
Calendar c = Calendar.getInstance();
int hour = c.get(Calendar.HOUR);
int minute = c.get(Calendar.MINUTE);
String timeString = Integer.toString(hour)+Integer.toString(minute);
int time = Integer.parseInt(timeString);
byte[] msg = OpenFitNotificationProtocol.createAlarmProtocol(OpenFitData.DATA_TYPE_ALARMCLOCK, id, mDataList, time);
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)10);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenAlarmClear() {
//0a0100000000 clear from phone
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)10);
oDatacomposer.writeInt(1);
oDatacomposer.writeByte((byte)0);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenAlarmCleared() {
//0A020000000300 clear from gear
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)10);
oDatacomposer.writeInt(2);
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeByte((byte)0);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenAlarmSnoozed() {
//0A020000000301 snooze from gear
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)10);
oDatacomposer.writeInt(2);
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeByte((byte)1);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenWeatherReq() {
//01010000000C
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)1);
oDatacomposer.writeInt(1);
oDatacomposer.writeByte((byte)12);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenWeather(String weather, String icon, long id) {
int i = getOpenWeatherIcon(icon);
byte[] msg = OpenFitNotificationProtocol.createWeatherProtocol(OpenFitData.DATA_TYPE_WEATHER, id, weather, i, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)3);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static byte[] getOpenWeatherClock(String location, String temp, String unit, String icon) {
//01
//3d000000
//09
//14 = size + 2?
//ff
//fe
//4e0069006500640065007200720061006400 = city name
//06
//40060000
//01 = units 01 C, 00 F
//00
//c944e055 = time stamp
//06
//98080000
//14050000
//06000000
//00000000
//00
//0600
//00000000
//00000000
//usage 9 = 0, 90 = 1, 900 = 9, 9000 = 90
List<OpenFitDataTypeAndString> mDataList = new ArrayList<OpenFitDataTypeAndString>();
mDataList.add(new OpenFitDataTypeAndString(OpenFitDataType.BYTE, location));
if(temp == null) {
temp = "0";
}
float t = Float.parseFloat(temp);
int tempInt = Math.round(t);
if(tempInt < 10) {
tempInt = tempInt * 100;
}
else if(tempInt < 100) {
tempInt = tempInt * 100;
}
else if(tempInt < 1000) {
tempInt = tempInt * 10;
}
int tempUnit = 1;
if(unit != null && unit.contains("F")) {
tempUnit = 0;
}
int i = getOpenWeatherClockIcon(icon);
byte[] msg = OpenFitNotificationProtocol.createWeatherClockProtocol(9, mDataList, tempInt, tempUnit, i, System.currentTimeMillis());
OpenFitVariableDataComposer oDatacomposer = new OpenFitVariableDataComposer();
oDatacomposer.writeByte((byte)1);
oDatacomposer.writeInt(msg.length);
oDatacomposer.writeBytes(msg);
return oDatacomposer.toByteArray();
}
public static int getOpenWeatherIcon(String icon) {
int i = 0;
if(icon == null) {
icon = "01";
}
if(icon.contains("01")) {
i = OpenFitData.WEATHER_TYPE_SUNNY;
}
else if(icon.contains("02")) {
i = OpenFitData.WEATHER_TYPE_MOSTLY_CLEAR;
}
else if(icon.contains("03")) {
i = OpenFitData.WEATHER_TYPE_MOSTLY_CLOUDY;
}
else if(icon.contains("04")) {
i = OpenFitData.WEATHER_TYPE_MOSTLY_CLOUDY;
}
else if(icon.contains("09")) {
i = OpenFitData.WEATHER_TYPE_HEAVY_RAIN;
}
else if(icon.contains("10")) {
i = OpenFitData.WEATHER_TYPE_PARTLY_SUNNY_SHOWERS;
}
else if(icon.contains("11")) {
i = OpenFitData.WEATHER_TYPE_THUNDERSTORMS;
}
else if(icon.contains("13")) {
i = OpenFitData.WEATHER_TYPE_SNOW;
}
else if(icon.contains("50")) {
i = OpenFitData.WEATHER_TYPE_FOG;
}
return i;
}
public static int getOpenWeatherClockIcon(String icon) {
int i = 0;
if(icon == null) {
icon = "01";
}
if(icon.contains("01")) {
i = OpenFitData.WEATHER_CLOCK_SUNNY;
}
else if(icon.contains("02")) {
i = OpenFitData.WEATHER_CLOCK_CLEAR;
}
else if(icon.contains("03")) {
i = OpenFitData.WEATHER_CLOCK_MOSTLY_CLOUDY;
}
else if(icon.contains("04")) {
i = OpenFitData.WEATHER_CLOCK_MOSTLY_CLOUDY;
}
else if(icon.contains("09")) {
i = OpenFitData.WEATHER_CLOCK_SHOWERS;
}
else if(icon.contains("10")) {
i = OpenFitData.WEATHER_CLOCK_PARTLY_SUNNY_SHOWERS;
}
else if(icon.contains("11")) {
i = OpenFitData.WEATHER_CLOCK_THUNDERSTORMS;
}
else if(icon.contains("13")) {
i = OpenFitData.WEATHER_CLOCK_SNOW;
}
else if(icon.contains("50")) {
i = OpenFitData.WEATHER_CLOCK_FOG;
}
return i;
}
public static String trimTitle(String s) {
s = s.substring(0, Math.min(s.length(), 50));
return s;
}
public static String trimMessage(String s) {
s = s.substring(0, Math.min(s.length(), 250));
return s;
}
public static byte[] hexStringToByteArray(String s) {
int len = s.length();
byte[] data = new byte[len / 2];
for(int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i+1), 16));
}
return data;
}
public static String hexStringToString(String hex){
StringBuilder sb = new StringBuilder();
StringBuilder temp = new StringBuilder();
for(int i=0; i<hex.length()-1; i+=2 ) {
String output = hex.substring(i, (i + 2));
int decimal = Integer.parseInt(output, 16);
sb.append((char)decimal);
temp.append(decimal);
}
return sb.toString();
}
final protected static char[] hexArray = "0123456789ABCDEF".toCharArray();
public static String byteArrayToHexString(byte[] bytes) {
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
public static int[] byteArrayToIntArray(byte[] bArray) {
int[] iarray = new int[bArray.length];
int i = 0;
for(byte b : bArray) {
iarray[i++] = b & 0xff;
}
return iarray;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.io.File;
import java.io.IOException;
import java.net.*;
import java.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet;
import com.google.common.primitives.Longs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.auth.*;
import org.apache.cassandra.config.Config.CommitLogSync;
import org.apache.cassandra.config.Config.RequestSchedulerId;
import org.apache.cassandra.config.EncryptionOptions.ClientEncryptionOptions;
import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.FSWriteError;
import org.apache.cassandra.io.sstable.format.SSTableFormat;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.*;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.scheduler.IRequestScheduler;
import org.apache.cassandra.scheduler.NoScheduler;
import org.apache.cassandra.service.CacheService;
import org.apache.cassandra.thrift.ThriftServer;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.memory.*;
public class DatabaseDescriptor
{
private static final Logger logger = LoggerFactory.getLogger(DatabaseDescriptor.class);
/**
* Tokens are serialized in a Gossip VersionedValue String. VV are restricted to 64KB
* when we send them over the wire, which works out to about 1700 tokens.
*/
private static final int MAX_NUM_TOKENS = 1536;
private static IEndpointSnitch snitch;
private static InetAddress listenAddress; // leave null so we can fall through to getLocalHost
private static InetAddress broadcastAddress;
private static InetAddress rpcAddress;
private static InetAddress broadcastRpcAddress;
private static SeedProvider seedProvider;
private static IInternodeAuthenticator internodeAuthenticator;
/* Hashing strategy Random or OPHF */
private static IPartitioner partitioner;
private static String paritionerName;
private static Config.DiskAccessMode indexAccessMode;
private static Config conf;
private static SSTableFormat.Type sstable_format = SSTableFormat.Type.BIG;
private static IAuthenticator authenticator = new AllowAllAuthenticator();
private static IAuthorizer authorizer = new AllowAllAuthorizer();
private static IRoleManager roleManager = new CassandraRoleManager();
private static IRequestScheduler requestScheduler;
private static RequestSchedulerId requestSchedulerId;
private static RequestSchedulerOptions requestSchedulerOptions;
private static long keyCacheSizeInMB;
private static long counterCacheSizeInMB;
private static long indexSummaryCapacityInMB;
private static String localDC;
private static Comparator<InetAddress> localComparator;
public static void forceStaticInitialization() {}
static
{
// In client mode, we use a default configuration. Note that the fields of this class will be
// left unconfigured however (the partitioner or localDC will be null for instance) so this
// should be used with care.
try
{
if (Config.isClientMode())
{
conf = new Config();
}
else
{
applyConfig(loadConfig());
}
}
catch (Exception e)
{
throw new ExceptionInInitializerError(e);
}
}
@VisibleForTesting
public static Config loadConfig() throws ConfigurationException
{
String loaderClass = System.getProperty("cassandra.config.loader");
ConfigurationLoader loader = loaderClass == null
? new YamlConfigurationLoader()
: FBUtilities.<ConfigurationLoader>construct(loaderClass, "configuration loading");
return loader.loadConfig();
}
private static InetAddress getNetworkInterfaceAddress(String intf, String configName, boolean preferIPv6) throws ConfigurationException
{
try
{
NetworkInterface ni = NetworkInterface.getByName(intf);
if (ni == null)
throw new ConfigurationException("Configured " + configName + " \"" + intf + "\" could not be found", false);
Enumeration<InetAddress> addrs = ni.getInetAddresses();
if (!addrs.hasMoreElements())
throw new ConfigurationException("Configured " + configName + " \"" + intf + "\" was found, but had no addresses", false);
/*
* Try to return the first address of the preferred type, otherwise return the first address
*/
InetAddress retval = null;
while (addrs.hasMoreElements())
{
InetAddress temp = addrs.nextElement();
if (preferIPv6 && temp instanceof Inet6Address) return temp;
if (!preferIPv6 && temp instanceof Inet4Address) return temp;
if (retval == null) retval = temp;
}
return retval;
}
catch (SocketException e)
{
throw new ConfigurationException("Configured " + configName + " \"" + intf + "\" caused an exception", e);
}
}
@VisibleForTesting
static void applyAddressConfig(Config config) throws ConfigurationException
{
listenAddress = null;
rpcAddress = null;
broadcastAddress = null;
broadcastRpcAddress = null;
/* Local IP, hostname or interface to bind services to */
if (config.listen_address != null && config.listen_interface != null)
{
throw new ConfigurationException("Set listen_address OR listen_interface, not both", false);
}
else if (config.listen_address != null)
{
try
{
listenAddress = InetAddress.getByName(config.listen_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown listen_address '" + config.listen_address + "'", false);
}
if (listenAddress.isAnyLocalAddress())
throw new ConfigurationException("listen_address cannot be a wildcard address (" + config.listen_address + ")!", false);
}
else if (config.listen_interface != null)
{
listenAddress = getNetworkInterfaceAddress(config.listen_interface, "listen_interface", config.listen_interface_prefer_ipv6);
}
/* Gossip Address to broadcast */
if (config.broadcast_address != null)
{
try
{
broadcastAddress = InetAddress.getByName(config.broadcast_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown broadcast_address '" + config.broadcast_address + "'", false);
}
if (broadcastAddress.isAnyLocalAddress())
throw new ConfigurationException("broadcast_address cannot be a wildcard address (" + config.broadcast_address + ")!", false);
}
/* Local IP, hostname or interface to bind RPC server to */
if (config.rpc_address != null && config.rpc_interface != null)
{
throw new ConfigurationException("Set rpc_address OR rpc_interface, not both", false);
}
else if (config.rpc_address != null)
{
try
{
rpcAddress = InetAddress.getByName(config.rpc_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown host in rpc_address " + config.rpc_address, false);
}
}
else if (config.rpc_interface != null)
{
rpcAddress = getNetworkInterfaceAddress(config.rpc_interface, "rpc_interface", config.rpc_interface_prefer_ipv6);
}
else
{
rpcAddress = FBUtilities.getLocalAddress();
}
/* RPC address to broadcast */
if (config.broadcast_rpc_address != null)
{
try
{
broadcastRpcAddress = InetAddress.getByName(config.broadcast_rpc_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown broadcast_rpc_address '" + config.broadcast_rpc_address + "'", false);
}
if (broadcastRpcAddress.isAnyLocalAddress())
throw new ConfigurationException("broadcast_rpc_address cannot be a wildcard address (" + config.broadcast_rpc_address + ")!", false);
}
else
{
if (rpcAddress.isAnyLocalAddress())
throw new ConfigurationException("If rpc_address is set to a wildcard address (" + config.rpc_address + "), then " +
"you must set broadcast_rpc_address to a value other than " + config.rpc_address, false);
broadcastRpcAddress = rpcAddress;
}
}
public static void applyConfig(Config config) throws ConfigurationException
{
conf = config;
if (conf.commitlog_sync == null)
{
throw new ConfigurationException("Missing required directive CommitLogSync", false);
}
if (conf.commitlog_sync == Config.CommitLogSync.batch)
{
if (conf.commitlog_sync_batch_window_in_ms == null)
{
throw new ConfigurationException("Missing value for commitlog_sync_batch_window_in_ms: Double expected.", false);
}
else if (conf.commitlog_sync_period_in_ms != null)
{
throw new ConfigurationException("Batch sync specified, but commitlog_sync_period_in_ms found. Only specify commitlog_sync_batch_window_in_ms when using batch sync", false);
}
logger.debug("Syncing log with a batch window of {}", conf.commitlog_sync_batch_window_in_ms);
}
else
{
if (conf.commitlog_sync_period_in_ms == null)
{
throw new ConfigurationException("Missing value for commitlog_sync_period_in_ms: Integer expected", false);
}
else if (conf.commitlog_sync_batch_window_in_ms != null)
{
throw new ConfigurationException("commitlog_sync_period_in_ms specified, but commitlog_sync_batch_window_in_ms found. Only specify commitlog_sync_period_in_ms when using periodic sync.", false);
}
logger.debug("Syncing log with a period of {}", conf.commitlog_sync_period_in_ms);
}
if (conf.commitlog_total_space_in_mb == null)
conf.commitlog_total_space_in_mb = 8192;
/* evaluate the DiskAccessMode Config directive, which also affects indexAccessMode selection */
if (conf.disk_access_mode == Config.DiskAccessMode.auto)
{
conf.disk_access_mode = hasLargeAddressSpace() ? Config.DiskAccessMode.mmap : Config.DiskAccessMode.standard;
indexAccessMode = conf.disk_access_mode;
logger.info("DiskAccessMode 'auto' determined to be {}, indexAccessMode is {}", conf.disk_access_mode, indexAccessMode);
}
else if (conf.disk_access_mode == Config.DiskAccessMode.mmap_index_only)
{
conf.disk_access_mode = Config.DiskAccessMode.standard;
indexAccessMode = Config.DiskAccessMode.mmap;
logger.info("DiskAccessMode is {}, indexAccessMode is {}", conf.disk_access_mode, indexAccessMode);
}
else
{
indexAccessMode = conf.disk_access_mode;
logger.info("DiskAccessMode is {}, indexAccessMode is {}", conf.disk_access_mode, indexAccessMode);
}
/* Authentication, authorization and role management backend, implementing IAuthenticator, IAuthorizer & IRoleMapper*/
if (conf.authenticator != null)
authenticator = FBUtilities.newAuthenticator(conf.authenticator);
if (conf.authorizer != null)
authorizer = FBUtilities.newAuthorizer(conf.authorizer);
if (authenticator instanceof AllowAllAuthenticator && !(authorizer instanceof AllowAllAuthorizer))
throw new ConfigurationException("AllowAllAuthenticator can't be used with " + conf.authorizer, false);
if (conf.role_manager != null)
roleManager = FBUtilities.newRoleManager(conf.role_manager);
if (authenticator instanceof PasswordAuthenticator && !(roleManager instanceof CassandraRoleManager))
throw new ConfigurationException("CassandraRoleManager must be used with PasswordAuthenticator", false);
if (conf.internode_authenticator != null)
internodeAuthenticator = FBUtilities.construct(conf.internode_authenticator, "internode_authenticator");
else
internodeAuthenticator = new AllowAllInternodeAuthenticator();
authenticator.validateConfiguration();
authorizer.validateConfiguration();
roleManager.validateConfiguration();
internodeAuthenticator.validateConfiguration();
/* Hashing strategy */
if (conf.partitioner == null)
{
throw new ConfigurationException("Missing directive: partitioner", false);
}
try
{
partitioner = FBUtilities.newPartitioner(System.getProperty("cassandra.partitioner", conf.partitioner));
}
catch (Exception e)
{
throw new ConfigurationException("Invalid partitioner class " + conf.partitioner, false);
}
paritionerName = partitioner.getClass().getCanonicalName();
if (conf.max_hint_window_in_ms == null)
{
throw new ConfigurationException("max_hint_window_in_ms cannot be set to null", false);
}
/* phi convict threshold for FailureDetector */
if (conf.phi_convict_threshold < 5 || conf.phi_convict_threshold > 16)
{
throw new ConfigurationException("phi_convict_threshold must be between 5 and 16, but was " + conf.phi_convict_threshold, false);
}
/* Thread per pool */
if (conf.concurrent_reads != null && conf.concurrent_reads < 2)
{
throw new ConfigurationException("concurrent_reads must be at least 2, but was " + conf.concurrent_reads, false);
}
if (conf.concurrent_writes != null && conf.concurrent_writes < 2)
{
throw new ConfigurationException("concurrent_writes must be at least 2, but was " + conf.concurrent_writes, false);
}
if (conf.concurrent_counter_writes != null && conf.concurrent_counter_writes < 2)
throw new ConfigurationException("concurrent_counter_writes must be at least 2, but was " + conf.concurrent_counter_writes, false);
if (conf.concurrent_replicates != null)
logger.warn("concurrent_replicates has been deprecated and should be removed from cassandra.yaml");
if (conf.file_cache_size_in_mb == null)
conf.file_cache_size_in_mb = Math.min(512, (int) (Runtime.getRuntime().maxMemory() / (4 * 1048576)));
if (conf.memtable_offheap_space_in_mb == null)
conf.memtable_offheap_space_in_mb = (int) (Runtime.getRuntime().maxMemory() / (4 * 1048576));
if (conf.memtable_offheap_space_in_mb < 0)
throw new ConfigurationException("memtable_offheap_space_in_mb must be positive, but was " + conf.memtable_offheap_space_in_mb, false);
// for the moment, we default to twice as much on-heap space as off-heap, as heap overhead is very large
if (conf.memtable_heap_space_in_mb == null)
conf.memtable_heap_space_in_mb = (int) (Runtime.getRuntime().maxMemory() / (4 * 1048576));
if (conf.memtable_heap_space_in_mb <= 0)
throw new ConfigurationException("memtable_heap_space_in_mb must be positive, but was " + conf.memtable_heap_space_in_mb, false);
logger.info("Global memtable on-heap threshold is enabled at {}MB", conf.memtable_heap_space_in_mb);
if (conf.memtable_offheap_space_in_mb == 0)
logger.info("Global memtable off-heap threshold is disabled, HeapAllocator will be used instead");
else
logger.info("Global memtable off-heap threshold is enabled at {}MB", conf.memtable_offheap_space_in_mb);
applyAddressConfig(config);
if (conf.thrift_framed_transport_size_in_mb <= 0)
throw new ConfigurationException("thrift_framed_transport_size_in_mb must be positive, but was " + conf.thrift_framed_transport_size_in_mb, false);
if (conf.native_transport_max_frame_size_in_mb <= 0)
throw new ConfigurationException("native_transport_max_frame_size_in_mb must be positive, but was " + conf.native_transport_max_frame_size_in_mb, false);
// fail early instead of OOMing (see CASSANDRA-8116)
if (ThriftServer.HSHA.equals(conf.rpc_server_type) && conf.rpc_max_threads == Integer.MAX_VALUE)
throw new ConfigurationException("The hsha rpc_server_type is not compatible with an rpc_max_threads " +
"setting of 'unlimited'. Please see the comments in cassandra.yaml " +
"for rpc_server_type and rpc_max_threads.",
false);
if (ThriftServer.HSHA.equals(conf.rpc_server_type) && conf.rpc_max_threads > (FBUtilities.getAvailableProcessors() * 2 + 1024))
logger.warn("rpc_max_threads setting of {} may be too high for the hsha server and cause unnecessary thread contention, reducing performance", conf.rpc_max_threads);
/* end point snitch */
if (conf.endpoint_snitch == null)
{
throw new ConfigurationException("Missing endpoint_snitch directive", false);
}
snitch = createEndpointSnitch(conf.endpoint_snitch);
EndpointSnitchInfo.create();
localDC = snitch.getDatacenter(FBUtilities.getBroadcastAddress());
localComparator = new Comparator<InetAddress>()
{
public int compare(InetAddress endpoint1, InetAddress endpoint2)
{
boolean local1 = localDC.equals(snitch.getDatacenter(endpoint1));
boolean local2 = localDC.equals(snitch.getDatacenter(endpoint2));
if (local1 && !local2)
return -1;
if (local2 && !local1)
return 1;
return 0;
}
};
/* Request Scheduler setup */
requestSchedulerOptions = conf.request_scheduler_options;
if (conf.request_scheduler != null)
{
try
{
if (requestSchedulerOptions == null)
{
requestSchedulerOptions = new RequestSchedulerOptions();
}
Class<?> cls = Class.forName(conf.request_scheduler);
requestScheduler = (IRequestScheduler) cls.getConstructor(RequestSchedulerOptions.class).newInstance(requestSchedulerOptions);
}
catch (ClassNotFoundException e)
{
throw new ConfigurationException("Invalid Request Scheduler class " + conf.request_scheduler, false);
}
catch (Exception e)
{
throw new ConfigurationException("Unable to instantiate request scheduler", e);
}
}
else
{
requestScheduler = new NoScheduler();
}
if (conf.request_scheduler_id == RequestSchedulerId.keyspace)
{
requestSchedulerId = conf.request_scheduler_id;
}
else
{
// Default to Keyspace
requestSchedulerId = RequestSchedulerId.keyspace;
}
// if data dirs, commitlog dir, or saved caches dir are set in cassandra.yaml, use that. Otherwise,
// use -Dcassandra.storagedir (set in cassandra-env.sh) as the parent dir for data/, commitlog/, and saved_caches/
if (conf.commitlog_directory == null)
{
conf.commitlog_directory = System.getProperty("cassandra.storagedir", null);
if (conf.commitlog_directory == null)
throw new ConfigurationException("commitlog_directory is missing and -Dcassandra.storagedir is not set", false);
conf.commitlog_directory += File.separator + "commitlog";
}
if (conf.saved_caches_directory == null)
{
conf.saved_caches_directory = System.getProperty("cassandra.storagedir", null);
if (conf.saved_caches_directory == null)
throw new ConfigurationException("saved_caches_directory is missing and -Dcassandra.storagedir is not set", false);
conf.saved_caches_directory += File.separator + "saved_caches";
}
if (conf.data_file_directories == null || conf.data_file_directories.length == 0)
{
String defaultDataDir = System.getProperty("cassandra.storagedir", null);
if (defaultDataDir == null)
throw new ConfigurationException("data_file_directories is not missing and -Dcassandra.storagedir is not set", false);
conf.data_file_directories = new String[]{ defaultDataDir + File.separator + "data" };
}
/* data file and commit log directories. they get created later, when they're needed. */
for (String datadir : conf.data_file_directories)
{
if (datadir.equals(conf.commitlog_directory))
throw new ConfigurationException("commitlog_directory must not be the same as any data_file_directories", false);
if (datadir.equals(conf.saved_caches_directory))
throw new ConfigurationException("saved_caches_directory must not be the same as any data_file_directories", false);
}
if (conf.commitlog_directory.equals(conf.saved_caches_directory))
throw new ConfigurationException("saved_caches_directory must not be the same as the commitlog_directory", false);
if (conf.memtable_flush_writers == null)
conf.memtable_flush_writers = Math.min(8, Math.max(2, Math.min(FBUtilities.getAvailableProcessors(), conf.data_file_directories.length)));
if (conf.memtable_flush_writers < 1)
throw new ConfigurationException("memtable_flush_writers must be at least 1, but was " + conf.memtable_flush_writers, false);
if (conf.memtable_cleanup_threshold == null)
conf.memtable_cleanup_threshold = (float) (1.0 / (1 + conf.memtable_flush_writers));
if (conf.memtable_cleanup_threshold < 0.01f)
throw new ConfigurationException("memtable_cleanup_threshold must be >= 0.01, but was " + conf.memtable_cleanup_threshold, false);
if (conf.memtable_cleanup_threshold > 0.99f)
throw new ConfigurationException("memtable_cleanup_threshold must be <= 0.99, but was " + conf.memtable_cleanup_threshold, false);
if (conf.memtable_cleanup_threshold < 0.1f)
logger.warn("memtable_cleanup_threshold is set very low [{}], which may cause performance degradation", conf.memtable_cleanup_threshold);
if (conf.concurrent_compactors == null)
conf.concurrent_compactors = Math.min(8, Math.max(2, Math.min(FBUtilities.getAvailableProcessors(), conf.data_file_directories.length)));
if (conf.concurrent_compactors <= 0)
throw new ConfigurationException("concurrent_compactors should be strictly greater than 0, but was " + conf.concurrent_compactors, false);
if (conf.initial_token != null)
for (String token : tokensFromString(conf.initial_token))
partitioner.getTokenFactory().validate(token);
if (conf.num_tokens == null)
conf.num_tokens = 1;
else if (conf.num_tokens > MAX_NUM_TOKENS)
throw new ConfigurationException(String.format("A maximum number of %d tokens per node is supported", MAX_NUM_TOKENS), false);
try
{
// if key_cache_size_in_mb option was set to "auto" then size of the cache should be "min(5% of Heap (in MB), 100MB)
keyCacheSizeInMB = (conf.key_cache_size_in_mb == null)
? Math.min(Math.max(1, (int) (Runtime.getRuntime().totalMemory() * 0.05 / 1024 / 1024)), 100)
: conf.key_cache_size_in_mb;
if (keyCacheSizeInMB < 0)
throw new NumberFormatException(); // to escape duplicating error message
}
catch (NumberFormatException e)
{
throw new ConfigurationException("key_cache_size_in_mb option was set incorrectly to '"
+ conf.key_cache_size_in_mb + "', supported values are <integer> >= 0.", false);
}
try
{
// if counter_cache_size_in_mb option was set to "auto" then size of the cache should be "min(2.5% of Heap (in MB), 50MB)
counterCacheSizeInMB = (conf.counter_cache_size_in_mb == null)
? Math.min(Math.max(1, (int) (Runtime.getRuntime().totalMemory() * 0.025 / 1024 / 1024)), 50)
: conf.counter_cache_size_in_mb;
if (counterCacheSizeInMB < 0)
throw new NumberFormatException(); // to escape duplicating error message
}
catch (NumberFormatException e)
{
throw new ConfigurationException("counter_cache_size_in_mb option was set incorrectly to '"
+ conf.counter_cache_size_in_mb + "', supported values are <integer> >= 0.", false);
}
// if set to empty/"auto" then use 5% of Heap size
indexSummaryCapacityInMB = (conf.index_summary_capacity_in_mb == null)
? Math.max(1, (int) (Runtime.getRuntime().totalMemory() * 0.05 / 1024 / 1024))
: conf.index_summary_capacity_in_mb;
if (indexSummaryCapacityInMB < 0)
throw new ConfigurationException("index_summary_capacity_in_mb option was set incorrectly to '"
+ conf.index_summary_capacity_in_mb + "', it should be a non-negative integer.", false);
if(conf.encryption_options != null)
{
logger.warn("Please rename encryption_options as server_encryption_options in the yaml");
//operate under the assumption that server_encryption_options is not set in yaml rather than both
conf.server_encryption_options = conf.encryption_options;
}
// load the seeds for node contact points
if (conf.seed_provider == null)
{
throw new ConfigurationException("seeds configuration is missing; a minimum of one seed is required.", false);
}
try
{
Class<?> seedProviderClass = Class.forName(conf.seed_provider.class_name);
seedProvider = (SeedProvider)seedProviderClass.getConstructor(Map.class).newInstance(conf.seed_provider.parameters);
}
// there are about 5 checked exceptions that could be thrown here.
catch (Exception e)
{
throw new ConfigurationException(e.getMessage() + "\nFatal configuration error; unable to start server. See log for stacktrace.", false);
}
if (seedProvider.getSeeds().size() == 0)
throw new ConfigurationException("The seed provider lists no seeds.", false);
if (conf.user_defined_function_fail_timeout < 0)
throw new ConfigurationException("user_defined_function_fail_timeout must not be negative", false);
if (conf.user_defined_function_warn_timeout < 0)
throw new ConfigurationException("user_defined_function_warn_timeout must not be negative", false);
if (conf.user_defined_function_fail_timeout < conf.user_defined_function_warn_timeout)
throw new ConfigurationException("user_defined_function_warn_timeout must less than user_defined_function_fail_timeout", false);
}
private static IEndpointSnitch createEndpointSnitch(String snitchClassName) throws ConfigurationException
{
if (!snitchClassName.contains("."))
snitchClassName = "org.apache.cassandra.locator." + snitchClassName;
IEndpointSnitch snitch = FBUtilities.construct(snitchClassName, "snitch");
return conf.dynamic_snitch ? new DynamicEndpointSnitch(snitch) : snitch;
}
public static IAuthenticator getAuthenticator()
{
return authenticator;
}
public static IAuthorizer getAuthorizer()
{
return authorizer;
}
public static IRoleManager getRoleManager()
{
return roleManager;
}
public static int getPermissionsValidity()
{
return conf.permissions_validity_in_ms;
}
public static void setPermissionsValidity(int timeout)
{
conf.permissions_validity_in_ms = timeout;
}
public static int getPermissionsCacheMaxEntries()
{
return conf.permissions_cache_max_entries;
}
public static int getPermissionsUpdateInterval()
{
return conf.permissions_update_interval_in_ms == -1
? conf.permissions_validity_in_ms
: conf.permissions_update_interval_in_ms;
}
public static int getRolesValidity()
{
return conf.roles_validity_in_ms;
}
public static void setRolesValidity(int validity)
{
conf.roles_validity_in_ms = validity;
}
public static int getRolesCacheMaxEntries()
{
return conf.roles_cache_max_entries;
}
public static int getRolesUpdateInterval()
{
return conf.roles_update_interval_in_ms == -1
? conf.roles_validity_in_ms
: conf.roles_update_interval_in_ms;
}
public static void setRolesUpdateInterval(int interval)
{
conf.roles_update_interval_in_ms = interval;
}
public static void setPermissionsUpdateInterval(int updateInterval)
{
conf.permissions_update_interval_in_ms = updateInterval;
}
public static int getThriftFramedTransportSize()
{
return conf.thrift_framed_transport_size_in_mb * 1024 * 1024;
}
/**
* Creates all storage-related directories.
*/
public static void createAllDirectories()
{
try
{
if (conf.data_file_directories.length == 0)
throw new ConfigurationException("At least one DataFileDirectory must be specified", false);
for (String dataFileDirectory : conf.data_file_directories)
{
FileUtils.createDirectory(dataFileDirectory);
}
if (conf.commitlog_directory == null)
throw new ConfigurationException("commitlog_directory must be specified", false);
FileUtils.createDirectory(conf.commitlog_directory);
if (conf.saved_caches_directory == null)
throw new ConfigurationException("saved_caches_directory must be specified", false);
FileUtils.createDirectory(conf.saved_caches_directory);
}
catch (ConfigurationException e)
{
throw new IllegalArgumentException("Bad configuration; unable to start server: "+e.getMessage());
}
catch (FSWriteError e)
{
throw new IllegalStateException(e.getCause().getMessage() + "; unable to start server");
}
}
public static IPartitioner getPartitioner()
{
return partitioner;
}
public static String getPartitionerName()
{
return paritionerName;
}
/* For tests ONLY, don't use otherwise or all hell will break loose */
public static void setPartitioner(IPartitioner newPartitioner)
{
partitioner = newPartitioner;
}
public static IEndpointSnitch getEndpointSnitch()
{
return snitch;
}
public static void setEndpointSnitch(IEndpointSnitch eps)
{
snitch = eps;
}
public static IRequestScheduler getRequestScheduler()
{
return requestScheduler;
}
public static RequestSchedulerOptions getRequestSchedulerOptions()
{
return requestSchedulerOptions;
}
public static RequestSchedulerId getRequestSchedulerId()
{
return requestSchedulerId;
}
public static int getColumnIndexSize()
{
return conf.column_index_size_in_kb * 1024;
}
public static int getBatchSizeWarnThreshold()
{
return conf.batch_size_warn_threshold_in_kb * 1024;
}
public static long getBatchSizeFailThreshold()
{
return conf.batch_size_fail_threshold_in_kb * 1024L;
}
public static int getBatchSizeFailThresholdInKB()
{
return conf.batch_size_fail_threshold_in_kb;
}
public static void setBatchSizeWarnThresholdInKB(int threshold)
{
conf.batch_size_warn_threshold_in_kb = threshold;
}
public static void setBatchSizeFailThresholdInKB(int threshold)
{
conf.batch_size_fail_threshold_in_kb = threshold;
}
public static Collection<String> getInitialTokens()
{
return tokensFromString(System.getProperty("cassandra.initial_token", conf.initial_token));
}
public static String getAllocateTokensKeyspace()
{
return System.getProperty("cassandra.allocate_tokens_keyspace", conf.allocate_tokens_for_keyspace);
}
public static Collection<String> tokensFromString(String tokenString)
{
List<String> tokens = new ArrayList<String>();
if (tokenString != null)
for (String token : tokenString.split(","))
tokens.add(token.replaceAll("^\\s+", "").replaceAll("\\s+$", ""));
return tokens;
}
public static Integer getNumTokens()
{
return conf.num_tokens;
}
public static InetAddress getReplaceAddress()
{
try
{
if (System.getProperty("cassandra.replace_address", null) != null)
return InetAddress.getByName(System.getProperty("cassandra.replace_address", null));
else if (System.getProperty("cassandra.replace_address_first_boot", null) != null)
return InetAddress.getByName(System.getProperty("cassandra.replace_address_first_boot", null));
return null;
}
catch (UnknownHostException e)
{
return null;
}
}
public static Collection<String> getReplaceTokens()
{
return tokensFromString(System.getProperty("cassandra.replace_token", null));
}
public static UUID getReplaceNode()
{
try
{
return UUID.fromString(System.getProperty("cassandra.replace_node", null));
} catch (NullPointerException e)
{
return null;
}
}
public static boolean isReplacing()
{
if (System.getProperty("cassandra.replace_address_first_boot", null) != null && SystemKeyspace.bootstrapComplete())
{
logger.info("Replace address on first boot requested; this node is already bootstrapped");
return false;
}
return getReplaceAddress() != null;
}
public static String getClusterName()
{
return conf.cluster_name;
}
public static int getMaxStreamingRetries()
{
return conf.max_streaming_retries;
}
public static int getStoragePort()
{
return Integer.parseInt(System.getProperty("cassandra.storage_port", conf.storage_port.toString()));
}
public static int getSSLStoragePort()
{
return Integer.parseInt(System.getProperty("cassandra.ssl_storage_port", conf.ssl_storage_port.toString()));
}
public static int getRpcPort()
{
return Integer.parseInt(System.getProperty("cassandra.rpc_port", conf.rpc_port.toString()));
}
public static int getRpcListenBacklog()
{
return conf.rpc_listen_backlog;
}
public static long getRpcTimeout()
{
return conf.request_timeout_in_ms;
}
public static void setRpcTimeout(Long timeOutInMillis)
{
conf.request_timeout_in_ms = timeOutInMillis;
}
public static long getReadRpcTimeout()
{
return conf.read_request_timeout_in_ms;
}
public static void setReadRpcTimeout(Long timeOutInMillis)
{
conf.read_request_timeout_in_ms = timeOutInMillis;
}
public static long getRangeRpcTimeout()
{
return conf.range_request_timeout_in_ms;
}
public static void setRangeRpcTimeout(Long timeOutInMillis)
{
conf.range_request_timeout_in_ms = timeOutInMillis;
}
public static long getWriteRpcTimeout()
{
return conf.write_request_timeout_in_ms;
}
public static void setWriteRpcTimeout(Long timeOutInMillis)
{
conf.write_request_timeout_in_ms = timeOutInMillis;
}
public static long getCounterWriteRpcTimeout()
{
return conf.counter_write_request_timeout_in_ms;
}
public static void setCounterWriteRpcTimeout(Long timeOutInMillis)
{
conf.counter_write_request_timeout_in_ms = timeOutInMillis;
}
public static long getCasContentionTimeout()
{
return conf.cas_contention_timeout_in_ms;
}
public static void setCasContentionTimeout(Long timeOutInMillis)
{
conf.cas_contention_timeout_in_ms = timeOutInMillis;
}
public static long getTruncateRpcTimeout()
{
return conf.truncate_request_timeout_in_ms;
}
public static void setTruncateRpcTimeout(Long timeOutInMillis)
{
conf.truncate_request_timeout_in_ms = timeOutInMillis;
}
public static boolean hasCrossNodeTimeout()
{
return conf.cross_node_timeout;
}
// not part of the Verb enum so we can change timeouts easily via JMX
public static long getTimeout(MessagingService.Verb verb)
{
switch (verb)
{
case READ:
return getReadRpcTimeout();
case RANGE_SLICE:
return getRangeRpcTimeout();
case TRUNCATE:
return getTruncateRpcTimeout();
case READ_REPAIR:
case MUTATION:
case PAXOS_COMMIT:
case PAXOS_PREPARE:
case PAXOS_PROPOSE:
return getWriteRpcTimeout();
case COUNTER_MUTATION:
return getCounterWriteRpcTimeout();
default:
return getRpcTimeout();
}
}
/**
* @return the minimum configured {read, write, range, truncate, misc} timeout
*/
public static long getMinRpcTimeout()
{
return Longs.min(getRpcTimeout(),
getReadRpcTimeout(),
getRangeRpcTimeout(),
getWriteRpcTimeout(),
getCounterWriteRpcTimeout(),
getTruncateRpcTimeout());
}
public static double getPhiConvictThreshold()
{
return conf.phi_convict_threshold;
}
public static void setPhiConvictThreshold(double phiConvictThreshold)
{
conf.phi_convict_threshold = phiConvictThreshold;
}
public static int getConcurrentReaders()
{
return conf.concurrent_reads;
}
public static int getConcurrentWriters()
{
return conf.concurrent_writes;
}
public static int getConcurrentCounterWriters()
{
return conf.concurrent_counter_writes;
}
public static int getFlushWriters()
{
return conf.memtable_flush_writers;
}
public static int getConcurrentCompactors()
{
return conf.concurrent_compactors;
}
public static int getCompactionThroughputMbPerSec()
{
return conf.compaction_throughput_mb_per_sec;
}
public static void setCompactionThroughputMbPerSec(int value)
{
conf.compaction_throughput_mb_per_sec = value;
}
public static int getCompactionLargePartitionWarningThreshold() { return conf.compaction_large_partition_warning_threshold_mb * 1024 * 1024; }
public static boolean getDisableSTCSInL0()
{
return Boolean.getBoolean("cassandra.disable_stcs_in_l0");
}
public static int getStreamThroughputOutboundMegabitsPerSec()
{
return conf.stream_throughput_outbound_megabits_per_sec;
}
public static void setStreamThroughputOutboundMegabitsPerSec(int value)
{
conf.stream_throughput_outbound_megabits_per_sec = value;
}
public static int getInterDCStreamThroughputOutboundMegabitsPerSec()
{
return conf.inter_dc_stream_throughput_outbound_megabits_per_sec;
}
public static void setInterDCStreamThroughputOutboundMegabitsPerSec(int value)
{
conf.inter_dc_stream_throughput_outbound_megabits_per_sec = value;
}
public static String[] getAllDataFileLocations()
{
return conf.data_file_directories;
}
public static String getCommitLogLocation()
{
return conf.commitlog_directory;
}
public static ParameterizedClass getCommitLogCompression()
{
return conf.commitlog_compression;
}
public static void setCommitLogCompression(ParameterizedClass compressor)
{
conf.commitlog_compression = compressor;
}
public static int getCommitLogMaxCompressionBuffersInPool()
{
return conf.commitlog_max_compression_buffers_in_pool;
}
public static int getTombstoneWarnThreshold()
{
return conf.tombstone_warn_threshold;
}
public static void setTombstoneWarnThreshold(int threshold)
{
conf.tombstone_warn_threshold = threshold;
}
public static int getTombstoneFailureThreshold()
{
return conf.tombstone_failure_threshold;
}
public static void setTombstoneFailureThreshold(int threshold)
{
conf.tombstone_failure_threshold = threshold;
}
/**
* size of commitlog segments to allocate
*/
public static int getCommitLogSegmentSize()
{
return conf.commitlog_segment_size_in_mb * 1024 * 1024;
}
public static void setCommitLogSegmentSize(int sizeMegabytes)
{
conf.commitlog_segment_size_in_mb = sizeMegabytes;
}
public static String getSavedCachesLocation()
{
return conf.saved_caches_directory;
}
public static Set<InetAddress> getSeeds()
{
return ImmutableSet.<InetAddress>builder().addAll(seedProvider.getSeeds()).build();
}
public static InetAddress getListenAddress()
{
return listenAddress;
}
public static InetAddress getBroadcastAddress()
{
return broadcastAddress;
}
public static IInternodeAuthenticator getInternodeAuthenticator()
{
return internodeAuthenticator;
}
public static void setBroadcastAddress(InetAddress broadcastAdd)
{
broadcastAddress = broadcastAdd;
}
public static boolean startRpc()
{
return conf.start_rpc;
}
public static InetAddress getRpcAddress()
{
return rpcAddress;
}
public static void setBroadcastRpcAddress(InetAddress broadcastRPCAddr)
{
broadcastRpcAddress = broadcastRPCAddr;
}
public static InetAddress getBroadcastRpcAddress()
{
return broadcastRpcAddress;
}
public static String getRpcServerType()
{
return conf.rpc_server_type;
}
public static boolean getRpcKeepAlive()
{
return conf.rpc_keepalive;
}
public static Integer getRpcMinThreads()
{
return conf.rpc_min_threads;
}
public static Integer getRpcMaxThreads()
{
return conf.rpc_max_threads;
}
public static Integer getRpcSendBufferSize()
{
return conf.rpc_send_buff_size_in_bytes;
}
public static Integer getRpcRecvBufferSize()
{
return conf.rpc_recv_buff_size_in_bytes;
}
public static Integer getInternodeSendBufferSize()
{
return conf.internode_send_buff_size_in_bytes;
}
public static Integer getInternodeRecvBufferSize()
{
return conf.internode_recv_buff_size_in_bytes;
}
public static boolean startNativeTransport()
{
return conf.start_native_transport;
}
public static int getNativeTransportPort()
{
return Integer.parseInt(System.getProperty("cassandra.native_transport_port", conf.native_transport_port.toString()));
}
public static Integer getNativeTransportMaxThreads()
{
return conf.native_transport_max_threads;
}
public static int getNativeTransportMaxFrameSize()
{
return conf.native_transport_max_frame_size_in_mb * 1024 * 1024;
}
public static Long getNativeTransportMaxConcurrentConnections()
{
return conf.native_transport_max_concurrent_connections;
}
public static void setNativeTransportMaxConcurrentConnections(long nativeTransportMaxConcurrentConnections)
{
conf.native_transport_max_concurrent_connections = nativeTransportMaxConcurrentConnections;
}
public static Long getNativeTransportMaxConcurrentConnectionsPerIp() {
return conf.native_transport_max_concurrent_connections_per_ip;
}
public static void setNativeTransportMaxConcurrentConnectionsPerIp(long native_transport_max_concurrent_connections_per_ip)
{
conf.native_transport_max_concurrent_connections_per_ip = native_transport_max_concurrent_connections_per_ip;
}
public static double getCommitLogSyncBatchWindow()
{
return conf.commitlog_sync_batch_window_in_ms;
}
public static void setCommitLogSyncBatchWindow(double windowMillis)
{
conf.commitlog_sync_batch_window_in_ms = windowMillis;
}
public static int getCommitLogSyncPeriod()
{
return conf.commitlog_sync_period_in_ms;
}
public static void setCommitLogSyncPeriod(int periodMillis)
{
conf.commitlog_sync_period_in_ms = periodMillis;
}
public static Config.CommitLogSync getCommitLogSync()
{
return conf.commitlog_sync;
}
public static void setCommitLogSync(CommitLogSync sync)
{
conf.commitlog_sync = sync;
}
public static Config.DiskAccessMode getDiskAccessMode()
{
return conf.disk_access_mode;
}
// Do not use outside unit tests.
@VisibleForTesting
public static void setDiskAccessMode(Config.DiskAccessMode mode)
{
conf.disk_access_mode = mode;
}
public static Config.DiskAccessMode getIndexAccessMode()
{
return indexAccessMode;
}
// Do not use outside unit tests.
@VisibleForTesting
public static void setIndexAccessMode(Config.DiskAccessMode mode)
{
indexAccessMode = mode;
}
public static void setDiskFailurePolicy(Config.DiskFailurePolicy policy)
{
conf.disk_failure_policy = policy;
}
public static Config.DiskFailurePolicy getDiskFailurePolicy()
{
return conf.disk_failure_policy;
}
public static void setCommitFailurePolicy(Config.CommitFailurePolicy policy)
{
conf.commit_failure_policy = policy;
}
public static Config.CommitFailurePolicy getCommitFailurePolicy()
{
return conf.commit_failure_policy;
}
public static boolean isSnapshotBeforeCompaction()
{
return conf.snapshot_before_compaction;
}
public static boolean isAutoSnapshot() {
return conf.auto_snapshot;
}
@VisibleForTesting
public static void setAutoSnapshot(boolean autoSnapshot)
{
conf.auto_snapshot = autoSnapshot;
}
@VisibleForTesting
public static boolean getAutoSnapshot()
{
return conf.auto_snapshot;
}
public static boolean isAutoBootstrap()
{
return Boolean.parseBoolean(System.getProperty("cassandra.auto_bootstrap", conf.auto_bootstrap.toString()));
}
public static void setHintedHandoffEnabled(boolean hintedHandoffEnabled)
{
conf.hinted_handoff_enabled = hintedHandoffEnabled;
}
public static boolean hintedHandoffEnabled()
{
return conf.hinted_handoff_enabled;
}
public static Set<String> hintedHandoffDisabledDCs()
{
return conf.hinted_handoff_disabled_datacenters;
}
public static void enableHintsForDC(String dc)
{
conf.hinted_handoff_disabled_datacenters.remove(dc);
}
public static void disableHintsForDC(String dc)
{
conf.hinted_handoff_disabled_datacenters.add(dc);
}
public static void setMaxHintWindow(int ms)
{
conf.max_hint_window_in_ms = ms;
}
public static int getMaxHintWindow()
{
return conf.max_hint_window_in_ms;
}
public static File getSerializedCachePath(String ksName, String cfName, UUID cfId, CacheService.CacheType cacheType, String version)
{
StringBuilder builder = new StringBuilder();
builder.append(ksName).append('-');
builder.append(cfName).append('-');
builder.append(ByteBufferUtil.bytesToHex(ByteBufferUtil.bytes(cfId))).append('-');
builder.append(cacheType);
builder.append((version == null ? "" : "-" + version + ".db"));
return new File(conf.saved_caches_directory, builder.toString());
}
public static int getDynamicUpdateInterval()
{
return conf.dynamic_snitch_update_interval_in_ms;
}
public static void setDynamicUpdateInterval(Integer dynamicUpdateInterval)
{
conf.dynamic_snitch_update_interval_in_ms = dynamicUpdateInterval;
}
public static int getDynamicResetInterval()
{
return conf.dynamic_snitch_reset_interval_in_ms;
}
public static void setDynamicResetInterval(Integer dynamicResetInterval)
{
conf.dynamic_snitch_reset_interval_in_ms = dynamicResetInterval;
}
public static double getDynamicBadnessThreshold()
{
return conf.dynamic_snitch_badness_threshold;
}
public static void setDynamicBadnessThreshold(Double dynamicBadnessThreshold)
{
conf.dynamic_snitch_badness_threshold = dynamicBadnessThreshold;
}
public static ServerEncryptionOptions getServerEncryptionOptions()
{
return conf.server_encryption_options;
}
public static ClientEncryptionOptions getClientEncryptionOptions()
{
return conf.client_encryption_options;
}
public static int getHintedHandoffThrottleInKB()
{
return conf.hinted_handoff_throttle_in_kb;
}
public static int getBatchlogReplayThrottleInKB()
{
return conf.batchlog_replay_throttle_in_kb;
}
public static void setHintedHandoffThrottleInKB(Integer throttleInKB)
{
conf.hinted_handoff_throttle_in_kb = throttleInKB;
}
public static int getMaxHintsThread()
{
return conf.max_hints_delivery_threads;
}
public static boolean isIncrementalBackupsEnabled()
{
return conf.incremental_backups;
}
public static void setIncrementalBackupsEnabled(boolean value)
{
conf.incremental_backups = value;
}
public static int getFileCacheSizeInMB()
{
return conf.file_cache_size_in_mb;
}
public static boolean getBufferPoolUseHeapIfExhausted()
{
return conf.buffer_pool_use_heap_if_exhausted;
}
public static Config.DiskOptimizationStrategy getDiskOptimizationStrategy()
{
return conf.disk_optimization_strategy;
}
@VisibleForTesting
public static void setDiskOptimizationStrategy(Config.DiskOptimizationStrategy strategy)
{
conf.disk_optimization_strategy = strategy;
}
public static double getDiskOptimizationEstimatePercentile()
{
return conf.disk_optimization_estimate_percentile;
}
public static double getDiskOptimizationPageCrossChance()
{
return conf.disk_optimization_page_cross_chance;
}
@VisibleForTesting
public static void setDiskOptimizationPageCrossChance(double chance)
{
conf.disk_optimization_page_cross_chance = chance;
}
public static long getTotalCommitlogSpaceInMB()
{
return conf.commitlog_total_space_in_mb;
}
public static int getSSTablePreempiveOpenIntervalInMB()
{
return FBUtilities.isWindows() ? -1 : conf.sstable_preemptive_open_interval_in_mb;
}
public static boolean getTrickleFsync()
{
return conf.trickle_fsync;
}
public static int getTrickleFsyncIntervalInKb()
{
return conf.trickle_fsync_interval_in_kb;
}
public static long getKeyCacheSizeInMB()
{
return keyCacheSizeInMB;
}
public static long getIndexSummaryCapacityInMB()
{
return indexSummaryCapacityInMB;
}
public static int getKeyCacheSavePeriod()
{
return conf.key_cache_save_period;
}
public static void setKeyCacheSavePeriod(int keyCacheSavePeriod)
{
conf.key_cache_save_period = keyCacheSavePeriod;
}
public static int getKeyCacheKeysToSave()
{
return conf.key_cache_keys_to_save;
}
public static void setKeyCacheKeysToSave(int keyCacheKeysToSave)
{
conf.key_cache_keys_to_save = keyCacheKeysToSave;
}
public static String getRowCacheClassName()
{
return conf.row_cache_class_name;
}
public static long getRowCacheSizeInMB()
{
return conf.row_cache_size_in_mb;
}
@VisibleForTesting
public static void setRowCacheSizeInMB(long val)
{
conf.row_cache_size_in_mb = val;
}
public static int getRowCacheSavePeriod()
{
return conf.row_cache_save_period;
}
public static void setRowCacheSavePeriod(int rowCacheSavePeriod)
{
conf.row_cache_save_period = rowCacheSavePeriod;
}
public static int getRowCacheKeysToSave()
{
return conf.row_cache_keys_to_save;
}
public static long getCounterCacheSizeInMB()
{
return counterCacheSizeInMB;
}
public static void setRowCacheKeysToSave(int rowCacheKeysToSave)
{
conf.row_cache_keys_to_save = rowCacheKeysToSave;
}
public static int getCounterCacheSavePeriod()
{
return conf.counter_cache_save_period;
}
public static void setCounterCacheSavePeriod(int counterCacheSavePeriod)
{
conf.counter_cache_save_period = counterCacheSavePeriod;
}
public static int getCounterCacheKeysToSave()
{
return conf.counter_cache_keys_to_save;
}
public static void setCounterCacheKeysToSave(int counterCacheKeysToSave)
{
conf.counter_cache_keys_to_save = counterCacheKeysToSave;
}
public static int getStreamingSocketTimeout()
{
return conf.streaming_socket_timeout_in_ms;
}
public static String getLocalDataCenter()
{
return localDC;
}
public static Comparator<InetAddress> getLocalComparator()
{
return localComparator;
}
public static Config.InternodeCompression internodeCompression()
{
return conf.internode_compression;
}
public static boolean getInterDCTcpNoDelay()
{
return conf.inter_dc_tcp_nodelay;
}
public static SSTableFormat.Type getSSTableFormat()
{
return sstable_format;
}
public static MemtablePool getMemtableAllocatorPool()
{
long heapLimit = ((long) conf.memtable_heap_space_in_mb) << 20;
long offHeapLimit = ((long) conf.memtable_offheap_space_in_mb) << 20;
switch (conf.memtable_allocation_type)
{
case unslabbed_heap_buffers:
return new HeapPool(heapLimit, conf.memtable_cleanup_threshold, new ColumnFamilyStore.FlushLargestColumnFamily());
case heap_buffers:
return new SlabPool(heapLimit, 0, conf.memtable_cleanup_threshold, new ColumnFamilyStore.FlushLargestColumnFamily());
case offheap_buffers:
if (!FileUtils.isCleanerAvailable())
{
throw new IllegalStateException("Could not free direct byte buffer: offheap_buffers is not a safe memtable_allocation_type without this ability, please adjust your config. This feature is only guaranteed to work on an Oracle JVM. Refusing to start.");
}
return new SlabPool(heapLimit, offHeapLimit, conf.memtable_cleanup_threshold, new ColumnFamilyStore.FlushLargestColumnFamily());
case offheap_objects:
return new NativePool(heapLimit, offHeapLimit, conf.memtable_cleanup_threshold, new ColumnFamilyStore.FlushLargestColumnFamily());
default:
throw new AssertionError();
}
}
public static int getIndexSummaryResizeIntervalInMinutes()
{
return conf.index_summary_resize_interval_in_minutes;
}
public static boolean hasLargeAddressSpace()
{
// currently we just check if it's a 64bit arch, but any we only really care if the address space is large
String datamodel = System.getProperty("sun.arch.data.model");
if (datamodel != null)
{
switch (datamodel)
{
case "64": return true;
case "32": return false;
}
}
String arch = System.getProperty("os.arch");
return arch.contains("64") || arch.contains("sparcv9");
}
public static int getTracetypeRepairTTL()
{
return conf.tracetype_repair_ttl;
}
public static int getTracetypeQueryTTL()
{
return conf.tracetype_query_ttl;
}
public static String getOtcCoalescingStrategy()
{
return conf.otc_coalescing_strategy;
}
public static int getOtcCoalescingWindow()
{
return conf.otc_coalescing_window_us;
}
public static int getWindowsTimerInterval()
{
return conf.windows_timer_interval;
}
public static boolean enableUserDefinedFunctions()
{
return conf.enable_user_defined_functions;
}
public static boolean enableUserDefinedFunctionsThreads()
{
return conf.enable_user_defined_functions_threads;
}
public static long getUserDefinedFunctionWarnTimeout()
{
return conf.user_defined_function_warn_timeout;
}
public static void setUserDefinedFunctionWarnTimeout(long userDefinedFunctionWarnTimeout)
{
conf.user_defined_function_warn_timeout = userDefinedFunctionWarnTimeout;
}
public static long getUserDefinedFunctionFailTimeout()
{
return conf.user_defined_function_fail_timeout;
}
public static void setUserDefinedFunctionFailTimeout(long userDefinedFunctionFailTimeout)
{
conf.user_defined_function_fail_timeout = userDefinedFunctionFailTimeout;
}
public static Config.UserFunctionTimeoutPolicy getUserFunctionTimeoutPolicy()
{
return conf.user_function_timeout_policy;
}
public static void setUserFunctionTimeoutPolicy(Config.UserFunctionTimeoutPolicy userFunctionTimeoutPolicy)
{
conf.user_function_timeout_policy = userFunctionTimeoutPolicy;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.CamelException;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.RouteBuilder;
/**
* @version
*/
public class RedeliveryErrorHandlerLogHandledTest extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
public void testRedeliveryErrorHandlerOnExceptionLogHandledDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
onException(IllegalArgumentException.class)
.maximumRedeliveries(3)
.redeliveryDelay(0)
.handled(true)
.to("mock:handled");
from("direct:foo")
.throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerOnExceptionLogHandled() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
onException(IllegalArgumentException.class)
.maximumRedeliveries(3)
.redeliveryDelay(0)
.logHandled(true)
.handled(true)
.to("mock:handled");
from("direct:foo")
.throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerOnExceptionLogRetryAttempted() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
onException(IllegalArgumentException.class)
.maximumRedeliveries(3)
.redeliveryDelay(0)
.logHandled(true)
.logRetryAttempted(true)
.handled(true)
.to("mock:handled");
from("direct:foo")
.throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerDoNotLogExhausted() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
errorHandler(defaultErrorHandler().logExhausted(false));
from("direct:bar")
.throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerLogExhaustedDefault() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
errorHandler(defaultErrorHandler());
from("direct:bar")
.throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerAllOptions() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
errorHandler(defaultErrorHandler()
.redeliveryDelay(0)
.maximumRedeliveries(3)
.logExhausted(true).logHandled(true).logRetryStackTrace(true).logStackTrace(true)
.retryAttemptedLogLevel(LoggingLevel.WARN).retriesExhaustedLogLevel(LoggingLevel.ERROR));
from("direct:bar")
.throwException(new CamelException("Camel rocks"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedMessageCount(0);
try {
template.sendBody("direct:bar", "Hello World");
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelException cause = assertIsInstanceOf(CamelException.class, e.getCause());
assertEquals("Camel rocks", cause.getMessage());
}
assertMockEndpointsSatisfied();
}
public void testRedeliveryErrorHandlerOnExceptionAllOptions() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
onException(IllegalArgumentException.class)
.redeliveryDelay(0)
.maximumRedeliveries(3)
.logHandled(true)
.logRetryAttempted(true)
.logRetryStackTrace(true)
.logExhausted(true)
.logStackTrace(true)
.handled(true)
.retryAttemptedLogLevel(LoggingLevel.WARN)
.retriesExhaustedLogLevel(LoggingLevel.ERROR)
.to("mock:handled");
from("direct:foo")
.throwException(new IllegalArgumentException("Damn"));
}
});
context.start();
getMockEndpoint("mock:handled").expectedBodiesReceived("Hello World");
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.protocol.http;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.TimeZone;
import jakarta.servlet.http.Cookie;
import org.apache.wicket.markup.html.pages.BrowserInfoPage;
import org.apache.wicket.request.IRequestParameters;
import org.apache.wicket.request.cycle.RequestCycle;
import org.apache.wicket.request.http.WebRequest;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.string.AppendingStringBuffer;
/**
* Description of various user agent (browser) properties. To fill the properties with values from
* the user agent you need to probe the browser using javascript and request header analysis. Wicket
* provides a default implementation of this in {@link BrowserInfoPage}.
* <p>
* A convenient way of letting Wicket do a sneaky redirect to {@link BrowserInfoPage} (and back
* again) is to put this in your Application's init method:
*
* <pre>
* getRequestCycleSettings().setGatherExtendedBrowserInfo(true);
* </pre>
*
* <p>
*
* WARNING: Be sure you think about the dangers of depending on information you pull from the client
* too much. They may be easily spoofed or inaccurate in other ways, and properties like window and
* browser size are all too easy to be used naively.
*
* @see BrowserInfoPage
* @author Frank Bille (frankbille)
*/
public class ClientProperties implements IClusterable
{
private static final long serialVersionUID = 1L;
private int browserHeight = -1;
private int browserWidth = -1;
private boolean navigatorCookieEnabled;
private boolean navigatorJavaEnabled;
private String navigatorAppCodeName;
private String navigatorAppName;
private String navigatorAppVersion;
private String navigatorLanguage;
private String navigatorPlatform;
private String navigatorUserAgent;
private String remoteAddress;
private int screenColorDepth = -1;
private int screenHeight = -1;
private int screenWidth = -1;
private String utcDSTOffset;
private String utcOffset;
private String jsTimeZone;
private String hostname;
private boolean javaScriptEnabled;
/** Cached timezone for repeating calls to {@link #getTimeZone()} */
private transient TimeZone timeZone;
/**
* @return The browser height at the time it was measured
*/
public int getBrowserHeight()
{
return browserHeight;
}
/**
* @return The browser width at the time it was measured
*/
public int getBrowserWidth()
{
return browserWidth;
}
/**
* @return The client's navigator.appCodeName property.
*/
public String getNavigatorAppCodeName()
{
return navigatorAppCodeName;
}
/**
* @return The client's navigator.appName property.
*/
public String getNavigatorAppName()
{
return navigatorAppName;
}
/**
* @return The client's navigator.appVersion property.
*/
public String getNavigatorAppVersion()
{
return navigatorAppVersion;
}
/**
* @return The client's navigator.language (or navigator.userLanguage) property.
*/
public String getNavigatorLanguage()
{
return navigatorLanguage;
}
/**
* @return The client's navigator.platform property.
*/
public String getNavigatorPlatform()
{
return navigatorPlatform;
}
/**
* @return The client's navigator.userAgent property.
*/
public String getNavigatorUserAgent()
{
return navigatorUserAgent;
}
/**
* @return The client's remote/ip address.
*/
public String getRemoteAddress()
{
return remoteAddress;
}
/**
* @return The clients hostname shown in the browser
*/
public String getHostname()
{
return hostname;
}
/**
* @return Color depth of the screen in bits (integer).
*/
public int getScreenColorDepth()
{
return screenColorDepth;
}
/**
* @return Height of the screen in pixels (integer).
*/
public int getScreenHeight()
{
return screenHeight;
}
/**
* @return Height of the screen in pixels (integer).
*/
public int getScreenWidth()
{
return screenWidth;
}
/**
* Get the client's time zone if that could be detected.
*
* @return The client's time zone
*/
public TimeZone getTimeZone()
{
if (timeZone == null && jsTimeZone != null)
{
TimeZone temptimeZone = TimeZone.getTimeZone(jsTimeZone);
if (jsTimeZone.equals(temptimeZone.getID()))
{
timeZone = temptimeZone;
}
}
if (timeZone == null)
{
String utc = getUtcOffset();
if (utc != null)
{
// apparently it is platform dependent on whether you get the
// offset in a decimal form or not. This parses the decimal
// form of the UTC offset, taking into account several
// possibilities
// such as getting the format in +2.5 or -1.2
int dotPos = utc.indexOf('.');
if (dotPos >= 0)
{
String hours = utc.substring(0, dotPos);
String hourPart = utc.substring(dotPos + 1);
if (hours.startsWith("+"))
{
hours = hours.substring(1);
}
int offsetHours = Integer.parseInt(hours);
int offsetMins = (int)(Double.parseDouble(hourPart) * 6);
// construct a GMT timezone offset string from the retrieved
// offset which can be parsed by the TimeZone class.
AppendingStringBuffer sb = new AppendingStringBuffer("GMT");
sb.append(offsetHours > 0 ? '+' : '-');
sb.append(Math.abs(offsetHours));
sb.append(':');
if (offsetMins < 10)
{
sb.append('0');
}
sb.append(offsetMins);
timeZone = TimeZone.getTimeZone(sb.toString());
}
else
{
int offset = Integer.parseInt(utc);
if (offset < 0)
{
utc = utc.substring(1);
}
timeZone = TimeZone.getTimeZone("GMT" + ((offset > 0) ? '+' : '-') + utc);
}
String dstOffset = getUtcDSTOffset();
if (timeZone != null && dstOffset != null)
{
TimeZone dstTimeZone;
dotPos = dstOffset.indexOf('.');
if (dotPos >= 0)
{
String hours = dstOffset.substring(0, dotPos);
String hourPart = dstOffset.substring(dotPos + 1);
if (hours.startsWith("+"))
{
hours = hours.substring(1);
}
int offsetHours = Integer.parseInt(hours);
int offsetMins = (int)(Double.parseDouble(hourPart) * 6);
// construct a GMT timezone offset string from the
// retrieved
// offset which can be parsed by the TimeZone class.
AppendingStringBuffer sb = new AppendingStringBuffer("GMT");
sb.append(offsetHours > 0 ? '+' : '-');
sb.append(Math.abs(offsetHours));
sb.append(':');
if (offsetMins < 10)
{
sb.append('0');
}
sb.append(offsetMins);
dstTimeZone = TimeZone.getTimeZone(sb.toString());
}
else
{
int offset = Integer.parseInt(dstOffset);
if (offset < 0)
{
dstOffset = dstOffset.substring(1);
}
dstTimeZone = TimeZone.getTimeZone("GMT" + ((offset > 0) ? '+' : '-') +
dstOffset);
}
// if the dstTimezone (1 July) has a different offset then
// the real time zone (1 January) try to combine the 2.
if (dstTimeZone != null &&
dstTimeZone.getRawOffset() != timeZone.getRawOffset())
{
int dstSaving = Math.abs(dstTimeZone.getRawOffset() - timeZone.getRawOffset());
String[] availableIDs = TimeZone.getAvailableIDs(dstTimeZone.getRawOffset() < timeZone.getRawOffset() ? dstTimeZone.getRawOffset() : timeZone.getRawOffset());
for (String availableID : availableIDs)
{
TimeZone zone = TimeZone.getTimeZone(availableID);
if (zone.getDSTSavings() == dstSaving)
{
// this is a best guess... still the start and end of the DST should
// be needed to know to be completely correct, or better yet
// not just the GMT offset but the TimeZone ID should be transfered
// from the browser.
timeZone = zone;
break;
}
}
}
}
}
}
return timeZone;
}
/**
* @return The client's time DST offset from UTC in hours (note: if you do this yourself, use
* 'new Date(new Date().getFullYear(), 0, 6, 0, 0, 0, 0).getTimezoneOffset() / -60'
* (note the -)).
*/
public String getUtcDSTOffset()
{
return utcDSTOffset;
}
/**
* @return The client's time offset from UTC in hours (note: if you do this yourself, use 'new
* Date(new Date().getFullYear(), 0, 1, 0, 0, 0, 0).getTimezoneOffset() / -60' (note the
* -)).
*/
public String getUtcOffset()
{
return utcOffset;
}
/**
* Flag indicating support of JavaScript in the browser.
*
* @return True if JavaScript is enabled
*/
public boolean isJavaScriptEnabled() {
return javaScriptEnabled;
}
/**
*
*
* @return The client's navigator.cookieEnabled property.
*/
public boolean isNavigatorCookieEnabled()
{
if (!navigatorCookieEnabled && RequestCycle.get() != null)
{
Collection<Cookie> cookies = ((WebRequest)RequestCycle.get().getRequest()).getCookies();
navigatorCookieEnabled = cookies != null && cookies.size() > 0;
}
return navigatorCookieEnabled;
}
/**
* @return The client's navigator.javaEnabled property.
*/
public boolean isNavigatorJavaEnabled()
{
return navigatorJavaEnabled;
}
/**
* @param browserHeight
* The height of the browser
*/
public void setBrowserHeight(int browserHeight)
{
this.browserHeight = browserHeight;
}
/**
* @param browserWidth
* The browser width
*/
public void setBrowserWidth(int browserWidth)
{
this.browserWidth = browserWidth;
}
/**
* @param cookiesEnabled
* The client's navigator.cookieEnabled property.
*/
public void setNavigatorCookieEnabled(boolean cookiesEnabled)
{
this.navigatorCookieEnabled = cookiesEnabled;
}
/**
* @param navigatorJavaEnabled
* The client's navigator.javaEnabled property.
*/
public void setNavigatorJavaEnabled(boolean navigatorJavaEnabled)
{
this.navigatorJavaEnabled = navigatorJavaEnabled;
}
/**
* @param navigatorAppCodeName
* The client's navigator.appCodeName property.
*/
public void setNavigatorAppCodeName(String navigatorAppCodeName)
{
this.navigatorAppCodeName = navigatorAppCodeName;
}
/**
* @param navigatorAppName
* The client's navigator.appName property.
*/
public void setNavigatorAppName(String navigatorAppName)
{
this.navigatorAppName = navigatorAppName;
}
/**
* @param navigatorAppVersion
* The client's navigator.appVersion property.
*/
public void setNavigatorAppVersion(String navigatorAppVersion)
{
this.navigatorAppVersion = navigatorAppVersion;
}
/**
* @param navigatorLanguage
* The client's navigator.language (or navigator.userLanguage) property.
*/
public void setNavigatorLanguage(String navigatorLanguage)
{
this.navigatorLanguage = navigatorLanguage;
}
/**
* @param navigatorPlatform
* The client's navigator.platform property.
*/
public void setNavigatorPlatform(String navigatorPlatform)
{
this.navigatorPlatform = navigatorPlatform;
}
/**
* @param navigatorUserAgent
* The client's navigator.userAgent property.
*/
public void setNavigatorUserAgent(String navigatorUserAgent)
{
this.navigatorUserAgent = navigatorUserAgent;
}
/**
* @param remoteAddress
* The client's remote/ip address.
*/
public void setRemoteAddress(String remoteAddress)
{
this.remoteAddress = remoteAddress;
}
/**
* @param hostname
* the hostname shown in the browser.
*/
public void setHostname(String hostname)
{
this.hostname = hostname;
}
/**
* @param screenColorDepth
* Color depth of the screen in bits (integer).
*/
public void setScreenColorDepth(int screenColorDepth)
{
this.screenColorDepth = screenColorDepth;
}
/**
* @param screenHeight
* Height of the screen in pixels (integer).
*/
public void setScreenHeight(int screenHeight)
{
this.screenHeight = screenHeight;
}
/**
* @param screenWidth
* Height of the screen in pixels (integer).
*/
public void setScreenWidth(int screenWidth)
{
this.screenWidth = screenWidth;
}
/**
* Sets time zone.
*
* @param timeZone
*/
public void setTimeZone(TimeZone timeZone)
{
this.timeZone = timeZone;
}
/**
* @param utcDSTOffset
*/
public void setUtcDSTOffset(String utcDSTOffset)
{
this.utcDSTOffset = utcDSTOffset;
}
/**
* @param utcOffset
* The client's time offset from UTC in minutes (note: if you do this yourself, use
* 'new Date().getTimezoneOffset() / -60' (note the -)).
*/
public void setUtcOffset(String utcOffset)
{
this.utcOffset = utcOffset;
}
/**
* @param jsTimeZone
*/
public void setJsTimeZone(String jsTimeZone)
{
this.jsTimeZone = jsTimeZone;
}
/**
* @param javaScriptEnabled
* is JavaScript supported in the browser
*/
public void setJavaScriptEnabled(boolean javaScriptEnabled) {
this.javaScriptEnabled = javaScriptEnabled;
}
@Override
public String toString()
{
StringBuilder b = new StringBuilder();
Class<?> clazz = getClass();
while (clazz != Object.class) {
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields)
{
// Ignore these fields
if (Modifier.isStatic(field.getModifiers()) ||
Modifier.isTransient(field.getModifiers()) ||
field.isSynthetic())
{
continue;
}
field.setAccessible(true);
Object value;
try
{
value = field.get(this);
}
catch (IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
if (field.getType().equals(Integer.TYPE))
{
if (Integer.valueOf(-1).equals(value))
{
value = null;
}
}
if (value != null)
{
b.append(field.getName());
b.append('=');
b.append(value);
b.append('\n');
}
}
clazz = clazz.getSuperclass();
}
return b.toString();
}
/**
* Read parameters.
*
* @param parameters
* parameters sent from browser
*/
public void read(IRequestParameters parameters)
{
setNavigatorAppCodeName(parameters.getParameterValue("navigatorAppCodeName").toString("N/A"));
setNavigatorAppName(parameters.getParameterValue("navigatorAppName").toString("N/A"));
setNavigatorAppVersion(parameters.getParameterValue("navigatorAppVersion").toString("N/A"));
setNavigatorCookieEnabled(parameters.getParameterValue("navigatorCookieEnabled").toBoolean(false));
setNavigatorJavaEnabled(parameters.getParameterValue("navigatorJavaEnabled").toBoolean(false));
setNavigatorLanguage(parameters.getParameterValue("navigatorLanguage").toString("N/A"));
setNavigatorPlatform(parameters.getParameterValue("navigatorPlatform").toString("N/A"));
setNavigatorUserAgent(parameters.getParameterValue("navigatorUserAgent").toString("N/A"));
setScreenWidth(parameters.getParameterValue("screenWidth").toInt(-1));
setScreenHeight(parameters.getParameterValue("screenHeight").toInt(-1));
setScreenColorDepth(parameters.getParameterValue("screenColorDepth").toInt(-1));
setUtcOffset(parameters.getParameterValue("utcOffset").toString(null));
setUtcDSTOffset(parameters.getParameterValue("utcDSTOffset").toString(null));
setJsTimeZone(parameters.getParameterValue("jsTimeZone").toString(null));
setBrowserWidth(parameters.getParameterValue("browserWidth").toInt(-1));
setBrowserHeight(parameters.getParameterValue("browserHeight").toInt(-1));
setHostname(parameters.getParameterValue("hostname").toString("N/A"));
}
}
| |
package tld.testmod.network.client;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.minlog.Log;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.PacketBuffer;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.capabilities.CapabilityInject;
import net.minecraftforge.fml.relauncher.Side;
import tld.testmod.common.storage.capability.IMusicDB;
import tld.testmod.common.storage.capability.MusicDBCapability;
import tld.testmod.common.storage.capability.SyncType;
import tld.testmod.common.storage.models.*;
import tld.testmod.network.AbstractMessage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
public class SyncMusicDBMessage extends AbstractMessage.AbstractClientMessage<SyncMusicDBMessage>
{
@CapabilityInject(IMusicDB.class)
private static final Capability<IMusicDB> MUSIC_DB_CAP = MusicDBCapability.nonNullInjected();
private SyncType syncType;
private NBTTagCompound data;
private boolean session;
private byte[] byteBuffer;
private final Kryo kryo = kryoThreadPool.get();
// What's better Object[] array or List<Object> ?? Both are possible
private PlayList[] playLists;
private PlayListEntry[] playListEntries;
private Song[] songs;
private Tag[] tags;
// private List<User> users;
private User[] users;
public SyncMusicDBMessage() { /* Required by the PacketDispatcher */ }
public SyncMusicDBMessage(IMusicDB musicDB, SyncType syncType)
{
this.syncType = syncType;
switch (syncType)
{
case ALL_NBT:
this.data = new NBTTagCompound();
this.data = (NBTTagCompound) MUSIC_DB_CAP.writeNBT(musicDB, null);
break;
case SESSION_STATE:
this.session = musicDB.isSessionOpen();
break;
case PLAY_LISTS:
playLists = musicDB.getPlaylists();
break;
case PLAY_LIST_ENTRIES:
playListEntries = musicDB.getPlayListEntries();
break;
case SONGS:
songs = musicDB.getSongs();
break;
case TAGS:
tags = musicDB.getTags();
break;
case USERS:
//users = Arrays.asList(musicDB.getUsers());
users = musicDB.getUsers();
break;
default:
}
}
@Override
protected void read(PacketBuffer buffer) throws IOException
{
syncType = buffer.readEnumValue(SyncType.class);
switch (syncType)
{
case ALL_NBT:
this.data = buffer.readCompoundTag();
break;
case SESSION_STATE:
session = buffer.readBoolean();
break;
case PLAY_LISTS:
playLists = readArray(buffer, PlayList[].class);
break;
case PLAY_LIST_ENTRIES:
playListEntries = readArray(buffer, PlayListEntry[].class);
break;
case SONGS:
songs = readArray(buffer, Song[].class);
break;
case TAGS:
tags = readArray(buffer, Tag[].class);
break;
case USERS:
//users = readList(buffer, Arrays.asList( new User[1] ).getClass());
users = readArray(buffer, User[].class);
break;
default:
}
}
@Override
protected void write(PacketBuffer buffer)
{
buffer.writeEnumValue(syncType);
switch (syncType)
{
case ALL_NBT:
buffer.writeCompoundTag(this.data);
break;
case SESSION_STATE:
buffer.writeBoolean(session);
break;
case PLAY_LISTS:
writeArray(buffer, playLists);
break;
case PLAY_LIST_ENTRIES:
writeArray(buffer, playListEntries);
break;
case SONGS:
writeArray(buffer, songs);
break;
case TAGS:
writeArray(buffer, tags);
break;
case USERS:
writeArray(buffer, users);
break;
default:
}
}
@Override
public void process(EntityPlayer player, Side side)
{
if (player.hasCapability(MUSIC_DB_CAP, null))
{
final IMusicDB musicDB = player.getCapability(MUSIC_DB_CAP, null);
if (musicDB != null)
{
switch (syncType)
{
case ALL_NBT:
MUSIC_DB_CAP.readNBT(musicDB, null, data);
break;
case SESSION_STATE:
if (session)
musicDB.openSession();
else
musicDB.closeSession();
break;
case PLAY_LISTS:
musicDB.setPlaylists(playLists);
break;
case PLAY_LIST_ENTRIES:
musicDB.setPlayListEntries(playListEntries);
break;
case SONGS:
musicDB.setSongs(songs);
break;
case TAGS:
musicDB.setTags(tags);
break;
case USERS:
musicDB.setUsers(users);
break;
default:
}
}
}
}
@SuppressWarnings("unchecked")
private <T> List<T> readList(PacketBuffer buffer, Class<?> clazz)
{
List<T> list;
// Deserialize data object from a byte array
byteBuffer = buffer.readByteArray();
ByteArrayInputStream bis = new ByteArrayInputStream(byteBuffer) ;
Input input = new Input(bis);
list = (List<T>) kryo.readObject(input, clazz);
input.close();
return list;
}
private <T> void writeList(PacketBuffer buffer, List<T> list)
{
// Serialize data object to a byte array
ByteArrayOutputStream bos = new ByteArrayOutputStream() ;
Output output = new Output(bos) ;
kryo.writeObject(output, list);
// Get the bytes of the serialized object
byteBuffer = output.toBytes();
output.close();
buffer.writeByteArray(byteBuffer);
}
// At the moment I prefer the Object[] array generic methods. Their use is cleaner.
private <T> T[] readArray(PacketBuffer buffer, Class<T[]> clazz)
{
T[] array;
// Deserialize data object from a byte array
byteBuffer = buffer.readByteArray();
ByteArrayInputStream bis = new ByteArrayInputStream(byteBuffer) ;
Input input = new Input(bis);
array = kryo.readObject(input, clazz);
input.close();
return array;
}
private <T> void writeArray(PacketBuffer buffer, T[] array)
{
// Serialize data object to a byte array
ByteArrayOutputStream bos = new ByteArrayOutputStream() ;
Output output = new Output(bos) ;
kryo.writeObject(output, array);
// Get the bytes of the serialized object
byteBuffer = output.toBytes();
output.close();
buffer.writeByteArray(byteBuffer);
}
static private final ThreadLocal<Kryo> kryoThreadPool = ThreadLocal.withInitial(() ->
{
// TODO: Remove Log.DEBUG or make Log.WARN for production use
Log.DEBUG();
Kryo kryo = new Kryo();
kryo.register(PlayList.class);
kryo.register(PlayListEntry.class);
kryo.register(Song.class);
kryo.register(Tag.class);
kryo.register(User.class);
kryo.register(PlayList[].class);
kryo.register(PlayListEntry[].class);
kryo.register(Song[].class);
kryo.register(Tag[].class);
kryo.register(User[].class);
kryo.register(Object[].class);
return kryo;
});
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.service.metadata.resource;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.apache.eagle.alert.coordination.model.Kafka2TupleMetadata;
import org.apache.eagle.alert.coordination.model.ScheduleState;
import org.apache.eagle.alert.coordination.model.internal.PolicyAssignment;
import org.apache.eagle.alert.coordination.model.internal.Topology;
import org.apache.eagle.alert.engine.coordinator.*;
import org.apache.eagle.alert.engine.interpreter.PolicyInterpreter;
import org.apache.eagle.alert.engine.interpreter.PolicyParseResult;
import org.apache.eagle.alert.engine.interpreter.PolicyValidationResult;
import org.apache.eagle.alert.engine.model.AlertPublishEvent;
import org.apache.eagle.alert.engine.publisher.PublishementTypeLoader;
import org.apache.eagle.alert.metadata.IMetadataDao;
import org.apache.eagle.alert.metadata.impl.MetadataDaoFactory;
import org.apache.eagle.alert.metadata.resource.Models;
import org.apache.eagle.alert.metadata.resource.OpResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.validation.Valid;
import javax.ws.rs.*;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @since Apr 11, 2016.
*/
@Path("/metadata")
@Produces("application/json")
@Consumes("application/json")
public class MetadataResource {
private static final Logger LOG = LoggerFactory.getLogger(MetadataResource.class);
// private IMetadataDao dao = MetadataDaoFactory.getInstance().getMetadataDao();
private final IMetadataDao dao;
public MetadataResource() {
this.dao = MetadataDaoFactory.getInstance().getMetadataDao();
}
@Inject
public MetadataResource(IMetadataDao dao) {
this.dao = dao;
}
@Path("/clusters")
@GET
public List<StreamingCluster> listClusters() {
return dao.listClusters();
}
@Path("/clear")
@POST
public OpResult clear() {
return dao.clear();
}
@Path("/clear/schedulestates")
@POST
public OpResult clearScheduleStates(int capacity) {
return dao.clearScheduleState(capacity);
}
@Path("/export")
@POST
public Models export() {
return dao.export();
}
@Path("/import")
@POST
public OpResult importModels(Models model) {
return dao.importModels(model);
}
@Path("/clusters")
@POST
public OpResult addCluster(StreamingCluster cluster) {
return dao.addCluster(cluster);
}
@Path("/clusters/batch")
@POST
public List<OpResult> addClusters(List<StreamingCluster> clusters) {
List<OpResult> results = new LinkedList<>();
for (StreamingCluster cluster : clusters) {
results.add(dao.addCluster(cluster));
}
return results;
}
@Path("/clusters/{clusterId}")
@DELETE
public OpResult removeCluster(@PathParam("clusterId") String clusterId) {
return dao.removeCluster(clusterId);
}
@Path("/clusters")
@DELETE
public List<OpResult> removeClusters(List<String> clusterIds) {
List<OpResult> results = new LinkedList<>();
for (String cluster : clusterIds) {
results.add(dao.removeCluster(cluster));
}
return results;
}
@Path("/streams")
@GET
public List<StreamDefinition> listStreams(@QueryParam("siteId") String siteId) {
if (siteId == null) {
return dao.listStreams();
} else {
return dao.listStreams().stream()
.filter((streamDefinition -> streamDefinition.getSiteId().equals(siteId)))
.collect(Collectors.toList());
}
}
@Path("/streams")
@POST
public OpResult createStream(StreamDefinition stream) {
return dao.createStream(stream);
}
@Path("/streams/create")
@POST
public OpResult createStream(StreamDefinitionWrapper stream) {
Preconditions.checkNotNull(stream.getStreamDefinition(),"Stream definition is null");
Preconditions.checkNotNull(stream.getStreamSource(),"Stream source is null");
stream.validateAndEnsureDefault();
OpResult createStreamResult = dao.createStream(stream.getStreamDefinition());
OpResult createDataSourceResult = dao.addDataSource(stream.getStreamSource());
// TODO: Check kafka topic exist or not.
if (createStreamResult.code == OpResult.SUCCESS
&& createDataSourceResult.code == OpResult.SUCCESS) {
return OpResult.success("Successfully create stream "
+ stream.getStreamDefinition().getStreamId()
+ ", and datasource "
+ stream.getStreamSource().getName());
} else {
return OpResult.fail("Error: "
+ StringUtils.join(new String[]{createDataSourceResult.message, createDataSourceResult.message},","));
}
}
@Path("/streams/batch")
@POST
public List<OpResult> addStreams(List<StreamDefinition> streams) {
List<OpResult> results = new LinkedList<>();
for (StreamDefinition stream : streams) {
results.add(dao.createStream(stream));
}
return results;
}
@Path("/streams/{streamId}")
@DELETE
public OpResult removeStream(@PathParam("streamId") String streamId) {
return dao.removeStream(streamId);
}
@Path("/streams")
@DELETE
public List<OpResult> removeStreams(List<String> streamIds) {
List<OpResult> results = new LinkedList<>();
for (String streamId : streamIds) {
results.add(dao.removeStream(streamId));
}
return results;
}
@Path("/datasources")
@GET
public List<Kafka2TupleMetadata> listDataSources() {
return dao.listDataSources();
}
@Path("/datasources")
@POST
public OpResult addDataSource(Kafka2TupleMetadata dataSource) {
return dao.addDataSource(dataSource);
}
@Path("/datasources/batch")
@POST
public List<OpResult> addDataSources(List<Kafka2TupleMetadata> datasources) {
List<OpResult> results = new LinkedList<>();
for (Kafka2TupleMetadata ds : datasources) {
results.add(dao.addDataSource(ds));
}
return results;
}
@Path("/datasources/{datasourceId}")
@DELETE
public OpResult removeDataSource(@PathParam("datasourceId") String datasourceId) {
return dao.removeDataSource(datasourceId);
}
@Path("/datasources")
@DELETE
public List<OpResult> removeDataSources(List<String> datasourceIds) {
List<OpResult> results = new LinkedList<>();
for (String ds : datasourceIds) {
results.add(dao.removeDataSource(ds));
}
return results;
}
@Path("/policies")
@GET
public List<PolicyDefinition> listPolicies(@QueryParam("siteId") String siteId) {
if (siteId != null) {
return dao.getPoliciesBySiteId(siteId);
} else {
return dao.listPolicies();
}
}
@Path("/policies")
@POST
public OpResult addPolicy(@Valid PolicyDefinition policy) {
PolicyValidationResult validationResult = this.validatePolicy(policy);
if (validationResult.isSuccess()) {
return dao.addPolicy(policy);
} else {
return OpResult.fail(validationResult.getMessage());
}
}
@Path("/policies/validate")
@POST
public PolicyValidationResult validatePolicy(PolicyDefinition policy) {
Map<String, StreamDefinition> allDefinitions = new HashMap<>();
for (StreamDefinition definition : dao.listStreams()) {
allDefinitions.put(definition.getStreamId(), definition);
}
return PolicyInterpreter.validate(policy, allDefinitions);
}
@Path("/policies/parse")
@POST
public PolicyParseResult parsePolicy(String policyDefinition) {
return PolicyInterpreter.parse(policyDefinition);
}
@Path("/policies/batch")
@POST
public List<OpResult> addPolicies(List<PolicyDefinition> policies) {
List<OpResult> results = new LinkedList<>();
for (PolicyDefinition policy : policies) {
results.add(dao.addPolicy(policy));
}
return results;
}
@Path("/policies/{policyId}")
@DELETE
public OpResult removePolicy(@PathParam("policyId") String policyId) {
return dao.removePolicy(policyId);
}
@Path("/policies/{policyId}/alerts")
@GET
public List<AlertPublishEvent> getAlertPublishEventByPolicyId(@PathParam("policyId") String policyId,
@QueryParam("size") int size) {
return dao.getAlertPublishEventsByPolicyId(policyId, size);
}
@Path("/policies/{policyId}/publishments")
@GET
public List<Publishment> getPolicyPublishments(@PathParam("policyId") String policyId) {
return dao.getPublishmentsByPolicyId(policyId);
}
@Path("/policies/{policyId}/publishments")
@POST
public OpResult addPublishmentsToPolicy(@PathParam("policyId") String policyId, List<String> publishmentIds) {
return dao.addPublishmentsToPolicy(policyId, publishmentIds);
}
@Path("/policies/{policyId}")
@GET
public PolicyDefinition getPolicyById(@PathParam("policyId") String policyId) {
Preconditions.checkNotNull(policyId, "policyId");
return dao.getPolicyById(policyId);
}
@Path("/policies/{policyId}/status/{status}")
@POST
public OpResult updatePolicyStatusByID(@PathParam("policyId") String policyId, @PathParam("status") PolicyDefinition.PolicyStatus status) {
OpResult result = new OpResult();
try {
PolicyDefinition policyDefinition = getPolicyById(policyId);
policyDefinition.setPolicyStatus(status);
OpResult updateResult = addPolicy(policyDefinition);
result.code = updateResult.code;
if (result.code == OpResult.SUCCESS) {
result.message = "Successfully updated status of " + policyId + " as " + status;
LOG.info(result.message);
} else {
result.message = updateResult.message;
LOG.error(result.message);
}
} catch (Exception e) {
LOG.error("Error: " + e.getMessage(), e);
result.code = OpResult.FAILURE;
result.message = e.getMessage();
}
return result;
}
@Path("/policies")
@DELETE
public List<OpResult> removePolicies(List<String> policies) {
List<OpResult> results = new LinkedList<>();
for (String policy : policies) {
results.add(dao.removePolicy(policy));
}
return results;
}
@Path("/publishments")
@GET
public List<Publishment> listPublishment() {
return dao.listPublishment();
}
@Path("/publishments")
@POST
public OpResult addPublishment(Publishment publishment) {
return dao.addPublishment(publishment);
}
@Path("/publishments/batch")
@POST
public List<OpResult> addPublishments(List<Publishment> publishments) {
List<OpResult> results = new LinkedList<>();
for (Publishment publishment : publishments) {
results.add(dao.addPublishment(publishment));
}
return results;
}
@Path("/publishments/{name}")
@DELETE
public OpResult removePublishment(@PathParam("name") String pubId) {
return dao.removePublishment(pubId);
}
@Path("/publishments")
@DELETE
public List<OpResult> removePublishments(List<String> pubIds) {
List<OpResult> results = new LinkedList<>();
for (String pub : pubIds) {
results.add(dao.removePublishment(pub));
}
return results;
}
@Path("/publishmentTypes")
@GET
public List<PublishmentType> listPublishmentType() {
return PublishementTypeLoader.loadPublishmentTypes();
}
@Path("/publishmentTypes")
@POST
@Deprecated
public OpResult addPublishmentType(PublishmentType publishmentType) {
return dao.addPublishmentType(publishmentType);
}
@Path("/publishmentTypes/batch")
@POST
@Deprecated
public List<OpResult> addPublishmentTypes(List<PublishmentType> publishmentTypes) {
List<OpResult> results = new LinkedList<>();
for (PublishmentType pubType : publishmentTypes) {
results.add(dao.addPublishmentType(pubType));
}
return results;
}
@Path("/publishmentTypes/{name}")
@DELETE
@Deprecated
public OpResult removePublishmentType(@PathParam("name") String name) {
return dao.removePublishmentType(name);
}
@Path("/publishmentTypes")
@DELETE
@Deprecated
public List<OpResult> removePublishmentTypes(List<String> pubTypes) {
List<OpResult> results = new LinkedList<>();
for (String pubType : pubTypes) {
results.add(dao.removePublishmentType(pubType));
}
return results;
}
@Path("/schedulestates/{versionId}")
@GET
public ScheduleState listScheduleState(@PathParam("versionId") String versionId) {
return dao.getScheduleState(versionId);
}
@Path("/schedulestates")
@GET
public ScheduleState latestScheduleState() {
return dao.getScheduleState();
}
@Path("/schedulestates")
@POST
public OpResult addScheduleState(ScheduleState state) {
return dao.addScheduleState(state);
}
@Path("/assignments")
@GET
public List<PolicyAssignment> listAssignmenets() {
return dao.listAssignments();
}
@Path("/assignments")
@POST
public OpResult addAssignmenet(PolicyAssignment pa) {
return dao.addAssignment(pa);
}
@Path("/topologies")
@GET
public List<Topology> listTopologies() {
return dao.listTopologies();
}
@Path("/topologies")
@POST
public OpResult addTopology(Topology t) {
return dao.addTopology(t);
}
@Path("/topologies/batch")
@POST
public List<OpResult> addTopologies(List<Topology> topologies) {
List<OpResult> results = new LinkedList<>();
for (Topology t : topologies) {
results.add(dao.addTopology(t));
}
return results;
}
@Path("/alerts")
@POST
public OpResult addAlertPublishEvent(AlertPublishEvent event) {
return dao.addAlertPublishEvent(event);
}
@Path("/alerts/batch")
@POST
public List<OpResult> addAlertPublishEvents(List<AlertPublishEvent> events) {
List<OpResult> results = new LinkedList<>();
for (AlertPublishEvent e : events) {
results.add(dao.addAlertPublishEvent(e));
}
return results;
}
@Path("/alerts")
@GET
public List<AlertPublishEvent> listAlertPublishEvents(@QueryParam("size") int size) {
return dao.listAlertPublishEvent(size);
}
@Path("/alerts/{alertId}")
@GET
public AlertPublishEvent getAlertPublishEvent(@PathParam("alertId") String alertId) {
return dao.getAlertPublishEvent(alertId);
}
@Path("/topologies/{topologyName}")
@DELETE
public OpResult removeTopology(@PathParam("topologyName") String topologyName) {
return dao.removeTopology(topologyName);
}
@Path("/topologies")
@DELETE
public List<OpResult> removeTopologies(List<String> topologies) {
List<OpResult> results = new LinkedList<>();
for (String t : topologies) {
results.add(dao.removeTopology(t));
}
return results;
}
}
| |
/*
* Copyright (c) 2015, Nordic Semiconductor
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.nordicsemi.nrfUARTv2.activity;
import android.app.Service;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattDescriptor;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothProfile;
import android.content.Context;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import java.util.List;
import java.util.UUID;
/**
* Service for managing connection and data communication with a GATT server hosted on a
* given Bluetooth LE device.
*/
public class UartService extends Service {
private final static String TAG = UartService.class.getSimpleName();
private BluetoothManager mBluetoothManager;
private BluetoothAdapter mBluetoothAdapter;
private String mBluetoothDeviceAddress;
private BluetoothGatt mBluetoothGatt;
private int mConnectionState = STATE_DISCONNECTED;
private static final int STATE_DISCONNECTED = 0;
private static final int STATE_CONNECTING = 1;
private static final int STATE_CONNECTED = 2;
public final static String ACTION_GATT_CONNECTED =
"com.nordicsemi.nrfUART.ACTION_GATT_CONNECTED";
public final static String ACTION_GATT_DISCONNECTED =
"com.nordicsemi.nrfUART.ACTION_GATT_DISCONNECTED";
public final static String ACTION_GATT_SERVICES_DISCOVERED =
"com.nordicsemi.nrfUART.ACTION_GATT_SERVICES_DISCOVERED";
public final static String ACTION_DATA_AVAILABLE =
"com.nordicsemi.nrfUART.ACTION_DATA_AVAILABLE";
public final static String EXTRA_DATA =
"com.nordicsemi.nrfUART.EXTRA_DATA";
public final static String DEVICE_DOES_NOT_SUPPORT_UART =
"com.nordicsemi.nrfUART.DEVICE_DOES_NOT_SUPPORT_UART";
public static final UUID TX_POWER_UUID = UUID.fromString("00001804-0000-1000-8000-00805f9b34fb");
public static final UUID TX_POWER_LEVEL_UUID = UUID.fromString("00002a07-0000-1000-8000-00805f9b34fb");
public static final UUID CCCD = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb");
public static final UUID FIRMWARE_REVISON_UUID = UUID.fromString("00002a26-0000-1000-8000-00805f9b34fb");
public static final UUID DIS_UUID = UUID.fromString("0000180a-0000-1000-8000-00805f9b34fb");
public static final UUID RX_SERVICE_UUID = UUID.fromString("6e400001-b5a3-f393-e0a9-e50e24dcca9e");
public static final UUID RX_CHAR_UUID = UUID.fromString("6e400002-b5a3-f393-e0a9-e50e24dcca9e");
public static final UUID TX_CHAR_UUID = UUID.fromString("6e400003-b5a3-f393-e0a9-e50e24dcca9e");
// Implements callback methods for GATT events that the app cares about. For example,
// connection change and services discovered.
private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
@Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
String intentAction;
if (newState == BluetoothProfile.STATE_CONNECTED) {
intentAction = ACTION_GATT_CONNECTED;
mConnectionState = STATE_CONNECTED;
broadcastUpdate(intentAction);
Log.i(TAG, "Connected to GATT server.");
// Attempts to discover services after successful connection.
Log.i(TAG, "Attempting to start service discovery:" +
mBluetoothGatt.discoverServices());
} else if (newState == BluetoothProfile.STATE_DISCONNECTED) {
intentAction = ACTION_GATT_DISCONNECTED;
mConnectionState = STATE_DISCONNECTED;
Log.i(TAG, "Disconnected from GATT server.");
broadcastUpdate(intentAction);
}
}
@Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
if (status == BluetoothGatt.GATT_SUCCESS) {
Log.w(TAG, "mBluetoothGatt = " + mBluetoothGatt );
broadcastUpdate(ACTION_GATT_SERVICES_DISCOVERED);
} else {
Log.w(TAG, "onServicesDiscovered received: " + status);
}
}
@Override
public void onCharacteristicRead(BluetoothGatt gatt,
BluetoothGattCharacteristic characteristic,
int status) {
if (status == BluetoothGatt.GATT_SUCCESS) {
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
}
@Override
public void onCharacteristicChanged(BluetoothGatt gatt,
BluetoothGattCharacteristic characteristic) {
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
};
private void broadcastUpdate(final String action) {
final Intent intent = new Intent(action);
LocalBroadcastManager.getInstance(this).sendBroadcast(intent);
}
private void broadcastUpdate(final String action,
final BluetoothGattCharacteristic characteristic) {
final Intent intent = new Intent(action);
// This is handling for the notification on TX Character of NUS service
if (TX_CHAR_UUID.equals(characteristic.getUuid())) {
// Log.d(TAG, String.format("Received TX: %d",characteristic.getValue() ));
intent.putExtra(EXTRA_DATA, characteristic.getValue());
} else {
}
LocalBroadcastManager.getInstance(this).sendBroadcast(intent);
}
public class LocalBinder extends Binder {
UartService getService() {
return UartService.this;
}
}
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
// After using a given device, you should make sure that BluetoothGatt.close() is called
// such that resources are cleaned up properly. In this particular example, close() is
// invoked when the UI is disconnected from the Service.
close();
return super.onUnbind(intent);
}
private final IBinder mBinder = new LocalBinder();
/**
* Initializes a reference to the local Bluetooth adapter.
*
* @return Return true if the initialization is successful.
*/
public boolean initialize() {
// For API level 18 and above, get a reference to BluetoothAdapter through
// BluetoothManager.
if (mBluetoothManager == null) {
mBluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE);
if (mBluetoothManager == null) {
Log.e(TAG, "Unable to initialize BluetoothManager.");
return false;
}
}
mBluetoothAdapter = mBluetoothManager.getAdapter();
if (mBluetoothAdapter == null) {
Log.e(TAG, "Unable to obtain a BluetoothAdapter.");
return false;
}
return true;
}
/**
* Connects to the GATT server hosted on the Bluetooth LE device.
*
* @param address The device address of the destination device.
*
* @return Return true if the connection is initiated successfully. The connection result
* is reported asynchronously through the
* {@code BluetoothGattCallback#onConnectionStateChange(android.bluetooth.BluetoothGatt, int, int)}
* callback.
*/
public boolean connect(final String address) {
if (mBluetoothAdapter == null || address == null) {
Log.w(TAG, "BluetoothAdapter not initialized or unspecified address.");
return false;
}
// Previously connected device. Try to reconnect.
if (mBluetoothDeviceAddress != null && address.equals(mBluetoothDeviceAddress)
&& mBluetoothGatt != null) {
Log.d(TAG, "Trying to use an existing mBluetoothGatt for connection.");
if (mBluetoothGatt.connect()) {
mConnectionState = STATE_CONNECTING;
return true;
} else {
return false;
}
}
final BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(address);
if (device == null) {
Log.w(TAG, "Device not found. Unable to connect.");
return false;
}
// We want to directly connect to the device, so we are setting the autoConnect
// parameter to false.
mBluetoothGatt = device.connectGatt(this, false, mGattCallback);
Log.d(TAG, "Trying to create a new connection.");
mBluetoothDeviceAddress = address;
mConnectionState = STATE_CONNECTING;
return true;
}
/**
* Disconnects an existing connection or cancel a pending connection. The disconnection result
* is reported asynchronously through the
* {@code BluetoothGattCallback#onConnectionStateChange(android.bluetooth.BluetoothGatt, int, int)}
* callback.
*/
public void disconnect() {
if (mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.w(TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.disconnect();
// mBluetoothGatt.close();
}
/**
* After using a given BLE device, the app must call this method to ensure resources are
* released properly.
*/
public void close() {
if (mBluetoothGatt == null) {
return;
}
Log.w(TAG, "mBluetoothGatt closed");
mBluetoothDeviceAddress = null;
mBluetoothGatt.close();
mBluetoothGatt = null;
}
/**
* Request a read on a given {@code BluetoothGattCharacteristic}. The read result is reported
* asynchronously through the {@code BluetoothGattCallback#onCharacteristicRead(android.bluetooth.BluetoothGatt, android.bluetooth.BluetoothGattCharacteristic, int)}
* callback.
*
* @param characteristic The characteristic to read from.
*/
public void readCharacteristic(BluetoothGattCharacteristic characteristic) {
if (mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.w(TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.readCharacteristic(characteristic);
}
/**
* Enables or disables notification on a give characteristic.
*
*/
/**
* Enable Notification on TX characteristic
*
* @return
*/
public void enableTXNotification()
{
/*
if (mBluetoothGatt == null) {
showMessage("mBluetoothGatt null" + mBluetoothGatt);
broadcastUpdate(DEVICE_DOES_NOT_SUPPORT_UART);
return;
}
*/
BluetoothGattService RxService = mBluetoothGatt.getService(RX_SERVICE_UUID);
if (RxService == null) {
showMessage("Rx service not found!");
broadcastUpdate(DEVICE_DOES_NOT_SUPPORT_UART);
return;
}
BluetoothGattCharacteristic TxChar = RxService.getCharacteristic(TX_CHAR_UUID);
if (TxChar == null) {
showMessage("Tx charateristic not found!");
broadcastUpdate(DEVICE_DOES_NOT_SUPPORT_UART);
return;
}
mBluetoothGatt.setCharacteristicNotification(TxChar,true);
BluetoothGattDescriptor descriptor = TxChar.getDescriptor(CCCD);
descriptor.setValue(BluetoothGattDescriptor.ENABLE_NOTIFICATION_VALUE);
mBluetoothGatt.writeDescriptor(descriptor);
}
public void writeRXCharacteristic(byte[] value)
{
BluetoothGattService RxService = mBluetoothGatt.getService(RX_SERVICE_UUID);
showMessage("mBluetoothGatt null"+ mBluetoothGatt);
if (RxService == null) {
showMessage("Rx service not found!");
broadcastUpdate(DEVICE_DOES_NOT_SUPPORT_UART);
return;
}
BluetoothGattCharacteristic RxChar = RxService.getCharacteristic(RX_CHAR_UUID);
if (RxChar == null) {
showMessage("Rx charateristic not found!");
broadcastUpdate(DEVICE_DOES_NOT_SUPPORT_UART);
return;
}
RxChar.setValue(value);
boolean status = mBluetoothGatt.writeCharacteristic(RxChar);
Log.d(TAG, "write TXchar - status=" + status);
}
private void showMessage(String msg) {
Log.e(TAG, msg);
}
/**
* Retrieves a list of supported GATT services on the connected device. This should be
* invoked only after {@code BluetoothGatt#discoverServices()} completes successfully.
*
* @return A {@code List} of supported services.
*/
public List<BluetoothGattService> getSupportedGattServices() {
if (mBluetoothGatt == null) return null;
return mBluetoothGatt.getServices();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.