gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ml.action;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.xcontent.ObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xpack.core.action.AbstractGetResourcesResponse;
import org.elasticsearch.xpack.core.action.util.QueryPage;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.results.OverallBucket;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
import java.util.function.LongSupplier;
import static org.elasticsearch.core.RestApiVersion.equalTo;
import static org.elasticsearch.core.RestApiVersion.onOrAfter;
import static org.elasticsearch.xpack.core.ml.MachineLearningField.DEPRECATED_ALLOW_NO_JOBS_PARAM;
/**
* <p>
* This action returns summarized bucket results over multiple jobs.
* Overall buckets have the span of the largest job's bucket_span.
* Their score is calculated by finding the max anomaly score per job
* and then averaging the top N.
* </p>
* <p>
* Overall buckets can be optionally aggregated into larger intervals
* by setting the bucket_span parameter. When that is the case, the
* overall_score is the max of the overall buckets that are within
* the interval.
* </p>
*/
public class GetOverallBucketsAction extends ActionType<GetOverallBucketsAction.Response> {
public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction();
public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get";
private GetOverallBucketsAction() {
super(NAME, Response::new);
}
public static class Request extends ActionRequest implements ToXContentObject {
public static final ParseField TOP_N = new ParseField("top_n");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField OVERALL_SCORE = new ParseField("overall_score");
public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim");
public static final ParseField START = new ParseField("start");
public static final ParseField END = new ParseField("end");
public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match").forRestApiVersion(onOrAfter(RestApiVersion.V_8));
public static final ParseField ALLOW_NO_MATCH_V7 = new ParseField("allow_no_match", DEPRECATED_ALLOW_NO_JOBS_PARAM)
.forRestApiVersion(equalTo(RestApiVersion.V_7));
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
PARSER.declareInt(Request::setTopN, TOP_N);
PARSER.declareString(Request::setBucketSpan, BUCKET_SPAN);
PARSER.declareDouble(Request::setOverallScore, OVERALL_SCORE);
PARSER.declareBoolean(Request::setExcludeInterim, EXCLUDE_INTERIM);
PARSER.declareString(
(request, startTime) -> request.setStart(parseDateOrThrow(startTime, START, System::currentTimeMillis)),
START
);
PARSER.declareString((request, endTime) -> request.setEnd(parseDateOrThrow(endTime, END, System::currentTimeMillis)), END);
PARSER.declareBoolean(Request::setAllowNoMatch, ALLOW_NO_MATCH);
PARSER.declareBoolean(Request::setAllowNoMatch, ALLOW_NO_MATCH_V7);
}
static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) {
DateMathParser dateMathParser = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser();
try {
return dateMathParser.parse(date, now).toEpochMilli();
} catch (Exception e) {
String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date);
throw new ElasticsearchParseException(msg, e);
}
}
public static Request parseRequest(String jobId, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (jobId != null) {
request.jobId = jobId;
}
return request;
}
private String jobId;
private int topN = 1;
private TimeValue bucketSpan;
private double overallScore = 0.0;
private boolean excludeInterim = false;
private Long start;
private Long end;
private boolean allowNoMatch = true;
public Request() {}
public Request(StreamInput in) throws IOException {
super(in);
jobId = in.readString();
topN = in.readVInt();
bucketSpan = in.readOptionalTimeValue();
overallScore = in.readDouble();
excludeInterim = in.readBoolean();
start = in.readOptionalLong();
end = in.readOptionalLong();
allowNoMatch = in.readBoolean();
}
public Request(String jobId) {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
}
public String getJobId() {
return jobId;
}
public int getTopN() {
return topN;
}
public void setTopN(int topN) {
if (topN <= 0) {
throw new IllegalArgumentException("[topN] parameter must be positive, found [" + topN + "]");
}
this.topN = topN;
}
public TimeValue getBucketSpan() {
return bucketSpan;
}
public void setBucketSpan(TimeValue bucketSpan) {
this.bucketSpan = bucketSpan;
}
public void setBucketSpan(String bucketSpan) {
this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName());
}
public double getOverallScore() {
return overallScore;
}
public void setOverallScore(double overallScore) {
this.overallScore = overallScore;
}
public boolean isExcludeInterim() {
return excludeInterim;
}
public void setExcludeInterim(boolean excludeInterim) {
this.excludeInterim = excludeInterim;
}
public Long getStart() {
return start;
}
public void setStart(Long start) {
this.start = start;
}
public void setStart(String start) {
setStart(parseDateOrThrow(start, START, System::currentTimeMillis));
}
public Long getEnd() {
return end;
}
public void setEnd(Long end) {
this.end = end;
}
public void setEnd(String end) {
setEnd(parseDateOrThrow(end, END, System::currentTimeMillis));
}
public boolean allowNoMatch() {
return allowNoMatch;
}
public void setAllowNoMatch(boolean allowNoMatch) {
this.allowNoMatch = allowNoMatch;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(jobId);
out.writeVInt(topN);
out.writeOptionalTimeValue(bucketSpan);
out.writeDouble(overallScore);
out.writeBoolean(excludeInterim);
out.writeOptionalLong(start);
out.writeOptionalLong(end);
out.writeBoolean(allowNoMatch);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(TOP_N.getPreferredName(), topN);
if (bucketSpan != null) {
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep());
}
builder.field(OVERALL_SCORE.getPreferredName(), overallScore);
builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim);
if (start != null) {
builder.field(START.getPreferredName(), String.valueOf(start));
}
if (end != null) {
builder.field(END.getPreferredName(), String.valueOf(end));
}
if (builder.getRestApiVersion() == RestApiVersion.V_7) {
builder.field(DEPRECATED_ALLOW_NO_JOBS_PARAM, allowNoMatch);
} else {
builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(jobId, topN, bucketSpan, overallScore, excludeInterim, start, end, allowNoMatch);
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (getClass() != other.getClass()) {
return false;
}
Request that = (Request) other;
return Objects.equals(jobId, that.jobId)
&& this.topN == that.topN
&& Objects.equals(bucketSpan, that.bucketSpan)
&& this.excludeInterim == that.excludeInterim
&& this.overallScore == that.overallScore
&& Objects.equals(start, that.start)
&& Objects.equals(end, that.end)
&& this.allowNoMatch == that.allowNoMatch;
}
}
public static class Response extends AbstractGetResourcesResponse<OverallBucket> implements ToXContentObject {
public Response(StreamInput in) throws IOException {
super(in);
}
public Response(QueryPage<OverallBucket> overallBuckets) {
super(overallBuckets);
}
public QueryPage<OverallBucket> getOverallBuckets() {
return getResources();
}
@Override
protected Reader<OverallBucket> getReader() {
return OverallBucket::new;
}
}
}
| |
package crystal.client;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.util.HashSet;
import java.util.Map;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import javax.swing.SpringLayout;
import crystal.model.DataSource;
import crystal.model.DataSource.RepoKind;
import crystal.client.PreferencesGUIEditorFrame.MyPathChooser;
import crystal.util.SpringLayoutUtility;
import crystal.util.ValidInputChecker;
/**
* A panel used for the configuration editor (PreferencesGUIEditorFrame) to display a single project.
*
* @author brun
*/
public class ProjectPanel extends JPanel {
private static final long serialVersionUID = 5244512987255240473L;
private static final int SOURCES_COLUMNS = 6;
private static final int ENVIRON_COLUMNS = 3;
private static final int BAR_SIZE = 1;
// The name of the project
private String _name;
/**
* Creates a new panel, with the pref Project configuration.
* @param: copyPref : the project configuration to display
* @param: copyPrefs: the overall configuration associated with this project
* @param mainFrame: the frame that will keep this panel
* @param tabbedPane: the pane on the mainFrame that will keep this panel
* @param prefs the original overall configuration
*
*/
public ProjectPanel(final ProjectPreferences copyPref, final ClientPreferences copyPrefs,
final JFrame mainFrame, final JTabbedPane tabbedPane,
final Map<JComponent, Boolean> changedComponents, final ProjectPreferences pref,
final Map<JTextField, Boolean> validText) {
super();
final JPanel panel = this;
setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
_name = copyPref.getName();
JPanel prefEnvironmentPanel = new JPanel(new SpringLayout());
for (int i = 0 ; i < 2; i++) {
prefEnvironmentPanel.add(new JLabel());
}
prefEnvironmentPanel.add(new JLabel("Valid?"));
prefEnvironmentPanel.add(new JLabel("Project Name: "));
final JTextField shortName = new JTextField(copyPref.getName());
final JLabel nameState = new JLabel(" valid");
nameState.setForeground(Color.GREEN.darker());
changedComponents.put(shortName, false);
validText.put(shortName, true);
prefEnvironmentPanel.add(shortName);
prefEnvironmentPanel.add(nameState);
shortName.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent arg0) {
}
@Override
public void keyReleased(KeyEvent arg0) {
boolean nameChanged = copyPref.setName(shortName.getText());
if (nameChanged) {
_name = shortName.getText();
((JLabel)((JPanel)tabbedPane.getTabComponentAt(tabbedPane.getSelectedIndex())).getComponent(0)).setText(_name);
if (pref != null) {
changedComponents.put(shortName,
!shortName.getText().equals(pref.getName()));
}
}
//boolean valid = ValidInputChecker.checkProjectPreferencesNameDuplicate(copyPrefs, copyPref);
validText.put(shortName, nameChanged);
setState(nameState, nameChanged);
//prefs.setChanged(true);
}
@Override
public void keyTyped(KeyEvent arg0) {
}
});
prefEnvironmentPanel.add(new JLabel("Parent Name (optional): "));
final JTextField parentName = new JTextField(copyPref.getEnvironment().getParent());
final JLabel parentState = new JLabel();
prefEnvironmentPanel.add(parentName);
prefEnvironmentPanel.add(parentState);
changedComponents.put(parentName, false);
validText.put(parentName, true);
parentName.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent arg0) {
}
public void keyTyped(KeyEvent arg0) {
}
public void keyReleased(KeyEvent arg0) {
copyPref.getEnvironment().setParent(parentName.getText());
if (pref != null) {
changedComponents.put(parentName,
!parentName.getText().equals(pref.getEnvironment().getParent()));
}
//prefs.setChanged(true);
panel.validate();
mainFrame.pack();
}
});
prefEnvironmentPanel.add(new JLabel("Repo Type: "));
final JComboBox type = new JComboBox();
final JLabel typeState = new JLabel();
//TODO change HG and GIT to hg and git.
type.addItem(DataSource.RepoKind.HG);
// Don't allow users to select GIT until it is ready
type.addItem(DataSource.RepoKind.GIT);
type.setSelectedItem(copyPref.getEnvironment().getKind());
prefEnvironmentPanel.add(type);
prefEnvironmentPanel.add(typeState);
changedComponents.put(type, false);
type.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
copyPref.getEnvironment().setKind((DataSource.RepoKind) type.getSelectedItem());
if (pref != null) {
changedComponents.put(type,
!((RepoKind)type.getSelectedItem()).equals(pref.getEnvironment().getKind()));
}
}
});
prefEnvironmentPanel.add(new JLabel("Clone Address: "));
final JTextField address = new JTextField(copyPref.getEnvironment().getCloneString());
final JPanel addressPanel = new JPanel();
addressPanel.setLayout(new BoxLayout(addressPanel, BoxLayout.X_AXIS));
JButton addressButton = new JButton("find");
final JLabel addressState = new JLabel();
addressPanel.add(address);
addressPanel.add(addressButton);
prefEnvironmentPanel.add(addressPanel);
prefEnvironmentPanel.add(addressState);
changedComponents.put(address, false);
boolean addressValid = (address.getText().startsWith("http")) || (address.getText().startsWith("ssh")) ||
ValidInputChecker.checkDirectoryPath(address.getText());
validText.put(address, addressValid);
setState(addressState, addressValid);
address.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
copyPref.getEnvironment().setCloneString(address.getText());
if (pref != null) {
changedComponents.put(address,
!address.getText().equals(pref.getEnvironment().getCloneString()));
}
boolean valid = (address.getText().startsWith("http")) || (address.getText().startsWith("ssh")) ||
ValidInputChecker.checkDirectoryPath(address.getText());
validText.put(address, valid);
setState(addressState, valid);
}
@Override
public void keyTyped(KeyEvent e) {
}
});
addressButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
new MyPathChooser("Path to clone address directory", address, JFileChooser.DIRECTORIES_ONLY);
//new MyPathChooser("Path to scratch directory", address, JFileChooser.DIRECTORIES_ONLY);
//prefs.setChanged(true);
}
});
prefEnvironmentPanel.add(new JLabel("Compile Command: "));
String compileCommand = copyPref.getEnvironment().getCompileCommand();
if(compileCommand == null || compileCommand.trim().equals("")){
compileCommand = "";
}
final JTextField compile = new JTextField(compileCommand);
final JLabel compileState = new JLabel(" valid");
compileState.setForeground(Color.GREEN.darker());
prefEnvironmentPanel.add(compile);
prefEnvironmentPanel.add(compileState);
changedComponents.put(compile, false);
boolean compileValid = ValidInputChecker.checkCommand(compile.getText());
validText.put(compile, compileValid);
setState(compileState, compileValid);
compile.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent arg0) {
}
@Override
public void keyReleased(KeyEvent arg0) {
copyPref.getEnvironment().setCompileCommand(compile.getText());
if (pref != null) {
changedComponents.put(compile,
!compile.getText().equals(pref.getEnvironment().getCompileCommand()));
}
boolean compileValid = ValidInputChecker.checkCommand(compile.getText());
validText.put(compile, compileValid);
setState(compileState, compileValid);
}
@Override
public void keyTyped(KeyEvent arg0) {
}
});
prefEnvironmentPanel.add(new JLabel("Test Command: "));
String testCommand = copyPref.getEnvironment().getTestCommand();
if(testCommand == null || testCommand.trim().equals("")){
testCommand = "";
}
final JTextField test = new JTextField(testCommand);
final JLabel testState = new JLabel(" valid");
testState.setForeground(Color.GREEN.darker());
prefEnvironmentPanel.add(test);
prefEnvironmentPanel.add(testState);
changedComponents.put(test, false);
boolean testValid = ValidInputChecker.checkCommand(test.getText());
validText.put(compile, testValid);
setState(testState, testValid);
test.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
copyPref.getEnvironment().setTestCommand(test.getText());
if (pref != null) {
changedComponents.put(test,
!test.getText().equals(pref.getEnvironment().getTestCommand()));
}
boolean testValid = ValidInputChecker.checkCommand(test.getText());
validText.put(compile, testValid);
setState(testState, testValid);
}
@Override
public void keyTyped(KeyEvent e) {
}
});
SpringLayoutUtility.formGridInColumn(prefEnvironmentPanel,
prefEnvironmentPanel.getComponents().length / ENVIRON_COLUMNS, ENVIRON_COLUMNS);
panel.add(prefEnvironmentPanel);
final JPanel sourcesPanel = new JPanel(new SpringLayout());
final JButton newRepoButton = new JButton("Add New Repository");
newRepoButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
HashSet<String> shortNameLookup = new HashSet<String>();
for (DataSource current : copyPref.getDataSources()) {
shortNameLookup.add(current.getShortName());
}
int count = 1;
while (shortNameLookup.contains("New Repo " + count++))
;
DataSource newGuy = new DataSource("New Repo " + --count, "",
DataSource.RepoKind.HG, false, null);
copyPref.addDataSource(newGuy);
addRepoPanel(newGuy, copyPref, copyPrefs, panel, mainFrame, sourcesPanel, changedComponents, null, validText);
SpringLayoutUtility.formGridInColumn(sourcesPanel, copyPref.getDataSources().size() + BAR_SIZE, SOURCES_COLUMNS);
copyPrefs.setChanged(true);
panel.validate();
mainFrame.pack();
}
});
add(newRepoButton);
JLabel pShortName = new JLabel("Short Name", JLabel.CENTER);
JLabel pHide = new JLabel("Hide?", JLabel.CENTER);
JLabel pParent = new JLabel("Parent", JLabel.CENTER);
JLabel pClone = new JLabel("Clone Address", JLabel.CENTER);
JLabel pDelete = new JLabel();
JLabel pState = new JLabel("Valid?", JLabel.CENTER);
sourcesPanel.add(pShortName);
sourcesPanel.add(pHide);
sourcesPanel.add(pParent);
sourcesPanel.add(pClone);
sourcesPanel.add(pDelete);
sourcesPanel.add(pState);
for (DataSource copySource : copyPref.getDataSources()) {
addRepoPanel(copySource, copyPref, copyPrefs, panel, mainFrame,
sourcesPanel, changedComponents, pref.getDataSource(copySource.getShortName()), validText);
}
SpringLayoutUtility.formGridInColumn(sourcesPanel, copyPref.getDataSources().size() + BAR_SIZE, SOURCES_COLUMNS);
add(sourcesPanel);
}
/**
* @return this project's name
*/
public String getName() {
return _name;
}
/**
* Creates a panel used to display a single repository of a project.
* @param source: the configuration of the repo
* @param pref: the configuration of the project
* @param prefs: the overall configuration
* @param panel: the panel in which this panel sits
* @param mainFrame: the frame in which this panel sits
* @return a panel used to display a single repository of a project.
*/
private void addRepoPanel(final DataSource copySource, final ProjectPreferences copyPref,
final ClientPreferences copyPrefs, final JPanel panel,
final JFrame mainFrame, final JPanel sourcesPanel,
final Map<JComponent, Boolean> changedComponents, final DataSource source,
final Map<JTextField, Boolean> validText) {
//repoPanel.setLayout(new BoxLayout(repoPanel, BoxLayout.X_AXIS));
/*
* repoPanel.add(new JLabel("Repo Type")); final JComboBox type = new JComboBox();
* type.addItem(DataSource.RepoKind.HG); // type.addItem(DataSource.RepoKind.GIT);
* type.setSelectedItem(source.getKind()); repoPanel.add(type); type.addActionListener(new ActionListener() {
* public void actionPerformed(ActionEvent e) { source.setKind((DataSource.RepoKind) type.getSelectedItem());
* prefs.setChanged(true); panel.validate(); mainFrame.pack(); } });
*/
final JLabel validState = new JLabel();
final boolean[] states = new boolean[3];
// repoPanel.add(new JLabel("Short Name"));
final JTextField shortName = new JTextField(copySource.getShortName());
changedComponents.put(shortName, false);
validText.put(shortName, true);
states[0] = true;
shortName.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
copySource.setShortName(shortName.getText());
if (source != null) {
changedComponents.put(shortName,
!shortName.getText().equals(source.getShortName()));
}
boolean valid = ValidInputChecker.checkDataSourceNameDuplicate(copyPref, copySource);
validText.put(shortName, valid);
states[0] = valid;
validState.setText(getState(states));
if (getState(states).equals("valid"))
validState.setForeground(Color.GREEN.darker());
else
validState.setForeground(Color.RED.darker());
}
@Override
public void keyTyped(KeyEvent e) {
}
});
// repoPanel.add(new JLabel("Hide?"));
final JCheckBox hideBox = new JCheckBox();
if (copySource.isHidden())
hideBox.setSelected(true);
changedComponents.put(hideBox, false);
hideBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
// hideBox.setSelected(!(hideBox.isSelected()));
copySource.hide(hideBox.isSelected());
if (source != null)
changedComponents.put(hideBox, hideBox.isSelected() != source.isHidden());
//prefs.setChanged(true);
}
});
// repoPanel.add(new JLabel("Parent"));
final JTextField parent = new JTextField(copySource.getParent());
changedComponents.put(parent, false);
validText.put(parent, true);
states[1] = true;
parent.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent arg0) {
}
public void keyTyped(KeyEvent arg0) {
}
public void keyReleased(KeyEvent arg0) {
copySource.setParent(parent.getText());
if (source != null)
changedComponents.put(parent, !parent.getText().equals(source.getParent()));
//prefs.setChanged(true);
panel.validate();
mainFrame.pack();
}
});
// repoPanel.add(new JLabel("Clone Address"));
final JTextField cloneAddress = new JTextField(copySource.getCloneString());
final JPanel cloneAddressPanel = new JPanel();
cloneAddressPanel.setLayout(new BoxLayout(cloneAddressPanel, BoxLayout.X_AXIS));
cloneAddressPanel.add(cloneAddress);
JButton findButton = new JButton("find");
cloneAddressPanel.add(findButton);
changedComponents.put(cloneAddress, false);
boolean validAddress = ValidInputChecker.checkDirectoryPath(cloneAddress.getText())
|| ValidInputChecker.checkUrl(cloneAddress.getText());
validText.put(cloneAddress, validAddress);
states[2] = validAddress;
cloneAddress.addKeyListener(new KeyListener() {
public void keyPressed(KeyEvent arg0) {
}
public void keyTyped(KeyEvent arg0) {
}
public void keyReleased(KeyEvent arg0) {
copySource.setCloneString(cloneAddress.getText());
if (source != null) {
changedComponents.put(cloneAddress,
!cloneAddress.getText().equals(source.getCloneString()));
}
boolean valid = ValidInputChecker.checkDirectoryPath(cloneAddress.getText())
|| ValidInputChecker.checkUrl(cloneAddress.getText());
validText.put(cloneAddress, valid);
states[2] = valid;
validState.setText(getState(states));
if (getState(states).equals("valid"))
validState.setForeground(Color.GREEN.darker());
else
validState.setForeground(Color.RED.darker());
//prefs.setChanged(true);
panel.validate();
mainFrame.pack();
}
});
findButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
new MyPathChooser("Path to clone address directory", cloneAddress, JFileChooser.DIRECTORIES_ONLY);
//prefs.setChanged(true);
}
});
final JButton deleteRepoButton = new JButton("Delete");
deleteRepoButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
int n = JOptionPane.showConfirmDialog(null,
"Delete " + copySource.getShortName() + "'s repository?");
if(n == JOptionPane.YES_OPTION){
copyPref.removeDataSource(copySource);
//prefs.setChanged(true);
sourcesPanel.remove(shortName);
sourcesPanel.remove(hideBox);
sourcesPanel.remove(parent);
sourcesPanel.remove(cloneAddressPanel);
sourcesPanel.remove(deleteRepoButton);
validText.remove(shortName);
validText.remove(parent);
validText.remove(cloneAddress);
copyPrefs.setChanged(true);
SpringLayoutUtility.formGridInColumn(sourcesPanel, copyPref.getDataSources().size() + BAR_SIZE,
SOURCES_COLUMNS);
mainFrame.pack();
}
}
});
sourcesPanel.add(shortName);
sourcesPanel.add(hideBox);
sourcesPanel.add(parent);
sourcesPanel.add(cloneAddressPanel);
sourcesPanel.add(deleteRepoButton);
validState.setText(getState(states));
if (getState(states).equals("valid"))
validState.setForeground(Color.GREEN.darker());
else
validState.setForeground(Color.RED.darker());
sourcesPanel.add(validState);
}
/**
* Set correct state representation for input label
* @param label
* @param valid
*/
private void setState(JLabel label, boolean valid) {
if (valid) {
label.setText(" valid");
label.setForeground(Color.GREEN.darker());
} else {
label.setText("invalid");
label.setForeground(Color.RED.darker());
}
}
/**
* Make a state representation from given boolean array
* @param states
* @return
*/
private String getState(boolean[] states) {
String s = "";
if (!states[0])
s = "short name ";
if (!states[1])
s += "parent ";
if (!states[2])
s += "address ";
if (s.isEmpty())
s = "valid";
else
s += ": invalid";
return s;
}
}
| |
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Formattable;
import java.util.Formatter;
import java.util.IllegalFormatException;
import java.util.List;
import java.util.Map;
import java.util.MissingFormatWidthException;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* (Pretty) Printing of Skylark values
*/
public final class Printer {
private static final char SKYLARK_QUOTATION_MARK = '"';
private Printer() {
}
/**
* Get an informal representation of object x.
* Currently only differs from repr in the behavior for strings and labels at top-level,
* that are returned as is rather than quoted.
* @param quotationMark The quotation mark to be used (' or ")
* @return the representation.
*/
public static String str(Object x, char quotationMark) {
return print(new StringBuilder(), x, quotationMark).toString();
}
public static String str(Object x) {
return str(x, SKYLARK_QUOTATION_MARK);
}
/**
* Get an official representation of object x.
* For regular data structures, the value should be parsable back into an equal data structure.
* @param quotationMark The quotation mark to be used (' or ")
* @return the representation.
*/
public static String repr(Object x, char quotationMark) {
return write(new StringBuilder(), x, quotationMark).toString();
}
public static String repr(Object x) {
return repr(x, SKYLARK_QUOTATION_MARK);
}
// In absence of a Python naming tradition, the write() vs print() function names
// follow the Lisp tradition: print() displays the informal representation (as in Python str)
// whereas write() displays a readable representation (as in Python repr).
/**
* Print an informal representation of object x.
* Currently only differs from repr in the behavior for strings and labels at top-level,
* that are returned as is rather than quoted.
* @param buffer the Appendable to which to print the representation
* @param o the object
* @param quotationMark The quotation mark to be used (' or ")
* @return the buffer, in fluent style
*/
public static Appendable print(Appendable buffer, Object o, char quotationMark) {
if (o instanceof Label) {
return append(buffer, o.toString()); // Pretty-print a label like a string
}
if (o instanceof String) {
return append(buffer, (String) o);
}
return write(buffer, o, quotationMark);
}
public static Appendable print(Appendable buffer, Object o) {
return print(buffer, o, SKYLARK_QUOTATION_MARK);
}
/**
* Print an official representation of object x.
* For regular data structures, the value should be parsable back into an equal data structure.
* @param buffer the Appendable to write to.
* @param o the string a representation of which to write.
* @param quotationMark The quotation mark to be used (' or ")
* @return the Appendable, in fluent style.
*/
public static Appendable write(Appendable buffer, Object o, char quotationMark) {
if (o == null) {
throw new NullPointerException(); // Java null is not a build language value.
} else if (o instanceof SkylarkValue) {
((SkylarkValue) o).write(buffer, quotationMark);
} else if (o instanceof String) {
writeString(buffer, (String) o, quotationMark);
} else if (o instanceof Integer || o instanceof Double) {
append(buffer, o.toString());
} else if (o == Boolean.TRUE) {
append(buffer, "True");
} else if (o == Boolean.FALSE) {
append(buffer, "False");
} else if (o instanceof List<?>) {
List<?> seq = (List<?>) o;
printList(buffer, seq, EvalUtils.isTuple(seq), quotationMark);
} else if (o instanceof Map<?, ?>) {
Map<?, ?> dict = (Map<?, ?>) o;
printList(buffer, getSortedEntrySet(dict), "{", ", ", "}", null, quotationMark);
} else if (o instanceof Map.Entry<?, ?>) {
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;
write(buffer, entry.getKey(), quotationMark);
append(buffer, ": ");
write(buffer, entry.getValue(), quotationMark);
} else if (o instanceof PathFragment) {
append(buffer, ((PathFragment) o).getPathString());
} else if (o instanceof Class<?>) {
append(buffer, EvalUtils.getDataTypeNameFromClass((Class<?>) o));
} else {
append(buffer, o.toString());
}
return buffer;
}
/**
* Returns the sorted entry set of the given map
*/
private static <K, V> Set<Map.Entry<K, V>> getSortedEntrySet(Map<K, V> dict) {
if (!(dict instanceof SortedMap<?, ?>)) {
Map<K, V> tmp = new TreeMap<>(EvalUtils.SKYLARK_COMPARATOR);
tmp.putAll(dict);
dict = tmp;
}
return dict.entrySet();
}
public static Appendable write(Appendable buffer, Object o) {
return write(buffer, o, SKYLARK_QUOTATION_MARK);
}
// Throughout this file, we transform IOException into AssertionError.
// During normal operations, we only use in-memory Appendable-s that
// cannot cause an IOException.
public static Appendable append(Appendable buffer, char c) {
try {
return buffer.append(c);
} catch (IOException e) {
throw new AssertionError(e);
}
}
public static Appendable append(Appendable buffer, CharSequence s) {
try {
return buffer.append(s);
} catch (IOException e) {
throw new AssertionError(e);
}
}
private static Appendable append(Appendable buffer, CharSequence s, int start, int end) {
try {
return buffer.append(s, start, end);
} catch (IOException e) {
throw new AssertionError(e);
}
}
private static Appendable backslashChar(Appendable buffer, char c) {
return append(append(buffer, '\\'), c);
}
private static Appendable escapeCharacter(Appendable buffer, char c, char quote) {
if (c == quote) {
return backslashChar(buffer, c);
}
switch (c) {
case '\\':
return backslashChar(buffer, '\\');
case '\r':
return backslashChar(buffer, 'r');
case '\n':
return backslashChar(buffer, 'n');
case '\t':
return backslashChar(buffer, 't');
default:
if (c < 32) {
return append(buffer, String.format("\\x%02x", (int) c));
}
return append(buffer, c); // no need to support UTF-8
} // endswitch
}
/**
* Write a properly escaped Skylark representation of a string to a buffer.
*
* @param buffer the Appendable to write to.
* @param s the string a representation of which to write.
* @param quote the quote character to use, '"' or '\''.
* @return the Appendable, in fluent style.
*/
private static Appendable writeString(Appendable buffer, String s, char quote) {
append(buffer, quote);
int len = s.length();
for (int i = 0; i < len; i++) {
char c = s.charAt(i);
escapeCharacter(buffer, c, quote);
}
return append(buffer, quote);
}
/**
* Print a list of object representations
* @param buffer an appendable buffer onto which to write the list.
* @param list the list of objects to write (each as with repr)
* @param before a string to print before the list
* @param separator a separator to print between each object
* @param after a string to print after the list
* @param singletonTerminator null or a string to print after the list if it is a singleton
* The singleton case is notably relied upon in python syntax to distinguish
* a tuple of size one such as ("foo",) from a merely parenthesized object such as ("foo").
* @param quotationMark The quotation mark to be used (' or ")
* @return the Appendable, in fluent style.
*/
public static Appendable printList(
Appendable buffer,
Iterable<?> list,
String before,
String separator,
String after,
String singletonTerminator,
char quotationMark) {
boolean printSeparator = false; // don't print the separator before the first element
int len = 0;
append(buffer, before);
for (Object o : list) {
if (printSeparator) {
append(buffer, separator);
}
write(buffer, o, quotationMark);
printSeparator = true;
len++;
}
if (singletonTerminator != null && len == 1) {
append(buffer, singletonTerminator);
}
return append(buffer, after);
}
public static Appendable printList(Appendable buffer, Iterable<?> list, String before,
String separator, String after, String singletonTerminator) {
return printList(
buffer, list, before, separator, after, singletonTerminator, SKYLARK_QUOTATION_MARK);
}
/**
* Print a Skylark list or tuple of object representations
* @param buffer an appendable buffer onto which to write the list.
* @param list the contents of the list or tuple
* @param isTuple is it a tuple or a list?
* @param quotationMark The quotation mark to be used (' or ")
* @return the Appendable, in fluent style.
*/
public static Appendable printList(
Appendable buffer, Iterable<?> list, boolean isTuple, char quotationMark) {
if (isTuple) {
return printList(buffer, list, "(", ", ", ")", ",", quotationMark);
} else {
return printList(buffer, list, "[", ", ", "]", null, quotationMark);
}
}
/**
* Print a list of object representations
* @param list the list of objects to write (each as with repr)
* @param before a string to print before the list
* @param separator a separator to print between each object
* @param after a string to print after the list
* @param singletonTerminator null or a string to print after the list if it is a singleton
* The singleton case is notably relied upon in python syntax to distinguish
* a tuple of size one such as ("foo",) from a merely parenthesized object such as ("foo").
* @param quotationMark The quotation mark to be used (' or ")
* @return a String, the representation.
*/
public static String listString(Iterable<?> list, String before, String separator, String after,
String singletonTerminator, char quotationMark) {
return printList(new StringBuilder(), list, before, separator, after, singletonTerminator,
quotationMark).toString();
}
public static String listString(
Iterable<?> list, String before, String separator, String after, String singletonTerminator) {
return listString(list, before, separator, after, singletonTerminator, SKYLARK_QUOTATION_MARK);
}
public static List<?> makeList(Collection<?> list) {
return list == null ? Lists.newArrayList() : Lists.newArrayList(list);
}
public static List<String> makeStringList(List<Label> labels) {
if (labels == null) {
return Collections.emptyList();
}
List<String> strings = Lists.newArrayListWithCapacity(labels.size());
for (Label label : labels) {
strings.add(label.toString());
}
return strings;
}
/**
* Perform Python-style string formatting, lazily.
*
* @param pattern a format string.
* @param arguments positional arguments.
* @return the formatted string.
*/
public static Formattable formattable(final String pattern, Object... arguments)
throws IllegalFormatException {
final ImmutableList<Object> args = ImmutableList.copyOf(arguments);
return new Formattable() {
@Override
public String toString() {
return formatToString(pattern, args);
}
@Override
public void formatTo(Formatter formatter, int flags, int width, int precision) {
Printer.formatTo(formatter.out(), pattern, args);
}
};
}
/**
* Perform Python-style string formatting.
*
* @param pattern a format string.
* @param arguments a tuple containing positional arguments.
* @return the formatted string.
*/
public static String format(String pattern, Object... arguments)
throws IllegalFormatException {
return formatToString(pattern, ImmutableList.copyOf(arguments));
}
/**
* Perform Python-style string formatting.
*
* @param pattern a format string.
* @param arguments a tuple containing positional arguments.
* @return the formatted string.
*/
public static String formatToString(String pattern, List<?> arguments)
throws IllegalFormatException {
return formatTo(new StringBuilder(), pattern, arguments).toString();
}
/**
* Perform Python-style string formatting, as per pattern % tuple
* Limitations: only %d %s %r %% are supported.
*
* @param buffer an Appendable to output to.
* @param pattern a format string.
* @param arguments a list containing positional arguments.
* @return the buffer, in fluent style.
*/
// TODO(bazel-team): support formatting arguments, and more complex Python patterns.
public static Appendable formatTo(Appendable buffer, String pattern, List<?> arguments)
throws IllegalFormatException {
// N.B. MissingFormatWidthException is the only kind of IllegalFormatException
// whose constructor can take and display arbitrary error message, hence its use below.
int length = pattern.length();
int argLength = arguments.size();
int i = 0; // index of next character in pattern
int a = 0; // index of next argument in arguments
while (i < length) {
int p = pattern.indexOf('%', i);
if (p == -1) {
append(buffer, pattern, i, length);
break;
}
if (p > i) {
append(buffer, pattern, i, p);
}
if (p == length - 1) {
throw new MissingFormatWidthException(
"incomplete format pattern ends with %: " + repr(pattern));
}
char directive = pattern.charAt(p + 1);
i = p + 2;
switch (directive) {
case '%':
append(buffer, '%');
continue;
case 'd':
case 'r':
case 's':
if (a >= argLength) {
throw new MissingFormatWidthException("not enough arguments for format pattern "
+ repr(pattern) + ": "
+ repr(SkylarkList.tuple(arguments)));
}
Object argument = arguments.get(a++);
switch (directive) {
case 'd':
if (argument instanceof Integer) {
append(buffer, argument.toString());
continue;
} else {
throw new MissingFormatWidthException(
"invalid argument " + repr(argument) + " for format pattern %d");
}
case 'r':
write(buffer, argument);
continue;
case 's':
print(buffer, argument);
continue;
}
default:
throw new MissingFormatWidthException(
"unsupported format character " + repr(String.valueOf(directive))
+ " at index " + (p + 1) + " in " + repr(pattern));
}
}
if (a < argLength) {
throw new MissingFormatWidthException(
"not all arguments converted during string formatting");
}
return buffer;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright 2005 Sun Microsystems, Inc. All rights reserved.
*/
package org.apache.jcp.xml.dsig.internal.dom;
import javax.xml.crypto.*;
import javax.xml.crypto.dsig.*;
import javax.xml.crypto.dsig.spec.SignatureMethodParameterSpec;
import java.io.IOException;
import java.security.*;
import java.security.interfaces.DSAKey;
import java.security.spec.AlgorithmParameterSpec;
import org.w3c.dom.Element;
import org.apache.xml.security.algorithms.implementations.SignatureECDSA;
import org.apache.xml.security.utils.JavaUtils;
import org.apache.jcp.xml.dsig.internal.SignerOutputStream;
/**
* DOM-based abstract implementation of SignatureMethod.
*
*/
public abstract class DOMSignatureMethod extends AbstractDOMSignatureMethod {
private static final String DOM_SIGNATURE_PROVIDER = "org.jcp.xml.dsig.internal.dom.SignatureProvider";
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(DOMSignatureMethod.class);
private SignatureMethodParameterSpec params;
private Signature signature;
// see RFC 4051 for these algorithm definitions
static final String RSA_SHA224 =
"http://www.w3.org/2001/04/xmldsig-more#rsa-sha224";
static final String RSA_SHA256 =
"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256";
static final String RSA_SHA384 =
"http://www.w3.org/2001/04/xmldsig-more#rsa-sha384";
static final String RSA_SHA512 =
"http://www.w3.org/2001/04/xmldsig-more#rsa-sha512";
static final String RSA_RIPEMD160 =
"http://www.w3.org/2001/04/xmldsig-more#rsa-ripemd160";
static final String ECDSA_SHA1 =
"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha1";
static final String ECDSA_SHA224 =
"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha224";
static final String ECDSA_SHA256 =
"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256";
static final String ECDSA_SHA384 =
"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384";
static final String ECDSA_SHA512 =
"http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512";
static final String DSA_SHA256 =
"http://www.w3.org/2009/xmldsig11#dsa-sha256";
// see RFC 6931 for these algorithm definitions
static final String ECDSA_RIPEMD160 =
"http://www.w3.org/2007/05/xmldsig-more#ecdsa-ripemd160";
static final String RSA_SHA1_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#sha1-rsa-MGF1";
static final String RSA_SHA224_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#sha224-rsa-MGF1";
static final String RSA_SHA256_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#sha256-rsa-MGF1";
static final String RSA_SHA384_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#sha384-rsa-MGF1";
static final String RSA_SHA512_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#sha512-rsa-MGF1";
static final String RSA_RIPEMD160_MGF1 =
"http://www.w3.org/2007/05/xmldsig-more#ripemd160-rsa-MGF1";
/**
* Creates a <code>DOMSignatureMethod</code>.
*
* @param params the algorithm-specific params (may be <code>null</code>)
* @throws InvalidAlgorithmParameterException if the parameters are not
* appropriate for this signature method
*/
DOMSignatureMethod(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException
{
if (params != null &&
!(params instanceof SignatureMethodParameterSpec)) {
throw new InvalidAlgorithmParameterException
("params must be of type SignatureMethodParameterSpec");
}
checkParams((SignatureMethodParameterSpec)params);
this.params = (SignatureMethodParameterSpec)params;
}
/**
* Creates a <code>DOMSignatureMethod</code> from an element. This ctor
* invokes the {@link #unmarshalParams unmarshalParams} method to
* unmarshal any algorithm-specific input parameters.
*
* @param smElem a SignatureMethod element
*/
DOMSignatureMethod(Element smElem) throws MarshalException {
Element paramsElem = DOMUtils.getFirstChildElement(smElem);
if (paramsElem != null) {
params = unmarshalParams(paramsElem);
}
try {
checkParams(params);
} catch (InvalidAlgorithmParameterException iape) {
throw new MarshalException(iape);
}
}
static SignatureMethod unmarshal(Element smElem) throws MarshalException {
String alg = DOMUtils.getAttributeValue(smElem, "Algorithm");
if (alg.equals(SignatureMethod.RSA_SHA1)) {
return new SHA1withRSA(smElem);
} else if (alg.equals(RSA_SHA224)) {
return new SHA224withRSA(smElem);
} else if (alg.equals(RSA_SHA256)) {
return new SHA256withRSA(smElem);
} else if (alg.equals(RSA_SHA384)) {
return new SHA384withRSA(smElem);
} else if (alg.equals(RSA_SHA512)) {
return new SHA512withRSA(smElem);
} else if (alg.equals(RSA_RIPEMD160)) {
return new RIPEMD160withRSA(smElem);
} else if (alg.equals(RSA_SHA1_MGF1)) {
return new SHA1withRSAandMGF1(smElem);
} else if (alg.equals(RSA_SHA224_MGF1)) {
return new SHA224withRSAandMGF1(smElem);
} else if (alg.equals(RSA_SHA256_MGF1)) {
return new SHA256withRSAandMGF1(smElem);
} else if (alg.equals(RSA_SHA384_MGF1)) {
return new SHA384withRSAandMGF1(smElem);
} else if (alg.equals(RSA_SHA512_MGF1)) {
return new SHA512withRSAandMGF1(smElem);
} else if (alg.equals(DOMRSAPSSSignatureMethod.RSA_PSS)) {
return new DOMRSAPSSSignatureMethod.RSAPSS(smElem);
} else if (alg.equals(RSA_RIPEMD160_MGF1)) {
return new RIPEMD160withRSAandMGF1(smElem);
} else if (alg.equals(SignatureMethod.DSA_SHA1)) {
return new SHA1withDSA(smElem);
} else if (alg.equals(DSA_SHA256)) {
return new SHA256withDSA(smElem);
} else if (alg.equals(ECDSA_SHA1)) {
return new SHA1withECDSA(smElem);
} else if (alg.equals(ECDSA_SHA224)) {
return new SHA224withECDSA(smElem);
} else if (alg.equals(ECDSA_SHA256)) {
return new SHA256withECDSA(smElem);
} else if (alg.equals(ECDSA_SHA384)) {
return new SHA384withECDSA(smElem);
} else if (alg.equals(ECDSA_SHA512)) {
return new SHA512withECDSA(smElem);
} else if (alg.equals(ECDSA_RIPEMD160)) {
return new RIPEMD160withECDSA(smElem);
} else if (alg.equals(SignatureMethod.HMAC_SHA1)) {
return new DOMHMACSignatureMethod.SHA1(smElem);
} else if (alg.equals(DOMHMACSignatureMethod.HMAC_SHA224)) {
return new DOMHMACSignatureMethod.SHA224(smElem);
} else if (alg.equals(DOMHMACSignatureMethod.HMAC_SHA256)) {
return new DOMHMACSignatureMethod.SHA256(smElem);
} else if (alg.equals(DOMHMACSignatureMethod.HMAC_SHA384)) {
return new DOMHMACSignatureMethod.SHA384(smElem);
} else if (alg.equals(DOMHMACSignatureMethod.HMAC_SHA512)) {
return new DOMHMACSignatureMethod.SHA512(smElem);
} else if (alg.equals(DOMHMACSignatureMethod.HMAC_RIPEMD160)) {
return new DOMHMACSignatureMethod.RIPEMD160(smElem);
} else {
throw new MarshalException
("unsupported SignatureMethod algorithm: " + alg);
}
}
public final AlgorithmParameterSpec getParameterSpec() {
return params;
}
boolean verify(Key key, SignedInfo si, byte[] sig,
XMLValidateContext context)
throws InvalidKeyException, SignatureException, XMLSignatureException
{
if (key == null || si == null || sig == null) {
throw new NullPointerException();
}
if (!(key instanceof PublicKey)) {
throw new InvalidKeyException("key must be PublicKey");
}
if (signature == null) {
try {
Provider p = (Provider)context.getProperty(DOM_SIGNATURE_PROVIDER);
signature = (p == null)
? Signature.getInstance(getJCAAlgorithm())
: Signature.getInstance(getJCAAlgorithm(), p);
} catch (NoSuchAlgorithmException nsae) {
throw new XMLSignatureException(nsae);
}
}
signature.initVerify((PublicKey)key);
LOG.debug("Signature provider: {}", signature.getProvider());
LOG.debug("Verifying with key: {}", key);
LOG.debug("JCA Algorithm: {}", getJCAAlgorithm());
LOG.debug("Signature Bytes length: {}", sig.length);
try (SignerOutputStream outputStream = new SignerOutputStream(signature)) {
((DOMSignedInfo)si).canonicalize(context, outputStream);
Type type = getAlgorithmType();
if (type == Type.DSA) {
int size = ((DSAKey)key).getParams().getQ().bitLength();
return signature.verify(JavaUtils.convertDsaXMLDSIGtoASN1(sig,
size/8));
} else if (type == Type.ECDSA) {
return signature.verify(SignatureECDSA.convertXMLDSIGtoASN1(sig));
} else {
return signature.verify(sig);
}
} catch (IOException ioe) {
throw new XMLSignatureException(ioe);
}
}
byte[] sign(Key key, SignedInfo si, XMLSignContext context)
throws InvalidKeyException, XMLSignatureException
{
if (key == null || si == null) {
throw new NullPointerException();
}
if (!(key instanceof PrivateKey)) {
throw new InvalidKeyException("key must be PrivateKey");
}
if (signature == null) {
try {
Provider p = (Provider)context.getProperty(DOM_SIGNATURE_PROVIDER);
signature = (p == null)
? Signature.getInstance(getJCAAlgorithm())
: Signature.getInstance(getJCAAlgorithm(), p);
} catch (NoSuchAlgorithmException nsae) {
throw new XMLSignatureException(nsae);
}
}
signature.initSign((PrivateKey)key);
LOG.debug("Signature provider: {}", signature.getProvider());
LOG.debug("Signing with key: {}", key);
LOG.debug("JCA Algorithm: {}", getJCAAlgorithm());
try (SignerOutputStream outputStream = new SignerOutputStream(signature)) {
((DOMSignedInfo)si).canonicalize(context, outputStream);
Type type = getAlgorithmType();
if (type == Type.DSA) {
int size = ((DSAKey)key).getParams().getQ().bitLength();
return JavaUtils.convertDsaASN1toXMLDSIG(signature.sign(),
size/8);
} else if (type == Type.ECDSA) {
return SignatureECDSA.convertASN1toXMLDSIG(signature.sign());
} else {
return signature.sign();
}
} catch (SignatureException se) {
throw new XMLSignatureException(se);
} catch (IOException ioe) {
throw new XMLSignatureException(ioe);
}
}
static final class SHA1withRSA extends DOMSignatureMethod {
SHA1withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA1withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return SignatureMethod.RSA_SHA1;
}
@Override
String getJCAAlgorithm() {
return "SHA1withRSA";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA224withRSA extends DOMSignatureMethod {
SHA224withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA224withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return RSA_SHA224;
}
String getJCAAlgorithm() {
return "SHA224withRSA";
}
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA256withRSA extends DOMSignatureMethod {
SHA256withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA256withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return RSA_SHA256;
}
String getJCAAlgorithm() {
return "SHA256withRSA";
}
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA384withRSA extends DOMSignatureMethod {
SHA384withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA384withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return RSA_SHA384;
}
String getJCAAlgorithm() {
return "SHA384withRSA";
}
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA512withRSA extends DOMSignatureMethod {
SHA512withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA512withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return RSA_SHA512;
}
String getJCAAlgorithm() {
return "SHA512withRSA";
}
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class RIPEMD160withRSA extends DOMSignatureMethod {
RIPEMD160withRSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
RIPEMD160withRSA(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_RIPEMD160;
}
@Override
String getJCAAlgorithm() {
return "RIPEMD160withRSA";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA1withRSAandMGF1 extends DOMSignatureMethod {
SHA1withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA1withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_SHA1_MGF1;
}
@Override
String getJCAAlgorithm() {
return "SHA1withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA224withRSAandMGF1 extends DOMSignatureMethod {
SHA224withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA224withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_SHA224_MGF1;
}
@Override
String getJCAAlgorithm() {
return "SHA224withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA256withRSAandMGF1 extends DOMSignatureMethod {
SHA256withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA256withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_SHA256_MGF1;
}
@Override
String getJCAAlgorithm() {
return "SHA256withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA384withRSAandMGF1 extends DOMSignatureMethod {
SHA384withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA384withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_SHA384_MGF1;
}
@Override
String getJCAAlgorithm() {
return "SHA384withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA512withRSAandMGF1 extends DOMSignatureMethod {
SHA512withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA512withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_SHA512_MGF1;
}
@Override
String getJCAAlgorithm() {
return "SHA512withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class RIPEMD160withRSAandMGF1 extends DOMSignatureMethod {
RIPEMD160withRSAandMGF1(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
RIPEMD160withRSAandMGF1(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return RSA_RIPEMD160_MGF1;
}
@Override
String getJCAAlgorithm() {
return "RIPEMD160withRSAandMGF1";
}
@Override
Type getAlgorithmType() {
return Type.RSA;
}
}
static final class SHA1withDSA extends DOMSignatureMethod {
SHA1withDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA1withDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return SignatureMethod.DSA_SHA1;
}
String getJCAAlgorithm() {
return "SHA1withDSA";
}
Type getAlgorithmType() {
return Type.DSA;
}
}
static final class SHA256withDSA extends DOMSignatureMethod {
SHA256withDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA256withDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return DSA_SHA256;
}
String getJCAAlgorithm() {
return "SHA256withDSA";
}
Type getAlgorithmType() {
return Type.DSA;
}
}
static final class SHA1withECDSA extends DOMSignatureMethod {
SHA1withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA1withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return ECDSA_SHA1;
}
String getJCAAlgorithm() {
return "SHA1withECDSA";
}
Type getAlgorithmType() {
return Type.ECDSA;
}
}
static final class SHA224withECDSA extends DOMSignatureMethod {
SHA224withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA224withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return ECDSA_SHA224;
}
@Override
String getJCAAlgorithm() {
return "SHA224withECDSA";
}
@Override
Type getAlgorithmType() {
return Type.ECDSA;
}
}
static final class SHA256withECDSA extends DOMSignatureMethod {
SHA256withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA256withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return ECDSA_SHA256;
}
String getJCAAlgorithm() {
return "SHA256withECDSA";
}
Type getAlgorithmType() {
return Type.ECDSA;
}
}
static final class SHA384withECDSA extends DOMSignatureMethod {
SHA384withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA384withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return ECDSA_SHA384;
}
String getJCAAlgorithm() {
return "SHA384withECDSA";
}
Type getAlgorithmType() {
return Type.ECDSA;
}
}
static final class SHA512withECDSA extends DOMSignatureMethod {
SHA512withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
SHA512withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
public String getAlgorithm() {
return ECDSA_SHA512;
}
String getJCAAlgorithm() {
return "SHA512withECDSA";
}
Type getAlgorithmType() {
return Type.ECDSA;
}
}
static final class RIPEMD160withECDSA extends DOMSignatureMethod {
RIPEMD160withECDSA(AlgorithmParameterSpec params)
throws InvalidAlgorithmParameterException {
super(params);
}
RIPEMD160withECDSA(Element dmElem) throws MarshalException {
super(dmElem);
}
@Override
public String getAlgorithm() {
return ECDSA_RIPEMD160;
}
@Override
String getJCAAlgorithm() {
return "RIPEMD160withECDSA";
}
@Override
Type getAlgorithmType() {
return Type.ECDSA;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.functions.hive.conversion;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.catalog.exceptions.CatalogException;
import org.apache.flink.table.catalog.hive.client.HiveShim;
import org.apache.flink.table.catalog.hive.util.HiveReflectionUtils;
import org.apache.flink.table.catalog.hive.util.HiveTypeUtil;
import org.apache.flink.table.functions.hive.FlinkHiveUDFException;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.CharType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.MapType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.VarCharType;
import org.apache.flink.types.Row;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import javax.annotation.Nonnull;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Util for any ObjectInspector related inspection and conversion of Hive data to/from Flink data.
*
* <p>Hive ObjectInspector is a group of flexible APIs to inspect value in different data
* representation, and developers can extend those API as needed, so technically, object inspector
* supports arbitrary data type in java.
*/
@Internal
public class HiveInspectors {
/** Get an array of ObjectInspector from the give array of args and their types. */
public static ObjectInspector[] toInspectors(
HiveShim hiveShim, Object[] args, DataType[] argTypes) {
assert args.length == argTypes.length;
ObjectInspector[] argumentInspectors = new ObjectInspector[argTypes.length];
for (int i = 0; i < argTypes.length; i++) {
Object constant = args[i];
if (constant == null) {
argumentInspectors[i] =
TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
HiveTypeUtil.toHiveTypeInfo(argTypes[i], false));
} else {
PrimitiveTypeInfo primitiveTypeInfo =
(PrimitiveTypeInfo) HiveTypeUtil.toHiveTypeInfo(argTypes[i], false);
constant =
getConversion(
getObjectInspector(primitiveTypeInfo),
argTypes[i].getLogicalType(),
hiveShim)
.toHiveObject(constant);
argumentInspectors[i] =
getObjectInspectorForPrimitiveConstant(
primitiveTypeInfo, constant, hiveShim);
}
}
return argumentInspectors;
}
/**
* Get conversion for converting Flink object to Hive object from an ObjectInspector and the
* corresponding Flink DataType.
*/
public static HiveObjectConversion getConversion(
ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
if (inspector instanceof PrimitiveObjectInspector) {
HiveObjectConversion conversion;
if (inspector instanceof BooleanObjectInspector
|| inspector instanceof StringObjectInspector
|| inspector instanceof ByteObjectInspector
|| inspector instanceof ShortObjectInspector
|| inspector instanceof IntObjectInspector
|| inspector instanceof LongObjectInspector
|| inspector instanceof FloatObjectInspector
|| inspector instanceof DoubleObjectInspector
|| inspector instanceof BinaryObjectInspector
|| inspector instanceof VoidObjectInspector) {
conversion = IdentityConversion.INSTANCE;
} else if (inspector instanceof DateObjectInspector) {
conversion = hiveShim::toHiveDate;
} else if (inspector instanceof TimestampObjectInspector) {
conversion = hiveShim::toHiveTimestamp;
} else if (inspector instanceof HiveCharObjectInspector) {
conversion =
o ->
o == null
? null
: new HiveChar(
(String) o, ((CharType) dataType).getLength());
} else if (inspector instanceof HiveVarcharObjectInspector) {
conversion =
o ->
o == null
? null
: new HiveVarchar(
(String) o, ((VarCharType) dataType).getLength());
} else if (inspector instanceof HiveDecimalObjectInspector) {
conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
} else {
throw new FlinkHiveUDFException(
"Unsupported primitive object inspector " + inspector.getClass().getName());
}
// if the object inspector prefers Writable objects, we should add an extra conversion
// for that
// currently this happens for constant arguments for UDFs
if (((PrimitiveObjectInspector) inspector).preferWritable()) {
conversion = new WritableHiveObjectConversion(conversion, hiveShim);
}
return conversion;
}
if (inspector instanceof ListObjectInspector) {
HiveObjectConversion eleConvert =
getConversion(
((ListObjectInspector) inspector).getListElementObjectInspector(),
((ArrayType) dataType).getElementType(),
hiveShim);
return o -> {
if (o == null) {
return null;
}
Object[] array = (Object[]) o;
List<Object> result = new ArrayList<>();
for (Object ele : array) {
result.add(eleConvert.toHiveObject(ele));
}
return result;
};
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
MapType kvType = (MapType) dataType;
HiveObjectConversion keyConversion =
getConversion(
mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
HiveObjectConversion valueConversion =
getConversion(
mapInspector.getMapValueObjectInspector(),
kvType.getValueType(),
hiveShim);
return o -> {
if (o == null) {
return null;
}
Map<Object, Object> map = (Map) o;
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<Object, Object> entry : map.entrySet()) {
result.put(
keyConversion.toHiveObject(entry.getKey()),
valueConversion.toHiveObject(entry.getValue()));
}
return result;
};
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
for (int i = 0; i < structFields.size(); i++) {
conversions[i] =
getConversion(
structFields.get(i).getFieldObjectInspector(),
rowFields.get(i).getType(),
hiveShim);
}
return o -> {
if (o == null) {
return null;
}
Row row = (Row) o;
List<Object> result = new ArrayList<>(row.getArity());
for (int i = 0; i < row.getArity(); i++) {
result.add(conversions[i].toHiveObject(row.getField(i)));
}
return result;
};
}
throw new FlinkHiveUDFException(
String.format(
"Flink doesn't support convert object conversion for %s yet", inspector));
}
/** Converts a Hive object to Flink object with an ObjectInspector. */
public static Object toFlinkObject(ObjectInspector inspector, Object data, HiveShim hiveShim) {
if (data == null || inspector instanceof VoidObjectInspector) {
return null;
}
if (inspector instanceof PrimitiveObjectInspector) {
if (inspector instanceof BooleanObjectInspector
|| inspector instanceof StringObjectInspector
|| inspector instanceof ByteObjectInspector
|| inspector instanceof ShortObjectInspector
|| inspector instanceof IntObjectInspector
|| inspector instanceof LongObjectInspector
|| inspector instanceof FloatObjectInspector
|| inspector instanceof DoubleObjectInspector
|| inspector instanceof BinaryObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return poi.getPrimitiveJavaObject(data);
} else if (inspector instanceof DateObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return hiveShim.toFlinkDate(poi.getPrimitiveJavaObject(data));
} else if (inspector instanceof TimestampObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return hiveShim.toFlinkTimestamp(poi.getPrimitiveJavaObject(data));
} else if (inspector instanceof HiveCharObjectInspector) {
HiveCharObjectInspector oi = (HiveCharObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).getValue();
} else if (inspector instanceof HiveVarcharObjectInspector) {
HiveVarcharObjectInspector oi = (HiveVarcharObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).getValue();
} else if (inspector instanceof HiveDecimalObjectInspector) {
HiveDecimalObjectInspector oi = (HiveDecimalObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).bigDecimalValue();
}
}
if (inspector instanceof ListObjectInspector) {
ListObjectInspector listInspector = (ListObjectInspector) inspector;
List<?> list = listInspector.getList(data);
// flink expects a specific array type (e.g. Integer[] instead of Object[]), so we have
// to get the element class
ObjectInspector elementInspector = listInspector.getListElementObjectInspector();
Object[] result =
(Object[])
Array.newInstance(
HiveTypeUtil.toFlinkType(elementInspector).getConversionClass(),
list.size());
for (int i = 0; i < list.size(); i++) {
result[i] = toFlinkObject(elementInspector, list.get(i), hiveShim);
}
return result;
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
Map<?, ?> map = mapInspector.getMap(data);
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<?, ?> entry : map.entrySet()) {
result.put(
toFlinkObject(
mapInspector.getMapKeyObjectInspector(), entry.getKey(), hiveShim),
toFlinkObject(
mapInspector.getMapValueObjectInspector(),
entry.getValue(),
hiveShim));
}
return result;
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
Row row = new Row(fields.size());
// StandardStructObjectInspector.getStructFieldData in Hive-1.2.1 only accepts array or
// list as data
if (!data.getClass().isArray()
&& !(data instanceof List)
&& (inspector instanceof StandardStructObjectInspector)) {
data = new Object[] {data};
}
for (int i = 0; i < row.getArity(); i++) {
row.setField(
i,
toFlinkObject(
fields.get(i).getFieldObjectInspector(),
structInspector.getStructFieldData(data, fields.get(i)),
hiveShim));
}
return row;
}
throw new FlinkHiveUDFException(
String.format("Unwrap does not support ObjectInspector '%s' yet", inspector));
}
/** Get Hive {@link ObjectInspector} for a Flink {@link DataType}. */
public static ObjectInspector getObjectInspector(DataType flinkType) {
return getObjectInspector(flinkType.getLogicalType());
}
/** Get Hive {@link ObjectInspector} for a Flink {@link LogicalType}. */
public static ObjectInspector getObjectInspector(LogicalType flinkType) {
return getObjectInspector(HiveTypeUtil.toHiveTypeInfo(flinkType, true));
}
private static ObjectInspector getObjectInspectorForPrimitiveConstant(
PrimitiveTypeInfo primitiveTypeInfo, @Nonnull Object value, HiveShim hiveShim) {
String className;
value = hiveShim.hivePrimitiveToWritable(value);
switch (primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
className = WritableConstantBooleanObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case BYTE:
className = WritableConstantByteObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case SHORT:
className = WritableConstantShortObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case INT:
className = WritableConstantIntObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case LONG:
className = WritableConstantLongObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case FLOAT:
className = WritableConstantFloatObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case DOUBLE:
className = WritableConstantDoubleObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case STRING:
className = WritableConstantStringObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case CHAR:
try {
Constructor<WritableConstantHiveCharObjectInspector> constructor =
WritableConstantHiveCharObjectInspector.class.getDeclaredConstructor(
CharTypeInfo.class, value.getClass());
constructor.setAccessible(true);
return constructor.newInstance(primitiveTypeInfo, value);
} catch (Exception e) {
throw new FlinkHiveUDFException(
"Failed to create writable constant object inspector", e);
}
case VARCHAR:
try {
Constructor<WritableConstantHiveVarcharObjectInspector> constructor =
WritableConstantHiveVarcharObjectInspector.class.getDeclaredConstructor(
VarcharTypeInfo.class, value.getClass());
constructor.setAccessible(true);
return constructor.newInstance(primitiveTypeInfo, value);
} catch (Exception e) {
throw new FlinkHiveUDFException(
"Failed to create writable constant object inspector", e);
}
case DATE:
className = WritableConstantDateObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case TIMESTAMP:
className = WritableConstantTimestampObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case DECIMAL:
try {
Constructor<WritableConstantHiveDecimalObjectInspector> constructor =
WritableConstantHiveDecimalObjectInspector.class.getDeclaredConstructor(
DecimalTypeInfo.class, value.getClass());
constructor.setAccessible(true);
return constructor.newInstance(primitiveTypeInfo, value);
} catch (Exception e) {
throw new FlinkHiveUDFException(
"Failed to create writable constant object inspector", e);
}
case BINARY:
className = WritableConstantBinaryObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(className, value);
case UNKNOWN:
case VOID:
// If type is null, we use the Constant String to replace
className = WritableConstantStringObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(
className, value.toString());
default:
throw new FlinkHiveUDFException(
String.format(
"Cannot find ConstantObjectInspector for %s", primitiveTypeInfo));
}
}
public static ObjectInspector getObjectInspector(TypeInfo type) {
switch (type.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
primitiveType);
case LIST:
ListTypeInfo listType = (ListTypeInfo) type;
return ObjectInspectorFactory.getStandardListObjectInspector(
getObjectInspector(listType.getListElementTypeInfo()));
case MAP:
MapTypeInfo mapType = (MapTypeInfo) type;
return ObjectInspectorFactory.getStandardMapObjectInspector(
getObjectInspector(mapType.getMapKeyTypeInfo()),
getObjectInspector(mapType.getMapValueTypeInfo()));
case STRUCT:
StructTypeInfo structType = (StructTypeInfo) type;
List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(
structType.getAllStructFieldNames(), fieldInspectors);
default:
throw new CatalogException("Unsupported Hive type category " + type.getCategory());
}
}
}
| |
package nu.nerd.beastmaster.commands;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import nu.nerd.beastmaster.BeastMaster;
import nu.nerd.beastmaster.objectives.ObjectiveType;
// --------------------------------------------------------------------------
/**
* Executor for the /beast-obj command.
*/
public class BeastObjectiveExecutor extends ExecutorBase {
// ------------------------------------------------------------------------
/**
* Default constructor.
*/
public BeastObjectiveExecutor() {
super("beast-obj", "help", "add", "remove", "list", "info",
"limit", "range", "height", "time");
// TODO: command to set loot table.
}
// --------------------------------------------------------------------------
/**
* @see org.bukkit.command.CommandExecutor#onCommand(org.bukkit.command.CommandSender,
* org.bukkit.command.Command, java.lang.String, java.lang.String[])
*/
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
if (args.length == 0 || (args.length == 1 && args[0].equals("help"))) {
return false;
}
if (args.length >= 1) {
if (args[0].equals("add")) {
if (args.length != 2) {
Commands.invalidArguments(sender, getName() + " add <obj-id>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType != null) {
Commands.errorNotNull(sender, "objective", idArg);
return true;
}
objectiveType = new ObjectiveType(idArg);
BeastMaster.OBJECTIVE_TYPES.addObjectiveType(objectiveType);
BeastMaster.CONFIG.save();
sender.sendMessage(ChatColor.GOLD + "Added a new objective type: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("remove")) {
if (args.length != 2) {
Commands.invalidArguments(sender, getName() + " remove <obj-id>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType == null) {
Commands.errorNull(sender, "objective", idArg);
return true;
}
BeastMaster.OBJECTIVE_TYPES.removeObjectiveType(objectiveType);
BeastMaster.CONFIG.save();
sender.sendMessage(ChatColor.GOLD + "Removed objective type: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("list")) {
if (args.length != 1) {
Commands.invalidArguments(sender, getName() + " list");
return true;
}
sender.sendMessage(ChatColor.GOLD + "Objective types:");
for (ObjectiveType objectiveType : BeastMaster.OBJECTIVE_TYPES.getObjectiveTypes()) {
sender.sendMessage(objectiveType.getDescription());
}
return true;
} else if (args[0].equals("info")) {
if (args.length != 2) {
Commands.invalidArguments(sender, getName() + " info <obj-id>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType == null) {
Commands.errorNull(sender, "objective", idArg);
return true;
}
sender.sendMessage(ChatColor.GOLD + "Objective: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("limit")) {
if (args.length != 3) {
Commands.invalidArguments(sender, getName() + " limit <obj-id> <max>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType == null) {
Commands.errorNull(sender, "objective", idArg);
return true;
}
Integer maxCount = Commands.parseNumber(args[2], Commands::parseInt,
(x) -> x > 0,
() -> sender
.sendMessage(ChatColor.RED + "The maximum number of objectives must be at least one!"),
null);
if (maxCount == null) {
return true;
}
objectiveType.setMaxCount(maxCount);
BeastMaster.CONFIG.save();
sender.sendMessage(ChatColor.GOLD + "Objective: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("range")) {
if (args.length != 4) {
Commands.invalidArguments(sender, getName() + " range <obj-id> <min> <max>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType == null) {
Commands.errorNull(sender, "objective", idArg);
return true;
}
Integer minRange = Commands.parseNumber(args[2], Commands::parseInt,
(x) -> x >= 0,
() -> sender.sendMessage(ChatColor.RED + "The minimum range must be at least zero!"),
() -> sender.sendMessage(ChatColor.RED + "The minimum range must be an integer!"));
if (minRange == null) {
return true;
}
Integer maxRange = Commands.parseNumber(args[3], Commands::parseInt,
(x) -> x >= minRange,
() -> sender.sendMessage(ChatColor.RED +
"The maximum range must be at least as big as the minimum range!"),
() -> sender.sendMessage(ChatColor.RED + "The maximum range must be an integer!"));
if (maxRange == null) {
return true;
}
objectiveType.setRange(minRange, maxRange);
BeastMaster.CONFIG.save();
sender.sendMessage(ChatColor.GOLD + "Objective: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("height")) {
if (args.length != 4) {
Commands.invalidArguments(sender, getName() + " height <obj-id> <min> <max>");
return true;
}
String idArg = args[1];
ObjectiveType objectiveType = BeastMaster.OBJECTIVE_TYPES.getObjectiveType(idArg);
if (objectiveType == null) {
Commands.errorNull(sender, "objective", idArg);
return true;
}
Integer minY = Commands.parseNumber(args[2], Commands::parseInt,
(x) -> x >= 0,
() -> sender.sendMessage(ChatColor.RED + "The minimum Y must be at least zero!"),
() -> sender.sendMessage(ChatColor.RED + "The minimum Y must be an integer!"));
if (minY == null) {
return true;
}
Integer maxY = Commands.parseNumber(args[3], Commands::parseInt,
(y) -> y >= minY && y <= 255,
() -> sender.sendMessage(ChatColor.RED + "The maximum Y must be between the minimum Y and 255!"),
() -> sender.sendMessage(ChatColor.RED + "The maximum Y must be an integer!"));
if (maxY == null) {
return true;
}
objectiveType.setHeight(minY, maxY);
BeastMaster.CONFIG.save();
sender.sendMessage(ChatColor.GOLD + "Objective: " + objectiveType.getDescription());
return true;
} else if (args[0].equals("time")) {
sender.sendMessage(ChatColor.RED + "Not yet implemented; all objectives have unlimited lifetimes!");
return true;
}
}
return false;
} // onCommand
} // class BeastObjectiveExecutor
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.server.master.balancer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.client.impl.Table;
import org.apache.accumulo.core.client.impl.thrift.ThriftSecurityException;
import org.apache.accumulo.core.data.impl.KeyExtent;
import org.apache.accumulo.core.master.thrift.TableInfo;
import org.apache.accumulo.core.master.thrift.TabletServerStatus;
import org.apache.accumulo.core.tabletserver.thrift.TabletStats;
import org.apache.accumulo.core.util.HostAndPort;
import org.apache.accumulo.server.master.state.TServerInstance;
import org.apache.accumulo.server.master.state.TabletMigration;
import org.apache.hadoop.io.Text;
import org.apache.thrift.TException;
import org.junit.Before;
import org.junit.Test;
public class DefaultLoadBalancerTest {
class FakeTServer {
List<KeyExtent> extents = new ArrayList<>();
TabletServerStatus getStatus(TServerInstance server) {
TabletServerStatus result = new TabletServerStatus();
result.tableMap = new HashMap<>();
for (KeyExtent extent : extents) {
Table.ID tableId = extent.getTableId();
TableInfo info = result.tableMap.get(tableId.canonicalID());
if (info == null)
result.tableMap.put(tableId.canonicalID(), info = new TableInfo());
info.onlineTablets++;
info.recs = info.onlineTablets;
info.ingestRate = 123.;
info.queryRate = 456.;
}
return result;
}
}
Map<TServerInstance,FakeTServer> servers = new HashMap<>();
Map<KeyExtent,TServerInstance> last = new HashMap<>();
class TestDefaultLoadBalancer extends DefaultLoadBalancer {
@Override
public List<TabletStats> getOnlineTabletsForTable(TServerInstance tserver, Table.ID table) throws ThriftSecurityException, TException {
List<TabletStats> result = new ArrayList<>();
for (KeyExtent extent : servers.get(tserver).extents) {
if (extent.getTableId().equals(table)) {
result.add(new TabletStats(extent.toThrift(), null, null, null, 0l, 0., 0., 0));
}
}
return result;
}
}
@Before
public void setUp() {
last.clear();
servers.clear();
}
@Test
public void testAssignMigrations() {
servers.put(new TServerInstance(HostAndPort.fromParts("127.0.0.1", 1234), "a"), new FakeTServer());
servers.put(new TServerInstance(HostAndPort.fromParts("127.0.0.2", 1234), "b"), new FakeTServer());
servers.put(new TServerInstance(HostAndPort.fromParts("127.0.0.3", 1234), "c"), new FakeTServer());
List<KeyExtent> metadataTable = new ArrayList<>();
String table = "t1";
metadataTable.add(makeExtent(table, null, null));
table = "t2";
metadataTable.add(makeExtent(table, "a", null));
metadataTable.add(makeExtent(table, null, "a"));
table = "t3";
metadataTable.add(makeExtent(table, "a", null));
metadataTable.add(makeExtent(table, "b", "a"));
metadataTable.add(makeExtent(table, "c", "b"));
metadataTable.add(makeExtent(table, "d", "c"));
metadataTable.add(makeExtent(table, "e", "d"));
metadataTable.add(makeExtent(table, null, "e"));
Collections.sort(metadataTable);
TestDefaultLoadBalancer balancer = new TestDefaultLoadBalancer();
SortedMap<TServerInstance,TabletServerStatus> current = new TreeMap<>();
for (Entry<TServerInstance,FakeTServer> entry : servers.entrySet()) {
current.put(entry.getKey(), entry.getValue().getStatus(entry.getKey()));
}
assignTablets(metadataTable, servers, current, balancer);
// Verify that the counts on the tables are correct
Map<String,Integer> expectedCounts = new HashMap<>();
expectedCounts.put("t1", 1);
expectedCounts.put("t2", 1);
expectedCounts.put("t3", 2);
checkBalance(metadataTable, servers, expectedCounts);
// Rebalance once
for (Entry<TServerInstance,FakeTServer> entry : servers.entrySet()) {
current.put(entry.getKey(), entry.getValue().getStatus(entry.getKey()));
}
// Nothing should happen, we are balanced
ArrayList<TabletMigration> out = new ArrayList<>();
balancer.getMigrations(current, out);
assertEquals(out.size(), 0);
// Take down a tabletServer
TServerInstance first = current.keySet().iterator().next();
current.remove(first);
FakeTServer remove = servers.remove(first);
// reassign offline extents
assignTablets(remove.extents, servers, current, balancer);
checkBalance(metadataTable, servers, null);
}
private void assignTablets(List<KeyExtent> metadataTable, Map<TServerInstance,FakeTServer> servers, SortedMap<TServerInstance,TabletServerStatus> status,
TestDefaultLoadBalancer balancer) {
// Assign tablets
for (KeyExtent extent : metadataTable) {
TServerInstance assignment = balancer.getAssignment(status, extent, last.get(extent));
assertNotNull(assignment);
assertFalse(servers.get(assignment).extents.contains(extent));
servers.get(assignment).extents.add(extent);
last.put(extent, assignment);
}
}
SortedMap<TServerInstance,TabletServerStatus> getAssignments(Map<TServerInstance,FakeTServer> servers) {
SortedMap<TServerInstance,TabletServerStatus> result = new TreeMap<>();
for (Entry<TServerInstance,FakeTServer> entry : servers.entrySet()) {
result.put(entry.getKey(), entry.getValue().getStatus(entry.getKey()));
}
return result;
}
@Test
public void testUnevenAssignment() {
for (char c : "abcdefghijklmnopqrstuvwxyz".toCharArray()) {
String cString = Character.toString(c);
HostAndPort fakeAddress = HostAndPort.fromParts("127.0.0.1", c);
String fakeInstance = cString;
TServerInstance tsi = new TServerInstance(fakeAddress, fakeInstance);
FakeTServer fakeTServer = new FakeTServer();
servers.put(tsi, fakeTServer);
fakeTServer.extents.add(makeExtent(cString, null, null));
}
// Put more tablets on one server, but not more than the number of servers
Entry<TServerInstance,FakeTServer> first = servers.entrySet().iterator().next();
first.getValue().extents.add(makeExtent("newTable", "a", null));
first.getValue().extents.add(makeExtent("newTable", "b", "a"));
first.getValue().extents.add(makeExtent("newTable", "c", "b"));
first.getValue().extents.add(makeExtent("newTable", "d", "c"));
first.getValue().extents.add(makeExtent("newTable", "e", "d"));
first.getValue().extents.add(makeExtent("newTable", "f", "e"));
first.getValue().extents.add(makeExtent("newTable", "g", "f"));
first.getValue().extents.add(makeExtent("newTable", "h", "g"));
first.getValue().extents.add(makeExtent("newTable", "i", null));
TestDefaultLoadBalancer balancer = new TestDefaultLoadBalancer();
Set<KeyExtent> migrations = Collections.emptySet();
int moved = 0;
// balance until we can't balance no more!
while (true) {
List<TabletMigration> migrationsOut = new ArrayList<>();
balancer.balance(getAssignments(servers), migrations, migrationsOut);
if (migrationsOut.size() == 0)
break;
for (TabletMigration migration : migrationsOut) {
if (servers.get(migration.oldServer).extents.remove(migration.tablet))
moved++;
servers.get(migration.newServer).extents.add(migration.tablet);
}
}
assertEquals(8, moved);
}
@Test
public void testUnevenAssignment2() {
// make 26 servers
for (char c : "abcdefghijklmnopqrstuvwxyz".toCharArray()) {
String cString = Character.toString(c);
HostAndPort fakeAddress = HostAndPort.fromParts("127.0.0.1", c);
String fakeInstance = cString;
TServerInstance tsi = new TServerInstance(fakeAddress, fakeInstance);
FakeTServer fakeTServer = new FakeTServer();
servers.put(tsi, fakeTServer);
}
// put 60 tablets on 25 of them
List<Entry<TServerInstance,FakeTServer>> shortList = new ArrayList<>(servers.entrySet());
Entry<TServerInstance,FakeTServer> shortServer = shortList.remove(0);
int c = 0;
for (int i = 0; i < 60; i++) {
for (Entry<TServerInstance,FakeTServer> entry : shortList) {
entry.getValue().extents.add(makeExtent("t" + c, null, null));
}
}
// put 10 on the that short server:
for (int i = 0; i < 10; i++) {
shortServer.getValue().extents.add(makeExtent("s" + i, null, null));
}
TestDefaultLoadBalancer balancer = new TestDefaultLoadBalancer();
Set<KeyExtent> migrations = Collections.emptySet();
int moved = 0;
// balance until we can't balance no more!
while (true) {
List<TabletMigration> migrationsOut = new ArrayList<>();
balancer.balance(getAssignments(servers), migrations, migrationsOut);
if (migrationsOut.size() == 0)
break;
for (TabletMigration migration : migrationsOut) {
if (servers.get(migration.oldServer).extents.remove(migration.tablet))
moved++;
last.remove(migration.tablet);
servers.get(migration.newServer).extents.add(migration.tablet);
last.put(migration.tablet, migration.newServer);
}
}
// average is 58, with 2 at 59: we need 48 more moved to the short server
assertEquals(48, moved);
}
private void checkBalance(List<KeyExtent> metadataTable, Map<TServerInstance,FakeTServer> servers, Map<String,Integer> expectedCounts) {
// Verify they are spread evenly over the cluster
int average = metadataTable.size() / servers.size();
for (FakeTServer server : servers.values()) {
int diff = server.extents.size() - average;
if (diff < 0)
fail("average number of tablets is " + average + " but a server has " + server.extents.size());
if (diff > 1)
fail("average number of tablets is " + average + " but a server has " + server.extents.size());
}
if (expectedCounts != null) {
for (FakeTServer server : servers.values()) {
Map<String,Integer> counts = new HashMap<>();
for (KeyExtent extent : server.extents) {
String t = extent.getTableId().canonicalID();
if (counts.get(t) == null)
counts.put(t, 0);
counts.put(t, counts.get(t) + 1);
}
for (Entry<String,Integer> entry : counts.entrySet()) {
assertEquals(expectedCounts.get(entry.getKey()), counts.get(entry.getKey()));
}
}
}
}
private static KeyExtent makeExtent(String table, String end, String prev) {
return new KeyExtent(Table.ID.of(table), toText(end), toText(prev));
}
private static Text toText(String value) {
if (value != null)
return new Text(value);
return null;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.AbstractAction;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.actions.Executor;
import com.google.devtools.build.lib.actions.ParameterFile;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.extra.CppLinkInfo;
import com.google.devtools.build.lib.actions.extra.ExtraActionInfo;
import com.google.devtools.build.lib.analysis.AnalysisEnvironment;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoProvider;
import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.collect.CollectionUtils;
import com.google.devtools.build.lib.collect.ImmutableIterable;
import com.google.devtools.build.lib.collect.IterablesChain;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.packages.RuleErrorConsumer;
import com.google.devtools.build.lib.rules.cpp.Link.LinkStaticness;
import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType;
import com.google.devtools.build.lib.rules.cpp.LinkerInputs.LibraryToLink;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.util.OS;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.util.ShellEscaper;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Action that represents a linking step.
*/
@ThreadCompatible
public final class CppLinkAction extends AbstractAction {
/**
* An abstraction for creating intermediate and output artifacts for C++ linking.
*
* <p>This is unfortunately necessary, because most of the time, these artifacts are well-behaved
* ones sitting under a package directory, but nativedeps link actions can be shared. In order to
* avoid creating every artifact here with {@code getShareableArtifact()}, we abstract the
* artifact creation away.
*/
public interface LinkArtifactFactory {
/**
* Create an artifact at the specified root-relative path in the bin directory.
*/
Artifact create(RuleContext ruleContext, PathFragment rootRelativePath);
}
/**
* An implementation of {@link LinkArtifactFactory} that can only create artifacts in the package
* directory.
*/
public static final LinkArtifactFactory DEFAULT_ARTIFACT_FACTORY = new LinkArtifactFactory() {
@Override
public Artifact create(RuleContext ruleContext, PathFragment rootRelativePath) {
return ruleContext.getDerivedArtifact(rootRelativePath,
ruleContext.getConfiguration().getBinDirectory());
}
};
private static final String LINK_GUID = "58ec78bd-1176-4e36-8143-439f656b181d";
private static final String FAKE_LINK_GUID = "da36f819-5a15-43a9-8a45-e01b60e10c8b";
private final CppConfiguration cppConfiguration;
private final LibraryToLink outputLibrary;
private final LibraryToLink interfaceOutputLibrary;
private final LinkCommandLine linkCommandLine;
/** True for cc_fake_binary targets. */
private final boolean fake;
private final boolean isLTOIndexing;
// This is set for both LTO indexing and LTO linking.
@Nullable private final Iterable<LTOBackendArtifacts> allLTOBackendArtifacts;
private final Iterable<Artifact> mandatoryInputs;
// Linking uses a lot of memory; estimate 1 MB per input file, min 1.5 Gib.
// It is vital to not underestimate too much here,
// because running too many concurrent links can
// thrash the machine to the point where it stops
// responding to keystrokes or mouse clicks.
// CPU and IO do not scale similarly and still use the static minimum estimate.
public static final ResourceSet LINK_RESOURCES_PER_INPUT =
ResourceSet.createWithRamCpuIo(1, 0, 0);
// This defines the minimum of each resource that will be reserved.
public static final ResourceSet MIN_STATIC_LINK_RESOURCES =
ResourceSet.createWithRamCpuIo(1536, 1, 0.3);
// Dynamic linking should be cheaper than static linking.
public static final ResourceSet MIN_DYNAMIC_LINK_RESOURCES =
ResourceSet.createWithRamCpuIo(1024, 0.3, 0.2);
/**
* Use {@link Builder} to create instances of this class. Also see there for
* the documentation of all parameters.
*
* <p>This constructor is intentionally private and is only to be called from
* {@link Builder#build()}.
*/
private CppLinkAction(
ActionOwner owner,
Iterable<Artifact> inputs,
ImmutableList<Artifact> outputs,
CppConfiguration cppConfiguration,
LibraryToLink outputLibrary,
LibraryToLink interfaceOutputLibrary,
boolean fake,
boolean isLTOIndexing,
Iterable<LTOBackendArtifacts> allLTOBackendArtifacts,
LinkCommandLine linkCommandLine) {
super(owner, inputs, outputs);
this.mandatoryInputs = inputs;
this.cppConfiguration = cppConfiguration;
this.outputLibrary = outputLibrary;
this.interfaceOutputLibrary = interfaceOutputLibrary;
this.fake = fake;
this.isLTOIndexing = isLTOIndexing;
this.allLTOBackendArtifacts = allLTOBackendArtifacts;
this.linkCommandLine = linkCommandLine;
}
private static Iterable<LinkerInput> filterLinkerInputs(Iterable<LinkerInput> inputs) {
return Iterables.filter(inputs, new Predicate<LinkerInput>() {
@Override
public boolean apply(LinkerInput input) {
return Link.VALID_LINKER_INPUTS.matches(input.getArtifact().getFilename());
}
});
}
private static Iterable<Artifact> filterLinkerInputArtifacts(Iterable<Artifact> inputs) {
return Iterables.filter(inputs, new Predicate<Artifact>() {
@Override
public boolean apply(Artifact input) {
return Link.VALID_LINKER_INPUTS.matches(input.getFilename());
}
});
}
private CppConfiguration getCppConfiguration() {
return cppConfiguration;
}
@VisibleForTesting
public String getTargetCpu() {
return getCppConfiguration().getTargetCpu();
}
public String getHostSystemName() {
return getCppConfiguration().getHostSystemName();
}
public ImmutableMap<String, String> getEnvironment() {
if (OS.getCurrent() == OS.WINDOWS) {
// TODO(bazel-team): Both GCC and clang rely on their execution directories being on
// PATH, otherwise they fail to find dependent DLLs (and they fail silently...). On
// the other hand, Windows documentation says that the directory of the executable
// is always searched for DLLs first. Not sure what to make of it.
// Other options are to forward the system path (brittle), or to add a PATH field to
// the crosstool file.
//
// @see com.google.devtools.build.lib.rules.cpp.CppCompileAction#getEnvironment.
return ImmutableMap.of(
"PATH",
cppConfiguration.getToolPathFragment(CppConfiguration.Tool.GCC).getParentDirectory()
.getPathString()
);
}
return ImmutableMap.of();
}
/**
* Returns the link configuration; for correctness you should not call this method during
* execution - only the argv is part of the action cache key, and we therefore don't guarantee
* that the action will be re-executed if the contents change in a way that does not affect the
* argv.
*/
@VisibleForTesting
public LinkCommandLine getLinkCommandLine() {
return linkCommandLine;
}
public LibraryToLink getOutputLibrary() {
return outputLibrary;
}
public LibraryToLink getInterfaceOutputLibrary() {
return interfaceOutputLibrary;
}
/**
* Returns the path to the output artifact produced by the linker.
*/
public Path getOutputFile() {
return outputLibrary.getArtifact().getPath();
}
@VisibleForTesting
public List<String> getRawLinkArgv() {
return linkCommandLine.getRawLinkArgv();
}
@VisibleForTesting
public List<String> getArgv() {
return linkCommandLine.arguments();
}
/**
* Returns the command line specification for this link, included any required linkstamp
* compilation steps. The command line may refer to a .params file.
*
* @return a finalized command line suitable for execution
*/
public final List<String> getCommandLine() {
return linkCommandLine.getCommandLine();
}
Iterable<LTOBackendArtifacts> getAllLTOBackendArtifacts() {
return allLTOBackendArtifacts;
}
@Override
@ThreadCompatible
public void execute(
ActionExecutionContext actionExecutionContext)
throws ActionExecutionException, InterruptedException {
if (fake) {
executeFake();
} else {
Executor executor = actionExecutionContext.getExecutor();
try {
executor.getContext(CppLinkActionContext.class).exec(
this, actionExecutionContext);
} catch (ExecException e) {
throw e.toActionExecutionException("Linking of rule '" + getOwner().getLabel() + "'",
executor.getVerboseFailures(), this);
}
}
}
@Override
public String describeStrategy(Executor executor) {
return fake ? "fake,local" : executor.getContext(CppLinkActionContext.class).strategyLocality();
}
// Don't forget to update FAKE_LINK_GUID if you modify this method.
@ThreadCompatible
private void executeFake()
throws ActionExecutionException {
// The uses of getLinkConfiguration in this method may not be consistent with the computed key.
// I.e., this may be incrementally incorrect.
final Collection<Artifact> linkstampOutputs = getLinkCommandLine().getLinkstamps().values();
// Prefix all fake output files in the command line with $TEST_TMPDIR/.
final String outputPrefix = "$TEST_TMPDIR/";
List<String> escapedLinkArgv = escapeLinkArgv(linkCommandLine.getRawLinkArgv(),
linkstampOutputs, outputPrefix);
// Write the commands needed to build the real target to the fake target
// file.
StringBuilder s = new StringBuilder();
Joiner.on('\n').appendTo(s,
"# This is a fake target file, automatically generated.",
"# Do not edit by hand!",
"echo $0 is a fake target file and not meant to be executed.",
"exit 0",
"EOS",
"",
"makefile_dir=.",
"");
try {
// Concatenate all the (fake) .o files into the result.
for (LinkerInput linkerInput : getLinkCommandLine().getLinkerInputs()) {
Artifact objectFile = linkerInput.getArtifact();
if ((CppFileTypes.OBJECT_FILE.matches(objectFile.getFilename())
|| CppFileTypes.PIC_OBJECT_FILE.matches(objectFile.getFilename()))
&& linkerInput.isFake()) {
s.append(FileSystemUtils.readContentAsLatin1(objectFile.getPath())); // (IOException)
}
}
s.append(getOutputFile().getBaseName()).append(": ");
for (Artifact linkstamp : linkstampOutputs) {
s.append("mkdir -p " + outputPrefix +
linkstamp.getExecPath().getParentDirectory() + " && ");
}
Joiner.on(' ').appendTo(s,
ShellEscaper.escapeAll(linkCommandLine.finalizeAlreadyEscapedWithLinkstampCommands(
escapedLinkArgv, outputPrefix)));
s.append('\n');
if (getOutputFile().exists()) {
getOutputFile().setWritable(true); // (IOException)
}
FileSystemUtils.writeContent(getOutputFile(), ISO_8859_1, s.toString());
getOutputFile().setExecutable(true); // (IOException)
for (Artifact linkstamp : linkstampOutputs) {
FileSystemUtils.touchFile(linkstamp.getPath());
}
} catch (IOException e) {
throw new ActionExecutionException("failed to create fake link command for rule '" +
getOwner().getLabel() + ": " + e.getMessage(),
this, false);
}
}
/**
* Shell-escapes the raw link command line.
*
* @param rawLinkArgv raw link command line
* @param linkstampOutputs linkstamp artifacts
* @param outputPrefix to be prepended to any outputs
* @return escaped link command line
*/
private List<String> escapeLinkArgv(List<String> rawLinkArgv,
final Collection<Artifact> linkstampOutputs, final String outputPrefix) {
final List<String> linkstampExecPaths = Artifact.asExecPaths(linkstampOutputs);
ImmutableList.Builder<String> escapedArgs = ImmutableList.builder();
for (String rawArg : rawLinkArgv) {
String escapedArg;
if (rawArg.equals(getPrimaryOutput().getExecPathString())
|| linkstampExecPaths.contains(rawArg)) {
escapedArg = outputPrefix + ShellEscaper.escapeString(rawArg);
} else if (rawArg.startsWith(Link.FAKE_OBJECT_PREFIX)) {
escapedArg = outputPrefix + ShellEscaper.escapeString(
rawArg.substring(Link.FAKE_OBJECT_PREFIX.length()));
} else {
escapedArg = ShellEscaper.escapeString(rawArg);
}
escapedArgs.add(escapedArg);
}
return escapedArgs.build();
}
@Override
public ExtraActionInfo.Builder getExtraActionInfo() {
// The uses of getLinkConfiguration in this method may not be consistent with the computed key.
// I.e., this may be incrementally incorrect.
CppLinkInfo.Builder info = CppLinkInfo.newBuilder();
info.addAllInputFile(Artifact.toExecPaths(
LinkerInputs.toLibraryArtifacts(getLinkCommandLine().getLinkerInputs())));
info.addAllInputFile(Artifact.toExecPaths(
LinkerInputs.toLibraryArtifacts(getLinkCommandLine().getRuntimeInputs())));
info.setOutputFile(getPrimaryOutput().getExecPathString());
if (interfaceOutputLibrary != null) {
info.setInterfaceOutputFile(interfaceOutputLibrary.getArtifact().getExecPathString());
}
info.setLinkTargetType(getLinkCommandLine().getLinkTargetType().name());
info.setLinkStaticness(getLinkCommandLine().getLinkStaticness().name());
info.addAllLinkStamp(Artifact.toExecPaths(getLinkCommandLine().getLinkstamps().values()));
info.addAllBuildInfoHeaderArtifact(
Artifact.toExecPaths(getLinkCommandLine().getBuildInfoHeaderArtifacts()));
info.addAllLinkOpt(getLinkCommandLine().getLinkopts());
return super.getExtraActionInfo()
.setExtension(CppLinkInfo.cppLinkInfo, info.build());
}
@Override
protected String computeKey() {
Fingerprint f = new Fingerprint();
f.addString(fake ? FAKE_LINK_GUID : LINK_GUID);
f.addString(getCppConfiguration().getLdExecutable().getPathString());
f.addStrings(linkCommandLine.arguments());
// TODO(bazel-team): For correctness, we need to ensure the invariant that all values accessed
// during the execution phase are also covered by the key. Above, we add the argv to the key,
// which covers most cases. Unfortunately, the extra action and fake support methods above also
// sometimes directly access settings from the link configuration that may or may not affect the
// key. We either need to change the code to cover them in the key computation, or change the
// LinkConfiguration to disallow the combinations where the value of a setting does not affect
// the argv.
f.addBoolean(linkCommandLine.isNativeDeps());
f.addBoolean(linkCommandLine.useTestOnlyFlags());
if (linkCommandLine.getRuntimeSolibDir() != null) {
f.addPath(linkCommandLine.getRuntimeSolibDir());
}
f.addBoolean(isLTOIndexing);
return f.hexDigestAndReset();
}
@Override
public String describeKey() {
StringBuilder message = new StringBuilder();
if (fake) {
message.append("Fake ");
}
message.append(getProgressMessage());
message.append('\n');
message.append(" Command: ");
message.append(
ShellEscaper.escapeString(getCppConfiguration().getLdExecutable().getPathString()));
message.append('\n');
// Outputting one argument per line makes it easier to diff the results.
for (String argument : ShellEscaper.escapeAll(linkCommandLine.arguments())) {
message.append(" Argument: ");
message.append(argument);
message.append('\n');
}
return message.toString();
}
@Override
public String getMnemonic() {
return (isLTOIndexing) ? "CppLTOIndexing" : "CppLink";
}
@Override
protected String getRawProgressMessage() {
return (isLTOIndexing ? "LTO indexing " : "Linking ")
+ outputLibrary.getArtifact().prettyPrint();
}
@Override
public ResourceSet estimateResourceConsumption(Executor executor) {
return executor.getContext(CppLinkActionContext.class).estimateResourceConsumption(this);
}
/**
* Estimate the resources consumed when this action is run locally.
*/
public ResourceSet estimateResourceConsumptionLocal() {
// It's ok if this behaves differently even if the key is identical.
ResourceSet minLinkResources =
getLinkCommandLine().getLinkStaticness() == Link.LinkStaticness.DYNAMIC
? MIN_DYNAMIC_LINK_RESOURCES
: MIN_STATIC_LINK_RESOURCES;
final int inputSize = Iterables.size(getLinkCommandLine().getLinkerInputs())
+ Iterables.size(getLinkCommandLine().getRuntimeInputs());
return ResourceSet.createWithRamCpuIo(
Math.max(inputSize * LINK_RESOURCES_PER_INPUT.getMemoryMb(),
minLinkResources.getMemoryMb()),
Math.max(inputSize * LINK_RESOURCES_PER_INPUT.getCpuUsage(),
minLinkResources.getCpuUsage()),
Math.max(inputSize * LINK_RESOURCES_PER_INPUT.getIoUsage(),
minLinkResources.getIoUsage())
);
}
@Override
public Iterable<Artifact> getMandatoryInputs() {
return mandatoryInputs;
}
/**
* Determines whether or not this link should output a symbol counts file.
*/
public static boolean enableSymbolsCounts(
CppConfiguration cppConfiguration, boolean fake, LinkTargetType linkType) {
return cppConfiguration.getSymbolCounts()
&& cppConfiguration.supportsGoldLinker()
&& linkType == LinkTargetType.EXECUTABLE
&& !fake;
}
public static PathFragment symbolCountsFileName(PathFragment binaryName) {
return binaryName.replaceName(binaryName.getBaseName() + ".sc");
}
/**
* Builder class to construct {@link CppLinkAction}s.
*/
public static class Builder {
// Builder-only
// Null when invoked from tests (e.g. via createTestBuilder).
@Nullable private final RuleContext ruleContext;
private final AnalysisEnvironment analysisEnvironment;
private final Artifact output;
// can be null for CppLinkAction.createTestBuilder()
@Nullable private final CcToolchainProvider toolchain;
private Artifact interfaceOutput;
private Artifact symbolCounts;
private PathFragment runtimeSolibDir;
protected final BuildConfiguration configuration;
private final CppConfiguration cppConfiguration;
// Morally equivalent with {@link Context}, except these are mutable.
// Keep these in sync with {@link Context}.
private final Set<LinkerInput> nonLibraries = new LinkedHashSet<>();
private final NestedSetBuilder<LibraryToLink> libraries = NestedSetBuilder.linkOrder();
private NestedSet<Artifact> crosstoolInputs = NestedSetBuilder.emptySet(Order.STABLE_ORDER);
private Artifact runtimeMiddleman;
private NestedSet<Artifact> runtimeInputs = NestedSetBuilder.emptySet(Order.STABLE_ORDER);
private final NestedSetBuilder<Artifact> compilationInputs = NestedSetBuilder.stableOrder();
private final Set<Artifact> linkstamps = new LinkedHashSet<>();
private List<String> linkstampOptions = new ArrayList<>();
private final List<String> linkopts = new ArrayList<>();
private LinkTargetType linkType = LinkTargetType.STATIC_LIBRARY;
private LinkStaticness linkStaticness = LinkStaticness.FULLY_STATIC;
private List<Artifact> ltoBitcodeFiles = new ArrayList<>();
private boolean fake;
private boolean isNativeDeps;
private boolean useTestOnlyFlags;
private boolean wholeArchive;
private LinkArtifactFactory linkArtifactFactory = DEFAULT_ARTIFACT_FACTORY;
private boolean isLTOIndexing = false;
private Iterable<LTOBackendArtifacts> allLTOArtifacts = null;
/**
* Creates a builder that builds {@link CppLinkAction} instances.
*
* @param ruleContext the rule that owns the action
* @param output the output artifact
*/
public Builder(RuleContext ruleContext, Artifact output) {
this(ruleContext, output, ruleContext.getConfiguration(),
ruleContext.getAnalysisEnvironment(), CppHelper.getToolchain(ruleContext));
}
/**
* Creates a builder that builds {@link CppLinkAction} instances.
*
* @param ruleContext the rule that owns the action
* @param output the output artifact
*/
public Builder(RuleContext ruleContext, Artifact output,
BuildConfiguration configuration, CcToolchainProvider toolchain) {
this(ruleContext, output, configuration,
ruleContext.getAnalysisEnvironment(), toolchain);
}
/**
* Creates a builder that builds {@link CppLinkAction}s.
*
* @param ruleContext the rule that owns the action
* @param output the output artifact
* @param configuration the configuration used to determine the tool chain
* and the default link options
*/
private Builder(@Nullable RuleContext ruleContext, Artifact output,
BuildConfiguration configuration, AnalysisEnvironment analysisEnvironment,
CcToolchainProvider toolchain) {
this.ruleContext = ruleContext;
this.analysisEnvironment = Preconditions.checkNotNull(analysisEnvironment);
this.output = Preconditions.checkNotNull(output);
this.configuration = Preconditions.checkNotNull(configuration);
this.cppConfiguration = configuration.getFragment(CppConfiguration.class);
this.toolchain = toolchain;
if (cppConfiguration.supportsEmbeddedRuntimes() && toolchain != null) {
runtimeSolibDir = toolchain.getDynamicRuntimeSolibDir();
}
}
/**
* Given a Context, creates a Builder that builds {@link CppLinkAction}s.
* Note well: Keep the Builder->Context and Context->Builder transforms consistent!
* @param ruleContext the rule that owns the action
* @param output the output artifact
* @param linkContext an immutable CppLinkAction.Context from the original builder
*/
public Builder(RuleContext ruleContext, Artifact output, Context linkContext,
BuildConfiguration configuration) {
// These Builder-only fields get set in the constructor:
// ruleContext, analysisEnvironment, outputPath, configuration, runtimeSolibDir
this(ruleContext, output, configuration, ruleContext.getAnalysisEnvironment(),
CppHelper.getToolchain(ruleContext));
Preconditions.checkNotNull(linkContext);
// All linkContext fields should be transferred to this Builder.
this.nonLibraries.addAll(linkContext.nonLibraries);
this.libraries.addTransitive(linkContext.libraries);
this.crosstoolInputs = linkContext.crosstoolInputs;
this.runtimeMiddleman = linkContext.runtimeMiddleman;
this.runtimeInputs = linkContext.runtimeInputs;
this.compilationInputs.addTransitive(linkContext.compilationInputs);
this.linkstamps.addAll(linkContext.linkstamps);
this.linkopts.addAll(linkContext.linkopts);
this.linkType = linkContext.linkType;
this.linkStaticness = linkContext.linkStaticness;
this.fake = linkContext.fake;
this.isNativeDeps = linkContext.isNativeDeps;
this.useTestOnlyFlags = linkContext.useTestOnlyFlags;
}
public CppLinkAction.Builder setLinkArtifactFactory(LinkArtifactFactory linkArtifactFactory) {
this.linkArtifactFactory = linkArtifactFactory;
return this;
}
private Iterable<LTOBackendArtifacts> createLTOArtifacts(
PathFragment ltoOutputRootPrefix, NestedSet<LibraryToLink> uniqueLibraries) {
Set<Artifact> compiled = new LinkedHashSet<>();
for (LibraryToLink lib : uniqueLibraries) {
Iterables.addAll(compiled, lib.getLTOBitcodeFiles());
}
// This flattens the set of object files, so for M binaries and N .o files,
// this is O(M*N). If we had a nested set of .o files, we could have O(M + N) instead.
NestedSetBuilder<Artifact> bitcodeBuilder = NestedSetBuilder.stableOrder();
for (LibraryToLink lib : uniqueLibraries) {
if (!lib.containsObjectFiles()) {
continue;
}
for (Artifact a : lib.getObjectFiles()) {
if (compiled.contains(a)) {
bitcodeBuilder.add(a);
}
}
}
for (LinkerInput input : nonLibraries) {
// This relies on file naming conventions. It would be less fragile to have a dedicated
// field for non-library .o files.
if (CppFileTypes.OBJECT_FILE.matches(input.getArtifact().getExecPath())
|| CppFileTypes.PIC_OBJECT_FILE.matches(input.getArtifact().getExecPath())) {
bitcodeBuilder.add(input.getArtifact());
}
}
NestedSet<Artifact> allBitcode = bitcodeBuilder.build();
ImmutableList.Builder<LTOBackendArtifacts> ltoOutputs = ImmutableList.builder();
for (Artifact a : allBitcode) {
LTOBackendArtifacts ltoArtifacts = new LTOBackendArtifacts(
ltoOutputRootPrefix, a, allBitcode, ruleContext, linkArtifactFactory);
ltoOutputs.add(ltoArtifacts);
}
return ltoOutputs.build();
}
@VisibleForTesting
boolean canSplitCommandLine() {
if (toolchain == null || !toolchain.supportsParamFiles()) {
return false;
}
switch (linkType) {
// We currently can't split dynamic library links if they have interface outputs. That was
// probably an unintended side effect of the change that introduced interface outputs.
case DYNAMIC_LIBRARY:
return interfaceOutput == null;
case EXECUTABLE:
case STATIC_LIBRARY:
case PIC_STATIC_LIBRARY:
case ALWAYS_LINK_STATIC_LIBRARY:
case ALWAYS_LINK_PIC_STATIC_LIBRARY:
return true;
default:
return false;
}
}
/**
* Builds the Action as configured and returns it.
*/
public CppLinkAction build() {
if (interfaceOutput != null && (fake || linkType != LinkTargetType.DYNAMIC_LIBRARY)) {
throw new RuntimeException("Interface output can only be used "
+ "with non-fake DYNAMIC_LIBRARY targets");
}
final ImmutableList<Artifact> buildInfoHeaderArtifacts = !linkstamps.isEmpty()
? ruleContext.getBuildInfo(CppBuildInfo.KEY)
: ImmutableList.<Artifact>of();
boolean needWholeArchive = wholeArchive || needWholeArchive(
linkStaticness, linkType, linkopts, isNativeDeps, cppConfiguration);
NestedSet<LibraryToLink> uniqueLibraries = libraries.build();
final Iterable<Artifact> filteredNonLibraryArtifacts =
filterLinkerInputArtifacts(LinkerInputs.toLibraryArtifacts(nonLibraries));
final Iterable<LinkerInput> linkerInputs = IterablesChain.<LinkerInput>builder()
.add(ImmutableList.copyOf(filterLinkerInputs(nonLibraries)))
.add(ImmutableIterable.from(Link.mergeInputsCmdLine(
uniqueLibraries, needWholeArchive, cppConfiguration.archiveType())))
.build();
// ruleContext can only be null during testing. This is kind of ugly.
final ImmutableSet<String> features = (ruleContext == null)
? ImmutableSet.<String>of()
: ruleContext.getFeatures();
final LibraryToLink outputLibrary =
LinkerInputs.newInputLibrary(output, filteredNonLibraryArtifacts, this.ltoBitcodeFiles);
final LibraryToLink interfaceOutputLibrary =
(interfaceOutput == null)
? null
: LinkerInputs.newInputLibrary(
interfaceOutput, filteredNonLibraryArtifacts, this.ltoBitcodeFiles);
final ImmutableMap<Artifact, Artifact> linkstampMap =
mapLinkstampsToOutputs(linkstamps, ruleContext, output, linkArtifactFactory);
PathFragment ltoOutputRootPrefix = null;
if (isLTOIndexing && allLTOArtifacts == null) {
ltoOutputRootPrefix =
FileSystemUtils.appendExtension(
outputLibrary.getArtifact().getRootRelativePath(), ".lto");
allLTOArtifacts = createLTOArtifacts(ltoOutputRootPrefix, uniqueLibraries);
}
final ImmutableList<Artifact> actionOutputs;
if (isLTOIndexing) {
ImmutableList.Builder<Artifact> builder = ImmutableList.builder();
for (LTOBackendArtifacts ltoA : allLTOArtifacts) {
ltoA.addIndexingOutputs(builder);
}
actionOutputs = builder.build();
} else {
actionOutputs =
constructOutputs(
outputLibrary.getArtifact(),
linkstampMap.values(),
interfaceOutputLibrary == null ? null : interfaceOutputLibrary.getArtifact(),
symbolCounts);
}
PathFragment paramRootPath =
ParameterFile.derivePath(
outputLibrary.getArtifact().getRootRelativePath(), (isLTOIndexing) ? "lto" : "2");
@Nullable
final Artifact paramFile =
canSplitCommandLine()
? linkArtifactFactory.create(ruleContext, paramRootPath)
: null;
LinkCommandLine.Builder linkCommandLineBuilder =
new LinkCommandLine.Builder(configuration, getOwner(), ruleContext)
.setLinkerInputs(linkerInputs)
.setRuntimeInputs(
ImmutableList.copyOf(LinkerInputs.simpleLinkerInputs(runtimeInputs)))
.setLinkTargetType(linkType)
.setLinkStaticness(linkStaticness)
.setFeatures(features)
.setRuntimeSolibDir(linkType.isStaticLibraryLink() ? null : runtimeSolibDir)
.setNativeDeps(isNativeDeps)
.setUseTestOnlyFlags(useTestOnlyFlags)
.setNeedWholeArchive(needWholeArchive)
.setParamFile(paramFile)
.setAllLTOArtifacts(isLTOIndexing ? null : allLTOArtifacts)
.setToolchain(toolchain);
if (!isLTOIndexing) {
linkCommandLineBuilder
.setOutput(outputLibrary.getArtifact())
.setInterfaceOutput(interfaceOutput)
.setSymbolCountsOutput(symbolCounts)
.setBuildInfoHeaderArtifacts(buildInfoHeaderArtifacts)
.setInterfaceSoBuilder(getInterfaceSoBuilder())
.setLinkstamps(linkstampMap)
.setLinkopts(ImmutableList.copyOf(linkopts))
.addLinkstampCompileOptions(linkstampOptions);
} else {
// TODO(bazel-team): once the LLVM compiler patches have been finalized, this should
// be converted to a crosstool feature configuration instead.
List<String> opts = new ArrayList<>(linkopts);
opts.add("-flto");
opts.add(
"-Wl,-plugin-opt,thin-lto="
+ configuration.getBinDirectory().getExecPathString()
+ ":"
+ configuration
.getBinDirectory()
.getExecPath()
.getRelative(ltoOutputRootPrefix)
.toString());
linkCommandLineBuilder.setLinkopts(ImmutableList.copyOf(opts));
}
LinkCommandLine linkCommandLine = linkCommandLineBuilder.build();
// Compute the set of inputs - we only need stable order here.
NestedSetBuilder<Artifact> dependencyInputsBuilder = NestedSetBuilder.stableOrder();
dependencyInputsBuilder.addTransitive(crosstoolInputs);
if (runtimeMiddleman != null) {
dependencyInputsBuilder.add(runtimeMiddleman);
}
if (!isLTOIndexing) {
dependencyInputsBuilder.addAll(buildInfoHeaderArtifacts);
dependencyInputsBuilder.addAll(linkstamps);
dependencyInputsBuilder.addTransitive(compilationInputs.build());
}
Iterable<Artifact> expandedInputs =
LinkerInputs.toLibraryArtifacts(
Link.mergeInputsDependencies(
uniqueLibraries, needWholeArchive, cppConfiguration.archiveType()));
Iterable<Artifact> expandedNonLibraryInputs = LinkerInputs.toLibraryArtifacts(nonLibraries);
if (!isLTOIndexing && allLTOArtifacts != null) {
// We are doing LTO, and this is the real link, so substitute
// the LTO bitcode files with the real object files they were translated into.
Map<Artifact, Artifact> ltoMapping = new HashMap<>();
for (LTOBackendArtifacts a : allLTOArtifacts) {
ltoMapping.put(a.getBitcodeFile(), a.getObjectFile());
}
// Handle libraries.
List<Artifact> renamedInputs = new ArrayList<>();
for (Artifact a : expandedInputs) {
Artifact renamed = ltoMapping.get(a);
renamedInputs.add(renamed == null ? a : renamed);
}
expandedInputs = renamedInputs;
// Handle non-libraries.
List<Artifact> renamedNonLibraryInputs = new ArrayList<>();
for (Artifact a : expandedNonLibraryInputs) {
Artifact renamed = ltoMapping.get(a);
renamedNonLibraryInputs.add(renamed == null ? a : renamed);
}
expandedNonLibraryInputs = renamedNonLibraryInputs;
}
// getPrimaryInput returns the first element, and that is a public interface - therefore the
// order here is important.
IterablesChain.Builder<Artifact> inputsBuilder =
IterablesChain.<Artifact>builder()
.add(ImmutableList.copyOf(expandedNonLibraryInputs))
.add(dependencyInputsBuilder.build())
.add(ImmutableIterable.from(expandedInputs));
if (linkCommandLine.getParamFile() != null) {
inputsBuilder.add(ImmutableList.of(linkCommandLine.getParamFile()));
Action parameterFileWriteAction =
new ParameterFileWriteAction(
getOwner(),
paramFile,
linkCommandLine.paramCmdLine(),
ParameterFile.ParameterFileType.UNQUOTED,
ISO_8859_1);
analysisEnvironment.registerAction(parameterFileWriteAction);
}
return new CppLinkAction(
getOwner(),
inputsBuilder.deduplicate().build(),
actionOutputs,
cppConfiguration,
outputLibrary,
interfaceOutputLibrary,
fake,
isLTOIndexing,
allLTOArtifacts,
linkCommandLine);
}
/**
* The default heuristic on whether we need to use whole-archive for the link.
*/
private static boolean needWholeArchive(LinkStaticness staticness,
LinkTargetType type, Collection<String> linkopts, boolean isNativeDeps,
CppConfiguration cppConfig) {
boolean fullyStatic = (staticness == LinkStaticness.FULLY_STATIC);
boolean mostlyStatic = (staticness == LinkStaticness.MOSTLY_STATIC);
boolean sharedLinkopts = type == LinkTargetType.DYNAMIC_LIBRARY
|| linkopts.contains("-shared")
|| cppConfig.getLinkOptions().contains("-shared");
return (isNativeDeps || cppConfig.legacyWholeArchive())
&& (fullyStatic || mostlyStatic)
&& sharedLinkopts;
}
private static ImmutableList<Artifact> constructOutputs(Artifact primaryOutput,
Collection<Artifact> outputList, Artifact... outputs) {
return new ImmutableList.Builder<Artifact>()
.add(primaryOutput)
.addAll(outputList)
.addAll(CollectionUtils.asListWithoutNulls(outputs))
.build();
}
/**
* Translates a collection of linkstamp source files to an immutable
* mapping from source files to object files. In other words, given a
* set of source files, this method determines the output path to which
* each file should be compiled.
*
* @param linkstamps collection of linkstamp source files
* @param ruleContext the rule for which this link is being performed
* @param outputBinary the binary output path for this link
* @return an immutable map that pairs each source file with the
* corresponding object file that should be fed into the link
*/
public static ImmutableMap<Artifact, Artifact> mapLinkstampsToOutputs(
Collection<Artifact> linkstamps, RuleContext ruleContext, Artifact outputBinary,
LinkArtifactFactory linkArtifactFactory) {
ImmutableMap.Builder<Artifact, Artifact> mapBuilder = ImmutableMap.builder();
PathFragment outputBinaryPath = outputBinary.getRootRelativePath();
PathFragment stampOutputDirectory = outputBinaryPath.getParentDirectory().
getRelative("_objs").getRelative(outputBinaryPath.getBaseName());
for (Artifact linkstamp : linkstamps) {
PathFragment stampOutputPath = stampOutputDirectory.getRelative(
FileSystemUtils.replaceExtension(linkstamp.getRootRelativePath(), ".o"));
mapBuilder.put(linkstamp,
// Note that link stamp actions can be shared between link actions that output shared
// native dep libraries.
linkArtifactFactory.create(ruleContext, stampOutputPath));
}
return mapBuilder.build(); }
protected ActionOwner getOwner() {
return ruleContext.getActionOwner();
}
protected Artifact getInterfaceSoBuilder() {
return analysisEnvironment.getEmbeddedToolArtifact(CppRuleClasses.BUILD_INTERFACE_SO);
}
/**
* Set the crosstool inputs required for the action.
*/
public Builder setCrosstoolInputs(NestedSet<Artifact> inputs) {
this.crosstoolInputs = inputs;
return this;
}
/**
* This is the LTO indexing step, rather than the real link.
*
* <p>When using this, build() will store allLTOArtifacts as a side-effect so the next build()
* call can emit the real link. Do not call addInput() between the two build() calls.
*
*/
public Builder setLTOIndexing(boolean ltoIndexing) {
this.isLTOIndexing = ltoIndexing;
return this;
}
/**
* Sets the C++ runtime library inputs for the action.
*/
public Builder setRuntimeInputs(Artifact middleman, NestedSet<Artifact> inputs) {
Preconditions.checkArgument((middleman == null) == inputs.isEmpty());
this.runtimeMiddleman = middleman;
this.runtimeInputs = inputs;
return this;
}
/**
* Sets the interface output of the link. A non-null argument can
* only be provided if the link type is {@code DYNAMIC_LIBRARY}
* and fake is false.
*/
public Builder setInterfaceOutput(Artifact interfaceOutput) {
this.interfaceOutput = interfaceOutput;
return this;
}
public Builder setSymbolCountsOutput(Artifact symbolCounts) {
this.symbolCounts = symbolCounts;
return this;
}
/**
* Add additional inputs needed for the linkstamp compilation that is being done as part of the
* link.
*/
public Builder addCompilationInputs(Iterable<Artifact> inputs) {
this.compilationInputs.addAll(inputs);
return this;
}
public Builder addTransitiveCompilationInputs(NestedSet<Artifact> inputs) {
this.compilationInputs.addTransitive(inputs);
return this;
}
private void addNonLibraryInput(LinkerInput input) {
String name = input.getArtifact().getFilename();
Preconditions.checkArgument(
!Link.ARCHIVE_LIBRARY_FILETYPES.matches(name)
&& !Link.SHARED_LIBRARY_FILETYPES.matches(name),
"'%s' is a library file", input);
this.nonLibraries.add(input);
}
public Builder addLTOBitcodeFiles(Iterable<Artifact> files) {
for (Artifact a : files) {
ltoBitcodeFiles.add(a);
}
return this;
}
/**
* Adds a single artifact to the set of inputs (C++ source files, header files, etc). Artifacts
* that are not of recognized types will be used for dependency checking but will not be passed
* to the linker. The artifact must not be an archive or a shared library.
*/
public Builder addNonLibraryInput(Artifact input) {
addNonLibraryInput(LinkerInputs.simpleLinkerInput(input));
return this;
}
/**
* Adds multiple artifacts to the set of inputs (C++ source files, header files, etc).
* Artifacts that are not of recognized types will be used for dependency checking but will
* not be passed to the linker. The artifacts must not be archives or shared libraries.
*/
public Builder addNonLibraryInputs(Iterable<Artifact> inputs) {
for (Artifact input : inputs) {
addNonLibraryInput(LinkerInputs.simpleLinkerInput(input));
}
return this;
}
public Builder addFakeNonLibraryInputs(Iterable<Artifact> inputs) {
for (Artifact input : inputs) {
addNonLibraryInput(LinkerInputs.fakeLinkerInput(input));
}
return this;
}
private void checkLibrary(LibraryToLink input) {
String name = input.getArtifact().getFilename();
Preconditions.checkArgument(
Link.ARCHIVE_LIBRARY_FILETYPES.matches(name)
|| Link.SHARED_LIBRARY_FILETYPES.matches(name),
"'%s' is not a library file",
input);
}
/**
* Adds a single artifact to the set of inputs. The artifact must be an archive or a shared
* library. Note that all directly added libraries are implicitly ordered before all nested
* sets added with {@link #addLibraries}, even if added in the opposite order.
*/
public Builder addLibrary(LibraryToLink input) {
checkLibrary(input);
libraries.add(input);
return this;
}
/**
* Adds multiple artifact to the set of inputs. The artifacts must be archives or shared
* libraries.
*/
public Builder addLibraries(NestedSet<LibraryToLink> inputs) {
for (LibraryToLink input : inputs) {
checkLibrary(input);
}
this.libraries.addTransitive(inputs);
return this;
}
/**
* Sets the type of ELF file to be created (.a, .so, .lo, executable). The
* default is {@link LinkTargetType#STATIC_LIBRARY}.
*/
public Builder setLinkType(LinkTargetType linkType) {
this.linkType = linkType;
return this;
}
/**
* Sets the degree of "staticness" of the link: fully static (static binding
* of all symbols), mostly static (use dynamic binding only for symbols from
* glibc), dynamic (use dynamic binding wherever possible). The default is
* {@link LinkStaticness#FULLY_STATIC}.
*/
public Builder setLinkStaticness(LinkStaticness linkStaticness) {
this.linkStaticness = linkStaticness;
return this;
}
/**
* Adds a C++ source file which will be compiled at link time. This is used
* to embed various values from the build system into binaries to identify
* their provenance.
*
* <p>Link stamps are also automatically added to the inputs.
*/
public Builder addLinkstamps(Map<Artifact, ImmutableList<Artifact>> linkstamps) {
this.linkstamps.addAll(linkstamps.keySet());
// Add inputs for linkstamping.
if (!linkstamps.isEmpty()) {
// This will just be the compiler unless include scanning is disabled, in which case it will
// include all header files. Since we insist that linkstamps declare all their headers, all
// header files would be overkill, but that only happens when include scanning is disabled.
addTransitiveCompilationInputs(toolchain.getCompile());
for (Map.Entry<Artifact, ImmutableList<Artifact>> entry : linkstamps.entrySet()) {
addCompilationInputs(entry.getValue());
}
}
return this;
}
public Builder addLinkstampCompilerOptions(ImmutableList<String> linkstampOptions) {
this.linkstampOptions = linkstampOptions;
return this;
}
/**
* Adds an additional linker option.
*/
public Builder addLinkopt(String linkopt) {
this.linkopts.add(linkopt);
return this;
}
/**
* Adds multiple linker options at once.
*
* @see #addLinkopt(String)
*/
public Builder addLinkopts(Collection<String> linkopts) {
this.linkopts.addAll(linkopts);
return this;
}
/**
* Merges the given link params into this builder by calling {@link #addLinkopts}, {@link
* #addLibraries}, and {@link #addLinkstamps}.
*/
public Builder addLinkParams(CcLinkParams linkParams, RuleErrorConsumer errorListener) {
addLinkopts(linkParams.flattenedLinkopts());
addLibraries(linkParams.getLibraries());
ExtraLinkTimeLibraries extraLinkTimeLibraries = linkParams.getExtraLinkTimeLibraries();
if (extraLinkTimeLibraries != null) {
for (ExtraLinkTimeLibrary extraLibrary : extraLinkTimeLibraries.getExtraLibraries()) {
addLibraries(extraLibrary.buildLibraries(ruleContext));
}
}
addLinkstamps(CppHelper.resolveLinkstamps(errorListener, linkParams));
return this;
}
/**
* Sets whether this link action will be used for a cc_fake_binary; false by
* default.
*/
public Builder setFake(boolean fake) {
this.fake = fake;
return this;
}
/**
* Sets whether this link action is used for a native dependency library.
*/
public Builder setNativeDeps(boolean isNativeDeps) {
this.isNativeDeps = isNativeDeps;
return this;
}
/**
* Setting this to true overrides the default whole-archive computation and force-enables
* whole archives for every archive in the link. This is only necessary for linking executable
* binaries that are supposed to export symbols.
*
* <p>Usually, the link action while use whole archives for dynamic libraries that are native
* deps (or the legacy whole archive flag is enabled), and that are not dynamically linked.
*
* <p>(Note that it is possible to build dynamic libraries with cc_binary rules by specifying
* linkshared = 1, and giving the rule a name that matches the pattern {@code
* lib<name>.so}.)
*/
public Builder setWholeArchive(boolean wholeArchive) {
this.wholeArchive = wholeArchive;
return this;
}
/**
* Sets whether this link action should use test-specific flags (e.g. $EXEC_ORIGIN instead of
* $ORIGIN for the solib search path or lazy binding); false by default.
*/
public Builder setUseTestOnlyFlags(boolean useTestOnlyFlags) {
this.useTestOnlyFlags = useTestOnlyFlags;
return this;
}
/**
* Sets the name of the directory where the solib symlinks for the dynamic runtime libraries
* live. This is usually automatically set from the cc_toolchain.
*/
public Builder setRuntimeSolibDir(PathFragment runtimeSolibDir) {
this.runtimeSolibDir = runtimeSolibDir;
return this;
}
/**
* Creates a builder without the need for a {@link RuleContext}.
* This is to be used exclusively for testing purposes.
*
* <p>Link stamping is not supported if using this method.
*/
@VisibleForTesting
public static Builder createTestBuilder(
final ActionOwner owner, final AnalysisEnvironment analysisEnvironment,
final Artifact output, BuildConfiguration config) {
return new Builder(null, output, config, analysisEnvironment, null) {
@Override
protected ActionOwner getOwner() {
return owner;
}
};
}
}
/**
* TransitiveInfoProvider for ELF link actions.
*/
@Immutable @ThreadSafe
public static final class Context implements TransitiveInfoProvider {
// Morally equivalent with {@link Builder}, except these are immutable.
// Keep these in sync with {@link Builder}.
private final ImmutableSet<LinkerInput> nonLibraries;
private final NestedSet<LibraryToLink> libraries;
private final NestedSet<Artifact> crosstoolInputs;
private final Artifact runtimeMiddleman;
private final NestedSet<Artifact> runtimeInputs;
private final NestedSet<Artifact> compilationInputs;
private final ImmutableSet<Artifact> linkstamps;
private final ImmutableList<String> linkopts;
private final LinkTargetType linkType;
private final LinkStaticness linkStaticness;
private final boolean fake;
private final boolean isNativeDeps;
private final boolean useTestOnlyFlags;
/**
* Given a {@link Builder}, creates a {@code Context} to pass to another target.
* Note well: Keep the Builder->Context and Context->Builder transforms consistent!
* @param builder a mutable {@link CppLinkAction.Builder} to clone from
*/
public Context(Builder builder) {
this.nonLibraries = ImmutableSet.copyOf(builder.nonLibraries);
this.libraries = NestedSetBuilder.<LibraryToLink>linkOrder()
.addTransitive(builder.libraries.build()).build();
this.crosstoolInputs =
NestedSetBuilder.<Artifact>stableOrder().addTransitive(builder.crosstoolInputs).build();
this.runtimeMiddleman = builder.runtimeMiddleman;
this.runtimeInputs =
NestedSetBuilder.<Artifact>stableOrder().addTransitive(builder.runtimeInputs).build();
this.compilationInputs = NestedSetBuilder.<Artifact>stableOrder()
.addTransitive(builder.compilationInputs.build()).build();
this.linkstamps = ImmutableSet.copyOf(builder.linkstamps);
this.linkopts = ImmutableList.copyOf(builder.linkopts);
this.linkType = builder.linkType;
this.linkStaticness = builder.linkStaticness;
this.fake = builder.fake;
this.isNativeDeps = builder.isNativeDeps;
this.useTestOnlyFlags = builder.useTestOnlyFlags;
}
}
}
| |
/*
* Copyright 2002-2017 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata;
import com.drew.lang.Rational;
import com.drew.lang.annotations.NotNull;
import com.drew.lang.annotations.Nullable;
import com.drew.lang.annotations.SuppressWarnings;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Array;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Abstract base class for all directory implementations, having methods for getting and setting tag values of various
* data types.
*
* @author Drew Noakes https://drewnoakes.com
*/
@java.lang.SuppressWarnings("WeakerAccess")
public abstract class Directory
{
private static final String _floatFormatPattern = "0.###";
/** Map of values hashed by type identifiers. */
@NotNull
protected final Map<Integer, Object> _tagMap = new HashMap<Integer, Object>();
/**
* A convenient list holding tag values in the order in which they were stored.
* This is used for creation of an iterator, and for counting the number of
* defined tags.
*/
@NotNull
protected final Collection<Tag> _definedTagList = new ArrayList<Tag>();
@NotNull
private final Collection<String> _errorList = new ArrayList<String>(4);
/** The descriptor used to interpret tag values. */
protected TagDescriptor _descriptor;
@Nullable
private Directory _parent;
// ABSTRACT METHODS
/**
* Provides the name of the directory, for display purposes. E.g. <code>Exif</code>
*
* @return the name of the directory
*/
@NotNull
public abstract String getName();
/**
* Provides the map of tag names, hashed by tag type identifier.
*
* @return the map of tag names
*/
@NotNull
protected abstract HashMap<Integer, String> getTagNameMap();
protected Directory()
{}
// VARIOUS METHODS
/**
* Gets a value indicating whether the directory is empty, meaning it contains no errors and no tag values.
*/
public boolean isEmpty()
{
return _errorList.isEmpty() && _definedTagList.isEmpty();
}
/**
* Indicates whether the specified tag type has been set.
*
* @param tagType the tag type to check for
* @return true if a value exists for the specified tag type, false if not
*/
@java.lang.SuppressWarnings({ "UnnecessaryBoxing" })
public boolean containsTag(int tagType)
{
return _tagMap.containsKey(Integer.valueOf(tagType));
}
/**
* Returns an Iterator of Tag instances that have been set in this Directory.
*
* @return an Iterator of Tag instances
*/
@NotNull
public Collection<Tag> getTags()
{
return Collections.unmodifiableCollection(_definedTagList);
}
/**
* Returns the number of tags set in this Directory.
*
* @return the number of tags set in this Directory
*/
public int getTagCount()
{
return _definedTagList.size();
}
/**
* Sets the descriptor used to interpret tag values.
*
* @param descriptor the descriptor used to interpret tag values
*/
@java.lang.SuppressWarnings({ "ConstantConditions" })
public void setDescriptor(@NotNull TagDescriptor descriptor)
{
if (descriptor == null)
throw new NullPointerException("cannot set a null descriptor");
_descriptor = descriptor;
}
/**
* Registers an error message with this directory.
*
* @param message an error message.
*/
public void addError(@NotNull String message)
{
_errorList.add(message);
}
/**
* Gets a value indicating whether this directory has any error messages.
*
* @return true if the directory contains errors, otherwise false
*/
public boolean hasErrors()
{
return _errorList.size() > 0;
}
/**
* Used to iterate over any error messages contained in this directory.
*
* @return an iterable collection of error message strings.
*/
@NotNull
public Iterable<String> getErrors()
{
return Collections.unmodifiableCollection(_errorList);
}
/** Returns the count of error messages in this directory. */
public int getErrorCount()
{
return _errorList.size();
}
@Nullable
public Directory getParent()
{
return _parent;
}
public void setParent(@NotNull Directory parent)
{
_parent = parent;
}
// TAG SETTERS
/**
* Sets an <code>int</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as an int
*/
public void setInt(int tagType, int value)
{
setObject(tagType, value);
}
/**
* Sets an <code>int[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param ints the int array to store
*/
public void setIntArray(int tagType, @NotNull int[] ints)
{
setObjectArray(tagType, ints);
}
/**
* Sets a <code>float</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a float
*/
public void setFloat(int tagType, float value)
{
setObject(tagType, value);
}
/**
* Sets a <code>float[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param floats the float array to store
*/
public void setFloatArray(int tagType, @NotNull float[] floats)
{
setObjectArray(tagType, floats);
}
/**
* Sets a <code>double</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a double
*/
public void setDouble(int tagType, double value)
{
setObject(tagType, value);
}
/**
* Sets a <code>double[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param doubles the double array to store
*/
public void setDoubleArray(int tagType, @NotNull double[] doubles)
{
setObjectArray(tagType, doubles);
}
/**
* Sets a <code>StringValue</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a StringValue
*/
@java.lang.SuppressWarnings({ "ConstantConditions" })
public void setStringValue(int tagType, @NotNull StringValue value)
{
if (value == null)
throw new NullPointerException("cannot set a null StringValue");
setObject(tagType, value);
}
/**
* Sets a <code>String</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a String
*/
@java.lang.SuppressWarnings({ "ConstantConditions" })
public void setString(int tagType, @NotNull String value)
{
if (value == null)
throw new NullPointerException("cannot set a null String");
setObject(tagType, value);
}
/**
* Sets a <code>String[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param strings the String array to store
*/
public void setStringArray(int tagType, @NotNull String[] strings)
{
setObjectArray(tagType, strings);
}
/**
* Sets a <code>StringValue[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param strings the StringValue array to store
*/
public void setStringValueArray(int tagType, @NotNull StringValue[] strings)
{
setObjectArray(tagType, strings);
}
/**
* Sets a <code>boolean</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a boolean
*/
public void setBoolean(int tagType, boolean value)
{
setObject(tagType, value);
}
/**
* Sets a <code>long</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a long
*/
public void setLong(int tagType, long value)
{
setObject(tagType, value);
}
/**
* Sets a <code>java.util.Date</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag as a java.util.Date
*/
public void setDate(int tagType, @NotNull java.util.Date value)
{
setObject(tagType, value);
}
/**
* Sets a <code>Rational</code> value for the specified tag.
*
* @param tagType the tag's value as an int
* @param rational rational number
*/
public void setRational(int tagType, @NotNull Rational rational)
{
setObject(tagType, rational);
}
/**
* Sets a <code>Rational[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param rationals the Rational array to store
*/
public void setRationalArray(int tagType, @NotNull Rational[] rationals)
{
setObjectArray(tagType, rationals);
}
/**
* Sets a <code>byte[]</code> (array) for the specified tag.
*
* @param tagType the tag identifier
* @param bytes the byte array to store
*/
public void setByteArray(int tagType, @NotNull byte[] bytes)
{
setObjectArray(tagType, bytes);
}
/**
* Sets a <code>Object</code> for the specified tag.
*
* @param tagType the tag's value as an int
* @param value the value for the specified tag
* @throws NullPointerException if value is <code>null</code>
*/
@java.lang.SuppressWarnings( { "ConstantConditions", "UnnecessaryBoxing" })
public void setObject(int tagType, @NotNull Object value)
{
if (value == null)
throw new NullPointerException("cannot set a null object");
if (!_tagMap.containsKey(Integer.valueOf(tagType))) {
_definedTagList.add(new Tag(tagType, this));
}
// else {
// final Object oldValue = _tagMap.get(tagType);
// if (!oldValue.equals(value))
// addError(String.format("Overwritten tag 0x%s (%s). Old=%s, New=%s", Integer.toHexString(tagType), getTagName(tagType), oldValue, value));
// }
_tagMap.put(tagType, value);
}
/**
* Sets an array <code>Object</code> for the specified tag.
*
* @param tagType the tag's value as an int
* @param array the array of values for the specified tag
*/
public void setObjectArray(int tagType, @NotNull Object array)
{
// for now, we don't do anything special -- this method might be a candidate for removal once the dust settles
setObject(tagType, array);
}
// TAG GETTERS
/**
* Returns the specified tag's value as an int, if possible. Every attempt to represent the tag's value as an int
* is taken. Here is a list of the action taken depending upon the tag's original type:
* <ul>
* <li> int - Return unchanged.
* <li> Number - Return an int value (real numbers are truncated).
* <li> Rational - Truncate any fractional part and returns remaining int.
* <li> String - Attempt to parse string as an int. If this fails, convert the char[] to an int (using shifts and OR).
* <li> Rational[] - Return int value of first item in array.
* <li> byte[] - Return int value of first item in array.
* <li> int[] - Return int value of first item in array.
* </ul>
*
* @throws MetadataException if no value exists for tagType or if it cannot be converted to an int.
*/
public int getInt(int tagType) throws MetadataException
{
Integer integer = getInteger(tagType);
if (integer!=null)
return integer;
Object o = getObject(tagType);
if (o == null)
throw new MetadataException("Tag '" + getTagName(tagType) + "' has not been set -- check using containsTag() first");
throw new MetadataException("Tag '" + tagType + "' cannot be converted to int. It is of type '" + o.getClass() + "'.");
}
/**
* Returns the specified tag's value as an Integer, if possible. Every attempt to represent the tag's value as an
* Integer is taken. Here is a list of the action taken depending upon the tag's original type:
* <ul>
* <li> int - Return unchanged
* <li> Number - Return an int value (real numbers are truncated)
* <li> Rational - Truncate any fractional part and returns remaining int
* <li> String - Attempt to parse string as an int. If this fails, convert the char[] to an int (using shifts and OR)
* <li> Rational[] - Return int value of first item in array if length > 0
* <li> byte[] - Return int value of first item in array if length > 0
* <li> int[] - Return int value of first item in array if length > 0
* </ul>
*
* If the value is not found or cannot be converted to int, <code>null</code> is returned.
*/
@Nullable
public Integer getInteger(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof Number) {
return ((Number)o).intValue();
} else if (o instanceof String || o instanceof StringValue) {
try {
return Integer.parseInt(o.toString());
} catch (NumberFormatException nfe) {
// convert the char array to an int
String s = o.toString();
byte[] bytes = s.getBytes();
long val = 0;
for (byte aByte : bytes) {
val = val << 8;
val += (aByte & 0xff);
}
return (int)val;
}
} else if (o instanceof Rational[]) {
Rational[] rationals = (Rational[])o;
if (rationals.length == 1)
return rationals[0].intValue();
} else if (o instanceof byte[]) {
byte[] bytes = (byte[])o;
if (bytes.length == 1)
return (int)bytes[0];
} else if (o instanceof int[]) {
int[] ints = (int[])o;
if (ints.length == 1)
return ints[0];
} else if (o instanceof short[]) {
short[] shorts = (short[])o;
if (shorts.length == 1)
return (int)shorts[0];
}
return null;
}
/**
* Gets the specified tag's value as a String array, if possible. Only supported
* where the tag is set as StringValue[], String[], StringValue, String, int[], byte[] or Rational[].
*
* @param tagType the tag identifier
* @return the tag's value as an array of Strings. If the value is unset or cannot be converted, <code>null</code> is returned.
*/
@Nullable
public String[] getStringArray(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof String[])
return (String[])o;
if (o instanceof String)
return new String[] { (String)o };
if (o instanceof StringValue)
return new String[] { o.toString() };
if (o instanceof StringValue[]) {
StringValue[] stringValues = (StringValue[])o;
String[] strings = new String[stringValues.length];
for (int i = 0; i < strings.length; i++)
strings[i] = stringValues[i].toString();
return strings;
}
if (o instanceof int[]) {
int[] ints = (int[])o;
String[] strings = new String[ints.length];
for (int i = 0; i < strings.length; i++)
strings[i] = Integer.toString(ints[i]);
return strings;
}
if (o instanceof byte[]) {
byte[] bytes = (byte[])o;
String[] strings = new String[bytes.length];
for (int i = 0; i < strings.length; i++)
strings[i] = Byte.toString(bytes[i]);
return strings;
}
if (o instanceof Rational[]) {
Rational[] rationals = (Rational[])o;
String[] strings = new String[rationals.length];
for (int i = 0; i < strings.length; i++)
strings[i] = rationals[i].toSimpleString(false);
return strings;
}
return null;
}
/**
* Gets the specified tag's value as a StringValue array, if possible.
* Only succeeds if the tag is set as StringValue[], or StringValue.
*
* @param tagType the tag identifier
* @return the tag's value as an array of StringValues. If the value is unset or cannot be converted, <code>null</code> is returned.
*/
@Nullable
public StringValue[] getStringValueArray(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof StringValue[])
return (StringValue[])o;
if (o instanceof StringValue)
return new StringValue[] {(StringValue) o};
return null;
}
/**
* Gets the specified tag's value as an int array, if possible. Only supported
* where the tag is set as String, Integer, int[], byte[] or Rational[].
*
* @param tagType the tag identifier
* @return the tag's value as an int array
*/
@Nullable
public int[] getIntArray(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof int[])
return (int[])o;
if (o instanceof Rational[]) {
Rational[] rationals = (Rational[])o;
int[] ints = new int[rationals.length];
for (int i = 0; i < ints.length; i++) {
ints[i] = rationals[i].intValue();
}
return ints;
}
if (o instanceof short[]) {
short[] shorts = (short[])o;
int[] ints = new int[shorts.length];
for (int i = 0; i < shorts.length; i++) {
ints[i] = shorts[i];
}
return ints;
}
if (o instanceof byte[]) {
byte[] bytes = (byte[])o;
int[] ints = new int[bytes.length];
for (int i = 0; i < bytes.length; i++) {
ints[i] = bytes[i];
}
return ints;
}
if (o instanceof CharSequence) {
CharSequence str = (CharSequence)o;
int[] ints = new int[str.length()];
for (int i = 0; i < str.length(); i++) {
ints[i] = str.charAt(i);
}
return ints;
}
if (o instanceof Integer)
return new int[] { (Integer)o };
return null;
}
/**
* Gets the specified tag's value as an byte array, if possible. Only supported
* where the tag is set as String, Integer, int[], byte[] or Rational[].
*
* @param tagType the tag identifier
* @return the tag's value as a byte array
*/
@Nullable
public byte[] getByteArray(int tagType)
{
Object o = getObject(tagType);
if (o == null) {
return null;
} else if (o instanceof StringValue) {
return ((StringValue)o).getBytes();
} else if (o instanceof Rational[]) {
Rational[] rationals = (Rational[])o;
byte[] bytes = new byte[rationals.length];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = rationals[i].byteValue();
}
return bytes;
} else if (o instanceof byte[]) {
return (byte[])o;
} else if (o instanceof int[]) {
int[] ints = (int[])o;
byte[] bytes = new byte[ints.length];
for (int i = 0; i < ints.length; i++) {
bytes[i] = (byte)ints[i];
}
return bytes;
} else if (o instanceof short[]) {
short[] shorts = (short[])o;
byte[] bytes = new byte[shorts.length];
for (int i = 0; i < shorts.length; i++) {
bytes[i] = (byte)shorts[i];
}
return bytes;
} else if (o instanceof CharSequence) {
CharSequence str = (CharSequence)o;
byte[] bytes = new byte[str.length()];
for (int i = 0; i < str.length(); i++) {
bytes[i] = (byte)str.charAt(i);
}
return bytes;
}
if (o instanceof Integer)
return new byte[] { ((Integer)o).byteValue() };
return null;
}
/** Returns the specified tag's value as a double, if possible. */
public double getDouble(int tagType) throws MetadataException
{
Double value = getDoubleObject(tagType);
if (value!=null)
return value;
Object o = getObject(tagType);
if (o == null)
throw new MetadataException("Tag '" + getTagName(tagType) + "' has not been set -- check using containsTag() first");
throw new MetadataException("Tag '" + tagType + "' cannot be converted to a double. It is of type '" + o.getClass() + "'.");
}
/** Returns the specified tag's value as a Double. If the tag is not set or cannot be converted, <code>null</code> is returned. */
@Nullable
public Double getDoubleObject(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof String || o instanceof StringValue) {
try {
return Double.parseDouble(o.toString());
} catch (NumberFormatException nfe) {
return null;
}
}
if (o instanceof Number)
return ((Number)o).doubleValue();
return null;
}
/** Returns the specified tag's value as a float, if possible. */
public float getFloat(int tagType) throws MetadataException
{
Float value = getFloatObject(tagType);
if (value!=null)
return value;
Object o = getObject(tagType);
if (o == null)
throw new MetadataException("Tag '" + getTagName(tagType) + "' has not been set -- check using containsTag() first");
throw new MetadataException("Tag '" + tagType + "' cannot be converted to a float. It is of type '" + o.getClass() + "'.");
}
/** Returns the specified tag's value as a float. If the tag is not set or cannot be converted, <code>null</code> is returned. */
@Nullable
public Float getFloatObject(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof String || o instanceof StringValue) {
try {
return Float.parseFloat(o.toString());
} catch (NumberFormatException nfe) {
return null;
}
}
if (o instanceof Number)
return ((Number)o).floatValue();
return null;
}
/** Returns the specified tag's value as a long, if possible. */
public long getLong(int tagType) throws MetadataException
{
Long value = getLongObject(tagType);
if (value != null)
return value;
Object o = getObject(tagType);
if (o == null)
throw new MetadataException("Tag '" + getTagName(tagType) + "' has not been set -- check using containsTag() first");
throw new MetadataException("Tag '" + tagType + "' cannot be converted to a long. It is of type '" + o.getClass() + "'.");
}
/** Returns the specified tag's value as a long. If the tag is not set or cannot be converted, <code>null</code> is returned. */
@Nullable
public Long getLongObject(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof String || o instanceof StringValue) {
try {
return Long.parseLong(o.toString());
} catch (NumberFormatException nfe) {
return null;
}
}
if (o instanceof Number)
return ((Number)o).longValue();
return null;
}
/** Returns the specified tag's value as a boolean, if possible. */
public boolean getBoolean(int tagType) throws MetadataException
{
Boolean value = getBooleanObject(tagType);
if (value != null)
return value;
Object o = getObject(tagType);
if (o == null)
throw new MetadataException("Tag '" + getTagName(tagType) + "' has not been set -- check using containsTag() first");
throw new MetadataException("Tag '" + tagType + "' cannot be converted to a boolean. It is of type '" + o.getClass() + "'.");
}
/** Returns the specified tag's value as a boolean. If the tag is not set or cannot be converted, <code>null</code> is returned. */
@Nullable
@SuppressWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "keep API interface consistent")
public Boolean getBooleanObject(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof Boolean)
return (Boolean)o;
if (o instanceof String || o instanceof StringValue) {
try {
return Boolean.getBoolean(o.toString());
} catch (NumberFormatException nfe) {
return null;
}
}
if (o instanceof Number)
return (((Number)o).doubleValue() != 0);
return null;
}
/**
* Returns the specified tag's value as a java.util.Date. If the value is unset or cannot be converted, <code>null</code> is returned.
* <p>
* If the underlying value is a {@link String}, then attempts will be made to parse the string as though it is in
* the GMT {@link TimeZone}. If the {@link TimeZone} is known, call the overload that accepts one as an argument.
*/
@Nullable
public java.util.Date getDate(int tagType)
{
return getDate(tagType, null, null);
}
/**
* Returns the specified tag's value as a java.util.Date. If the value is unset or cannot be converted, <code>null</code> is returned.
* <p>
* If the underlying value is a {@link String}, then attempts will be made to parse the string as though it is in
* the {@link TimeZone} represented by the {@code timeZone} parameter (if it is non-null). Note that this parameter
* is only considered if the underlying value is a string and it has no time zone information, otherwise it has no effect.
*/
@Nullable
public java.util.Date getDate(int tagType, @Nullable TimeZone timeZone)
{
return getDate(tagType, null, timeZone);
}
/**
* Returns the specified tag's value as a java.util.Date. If the value is unset or cannot be converted, <code>null</code> is returned.
* <p>
* If the underlying value is a {@link String}, then attempts will be made to parse the string as though it is in
* the {@link TimeZone} represented by the {@code timeZone} parameter (if it is non-null). Note that this parameter
* is only considered if the underlying value is a string and it has no time zone information, otherwise it has no effect.
* In addition, the {@code subsecond} parameter, which specifies the number of digits after the decimal point in the seconds,
* is set to the returned Date. This parameter is only considered if the underlying value is a string and is has
* no subsecond information, otherwise it has no effect.
*
* @param tagType the tag identifier
* @param subsecond the subsecond value for the Date
* @param timeZone the time zone to use
* @return a Date representing the time value
*/
@Nullable
public java.util.Date getDate(int tagType, @Nullable String subsecond, @Nullable TimeZone timeZone)
{
Object o = getObject(tagType);
if (o instanceof java.util.Date)
return (java.util.Date)o;
java.util.Date date = null;
if ((o instanceof String) || (o instanceof StringValue)) {
// This seems to cover all known Exif and Xmp date strings
// Note that " : : : : " is a valid date string according to the Exif spec (which means 'unknown date'): http://www.awaresystems.be/imaging/tiff/tifftags/privateifd/exif/datetimeoriginal.html
String datePatterns[] = {
"yyyy:MM:dd HH:mm:ss",
"yyyy:MM:dd HH:mm",
"yyyy-MM-dd HH:mm:ss",
"yyyy-MM-dd HH:mm",
"yyyy.MM.dd HH:mm:ss",
"yyyy.MM.dd HH:mm",
"yyyy-MM-dd'T'HH:mm:ss",
"yyyy-MM-dd'T'HH:mm",
"yyyy-MM-dd",
"yyyy-MM",
"yyyyMMdd", // as used in IPTC data
"yyyy" };
String dateString = o.toString();
// if the date string has subsecond information, it supersedes the subsecond parameter
Pattern subsecondPattern = Pattern.compile("(\\d\\d:\\d\\d:\\d\\d)(\\.\\d+)");
Matcher subsecondMatcher = subsecondPattern.matcher(dateString);
if (subsecondMatcher.find()) {
subsecond = subsecondMatcher.group(2).substring(1);
dateString = subsecondMatcher.replaceAll("$1");
}
// if the date string has time zone information, it supersedes the timeZone parameter
Pattern timeZonePattern = Pattern.compile("(Z|[+-]\\d\\d:\\d\\d)$");
Matcher timeZoneMatcher = timeZonePattern.matcher(dateString);
if (timeZoneMatcher.find()) {
timeZone = TimeZone.getTimeZone("GMT" + timeZoneMatcher.group().replaceAll("Z", ""));
dateString = timeZoneMatcher.replaceAll("");
}
for (String datePattern : datePatterns) {
try {
DateFormat parser = new SimpleDateFormat(datePattern);
if (timeZone != null)
parser.setTimeZone(timeZone);
else
parser.setTimeZone(TimeZone.getTimeZone("GMT")); // don't interpret zone time
date = parser.parse(dateString);
break;
} catch (ParseException ex) {
// simply try the next pattern
}
}
}
if (date == null)
return null;
if (subsecond == null)
return date;
try {
int millisecond = (int) (Double.parseDouble("." + subsecond) * 1000);
if (millisecond >= 0 && millisecond < 1000) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.set(Calendar.MILLISECOND, millisecond);
return calendar.getTime();
}
return date;
} catch (NumberFormatException e) {
return date;
}
}
/** Returns the specified tag's value as a Rational. If the value is unset or cannot be converted, <code>null</code> is returned. */
@Nullable
public Rational getRational(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof Rational)
return (Rational)o;
if (o instanceof Integer)
return new Rational((Integer)o, 1);
if (o instanceof Long)
return new Rational((Long)o, 1);
// NOTE not doing conversions for real number types
return null;
}
/** Returns the specified tag's value as an array of Rational. If the value is unset or cannot be converted, <code>null</code> is returned. */
@Nullable
public Rational[] getRationalArray(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof Rational[])
return (Rational[])o;
return null;
}
/**
* Returns the specified tag's value as a String. This value is the 'raw' value. A more presentable decoding
* of this value may be obtained from the corresponding Descriptor.
*
* @return the String representation of the tag's value, or
* <code>null</code> if the tag hasn't been defined.
*/
@Nullable
public String getString(int tagType)
{
Object o = getObject(tagType);
if (o == null)
return null;
if (o instanceof Rational)
return ((Rational)o).toSimpleString(true);
if (o.getClass().isArray()) {
// handle arrays of objects and primitives
int arrayLength = Array.getLength(o);
final Class<?> componentType = o.getClass().getComponentType();
StringBuilder string = new StringBuilder();
if (Object.class.isAssignableFrom(componentType)) {
// object array
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(Array.get(o, i).toString());
}
} else if (componentType.getName().equals("int")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(Array.getInt(o, i));
}
} else if (componentType.getName().equals("short")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(Array.getShort(o, i));
}
} else if (componentType.getName().equals("long")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(Array.getLong(o, i));
}
} else if (componentType.getName().equals("float")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(new DecimalFormat(_floatFormatPattern).format(Array.getFloat(o, i)));
}
} else if (componentType.getName().equals("double")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(new DecimalFormat(_floatFormatPattern).format(Array.getDouble(o, i)));
}
} else if (componentType.getName().equals("byte")) {
for (int i = 0; i < arrayLength; i++) {
if (i != 0)
string.append(' ');
string.append(Array.getByte(o, i) & 0xff);
}
} else {
addError("Unexpected array component type: " + componentType.getName());
}
return string.toString();
}
if (o instanceof Double)
return new DecimalFormat(_floatFormatPattern).format(((Double)o).doubleValue());
if (o instanceof Float)
return new DecimalFormat(_floatFormatPattern).format(((Float)o).floatValue());
// Note that several cameras leave trailing spaces (Olympus, Nikon) but this library is intended to show
// the actual data within the file. It is not inconceivable that whitespace may be significant here, so we
// do not trim. Also, if support is added for writing data back to files, this may cause issues.
// We leave trimming to the presentation layer.
return o.toString();
}
@Nullable
public String getString(int tagType, String charset)
{
byte[] bytes = getByteArray(tagType);
if (bytes==null)
return null;
try {
return new String(bytes, charset);
} catch (UnsupportedEncodingException e) {
return null;
}
}
@Nullable
public StringValue getStringValue(int tagType)
{
Object o = getObject(tagType);
if (o instanceof StringValue)
return (StringValue)o;
return null;
}
/**
* Returns the object hashed for the particular tag type specified, if available.
*
* @param tagType the tag type identifier
* @return the tag's value as an Object if available, else <code>null</code>
*/
@java.lang.SuppressWarnings({ "UnnecessaryBoxing" })
@Nullable
public Object getObject(int tagType)
{
return _tagMap.get(Integer.valueOf(tagType));
}
// OTHER METHODS
/**
* Returns the name of a specified tag as a String.
*
* @param tagType the tag type identifier
* @return the tag's name as a String
*/
@NotNull
public String getTagName(int tagType)
{
HashMap<Integer, String> nameMap = getTagNameMap();
if (!nameMap.containsKey(tagType)) {
String hex = Integer.toHexString(tagType);
while (hex.length() < 4) {
hex = "0" + hex;
}
return "Unknown tag (0x" + hex + ")";
}
return nameMap.get(tagType);
}
/**
* Gets whether the specified tag is known by the directory and has a name.
*
* @param tagType the tag type identifier
* @return whether this directory has a name for the specified tag
*/
public boolean hasTagName(int tagType)
{
return getTagNameMap().containsKey(tagType);
}
/**
* Provides a description of a tag's value using the descriptor set by
* <code>setDescriptor(Descriptor)</code>.
*
* @param tagType the tag type identifier
* @return the tag value's description as a String
*/
@Nullable
public String getDescription(int tagType)
{
assert(_descriptor != null);
return _descriptor.getDescription(tagType);
}
@Override
public String toString()
{
return String.format("%s Directory (%d %s)",
getName(),
_tagMap.size(),
_tagMap.size() == 1
? "tag"
: "tags");
}
}
| |
/*
* Copyright (c) 2010-2017 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.model.impl.visualizer;
import com.evolveum.midpoint.model.api.ModelService;
import com.evolveum.midpoint.model.impl.visualizer.output.*;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.delta.*;
import com.evolveum.midpoint.prism.equivalence.ParameterizedEquivalenceStrategy;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.util.CloneUtil;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ValueDisplayUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.util.MiscUtil;
import com.evolveum.midpoint.util.exception.*;
import com.evolveum.midpoint.util.logging.LoggingUtils;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.*;
import static com.evolveum.midpoint.prism.delta.ChangeType.*;
import static com.evolveum.midpoint.prism.path.ItemPath.EMPTY_PATH;
import static com.evolveum.midpoint.prism.polystring.PolyString.getOrig;
import static com.evolveum.midpoint.schema.GetOperationOptions.createNoFetch;
import static com.evolveum.midpoint.schema.SelectorOptions.createCollection;
/**
* @author mederly
*/
@Component
public class Visualizer {
private static final Trace LOGGER = TraceManager.getTrace(Visualizer.class);
public static final String CLASS_DOT = Visualizer.class.getName() + ".";
@Autowired
private PrismContext prismContext;
@Autowired
private ModelService modelService;
@Autowired
private Resolver resolver;
private static final Map<Class<?>, List<ItemPath>> DESCRIPTIVE_ITEMS = new HashMap<>();
static {
DESCRIPTIVE_ITEMS.put(AssignmentType.class, Arrays.asList(
AssignmentType.F_TARGET_REF,
AssignmentType.F_CONSTRUCTION.append(ConstructionType.F_RESOURCE_REF),
AssignmentType.F_CONSTRUCTION.append(ConstructionType.F_KIND),
AssignmentType.F_CONSTRUCTION.append(ConstructionType.F_INTENT),
AssignmentType.F_TENANT_REF,
AssignmentType.F_ORG_REF,
AssignmentType.F_DESCRIPTION));
DESCRIPTIVE_ITEMS.put(ShadowType.class, Arrays.asList(
ShadowType.F_RESOURCE_REF,
ShadowType.F_KIND,
ShadowType.F_INTENT));
}
public SceneImpl visualize(PrismObject<? extends ObjectType> object, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException {
return visualize(object, new VisualizationContext(), task, parentResult);
}
public SceneImpl visualize(PrismObject<? extends ObjectType> object, VisualizationContext context, Task task, OperationResult parentResult)
throws SchemaException, ExpressionEvaluationException {
OperationResult result = parentResult.createSubresult(CLASS_DOT + "visualize");
try {
resolver.resolve(object, task, result);
return visualize(object, null, context, task, result);
} catch (RuntimeException | Error | SchemaException | ExpressionEvaluationException e) {
result.recordFatalError("Couldn't visualize data structure: " + e.getMessage(), e);
throw e;
} finally {
result.computeStatusIfUnknown();
}
}
private SceneImpl visualize(PrismObject<? extends ObjectType> object, SceneImpl owner, VisualizationContext context, Task task, OperationResult result) {
SceneImpl scene = new SceneImpl(owner);
scene.setChangeType(null);
scene.setName(createSceneName(object));
scene.setSourceRelPath(EMPTY_PATH);
scene.setSourceAbsPath(EMPTY_PATH);
scene.setSourceDefinition(object.getDefinition());
scene.setSourceValue(object.getValue());
scene.setSourceDelta(null);
visualizeItems(scene, object.getValue().getItems(), false, context, task, result);
return scene;
}
@SuppressWarnings("unused")
private SceneImpl visualize(PrismContainerValue<?> containerValue, SceneImpl owner, VisualizationContext context, Task task, OperationResult result) {
SceneImpl scene = new SceneImpl(owner);
scene.setChangeType(null);
NameImpl name = new NameImpl("id " + containerValue.getId()); // TODO
name.setNamesAreResourceKeys(false);
scene.setName(name);
scene.setSourceRelPath(EMPTY_PATH);
scene.setSourceAbsPath(EMPTY_PATH);
if (containerValue.getComplexTypeDefinition() != null) {
// TEMPORARY!!!
PrismContainerDefinition<?> pcd = prismContext.getSchemaRegistry().findContainerDefinitionByType(containerValue.getComplexTypeDefinition().getTypeName());
scene.setSourceDefinition(pcd);
} else if (containerValue.getParent() != null && containerValue.getParent().getDefinition() != null) {
scene.setSourceDefinition(containerValue.getParent().getDefinition());
}
scene.setSourceValue(containerValue);
scene.setSourceDelta(null);
visualizeItems(scene, containerValue.getItems(), false, context, task, result);
return scene;
}
public List<? extends SceneImpl> visualizeDeltas(List<ObjectDelta<? extends ObjectType>> deltas, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException {
OperationResult result = parentResult.createSubresult(CLASS_DOT + "visualizeDeltas");
try {
resolver.resolve(deltas, task, result);
return visualizeDeltas(deltas, new VisualizationContext(), task, result);
} catch (RuntimeException | Error | SchemaException | ExpressionEvaluationException e) {
result.recordFatalError("Couldn't visualize the data structure: " + e.getMessage(), e);
throw e;
} finally {
result.computeStatusIfUnknown();
}
}
private List<? extends SceneImpl> visualizeDeltas(List<ObjectDelta<? extends ObjectType>> deltas, VisualizationContext context, Task task, OperationResult result)
throws SchemaException {
List<SceneImpl> rv = new ArrayList<>(deltas.size());
for (ObjectDelta<? extends ObjectType> delta : deltas) {
if (!delta.isEmpty()) {
final SceneImpl scene = visualizeDelta(delta, null, null, context, task, result);
if (!scene.isEmpty()) {
rv.add(scene);
}
}
}
return rv;
}
@NotNull
public SceneImpl visualizeDelta(ObjectDelta<? extends ObjectType> objectDelta, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException {
return visualizeDelta(objectDelta, null, task, parentResult);
}
@NotNull
public SceneImpl visualizeDelta(ObjectDelta<? extends ObjectType> objectDelta, ObjectReferenceType objectRef, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException {
OperationResult result = parentResult.createSubresult(CLASS_DOT + "visualizeDelta");
try {
resolver.resolve(objectDelta, task, result);
return visualizeDelta(objectDelta, null, objectRef, new VisualizationContext(), task, result);
} catch (RuntimeException | Error | SchemaException | ExpressionEvaluationException e) {
result.recordFatalError("Couldn't visualize the data structure: " + e.getMessage(), e);
throw e;
} finally {
result.computeStatusIfUnknown();
}
}
private SceneImpl visualizeDelta(ObjectDelta<? extends ObjectType> objectDelta, SceneImpl owner, ObjectReferenceType objectRef,
VisualizationContext context, Task task, OperationResult result)
throws SchemaException {
SceneImpl scene = new SceneImpl(owner);
scene.setChangeType(objectDelta.getChangeType());
scene.setSourceDelta(objectDelta);
scene.setSourceRelPath(ItemPath.EMPTY_PATH);
scene.setSourceAbsPath(ItemPath.EMPTY_PATH);
PrismObject<? extends ObjectType> object;
if (objectDelta.isAdd()) {
object = objectDelta.getObjectToAdd();
} else if (objectDelta.getOid() != null) {
object = getOldObject(objectDelta.getOid(), objectDelta.getObjectTypeClass(), context, task, result);
} else {
// this can occur e.g. when showing secondary deltas for OBJECT ADD operation
object = null;
}
if (object != null) {
scene.setName(createSceneName(object));
scene.setSourceValue(object.getValue());
scene.setSourceDefinition(object.getDefinition());
} else {
scene.setName(createSceneName(objectDelta.getOid(), objectRef));
}
if (objectDelta.isAdd()) {
if (object == null) {
throw new IllegalStateException("ADD object delta with no object to add: " + objectDelta);
}
visualizeItems(scene, object.getValue().getItems(), false, context, task, result);
} else if (objectDelta.isModify()) {
if (object != null) {
addDescriptiveItems(scene, object.getValue(), context, task, result);
}
visualizeItemDeltas(scene, objectDelta.getModifications(), context, task, result);
} else if (objectDelta.isDelete()) {
if (object != null) {
addDescriptiveItems(scene, object.getValue(), context, task, result);
}
} else {
throw new IllegalStateException("Object delta that is neither ADD, nor MODIFY nor DELETE: " + objectDelta);
}
return scene;
}
private PrismObject<? extends ObjectType> getOldObject(String oid, Class<? extends ObjectType> objectTypeClass, VisualizationContext context, Task task, OperationResult result) {
PrismObject<? extends ObjectType> object = context.getOldObject(oid);
if (object != null) {
return object;
}
return getObject(oid, objectTypeClass, context, task, result);
}
private PrismObject<? extends ObjectType> getObject(String oid, Class<? extends ObjectType> objectTypeClass, VisualizationContext context, Task task, OperationResult result) {
PrismObject<? extends ObjectType> object = context.getCurrentObject(oid);
if (object != null) {
return object;
}
try {
if (objectTypeClass == null) {
LOGGER.warn("No object class for {}, using ObjectType", oid);
objectTypeClass = ObjectType.class;
}
object = modelService.getObject(objectTypeClass, oid, createCollection(createNoFetch()), task, result);
context.putObject(object);
return object;
} catch (ObjectNotFoundException e) {
// Not a big problem: object does not exist (was already deleted or was not yet created).
LoggingUtils.logExceptionOnDebugLevel(LOGGER, "Object {} does not exist", e, oid);
result.recordHandledError(e);
return null;
} catch (RuntimeException|SchemaException|ConfigurationException|CommunicationException|SecurityViolationException|ExpressionEvaluationException e) {
LoggingUtils.logUnexpectedException(LOGGER, "Couldn't resolve object {}", e, oid);
result.recordWarning("Couldn't resolve object " + oid + ": " + e.getMessage(), e);
return null;
}
}
private void visualizeItems(SceneImpl scene, Collection<Item<?, ?>> items, boolean descriptive, VisualizationContext context, Task task, OperationResult result) {
if (items == null) {
return;
}
List<Item<?, ?>> itemsToShow = new ArrayList<>(items);
Collections.sort(itemsToShow, getItemDisplayOrderComparator());
for (Item<?, ?> item : itemsToShow) {
if (item instanceof PrismProperty) {
final SceneItemImpl sceneItem = createSceneItem((PrismProperty) item, descriptive);
if (!sceneItem.isOperational() || context.isIncludeOperationalItems()) {
scene.addItem(sceneItem);
}
} else if (item instanceof PrismReference) {
final SceneItemImpl sceneItem = createSceneItem((PrismReference) item, descriptive, context, task, result);
if (!sceneItem.isOperational() || context.isIncludeOperationalItems()) {
scene.addItem(sceneItem);
}
} else if (item instanceof PrismContainer) {
PrismContainer<?> pc = (PrismContainer<?>) item;
PrismContainerDefinition<?> def = pc.getDefinition();
boolean separate = isContainerSingleValued(def, pc) ? context.isSeparateSinglevaluedContainers() : context.isSeparateMultivaluedContainers();
SceneImpl currentScene = scene;
for (PrismContainerValue<?> pcv : pc.getValues()) {
if (separate) {
SceneImpl si = new SceneImpl(scene);
NameImpl name = new NameImpl(item.getElementName().getLocalPart());
name.setId(name.getSimpleName());
if (def != null) {
name.setDisplayName(def.getDisplayName());
}
name.setNamesAreResourceKeys(true);
si.setName(name);
if (def != null) {
si.setOperational(def.isOperational());
si.setSourceDefinition(def);
if (si.isOperational() && !context.isIncludeOperationalItems()) {
continue;
}
}
si.setSourceRelPath(ItemPath.create(item.getElementName()));
si.setSourceAbsPath(scene.getSourceAbsPath().append(item.getElementName()));
si.setSourceDelta(null);
scene.addPartialScene(si);
currentScene = si;
}
visualizeItems(currentScene, pcv.getItems(), descriptive, context, task, result);
}
} else {
throw new IllegalStateException("Not a property nor reference nor container: " + item);
}
}
}
private boolean isContainerSingleValued(PrismContainerDefinition<?> def, PrismContainer<?> pc) {
if (def == null) {
return pc.getValues().size() <= 1;
} else {
return def.isSingleValue();
}
}
private void visualizeItemDeltas(SceneImpl scene, Collection<? extends ItemDelta<?, ?>> deltas, VisualizationContext context, Task task,
OperationResult result) throws SchemaException {
if (deltas == null) {
return;
}
List<ItemDelta<?, ?>> deltasToShow = new ArrayList<>(deltas);
for (ItemDelta<?, ?> delta : deltasToShow) {
if (delta instanceof ContainerDelta) {
visualizeContainerDelta((ContainerDelta) delta, scene, context, task, result);
} else {
visualizeAtomicDelta(delta, scene, context, task, result);
}
}
sortItems(scene);
sortPartialScenes(scene);
}
private void sortItems(SceneImpl scene) {
Collections.sort(scene.getItems(), new Comparator<SceneItemImpl>() {
@Override
public int compare(SceneItemImpl o1, SceneItemImpl o2) {
return compareDefinitions(o1.getSourceDefinition(), o2.getSourceDefinition());
}
});
}
private void sortPartialScenes(SceneImpl scene) {
Collections.sort(scene.getPartialScenes(), new Comparator<SceneImpl>() {
@Override
public int compare(SceneImpl s1, SceneImpl s2) {
final PrismContainerDefinition<?> def1 = s1.getSourceDefinition();
final PrismContainerDefinition<?> def2 = s2.getSourceDefinition();
int a = compareDefinitions(def1, def2);
if (a != 0) {
return a;
}
if (def1 == null || def2 == null) {
return MiscUtil.compareNullLast(def1, def2);
}
if (s1.isContainerValue() && s2.isContainerValue()) {
Long id1 = s1.getSourceContainerValueId();
Long id2 = s2.getSourceContainerValueId();
return compareNullableIntegers(id1, id2);
} else if (s1.isObjectValue() && s2.isObjectValue()) {
boolean f1 = s1.isFocusObject();
boolean f2 = s2.isFocusObject();
if (f1 && !f2) {
return -1;
} else if (f2 && !f1) {
return 1;
} else {
return 0;
}
}
if (s1.isObjectValue()) {
return -1;
} else if (s2.isObjectValue()) {
return 1;
}
return 0;
}
});
}
private <C extends Containerable> void visualizeContainerDelta(ContainerDelta<C> delta, SceneImpl scene, VisualizationContext context, Task task, OperationResult result) {
if (delta.isEmpty()) {
return;
}
if (delta.getDefinition() != null && delta.getDefinition().isOperational() && !context.isIncludeOperationalItems()) {
return;
}
Collection<PrismContainerValue<C>> valuesToAdd;
Collection<PrismContainerValue<C>> valuesToDelete;
if (!delta.isReplace()) {
valuesToAdd = delta.getValuesToAdd();
valuesToDelete = delta.getValuesToDelete();
} else {
valuesToAdd = new ArrayList<>();
valuesToDelete = new ArrayList<>();
Collection<PrismContainerValue<C>> oldValues = delta.getEstimatedOldValues();
for (PrismContainerValue<C> newValue : delta.getValuesToReplace()) {
if (oldValues == null || !oldValues.contains(newValue)) { // TODO containsEquivalentValue instead?
valuesToAdd.add(newValue);
}
}
if (oldValues != null) {
for (PrismContainerValue<C> oldValue : oldValues) {
if (!delta.getValuesToReplace().contains(oldValue)) { // TODO containsEquivalentValue instead?
valuesToDelete.add(oldValue);
}
}
}
}
if (valuesToDelete != null) {
for (PrismContainerValue<C> value : valuesToDelete) {
visualizeContainerDeltaValue(value, DELETE, delta, scene, context, task, result);
}
}
if (valuesToAdd != null) {
for (PrismContainerValue<C> value : valuesToAdd) {
visualizeContainerDeltaValue(value, ADD, delta, scene, context, task, result);
}
}
}
private <C extends Containerable> void visualizeContainerDeltaValue(PrismContainerValue<C> value, ChangeType changeType,
ContainerDelta<C> containerDelta, SceneImpl owningScene, VisualizationContext context, Task task, OperationResult result) {
SceneImpl scene = createContainerScene(changeType, containerDelta.getPath(), owningScene);
if (value.getId() != null) {
scene.getName().setId(String.valueOf(value.getId()));
}
// delete-by-id: we supply known values
if ((value.getItems().isEmpty()) && value.getId() != null) {
if (containerDelta.getEstimatedOldValues() != null) {
for (PrismContainerValue<C> oldValue : containerDelta.getEstimatedOldValues()) {
if (value.getId().equals(oldValue.getId())) {
value = oldValue;
break;
}
}
}
}
scene.setSourceValue(value);
visualizeItems(scene, value.getItems(), false, context, task, result);
owningScene.addPartialScene(scene);
}
private SceneImpl createContainerScene(ChangeType changeType, ItemPath containerPath, SceneImpl owningScene) {
SceneImpl scene = new SceneImpl(owningScene);
scene.setChangeType(changeType);
ItemPath deltaParentItemPath = getDeltaParentItemPath(containerPath);
PrismContainerDefinition<?> sceneDefinition = getSceneDefinition(scene, deltaParentItemPath);
NameImpl name = createNameForContainerDelta(containerPath, sceneDefinition);
scene.setName(name);
if (sceneDefinition != null) {
scene.setOperational(sceneDefinition.isOperational());
scene.setSourceDefinition(sceneDefinition);
}
ItemPath sceneRelativePath = containerPath.remainder(owningScene.getSourceRelPath());
scene.setSourceRelPath(sceneRelativePath);
scene.setSourceAbsPath(containerPath);
scene.setSourceDelta(null);
return scene;
}
private NameImpl createNameForContainerDelta(ItemPath deltaParentPath, PrismContainerDefinition<?> sceneDefinition) {
NameImpl name = new NameImpl(deltaParentPath.toString());
name.setId(String.valueOf(getLastId(deltaParentPath)));
if (sceneDefinition != null) {
name.setDisplayName(sceneDefinition.getDisplayName());
}
name.setNamesAreResourceKeys(true); // TODO: ok?
return name;
}
private ItemPath getDeltaParentItemPath(ItemPath deltaParentPath) {
if (ItemPath.isId(deltaParentPath.last())) {
return deltaParentPath.allExceptLast();
} else {
return deltaParentPath;
}
}
private Long getLastId(ItemPath deltaParentPath) {
return ItemPath.toIdOrNull(deltaParentPath.last());
}
private PrismContainerDefinition<?> getSceneDefinition(SceneImpl ownerScene, ItemPath deltaParentItemPath) {
PrismContainerDefinition<?> rootDefinition = getRootDefinition(ownerScene);
if (rootDefinition == null) {
return null;
} else {
return rootDefinition.findContainerDefinition(deltaParentItemPath);
}
}
private void visualizeAtomicDelta(ItemDelta<?, ?> delta, SceneImpl scene, VisualizationContext context, Task task, OperationResult result)
throws SchemaException {
ItemPath deltaParentPath = delta.getParentPath();
ItemPath sceneRelativeItemPath = getDeltaParentItemPath(deltaParentPath).remainder(scene.getSourceRelPath());
SceneImpl sceneForItem;
if (ItemPath.isEmpty(deltaParentPath)) {
sceneForItem = scene;
} else {
sceneForItem = findPartialSceneByPath(scene, deltaParentPath);
if (sceneForItem == null) {
sceneForItem = createContainerScene(MODIFY, deltaParentPath, scene);
if (sceneForItem.isOperational() && !context.isIncludeOperationalItems()) {
return;
}
PrismContainerValue<?> ownerPCV = scene.getSourceValue();
if (ownerPCV != null) {
Item<?,?> item = ownerPCV.findItem(sceneRelativeItemPath);
if (item instanceof PrismContainer) {
PrismContainer<?> container = (PrismContainer<?>) item;
sceneForItem.setSourceDefinition(container.getDefinition());
Long lastId = getLastId(deltaParentPath);
PrismContainerValue<?> sceneSrcValue;
if (lastId == null) {
if (container.size() == 1) {
sceneSrcValue = container.getValues().get(0);
} else {
sceneSrcValue = null;
}
} else {
sceneSrcValue = container.findValue(lastId);
}
if (sceneSrcValue != null) {
sceneForItem.setSourceValue(sceneSrcValue);
addDescriptiveItems(sceneForItem, sceneSrcValue, context, task, result);
}
}
}
scene.addPartialScene(sceneForItem);
}
}
ItemPath itemRelativeItemPath = getDeltaParentItemPath(delta.getPath()).remainder(sceneForItem.getSourceRelPath());
if (context.isRemoveExtraDescriptiveItems()) {
Iterator<? extends SceneItemImpl> iterator = sceneForItem.getItems().iterator();
while (iterator.hasNext()) {
SceneItemImpl sceneItem = iterator.next();
if (sceneItem.isDescriptive() && sceneItem.getSourceRelPath() != null && sceneItem.getSourceRelPath().equivalent(itemRelativeItemPath)) {
iterator.remove();
break;
}
}
}
visualizeAtomicItemDelta(sceneForItem, delta, context, task, result);
}
private void addDescriptiveItems(SceneImpl scene, PrismContainerValue<?> sourceValue, VisualizationContext context, Task task, OperationResult result) {
// TODO dynamically typed values
if (sourceValue.getContainer() == null || sourceValue.getContainer().getCompileTimeClass() == null) {
return;
}
Class<?> clazz = sourceValue.getContainer().getCompileTimeClass();
List<ItemPath> itemPathsToShow = DESCRIPTIVE_ITEMS.get(clazz);
if (itemPathsToShow == null) {
return;
}
List<Item<?,?>> itemsToShow = new ArrayList<>();
for (ItemPath itemPath : itemPathsToShow) {
Item<?,?> item = sourceValue.findItem(itemPath);
if (item != null) {
itemsToShow.add(item);
}
}
visualizeItems(scene, itemsToShow, true, context, task, result);
}
private PrismContainerDefinition<?> getRootDefinition(SceneImpl scene) {
while (scene.getOwner() != null) {
scene = scene.getOwner();
}
return scene.getSourceDefinition();
}
private SceneImpl findPartialSceneByPath(SceneImpl scene, ItemPath deltaParentPath) {
for (SceneImpl subscene : scene.getPartialScenes()) {
if (subscene.getSourceAbsPath().equivalent(deltaParentPath) && subscene.getChangeType() == MODIFY) {
return subscene;
}
}
return null;
}
private void visualizeAtomicItemDelta(SceneImpl scene, ItemDelta<?, ?> delta, VisualizationContext context, Task task, OperationResult result)
throws SchemaException {
final SceneDeltaItemImpl sceneDeltaItem;
if (delta instanceof PropertyDelta) {
sceneDeltaItem = createSceneDeltaItem((PropertyDelta) delta, scene, context, task, result);
} else if (delta instanceof ReferenceDelta) {
sceneDeltaItem = createSceneDeltaItem((ReferenceDelta) delta, scene, context, task, result);
} else {
throw new IllegalStateException("No property nor reference delta: " + delta);
}
if (!sceneDeltaItem.isOperational() || context.isIncludeOperationalItems()) {
scene.addItem(sceneDeltaItem);
}
}
private Comparator<Item<?, ?>> getItemDisplayOrderComparator() {
return (o1, o2) -> compareDefinitions(o1.getDefinition(), o2.getDefinition());
}
private int compareDefinitions(ItemDefinition d1, ItemDefinition d2) {
Integer order1 = d1 != null ? d1.getDisplayOrder() : null;
Integer order2 = d2 != null ? d2.getDisplayOrder() : null;
return compareNullableIntegers(order1, order2);
}
private int compareNullableIntegers(Integer i1, Integer i2) {
if (i1 == null && i2 == null) {
return 0;
} else if (i1 == null) {
return 1;
} else if (i2 == null) {
return -1;
} else {
return Integer.compare(i1, i2);
}
}
private int compareNullableIntegers(Long i1, Long i2) {
if (i1 == null && i2 == null) {
return 0;
} else if (i1 == null) {
return 1;
} else if (i2 == null) {
return -1;
} else {
return Long.compare(i1, i2);
}
}
private SceneItemImpl createSceneItemCommon(Item<?,?> item) {
SceneItemImpl si = new SceneItemImpl(createSceneItemName(item));
ItemDefinition<?> def = item.getDefinition();
if (def != null) {
si.setOperational(def.isOperational());
}
si.setSourceItem(item);
si.setSourceRelPath(item.getElementName());
return si;
}
private SceneItemImpl createSceneItem(PrismProperty<?> property, boolean descriptive) {
SceneItemImpl si = createSceneItemCommon(property);
si.setNewValues(toSceneItemValues(property.getValues()));
si.setDescriptive(descriptive);
return si;
}
private SceneItemImpl createSceneItem(PrismReference reference, boolean descriptive, VisualizationContext context, Task task,
OperationResult result) {
SceneItemImpl si = createSceneItemCommon(reference);
si.setNewValues(toSceneItemValuesRef(reference.getValues(), context, task, result));
si.setDescriptive(descriptive);
return si;
}
@SuppressWarnings({ "unused", "unchecked" })
private SceneDeltaItemImpl createSceneDeltaItem(PropertyDelta<?> delta, SceneImpl owningScene, VisualizationContext context, Task task,
OperationResult result) throws SchemaException {
SceneDeltaItemImpl si = createSceneDeltaItemCommon(delta, owningScene);
si.setOldValues(toSceneItemValues(delta.getEstimatedOldValues()));
PrismProperty property = prismContext.itemFactory().createProperty(delta.getElementName());
if (delta.getEstimatedOldValues() != null) {
property.addValues(CloneUtil.cloneCollectionMembers(delta.getEstimatedOldValues()));
}
try {
delta.applyToMatchingPath(property, ParameterizedEquivalenceStrategy.DEFAULT_FOR_DELTA_APPLICATION);
} catch (SchemaException e) {
throw new SystemException("Couldn't visualize property delta: " + delta + ": " + e.getMessage(), e);
}
computeAddedDeletedUnchanged(si, delta.getEstimatedOldValues(), property.getValues());
si.setNewValues(toSceneItemValues(property.getValues()));
return si;
}
private <V extends PrismPropertyValue<?>> void computeAddedDeletedUnchanged(SceneDeltaItemImpl si, Collection<V> oldValues, Collection<V> newValues) {
List<V> added = new ArrayList<>();
List<V> deleted = new ArrayList<>();
List<V> unchanged = new ArrayList<>();
computeDifferences(oldValues, newValues, added, deleted, unchanged);
si.setAddedValues(toSceneItemValues(added));
si.setDeletedValues(toSceneItemValues(deleted));
si.setUnchangedValues(toSceneItemValues(unchanged));
}
private <V extends PrismValue> void computeDifferences(Collection<V> oldValues, Collection<V> newValues, List<V> added, List<V> deleted, List<V> unchanged) {
if (oldValues != null) {
for (V oldValue : oldValues) {
if (newValues != null && newValues.contains(oldValue)) {
unchanged.add(oldValue);
} else {
deleted.add(oldValue);
}
}
}
if (newValues != null) {
for (V newValue : newValues) {
if (oldValues == null || !oldValues.contains(newValue)) {
added.add(newValue);
}
}
}
}
private void computeAddedDeletedUnchangedRef(SceneDeltaItemImpl si, Collection<PrismReferenceValue> oldValues, Collection<PrismReferenceValue> newValues,
VisualizationContext context, Task task, OperationResult result) {
List<PrismReferenceValue> added = new ArrayList<>();
List<PrismReferenceValue> deleted = new ArrayList<>();
List<PrismReferenceValue> unchanged = new ArrayList<>();
computeDifferences(oldValues, newValues, added, deleted, unchanged);
si.setAddedValues(toSceneItemValuesRef(added, context, task, result));
si.setDeletedValues(toSceneItemValuesRef(deleted, context, task, result));
si.setUnchangedValues(toSceneItemValuesRef(unchanged, context, task, result));
}
@SuppressWarnings("unchecked")
private <V extends PrismValue, D extends ItemDefinition> SceneDeltaItemImpl createSceneDeltaItemCommon(ItemDelta<V, D> itemDelta,
SceneImpl owningScene)
throws SchemaException {
String simpleName = itemDelta.getElementName() != null ? itemDelta.getElementName().getLocalPart() : "";
NameImpl name = new NameImpl(simpleName);
if (itemDelta.getDefinition() != null) {
name.setDisplayName(itemDelta.getDefinition().getDisplayName());
}
name.setId(simpleName);
name.setNamesAreResourceKeys(true);
SceneDeltaItemImpl si = new SceneDeltaItemImpl(name);
si.setSourceDelta(itemDelta);
D def = itemDelta.getDefinition();
if (def != null) {
Item<V,D> item = def.instantiate();
if (itemDelta.getEstimatedOldValues() != null) {
item.addAll(CloneUtil.cloneCollectionMembers(itemDelta.getEstimatedOldValues()));
}
si.setSourceItem(item);
si.setOperational(def.isOperational());
}
ItemPath remainder = itemDelta.getPath().remainder(owningScene.getSourceRelPath());
if (remainder.startsWithNullId()) {
remainder = remainder.rest();
}
si.setSourceRelPath(remainder);
return si;
}
private NameImpl createSceneItemName(Item<?,?> item) {
NameImpl name = new NameImpl(item.getElementName().getLocalPart());
ItemDefinition<?> def = item.getDefinition();
if (def != null) {
name.setDisplayName(def.getDisplayName());
name.setDescription(def.getDocumentation());
}
name.setId(name.getSimpleName()); // todo reconsider
name.setNamesAreResourceKeys(true);
return name;
}
private SceneDeltaItemImpl createSceneDeltaItem(ReferenceDelta delta, SceneImpl owningScene, VisualizationContext context, Task task,
OperationResult result)
throws SchemaException {
SceneDeltaItemImpl di = createSceneDeltaItemCommon(delta, owningScene);
di.setOldValues(toSceneItemValuesRef(delta.getEstimatedOldValues(), context, task, result));
PrismReference reference = prismContext.itemFactory().createReference(delta.getElementName());
try {
if (delta.getEstimatedOldValues() != null) {
reference.addAll(CloneUtil.cloneCollectionMembers(delta.getEstimatedOldValues()));
}
delta.applyToMatchingPath(reference, ParameterizedEquivalenceStrategy.DEFAULT_FOR_DELTA_APPLICATION);
} catch (SchemaException e) {
throw new SystemException("Couldn't visualize reference delta: " + delta + ": " + e.getMessage(), e);
}
computeAddedDeletedUnchangedRef(di, delta.getEstimatedOldValues(), reference.getValues(), context, task, result);
di.setNewValues(toSceneItemValuesRef(reference.getValues(), context, task, result));
return di;
}
private List<SceneItemValueImpl> toSceneItemValues(Collection<? extends PrismPropertyValue<?>> values) {
List<SceneItemValueImpl> rv = new ArrayList<>();
if (values != null) {
for (PrismPropertyValue<?> value : values) {
if (value != null) {
SceneItemValueImpl siv = new SceneItemValueImpl(ValueDisplayUtil.toStringValue(value));
siv.setSourceValue(value);
rv.add(siv);
}
}
}
return rv;
}
private List<SceneItemValueImpl> toSceneItemValuesRef(Collection<PrismReferenceValue> refValues, VisualizationContext context, Task task, OperationResult result) {
List<SceneItemValueImpl> rv = new ArrayList<>();
if (refValues != null) {
for (PrismReferenceValue refValue : refValues) {
if (refValue != null) {
refValue = createRefValueWithObject(refValue, context, task, result);
String name;
if (refValue.getObject() != null) {
name = PolyString.getOrig(refValue.getObject().getName());
} else if (refValue.getTargetName() != null) {
name = refValue.getTargetName().getOrig();
} else {
name = refValue.getOid();
}
String relation;
if (refValue.getRelation() != null) {
relation = "[" + refValue.getRelation().getLocalPart() + "]";
} else {
relation = null;
}
SceneItemValueImpl itemValue = new SceneItemValueImpl(name, relation);
itemValue.setSourceValue(refValue);
rv.add(itemValue);
}
}
}
return rv;
}
@SuppressWarnings("unchecked")
private PrismReferenceValue createRefValueWithObject(PrismReferenceValue refValue, VisualizationContext context, Task task, OperationResult result) {
if (refValue.getObject() != null) {
return refValue;
}
PrismObject<? extends ObjectType> object = getObject(refValue.getOid(),
(Class) refValue.getTargetTypeCompileTimeClass(prismContext), context, task, result);
if (object == null) {
return refValue;
}
refValue = refValue.clone();
refValue.setObject(object);
return refValue;
}
private NameImpl createSceneName(PrismObject<? extends ObjectType> object) {
NameImpl name = new NameImpl(object.getName() != null ? getOrig(object.getName()) : object.getOid());
name.setId(object.getOid());
ObjectType objectType = object.asObjectable();
name.setDescription(objectType.getDescription());
if (objectType instanceof UserType) {
name.setDisplayName(getOrig(((UserType) objectType).getFullName()));
} else if (objectType instanceof AbstractRoleType) {
name.setDisplayName(getOrig(((AbstractRoleType) objectType).getDisplayName()));
}
name.setNamesAreResourceKeys(false);
return name;
}
private NameImpl createSceneName(String oid, ObjectReferenceType objectRef) {
NameImpl nv = new NameImpl(oid);
nv.setId(oid);
if (objectRef != null && objectRef.asReferenceValue() != null && objectRef.asReferenceValue().getObject() != null){
PrismObject<ObjectType> object = objectRef.asReferenceValue().getObject();
if (object.asObjectable().getName() != null){
nv.setDisplayName(object.asObjectable().getName().getOrig());
}
}
nv.setNamesAreResourceKeys(false);
return nv;
}
}
| |
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Image;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.IOException;
import java.io.File;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.List;
public class GUI extends JFrame implements ActionListener, WindowListener
{
public static final String DATE_FORMAT_NOW = "yyyy-MM-dd_HH.mm.ss";
//ClassLoader cl = this.getClass().getClassLoader();
//cl.getResource("images/save.gif")
protected Parameters parameters;
protected GraphDisplay graphDisplay;
protected Preferences preferences;
protected GBADMenu gbadMenu;
public GUI()
{
super("Graph Based Anomaly Detection");
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
addWindowListener(this);
Dimension screenSize = getToolkit().getScreenSize();
int screenWidth = screenSize.width;
int screenHeight = screenSize.height;
int width = 800;
int height = 650;
if(width > screenWidth-100)
{
width = screenWidth-100;
}
if(height > screenHeight-100)
{
height = screenHeight-100;
}
setSize(width, height);
int xLoc = (screenWidth/2) - (getWidth()/2);
int yLoc = (screenHeight/2) - (getHeight()/2);
setLocation(xLoc, yLoc);
setupLookAndFeel();
preferences = new Preferences(this);
addMenuBar();
addContent();
setIconImage((new ImageIcon(Images.GBAD_ICON, "GBAD")).getImage().getScaledInstance(50, 50, Image.SCALE_SMOOTH));
}
public GUI(String fileName)
{
this();
System.out.println(fileName);
String[] tokens = fileName.split("\\.");
if(tokens.length > 0)
{
if(tokens[tokens.length-1].equals("g"))
{
graphDisplay.openGraph(fileName);
}
else if(tokens[tokens.length-1].equals("result"))
{
graphDisplay.openResults(fileName);
}
}
}
protected void addMenuBar()
{
gbadMenu = new GBADMenu();
gbadMenu.addActionListener(this);
setJMenuBar(gbadMenu);
}
protected void addContent()
{
JPanel content = new JPanel();
content.setLayout(new BorderLayout());
parameters = new Parameters();
parameters.addActionListener(this);
content.add(parameters, BorderLayout.WEST);
graphDisplay = new GraphDisplay();
content.add(graphDisplay, BorderLayout.CENTER);
setContentPane(content);
}
protected void setupLookAndFeel()
{
String os;
try
{
os = System.getProperty("os.name");
if(os == null)
{
throw new SecurityException();
}
}
catch(SecurityException se)
{
os = "Windows";
}
if(os.equals("Mac OS X"))
{
System.setProperty("apple.laf.useScreenMenuBar", "true");
//System.setProperty("com.apple.mrj.application.growbox.intrudes", "false");
}
else
{
System.clearProperty("apple.laf.useScreenMenuBar");
}
try
{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
//UIManager.setLookAndFeel("com.sun.java.swing.plaf.motif.MotifLookAndFeel");
}
catch(InstantiationException ie)
{
//JOptionPane.showMessageDialog(this, ie.getMessage(), "Error Setting Look And Feel", JOptionPane.ERROR_MESSAGE);
}
catch(IllegalAccessException iae)
{
//JOptionPane.showMessageDialog(this, iae.getMessage(), "Error Setting Look And Feel", JOptionPane.ERROR_MESSAGE);
}
catch(UnsupportedLookAndFeelException ulafe)
{
//JOptionPane.showMessageDialog(this, ulafe.getMessage(), "Error Setting Look And Feel", JOptionPane.ERROR_MESSAGE);
}
catch(ClassNotFoundException cnfe)
{
//JOptionPane.showMessageDialog(this, cnfe.getMessage(), "Error Setting Look And Feel", JOptionPane.ERROR_MESSAGE);
}
}
public static String getCurTime()
{
Calendar cal = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW);
return sdf.format(cal.getTime());
}
protected void startGBAD()
{
try
{
if(!graphDisplay.askToSaveGraph("Do you want to save the changes to " + graphDisplay.getGraphFileName() + " before running GBAD?"))
{
return;
}
File inputFile = graphDisplay.getGraphFile();
if((inputFile == null) || (!inputFile.exists()))
{
JOptionPane.showMessageDialog(this, "Unable to locate file " + graphDisplay.getGraphFileName() + ".", "Error Locating File", JOptionPane.ERROR_MESSAGE);
return;
}
String resultsDir;
if(inputFile.getParent() == null)
{
resultsDir = "./results/";
try
{
(new File(resultsDir)).mkdirs();
}
catch(Exception ex)
{
// can not create directory, ignore
ex.printStackTrace();
}
}
else
{
resultsDir = inputFile.getParent() + "/results/";
try
{
(new File(resultsDir)).mkdirs();
}
catch(Exception ex)
{
// can not create directory, ignore
ex.printStackTrace();
}
}
String append = "__" + getCurTime() + "__";
List<String> args = parameters.getParameters(inputFile, append, resultsDir);
String outputName = resultsDir + inputFile.getName() + append + parameters.getAnomalyAlgorithm() + ".result";
System.out.println(StartProcess.getCommandLine(new File(outputName), args));
try
{
Process process = StartProcess.start(outputName, args);
Thread thread = new Thread(new GBADProcessMonitor(outputName, process, this));
thread.start();
}
catch(IOException ioe)
{
JOptionPane.showMessageDialog(this, "Unable to start GBAD process.", "Error Starting GBAD", JOptionPane.ERROR_MESSAGE);
}
}
catch(NumberFormatException nfe)
{
JOptionPane.showMessageDialog(this, nfe.getMessage(), "Invalid Parameter Value", JOptionPane.ERROR_MESSAGE);
}
}
protected void gbadComplete(String outputName)
{
int option = JOptionPane.showConfirmDialog(this, "GBAD process exited. Do you want to open results?", "GBAD Process", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE);
if(option == JOptionPane.YES_OPTION)
{
graphDisplay.openResults(outputName);
}
}
protected void gbadError(String error)
{
int option = JOptionPane.showConfirmDialog(this, error, "GBAD Process", JOptionPane.DEFAULT_OPTION, JOptionPane.WARNING_MESSAGE);
}
public void actionPerformed(ActionEvent e)
{
if(e.getSource() == gbadMenu)
{
if(e.getID() == gbadMenu.NEW_COMMAND)
{
graphDisplay.newGraph();
}
else if(e.getID() == gbadMenu.OPEN_GRAPH_COMMAND)
{
graphDisplay.openGraph();
}
else if(e.getID() == gbadMenu.OPEN_RESULTS_COMMAND)
{
graphDisplay.openResults();
}
else if(e.getID() == gbadMenu.CLOSE_COMMAND)
{
WindowEvent newEvent = new WindowEvent(this, 0);
this.windowClosing(newEvent);
}
else if(e.getID() == gbadMenu.SAVE_COMMAND)
{
graphDisplay.saveGraph();
}
else if(e.getID() == gbadMenu.SAVE_AS_COMMAND)
{
graphDisplay.saveAsGraph();
}
else if(e.getID() == gbadMenu.PREFERENCES_COMMAND)
{
preferences.pack();
preferences.setSize(preferences.getPreferredSize());
Point p = getLocationOnScreen();
preferences.setLocation((int)p.getX()+10, (int)p.getY()+10);
preferences.setVisible(true);
}
}
else if(e.getSource() == parameters)
{
if(e.getActionCommand().equals(parameters.startGBADText))
{
startGBAD();
}
}
}
public void windowClosing(WindowEvent e)
{
if(graphDisplay.askToSaveGraph("Do you want to save the changes to " + graphDisplay.getGraphFileName() + " before closing?"))
{
System.exit(0);
}
}
public void windowClosed(WindowEvent e)
{
}
public void windowOpened(WindowEvent e)
{
}
public void windowIconified(WindowEvent e)
{
}
public void windowDeiconified(WindowEvent e)
{
}
public void windowActivated(WindowEvent e)
{
}
public void windowDeactivated(WindowEvent e)
{
}
public void windowGainedFocus(WindowEvent e)
{
}
public void windowLostFocus(WindowEvent e)
{
}
public void windowStateChanged(WindowEvent e)
{
}
}
class GBADProcessMonitor implements Runnable
{
String fileName;
Process process;
GUI listener;
public GBADProcessMonitor(String fileName, Process process, GUI listener)
{
this.process = process;
this.fileName = fileName;
this.listener = listener;
}
public void run()
{
try
{
int returnValue = process.waitFor();
System.out.println("VALUE OF " + returnValue);
if(returnValue != 0)
{
String error = "An unexpected error occurred when staring gbad. ";
if(returnValue == StartProcess.OUTPUT_CREATE_ERROR)
{
error = "Unable to create output file (" + fileName + ").";
}
else if(returnValue == StartProcess.START_PROCESS_ERROR)
{
error = "Unable to start gbad process.";
}
else if(returnValue == StartProcess.IO_ERROR)
{
error = "A problem occurred when attempting to read/write gbad output.";
}
listener.gbadError(error);
}
else
{
listener.gbadComplete(fileName);
}
}
catch(InterruptedException ie)
{
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/featurestore_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for [FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeature].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.DeleteFeatureRequest}
*/
public final class DeleteFeatureRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.DeleteFeatureRequest)
DeleteFeatureRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteFeatureRequest.newBuilder() to construct.
private DeleteFeatureRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteFeatureRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteFeatureRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteFeatureRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_DeleteFeatureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_DeleteFeatureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.DeleteFeatureRequest.class,
com.google.cloud.aiplatform.v1.DeleteFeatureRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.DeleteFeatureRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.DeleteFeatureRequest other =
(com.google.cloud.aiplatform.v1.DeleteFeatureRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.DeleteFeatureRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [FeaturestoreService.DeleteFeature][google.cloud.aiplatform.v1.FeaturestoreService.DeleteFeature].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.DeleteFeatureRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.DeleteFeatureRequest)
com.google.cloud.aiplatform.v1.DeleteFeatureRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_DeleteFeatureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_DeleteFeatureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.DeleteFeatureRequest.class,
com.google.cloud.aiplatform.v1.DeleteFeatureRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.DeleteFeatureRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_DeleteFeatureRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeleteFeatureRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.DeleteFeatureRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeleteFeatureRequest build() {
com.google.cloud.aiplatform.v1.DeleteFeatureRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeleteFeatureRequest buildPartial() {
com.google.cloud.aiplatform.v1.DeleteFeatureRequest result =
new com.google.cloud.aiplatform.v1.DeleteFeatureRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.DeleteFeatureRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.DeleteFeatureRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.DeleteFeatureRequest other) {
if (other == com.google.cloud.aiplatform.v1.DeleteFeatureRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.DeleteFeatureRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.DeleteFeatureRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Features to be deleted.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.DeleteFeatureRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.DeleteFeatureRequest)
private static final com.google.cloud.aiplatform.v1.DeleteFeatureRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.DeleteFeatureRequest();
}
public static com.google.cloud.aiplatform.v1.DeleteFeatureRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteFeatureRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteFeatureRequest>() {
@java.lang.Override
public DeleteFeatureRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteFeatureRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteFeatureRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteFeatureRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.DeleteFeatureRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.apache.lucene.document;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.StringReader;
import java.nio.CharBuffer;
import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.document.Field.ReusableStringReader;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
// sanity check some basics of fields
public class TestField extends LuceneTestCase {
public void testDoubleField() throws Exception {
Field fields[] = new Field[] {
new DoubleField("foo", 5d, Field.Store.NO),
new DoubleField("foo", 5d, Field.Store.YES)
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
field.setDoubleValue(6d); // ok
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6d, field.numericValue().doubleValue(), 0.0d);
}
}
public void testDoubleDocValuesField() throws Exception {
DoubleDocValuesField field = new DoubleDocValuesField("foo", 5d);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
field.setDoubleValue(6d); // ok
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6d, Double.longBitsToDouble(field.numericValue().longValue()), 0.0d);
}
public void testFloatDocValuesField() throws Exception {
FloatDocValuesField field = new FloatDocValuesField("foo", 5f);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
field.setFloatValue(6f); // ok
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6f, Float.intBitsToFloat(field.numericValue().intValue()), 0.0f);
}
public void testFloatField() throws Exception {
Field fields[] = new Field[] {
new FloatField("foo", 5f, Field.Store.NO),
new FloatField("foo", 5f, Field.Store.YES)
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
field.setFloatValue(6f); // ok
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6f, field.numericValue().floatValue(), 0.0f);
}
}
public void testIntField() throws Exception {
Field fields[] = new Field[] {
new IntField("foo", 5, Field.Store.NO),
new IntField("foo", 5, Field.Store.YES)
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
field.setIntValue(6); // ok
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6, field.numericValue().intValue());
}
}
public void testNumericDocValuesField() throws Exception {
NumericDocValuesField field = new NumericDocValuesField("foo", 5L);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
field.setLongValue(6); // ok
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6L, field.numericValue().longValue());
}
public void testLongField() throws Exception {
Field fields[] = new Field[] {
new LongField("foo", 5L, Field.Store.NO),
new LongField("foo", 5L, Field.Store.YES)
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
field.setLongValue(6); // ok
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(6L, field.numericValue().longValue());
}
}
public void testSortedBytesDocValuesField() throws Exception {
SortedDocValuesField field = new SortedDocValuesField("foo", new BytesRef("bar"));
trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("fubar".getBytes("UTF-8"));
field.setBytesValue(new BytesRef("baz"));
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(new BytesRef("baz"), field.binaryValue());
}
public void testBinaryDocValuesField() throws Exception {
BinaryDocValuesField field = new BinaryDocValuesField("foo", new BytesRef("bar"));
trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("fubar".getBytes("UTF-8"));
field.setBytesValue(new BytesRef("baz"));
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(new BytesRef("baz"), field.binaryValue());
}
public void testStringField() throws Exception {
Field fields[] = new Field[] {
new StringField("foo", "bar", Field.Store.NO),
new StringField("foo", "bar", Field.Store.YES)
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
field.setStringValue("baz");
trySetTokenStreamValue(field);
assertEquals("baz", field.stringValue());
}
}
public void testTextFieldString() throws Exception {
Field fields[] = new Field[] {
new TextField("foo", "bar", Field.Store.NO),
new TextField("foo", "bar", Field.Store.YES)
};
for (Field field : fields) {
field.setBoost(5f);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
field.setStringValue("baz");
field.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
assertEquals("baz", field.stringValue());
assertEquals(5f, field.boost(), 0f);
}
}
public void testTextFieldReader() throws Exception {
Field field = new TextField("foo", new StringReader("bar"));
field.setBoost(5f);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
field.setReaderValue(new StringReader("foobar"));
trySetShortValue(field);
trySetStringValue(field);
field.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
assertNotNull(field.readerValue());
assertEquals(5f, field.boost(), 0f);
}
/* TODO: this is pretty expert and crazy
* see if we can fix it up later
public void testTextFieldTokenStream() throws Exception {
}
*/
public void testStoredFieldBytes() throws Exception {
Field fields[] = new Field[] {
new StoredField("foo", "bar".getBytes("UTF-8")),
new StoredField("foo", "bar".getBytes("UTF-8"), 0, 3),
new StoredField("foo", new BytesRef("bar")),
};
for (Field field : fields) {
trySetBoost(field);
trySetByteValue(field);
field.setBytesValue("baz".getBytes("UTF-8"));
field.setBytesValue(new BytesRef("baz"));
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(new BytesRef("baz"), field.binaryValue());
}
}
public void testStoredFieldString() throws Exception {
Field field = new StoredField("foo", "bar");
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
field.setStringValue("baz");
trySetTokenStreamValue(field);
assertEquals("baz", field.stringValue());
}
public void testStoredFieldInt() throws Exception {
Field field = new StoredField("foo", 1);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
field.setIntValue(5);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(5, field.numericValue().intValue());
}
public void testStoredFieldDouble() throws Exception {
Field field = new StoredField("foo", 1D);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
field.setDoubleValue(5D);
trySetIntValue(field);
trySetFloatValue(field);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(5D, field.numericValue().doubleValue(), 0.0D);
}
public void testStoredFieldFloat() throws Exception {
Field field = new StoredField("foo", 1F);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
field.setFloatValue(5f);
trySetLongValue(field);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(5f, field.numericValue().floatValue(), 0.0f);
}
public void testStoredFieldLong() throws Exception {
Field field = new StoredField("foo", 1L);
trySetBoost(field);
trySetByteValue(field);
trySetBytesValue(field);
trySetBytesRefValue(field);
trySetDoubleValue(field);
trySetIntValue(field);
trySetFloatValue(field);
field.setLongValue(5);
trySetReaderValue(field);
trySetShortValue(field);
trySetStringValue(field);
trySetTokenStreamValue(field);
assertEquals(5L, field.numericValue().longValue());
}
private void trySetByteValue(Field f) {
try {
f.setByteValue((byte) 10);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetBytesValue(Field f) {
try {
f.setBytesValue(new byte[] { 5, 5 });
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetBytesRefValue(Field f) {
try {
f.setBytesValue(new BytesRef("bogus"));
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetDoubleValue(Field f) {
try {
f.setDoubleValue(Double.MAX_VALUE);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetIntValue(Field f) {
try {
f.setIntValue(Integer.MAX_VALUE);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetLongValue(Field f) {
try {
f.setLongValue(Long.MAX_VALUE);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetFloatValue(Field f) {
try {
f.setFloatValue(Float.MAX_VALUE);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetReaderValue(Field f) {
try {
f.setReaderValue(new StringReader("BOO!"));
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetShortValue(Field f) {
try {
f.setShortValue(Short.MAX_VALUE);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetStringValue(Field f) {
try {
f.setStringValue("BOO!");
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetTokenStreamValue(Field f) {
try {
f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
private void trySetBoost(Field f) {
try {
f.setBoost(5.0f);
fail();
} catch (IllegalArgumentException expected) {
// expected
}
}
public void testReusableStringReader() throws Exception {
ReusableStringReader reader = new ReusableStringReader();
assertEquals(-1, reader.read());
assertEquals(-1, reader.read(new char[1]));
assertEquals(-1, reader.read(new char[2], 1, 1));
assertEquals(-1, reader.read(CharBuffer.wrap(new char[2])));
reader.setValue("foobar");
char[] buf = new char[4];
assertEquals(4, reader.read(buf));
assertEquals("foob", new String(buf));
assertEquals(2, reader.read(buf));
assertEquals("ar", new String(buf, 0, 2));
assertEquals(-1, reader.read(buf));
reader.close();
reader.setValue("foobar");
assertEquals(0, reader.read(buf, 1, 0));
assertEquals(3, reader.read(buf, 1, 3));
assertEquals("foo", new String(buf, 1, 3));
assertEquals(2, reader.read(CharBuffer.wrap(buf, 2, 2)));
assertEquals("ba", new String(buf, 2, 2));
assertEquals('r', (char) reader.read());
assertEquals(-1, reader.read(buf));
reader.close();
reader.setValue("foobar");
StringBuilder sb = new StringBuilder();
int ch;
while ((ch = reader.read()) != -1) {
sb.append((char) ch);
}
reader.close();
assertEquals("foobar", sb.toString());
}
}
| |
/*
* Copyright 2017 Marcus Portmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package guru.mmp.application.persistence;
//~--- non-JDK imports --------------------------------------------------------
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate;
import javax.inject.Inject;
import javax.sql.DataSource;
import java.sql.*;
import java.util.UUID;
//~--- JDK imports ------------------------------------------------------------
/**
* The <code>IDGenerator</code> class provides unique IDs for the entity types in the database.
* <p>
* It requires the IDGENERATOR table which must be created under the IDGENERATOR schema within the
* database. The unique ID will be retrieved using a new transaction while suspending the existing
* database transaction. This is done to reduce deadlocks and improve performance.
*
* @author Marcus Portmann
*/
@Repository
public class IDGenerator
{
/**
* The data source used to provide connections to the application database.
*/
@Inject
@Qualifier("applicationDataSource")
private DataSource dataSource;
/**
* The Transaction Manager.
*/
@Inject
private PlatformTransactionManager transactionManager;
/**
* Get the next unique <code>long</code> ID for the entity with the specified type.
*
* @param type the type of entity to retrieve the next ID for
*
* @return the next unique <code>long</code> ID for the entity with the specified type
*/
public long next(String type)
{
TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager,
new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRES_NEW));
try
{
return transactionTemplate.execute(
status ->
{
try (Connection connection = dataSource.getConnection())
{
Long id = getCurrentId(connection, type);
if (id == null)
{
id = 1L;
insertId(connection, type, id);
/*
* TODO: Handle a duplicate row exception caused by the INSERT/UPDATE race
* condition. This race condition occurs when there is no row for a particular
* type of entity in the IDGENERATOR table. Assuming we have two different
* threads that are both attempting to retrieve the next ID for this entity
* type. When the first thread executes the SELECT FOR UPDATE call, it will
* not able to lock a row and will then attempt to execute the INSERT. If
* another thread manages to execute the SELECT FOR UPDATE call before the
* first thread completes the INSERT then one of the threads will experience a
* duplicate row exception as they will both attempt to INSERT. The easiest
* way to prevent this from happening is to pre-populate the IDGENERATOR
* table with initial IDs.
*/
}
else
{
id = id + 1L;
updateId(connection, type, id);
}
return id;
}
catch (Throwable e)
{
throw new IDGeneratorException(String.format("Failed to retrieve the new ID for the "
+ "entity of type (%s) from the IDGENERATOR table: %s", type, e.getMessage()), e);
}
}
);
}
catch (TransactionException e)
{
throw new IDGeneratorException(String.format("Failed to retrieve the new ID for the entity "
+ "of type (%s) from the IDGENERATOR table: %s", type, e.getMessage()), e);
}
}
/**
* Returns the next <code>UUID</code>.
*
* @return the next <code>UUID</code>
*/
public UUID nextUUID()
{
// TODO: Save the results of checking if we are using a PostgreSQL database
/*
* First check whether this is a PostgreSQL database and we should be using a stored procedure
* to retrieve the next UUID.
*/
try (Connection connection = dataSource.getConnection())
{
DatabaseMetaData metaData = connection.getMetaData();
if (metaData.getDatabaseProductName().equals("PostgreSQL"))
{
// TODO: Retrieve the next UUID using a PostgreSQL stored procedure
}
}
catch (Throwable e)
{
throw new IDGeneratorException("Failed to retrieve the next UUID", e);
}
return UUID.randomUUID();
}
private Long getCurrentId(Connection connection, String type)
throws SQLException
{
try (PreparedStatement statement = connection.prepareStatement(
"SELECT CURRENT FROM IDGENERATOR.IDGENERATOR WHERE NAME=? FOR UPDATE"))
{
statement.setString(1, type);
try (ResultSet rs = statement.executeQuery())
{
if (rs.next())
{
return rs.getLong(1);
}
else
{
return null;
}
}
}
}
private void insertId(Connection connection, String type, long id)
throws SQLException
{
try (PreparedStatement statement = connection.prepareStatement(
"INSERT INTO IDGENERATOR.IDGENERATOR (CURRENT, NAME) VALUES (?, ?)"))
{
statement.setLong(1, id);
statement.setString(2, type);
if (statement.executeUpdate() == 0)
{
throw new SQLException("No rows were affected while inserting the IDGENERATOR.IDGENERATOR "
+ "table row for the type (" + type + ")");
}
}
}
private void updateId(Connection connection, String type, long id)
throws SQLException
{
try (PreparedStatement statement = connection.prepareStatement(
"UPDATE IDGENERATOR.IDGENERATOR SET CURRENT=? WHERE NAME=?"))
{
statement.setLong(1, id);
statement.setString(2, type);
if (statement.executeUpdate() == 0)
{
throw new SQLException("No rows were affected while updating the IDGENERATOR.IDGENERATOR "
+ "table row for the type (" + type + ")");
}
}
}
}
| |
package com.medievallords.carbyne.listeners;
import com.medievallords.carbyne.Carbyne;
import com.medievallords.carbyne.economy.objects.Account;
import com.medievallords.carbyne.utils.*;
import com.medievallords.carbyne.utils.scoreboard.Board;
import com.medievallords.carbyne.utils.scoreboard.BoardCooldown;
import com.palmergames.bukkit.towny.object.TownBlock;
import com.palmergames.bukkit.towny.object.TownyUniverse;
import com.vexsoftware.votifier.model.VotifierEvent;
import lombok.Getter;
import org.bukkit.*;
import org.bukkit.block.Block;
import org.bukkit.craftbukkit.v1_8_R3.inventory.CraftItemStack;
import org.bukkit.entity.Chicken;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Item;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryOpenEvent;
import org.bukkit.event.player.*;
import org.bukkit.event.vehicle.VehicleExitEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import org.bukkit.scheduler.BukkitRunnable;
import org.bukkit.util.Vector;
import org.github.paperspigot.Title;
import org.spigotmc.event.entity.EntityDismountEvent;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class PlayerListeners implements Listener {
@Getter
private static int voteCount = 0;
//private Carbyne main = Carbyne.getInstance();
private String joinMessage;
private String[] subtitles;
public PlayerListeners() {
joinMessage = ChatColor.translateAlternateColorCodes('&', Carbyne.getInstance().getConfig().getString("JoinMessage"));
if (joinMessage == null)
joinMessage = ChatColor.translateAlternateColorCodes('&', "&5Medieval Lords");
List<String> initSubs = Carbyne.getInstance().getConfig().getStringList("JoinMessageSubtitles");
subtitles = initSubs.toArray(new String[initSubs.size()]);
if (subtitles.length < 1 || subtitles[0] == null)
subtitles = new String[]{};
for (int i = 0; i < subtitles.length; i++)
subtitles[i] = ChatColor.translateAlternateColorCodes('&', subtitles[i]);
}
@EventHandler
public void onJoin(PlayerJoinEvent event) {
Player player = event.getPlayer();
player.playSound(player.getLocation(), Sound.NOTE_PLING, 1, 1);
if (!player.hasPlayedBefore())
player.sendTitle(new Title.Builder().title(joinMessage).subtitle(subtitles[Maths.randomNumberBetween(subtitles.length, 0)]).stay(55).build());
}
@EventHandler
public void onInventoryOpen(InventoryOpenEvent event) {
for (ItemStack itemStack : event.getInventory().getContents())
if (itemStack != null && itemStack.getMaxStackSize() == 1 && itemStack.getAmount() > 1)
itemStack.setAmount(1);
}
@EventHandler
public void onInventoryClick(InventoryClickEvent event) {
for (ItemStack itemStack : event.getInventory().getContents())
if (itemStack != null && itemStack.getMaxStackSize() == 1 && itemStack.getAmount() > 1)
itemStack.setAmount(1);
}
@EventHandler
public void onClick(PlayerInteractEvent event) {
if (event.getAction() == Action.RIGHT_CLICK_BLOCK && event.getPlayer().getGameMode() == GameMode.CREATIVE) {
if (event.getPlayer().getItemInHand() != null && event.getPlayer().getItemInHand().getType() != Material.AIR) {
ItemStack item = event.getPlayer().getItemInHand();
switch (item.getType()) {
case TRAPPED_CHEST:
case CHEST:
case HOPPER:
case DISPENSER:
case DROPPER:
case FURNACE:
case BREWING_STAND:
net.minecraft.server.v1_8_R3.ItemStack itemStack = CraftItemStack.asNMSCopy(item);
if (itemStack.getTag() != null) {
event.setCancelled(true);
itemStack.setTag(null);
event.getPlayer().setItemInHand(CraftItemStack.asCraftMirror(itemStack));
}
}
}
}
}
//FIX VOTING STUFF (EXP, DIAMONDS)
@EventHandler
public void onVote(VotifierEvent event) {
Player player = Bukkit.getPlayer(event.getVote().getUsername());
if (player != null) {
player.playSound(player.getLocation(), Sound.ORB_PICKUP, 1, .1f);
voteCount++;
if (voteCount % 15 == 0 && voteCount < 100)
MessageManager.broadcastMessage("&f[&3Voting&f]: &5&l" + voteCount + " &aconsecutive votes has been reached! Vote using &3/vote&a!");
ItemStack reward;
reward = StaticClasses.crateManager.getKey("MysticalKey").getItem().clone();
Map<Integer, ItemStack> leftovers = InventoryWorkaround.addItems(player.getInventory(), reward);
if (leftovers.values().size() > 0) {
MessageManager.sendMessage(player, "&cThis item could not fit in your inventory, and was dropped to the ground.");
for (ItemStack itemStack : leftovers.values()) {
Item item = player.getWorld().dropItem(player.getEyeLocation(), itemStack);
item.setVelocity(player.getEyeLocation().getDirection().normalize().multiply(1));
}
}
double anotherRandom = Math.random();
int amount;
if (anotherRandom <= 0.05)
amount = 300;
else if (anotherRandom <= 0.1)
amount = 250;
else if (anotherRandom <= 0.25)
amount = 150;
else
amount = 75;
Account.getAccount(player.getUniqueId()).setBalance(Account.getAccount(player.getUniqueId()).getBalance() + amount);
MessageManager.broadcastMessage("&f[&3Voting&f]: &5" + player.getName() + " &ahas voted and has received a " + reward.getItemMeta().getDisplayName() + "&a, and &c" + MessageManager.format(amount) + "&a! Vote using &3/vote&a!");
MessageManager.sendMessage(player, "&f[&3Voting&f]: &aYou have received a " + reward.getItemMeta().getDisplayName() + "&a! Thank you for voting!");
}
if (voteCount >= 100) {
voteCount = 0;
ItemStack reward = StaticClasses.crateManager.getKey("MysticalKey").getItem().clone();
double anotherRandom = Math.random();
int amount;
if (anotherRandom <= 0.05)
amount = 300;
else if (anotherRandom <= 0.1)
amount = 250;
else if (anotherRandom <= 0.25)
amount = 150;
else
amount = 75;
for (Player online : PlayerUtility.getOnlinePlayers()) {
Map<Integer, ItemStack> leftovers = InventoryWorkaround.addItems(online.getInventory(), reward);
if (leftovers.values().size() > 0) {
MessageManager.sendMessage(online, "&cThis item could not fit in your inventory, and was dropped to the ground.");
for (ItemStack itemStack : leftovers.values()) {
Item item = online.getWorld().dropItem(online.getEyeLocation(), itemStack);
item.setVelocity(online.getEyeLocation().getDirection().normalize().multiply(1));
}
}
Account.getAccount(online.getUniqueId()).setBalance(Account.getAccount(online.getUniqueId()).getBalance() + amount);
}
MessageManager.broadcastMessage("&f[&3Voting&f]: &5&l100 &aconsecutive votes has been reached, everyone online gets 1 " + reward.getItemMeta().getDisplayName() + "&a, and &c" + MessageManager.format(amount) + "&a! Vote using &3/vote&a!");
}
}
@EventHandler
public void onPlayerDeath(PlayerDeathEvent event) {
ItemStack blood = new ItemBuilder(Material.INK_SACK).durability(1).build();
ItemStack blood2 = new ItemBuilder(Material.REDSTONE).build();
ItemStack bone = new ItemBuilder(Material.BONE).build();
Player player = event.getEntity();
player.getWorld().playSound(player.getLocation(), Sound.VILLAGER_HIT, 1, 0.15f);
ParticleEffect.LAVA.display(0, 0, 0, 0, 2, player.getLocation(), 60, false);
List<Item> items = new ArrayList<>();
for (int i = 0; i < 7; i++) {
Item item = player.getWorld().dropItem(player.getLocation(), blood);
Item item2 = player.getWorld().dropItem(player.getLocation(), bone);
Item item3 = player.getWorld().dropItem(player.getLocation(), blood2);
item.setVelocity(new Vector(Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5).multiply(1.1));
item2.setVelocity(new Vector(Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5).multiply(1.1));
item3.setVelocity(new Vector(Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5, Maths.randomNumberBetween(0, 10) - 5).multiply(1.1));
item.setPickupDelay(1000000000);
item2.setPickupDelay(1000000000);
item3.setPickupDelay(1000000000);
items.add(item);
items.add(item2);
items.add(item3);
}
new BukkitRunnable() {
private int i = 0;
@Override
public void run() {
for (Item item : items)
if (item.isOnGround()) {
item.getWorld().playSound(item.getLocation(), Sound.LAVA_POP, 1, 1f);
i++;
}
if (items.size() <= 0 || i >= 3)
cancel();
}
}.runTaskTimerAsynchronously(Carbyne.getInstance(), 0, 5);
new BukkitRunnable() {
@Override
public void run() {
for (Item item : items)
item.remove();
items.clear();
}
}.runTaskLater(Carbyne.getInstance(), 150);
}
@EventHandler
public void onTeleport(PlayerTeleportEvent event) {
if (!StaticClasses.staffManager.isVanished(event.getPlayer()))
if (event.getFrom().getWorld().equals(event.getTo().getWorld()) && event.getFrom().distance(event.getTo()) > 10) {
event.getPlayer().playSound(event.getTo(), Sound.ENDERMAN_TELEPORT, .6f, 1);
event.getPlayer().playSound(event.getFrom(), Sound.ENDERMAN_TELEPORT, .6f, 1);
}
}
@EventHandler
public void onWorld(PlayerChangedWorldEvent event) {
event.getPlayer().setAllowFlight(false);
}
@EventHandler
public void onDoubleJump(PlayerToggleFlightEvent event) {
Player player = event.getPlayer();
if (player.getGameMode() == GameMode.CREATIVE)
return;
if (StaticClasses.gamemodeManager.getFlyPlayers().contains(player) || StaticClasses.gamemodeManager.getGmPlayers().contains(player))
return;
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(player.getUniqueId());
if (playerHealth.getStamina() >= 15 && playerHealth.isSkillsToggled()) {
Board board = Board.getByPlayer(player);
if (board != null) {
BoardCooldown skillCooldown = board.getCooldown("skill");
if (skillCooldown == null) {
playerHealth.setStamina(playerHealth.getStamina() - 15);
event.setCancelled(true);
player.setAllowFlight(false);
float hForce = 15 / 10.0F;
float vForce = 12 / 10.0F;
Vector direction = player.getLocation().getDirection();
Vector forward = direction.multiply(3);
if (playerHealth.isSprintToggled())
forward.multiply(4.5);
Vector vector = player.getLocation().toVector().subtract(player.getLocation().add(0, 3, 0).toVector());
vector.add(forward);
vector.setY(5);
vector.normalize();
vector.multiply(hForce * 0.9);
vector.setY(vForce * 0.9);
player.setVelocity(vector);
new BoardCooldown(board, "skill", 10.0D);
ParticleEffect.CLOUD.display(0.0F, 0.0F, 0.0F, 0.004F, 100, player.getLocation().subtract(0.0, 0.1, 0.0), 15, false);
for (Player all : PlayerUtility.getPlayersInRadius(player.getLocation(), 15))
all.playSound(all.getLocation(), Sound.HORSE_JUMP, 3.0F, 0.533F);
}
} else
event.setCancelled(true);
} else
event.setCancelled(true);
}
@EventHandler
public void onSprintToggle(PlayerToggleSneakEvent event) {
if (!event.isSneaking()) {
if (event.getPlayer().getGameMode() == GameMode.CREATIVE)
return;
if (StaticClasses.gamemodeManager.getFlyPlayers().contains(event.getPlayer()) || StaticClasses.gamemodeManager.getGmPlayers().contains(event.getPlayer()))
return;
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(event.getPlayer().getUniqueId());
if (System.currentTimeMillis() - playerHealth.getSprintCombo() <= 1000 && playerHealth.isSkillsToggled()) {
if (playerHealth.getStamina() > 6 && !playerHealth.isSprintToggled()) {
if (!event.getPlayer().hasPotionEffect(PotionEffectType.SPEED)) {
playerHealth.setSprintToggled(true);
event.getPlayer().setWalkSpeed(0.4f);
playerHealth.setSprintCombo(0);
new BukkitRunnable() {
@Override
public void run() {
if (!event.getPlayer().isOnline())
cancel();
if (playerHealth.isSprintToggled())
ParticleEffect.SMOKE_LARGE.display(0.0F, 0.0F, 0.0F, 0.03F, 2, event.getPlayer().getLocation().subtract(0.0, 0.1, 0.0), 30, false);
else
cancel();
}
}.runTaskTimerAsynchronously(Carbyne.getInstance(), 0, 1);
MessageManager.sendMessage(event.getPlayer(), "&aSuper Sprint has been enabled!");
} else {
MessageManager.sendMessage(event.getPlayer(), "&cYou cannot use Super Sprint while you have speed.");
}
} else if (playerHealth.isSprintToggled()) {
playerHealth.setSprintToggled(false);
event.getPlayer().setWalkSpeed(0.2f);
playerHealth.setSprintCombo(0);
MessageManager.sendMessage(event.getPlayer(), "&cSuper Sprint has been disabled!");
}
} else {
playerHealth.setSprintCombo(System.currentTimeMillis());
}
}
}
@EventHandler
public void onPiledriveCombo(PlayerInteractEvent event) {
if (event.getAction() == Action.RIGHT_CLICK_AIR || event.getAction() == Action.RIGHT_CLICK_BLOCK) {
Player player = event.getPlayer();
if (player.getItemInHand().getType().toString().contains("SWORD") || player.getItemInHand().getType().toString().contains("AXE") || player.getItemInHand().getType().toString().contains("HOE")) {
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(player.getUniqueId());
if (playerHealth.getStamina() >= 60 && playerHealth.isSkillsToggled()) {
Board board = Board.getByPlayer(player);
if (board != null) {
BoardCooldown skillCooldown = board.getCooldown("skill");
if (skillCooldown == null) {
if (!playerHealth.isPiledriveBoolReady()) {
if (System.currentTimeMillis() - playerHealth.getPiledriveCombo() <= 1000) {
playerHealth.setPiledriveReady(3);
playerHealth.setPiledriveCombo(0);
playerHealth.setPiledriveBoolReady(true);
MessageManager.sendMessage(player, "&aReady to piledrive! Damage an enemy to initiate!");
} else
playerHealth.setPiledriveCombo(System.currentTimeMillis());
}
}
}
}
}
}
}
@EventHandler(ignoreCancelled = true)
public void onPiledriver(EntityDamageByEntityEvent event) {
if (event.getDamager() instanceof Player && event.getEntity() instanceof Player) {
Player damaged = (Player) event.getEntity();
Player damager = (Player) event.getDamager();
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(damager.getUniqueId());
if (StaticClasses.duelManager.getDuelFromUUID(damaged.getUniqueId()) != null) {
if (playerHealth.isPiledriveBoolReady() && playerHealth.getStamina() >= 60)
pileDrive(damaged, damager);
} else {
TownBlock townBlock = TownyUniverse.getTownBlock(damaged.getLocation());
if (townBlock != null) {
if (townBlock.getPermissions().pvp)
if (playerHealth.isPiledriveBoolReady() && playerHealth.getStamina() >= 60)
pileDrive(damaged, damager);
} else {
if (playerHealth.isPiledriveBoolReady() && playerHealth.getStamina() >= 60)
pileDrive(damaged, damager);
}
}
}
}
@EventHandler
public void onDismount(EntityDismountEvent event) {
if (event.getEntity() instanceof Player) {
Player player = (Player) event.getEntity();
if (event.getDismounted() instanceof Chicken) {
player.sendTitle(new Title.Builder()
.title("").stay(1)
.subtitle("").stay(1)
.build());
}
}
}
public void pileDrive(Player damaged, Player damager) {
MessageManager.sendMessage(damager, "&aYou have piledrived &5" + damaged.getName() + "&a!");
PlayerHealth damagerPlayer = PlayerHealth.getPlayerHealth(damager.getUniqueId());
PotionEffect potionEffect = new PotionEffect(PotionEffectType.CONFUSION, 100, 2);
PotionEffect potionEffect2 = new PotionEffect(PotionEffectType.BLINDNESS, 60, 2);
PotionEffect potionEffect3 = new PotionEffect(PotionEffectType.SLOW, 80, 2);
damaged.addPotionEffect(potionEffect);
damaged.addPotionEffect(potionEffect2);
damaged.addPotionEffect(potionEffect3);
damagerPlayer.setPiledriveReady(0);
damagerPlayer.setPiledriveBoolReady(false);
damagerPlayer.setStamina(damagerPlayer.getStamina() - 60);
FireworkEffect effect = FireworkEffect.builder().withColor(Color.RED).with(FireworkEffect.Type.BURST).build();
FireworkEffect effect2 = FireworkEffect.builder().withColor(Color.ORANGE).trail(true).withFade(Color.YELLOW).with(FireworkEffect.Type.BURST).build();
InstantFirework.spawn(damaged.getLocation(), effect);
InstantFirework.spawn(damaged.getLocation(), effect2, effect);
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(damaged.getUniqueId());
playerHealth.setHealth(playerHealth.getHealth() - (playerHealth.getMaxHealth() * 0.02));
new BoardCooldown(Board.getByPlayer(damager), "skill", 10.0D);
damaged.sendTitle(new Title.Builder()
.title(ChatColor.translateAlternateColorCodes('&', "&cYou have been piledriven!")).stay(200)
.subtitle(ChatColor.translateAlternateColorCodes('&', "&4Press SHIFT to counter!")).stay(200)
.build());
if (clear(damaged)) {
Chicken chicken = damaged.getWorld().spawn(damaged.getLocation(), Chicken.class);
chicken.addPotionEffect(new PotionEffect(PotionEffectType.INVISIBILITY, 100000, 100000));
chicken.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 100000, 100000));
chicken.setPassenger(damaged);
chicken.setMaxHealth(1000.0);
chicken.setHealth(1000.0);
new BukkitRunnable() {
@Override
public void run() {
if (!chicken.isDead()) {
chicken.eject();
chicken.setHealth(0);
}
}
}.runTaskLater(Carbyne.getInstance(), 200L);
}
}
@EventHandler
public void vehicleDismountEvent(VehicleExitEvent event) {
if (event.getExited().getType().equals(EntityType.CHICKEN))
event.getExited().remove();
}
public boolean clear(Player damaged) {
int check = 0;
if (!correctType(damaged.getLocation().clone().add(1, 0, 0).getBlock()))
check += 1;
if (!correctType(damaged.getLocation().clone().subtract(1, 0, 0).getBlock()))
check += 1;
if (!correctType(damaged.getLocation().clone().add(0, 0, 1).getBlock()))
check += 1;
if (!correctType(damaged.getLocation().clone().subtract(0, 0, 1).getBlock()))
check += 1;
if (!correctType(damaged.getLocation().clone().add(1, 0, 0).subtract(0, 0, 1).getBlock()))
check += 10;
if (!correctType(damaged.getLocation().clone().add(1, 0, 1).getBlock()))
check += 10;
if (!correctType(damaged.getLocation().clone().subtract(1, 0, 0).add(0, 0, 1).getBlock()))
check += 1;
if (!correctType(damaged.getLocation().clone().subtract(1, 0, 1).getBlock()))
check += 10;
return check == 0;
}
private boolean correctType(Block check) {
return check.getType() == Material.AIR || check.getType() == Material.LONG_GRASS || check.getType() == Material.RED_ROSE || check.getType() == Material.YELLOW_FLOWER || check.getType() == Material.GRASS;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.serviceregistry.registry.cache;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import org.apache.servicecomb.foundation.common.Holder;
import org.apache.servicecomb.registry.api.registry.FindInstancesResponse;
import org.apache.servicecomb.registry.api.registry.Microservice;
import org.apache.servicecomb.registry.api.registry.MicroserviceInstance;
import org.apache.servicecomb.registry.api.registry.MicroserviceInstances;
import org.apache.servicecomb.registry.consumer.MicroserviceInstancePing;
import org.apache.servicecomb.serviceregistry.client.ServiceRegistryClient;
import org.apache.servicecomb.serviceregistry.registry.cache.MicroserviceCache.MicroserviceCacheStatus;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import mockit.Mock;
import mockit.MockUp;
public class RefreshableMicroserviceCacheTest {
private Holder<Function<Object[], MicroserviceInstances>> findServiceInstancesOprHolder = new Holder<>();
private ServiceRegistryClient srClient;
private RefreshableMicroserviceCache microserviceCache;
private List<MicroserviceInstance> pulledInstances = new ArrayList<>();
private Microservice consumerService;
@Before
public void setUp() throws Exception {
srClient = new MockUp<ServiceRegistryClient>() {
@Mock
MicroserviceInstances findServiceInstances(String consumerId, String appId, String serviceName,
String versionRule, String revision) {
return findServiceInstancesOprHolder.value
.apply(new Object[] {consumerId, appId, serviceName, versionRule, revision});
}
}.getMockInstance();
consumerService = new Microservice();
consumerService.setServiceId("consumerId");
microserviceCache = new RefreshableMicroserviceCache(
consumerService,
MicroserviceCacheKey.builder().env("env").appId("app").serviceName("svc").build(),
srClient,
false);
findServiceInstancesOprHolder.value = params -> {
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setNeedRefresh(true);
microserviceInstances.setRevision("rev0");
microserviceInstances.setMicroserviceNotExist(false);
FindInstancesResponse instancesResponse = new FindInstancesResponse();
instancesResponse.setInstances(pulledInstances);
microserviceInstances.setInstancesResponse(instancesResponse);
return microserviceInstances;
};
}
@Test
public void forceRefresh() {
MicroserviceInstance microserviceInstance = new MicroserviceInstance();
microserviceInstance.setInstanceId("instanceId00");
ArrayList<MicroserviceInstance> instances = new ArrayList<>();
instances.add(microserviceInstance);
findServiceInstancesOprHolder.value = params -> {
Assert.assertEquals("consumerId", params[0]);
Assert.assertEquals("app", params[1]);
Assert.assertEquals("svc", params[2]);
Assert.assertEquals("0.0.0.0+", params[3]);
Assert.assertNull(params[4]);
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setNeedRefresh(true);
microserviceInstances.setRevision("rev2");
microserviceInstances.setMicroserviceNotExist(false);
FindInstancesResponse instancesResponse = new FindInstancesResponse();
instancesResponse.setInstances(instances);
microserviceInstances.setInstancesResponse(instancesResponse);
return microserviceInstances;
};
microserviceCache.revisionId = "rev";
microserviceCache.forceRefresh();
Assert.assertEquals(MicroserviceCacheStatus.REFRESHED, microserviceCache.getStatus());
List<MicroserviceInstance> cachedInstances = microserviceCache.getInstances();
Assert.assertEquals(1, cachedInstances.size());
MicroserviceInstance instance = cachedInstances.iterator().next();
Assert.assertEquals("instanceId00", instance.getInstanceId());
Assert.assertEquals("rev2", microserviceCache.getRevisionId());
}
@Test
public void refresh() {
ArrayList<MicroserviceInstance> instances = new ArrayList<>();
findServiceInstancesOprHolder.value = params -> {
Assert.assertEquals("consumerId", params[0]);
Assert.assertEquals("app", params[1]);
Assert.assertEquals("svc", params[2]);
Assert.assertEquals("0.0.0.0+", params[3]);
Assert.assertNull(params[4]);
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setNeedRefresh(true);
microserviceInstances.setRevision("rev0");
microserviceInstances.setMicroserviceNotExist(false);
FindInstancesResponse instancesResponse = new FindInstancesResponse();
instancesResponse.setInstances(instances);
microserviceInstances.setInstancesResponse(instancesResponse);
return microserviceInstances;
};
// at the beginning, no instances in cache
List<MicroserviceInstance> cachedInstances = microserviceCache.getInstances();
Assert.assertEquals(0, cachedInstances.size());
Assert.assertNull(microserviceCache.getRevisionId());
// find 1 instance from sc
MicroserviceInstance microserviceInstance = new MicroserviceInstance();
instances.add(microserviceInstance);
microserviceInstance.setInstanceId("instanceId00");
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.REFRESHED, microserviceCache.getStatus());
cachedInstances = microserviceCache.getInstances();
Assert.assertEquals(1, cachedInstances.size());
MicroserviceInstance instance = cachedInstances.iterator().next();
Assert.assertEquals("instanceId00", instance.getInstanceId());
Assert.assertEquals("rev0", microserviceCache.getRevisionId());
// 2nd time, find 2 instances, one of them is the old instance
MicroserviceInstance microserviceInstance1 = new MicroserviceInstance();
instances.add(microserviceInstance1);
microserviceInstance1.setInstanceId("instanceId01");
findServiceInstancesOprHolder.value = params -> {
Assert.assertEquals("consumerId", params[0]);
Assert.assertEquals("app", params[1]);
Assert.assertEquals("svc", params[2]);
Assert.assertEquals("0.0.0.0+", params[3]);
Assert.assertEquals("rev0", params[4]);
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setNeedRefresh(true);
microserviceInstances.setRevision("rev1");
microserviceInstances.setMicroserviceNotExist(false);
FindInstancesResponse instancesResponse = new FindInstancesResponse();
instancesResponse.setInstances(instances);
microserviceInstances.setInstancesResponse(instancesResponse);
return microserviceInstances;
};
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.REFRESHED, microserviceCache.getStatus());
cachedInstances = microserviceCache.getInstances();
Assert.assertEquals(2, cachedInstances.size());
Assert.assertEquals("instanceId00", cachedInstances.get(0).getInstanceId());
Assert.assertEquals("instanceId01", cachedInstances.get(1).getInstanceId());
}
@Test
public void refresh_service_error() {
findServiceInstancesOprHolder.value = params -> null;
List<MicroserviceInstance> oldInstanceList = microserviceCache.getInstances();
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.CLIENT_ERROR, microserviceCache.getStatus());
Assert.assertSame(oldInstanceList, microserviceCache.getInstances());
}
@Test
public void refresh_service_not_exist() {
findServiceInstancesOprHolder.value = params -> {
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setMicroserviceNotExist(true);
return microserviceInstances;
};
List<MicroserviceInstance> oldInstanceList = microserviceCache.getInstances();
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.SERVICE_NOT_FOUND, microserviceCache.getStatus());
Assert.assertSame(oldInstanceList, microserviceCache.getInstances());
}
@Test
public void refresh_service_no_change() {
findServiceInstancesOprHolder.value = params -> {
MicroserviceInstances microserviceInstances = new MicroserviceInstances();
microserviceInstances.setMicroserviceNotExist(false);
microserviceInstances.setNeedRefresh(false);
return microserviceInstances;
};
List<MicroserviceInstance> oldInstanceList = microserviceCache.getInstances();
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.NO_CHANGE, microserviceCache.getStatus());
Assert.assertSame(oldInstanceList, microserviceCache.getInstances());
}
@Test
public void refresh_error_in_setInstances() {
microserviceCache = new RefreshableMicroserviceCache(
consumerService,
MicroserviceCacheKey.builder().env("env").appId("app").serviceName("svc").build(),
srClient,
false) {
@Override
protected Set<MicroserviceInstance> mergeInstances(List<MicroserviceInstance> pulledInstances) {
throw new IllegalStateException("a mock exception");
}
};
List<MicroserviceInstance> oldInstanceList = microserviceCache.getInstances();
Assert.assertEquals(MicroserviceCacheStatus.INIT, microserviceCache.getStatus());
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.SETTING_CACHE_ERROR, microserviceCache.getStatus());
List<MicroserviceInstance> newInstanceList = microserviceCache.getInstances();
Assert.assertEquals(0, newInstanceList.size());
Assert.assertSame(oldInstanceList, newInstanceList);
}
@Test
public void refresh_empty_instance_protection_disabled() {
microserviceCache.instances = new ArrayList<>();
MicroserviceInstance instance0 = new MicroserviceInstance();
instance0.setInstanceId("instanceId0");
microserviceCache.instances.add(instance0);
pulledInstances = new ArrayList<>();
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.REFRESHED, microserviceCache.getStatus());
Assert.assertEquals(0, microserviceCache.getInstances().size());
}
@Test
public void refresh_empty_instance_protection_enabled() {
microserviceCache.setEmptyInstanceProtectionEnabled(true);
microserviceCache.instancePing = new MicroserviceInstancePing() {
@Override
public int getOrder() {
return 0;
}
@Override
public boolean ping(MicroserviceInstance instance) {
return true;
}
};
microserviceCache.instances = new ArrayList<>();
MicroserviceInstance instance0 = new MicroserviceInstance();
instance0.setInstanceId("instanceId0");
microserviceCache.instances.add(instance0);
pulledInstances = new ArrayList<>();
microserviceCache.refresh();
Assert.assertEquals(MicroserviceCacheStatus.REFRESHED, microserviceCache.getStatus());
Assert.assertEquals(1, microserviceCache.getInstances().size());
Assert.assertEquals("instanceId0", microserviceCache.getInstances().get(0).getInstanceId());
}
@Test
public void set_consumer_service_id() {
Holder<Integer> assertCounter = new Holder<>(0);
Function<Object[], MicroserviceInstances> preservedLogic = findServiceInstancesOprHolder.value;
findServiceInstancesOprHolder.value = params -> {
Assert.assertEquals("consumerId", params[0]);
assertCounter.value++;
return preservedLogic.apply(params);
};
microserviceCache.refresh();
consumerService.setServiceId("consumerId2");
findServiceInstancesOprHolder.value = params -> {
Assert.assertEquals("consumerId2", params[0]);
assertCounter.value++;
return preservedLogic.apply(params);
};
microserviceCache.refresh();
Assert.assertEquals(Integer.valueOf(2), assertCounter.value);
}
}
| |
/*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of visitmeta-visualization, version 0.6.0,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2012 - 2016 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.visitmeta.graphCalculator.jung;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.apache.log4j.Logger;
import de.hshannover.f4.trust.visitmeta.graphCalculator.MetadataCollocation;
import edu.uci.ics.jung.algorithms.layout.StaticLayout;
/**
* <p>A Bipartite-Layout adapted to the specific structure of MAP graphs
* (with identifier nodes connected via metadata nodes).</p>
* <p>This implementation uses the class StaticLayout of JUNG2 for consistency.
* The layout algorithm itself has been developed by Trust@HsH group.</p>
*
* <p>Layout approach: Extended bipartite layout with 5 columns (numbered 0 to 4 from left to right) with</p>
* <ul>
* <li>identifier nodes in columns 1 and 3,</li>
* <li>metadata nodes attached to links in column 2 (center), and</li>
* <li>metadata nodes attached to identifiers in columns 0 and 4.</li>
* </ul>
* <p>The graph is traversed using depth-first or breadth-first traversal.</p>
*
* <p>Note: This approach only works for MetadataCollocation.FORK.
* The metadata collocation is therefore altered to FORK at construction.</p>
*
* @author vahlers
*/
public class LayoutBipartite2D extends Layout2D {
// ///////////////////////////////////////////////////////////////////////////////////// MEMBERS
private static final Logger LOGGER = Logger.getLogger(LayoutForceDirected2D.class);
protected StaticLayout<Node2D, Edge2D> mLayout; // JUNG layout class used for consistency
private boolean mUseDFS; // true: depth-first traversal (default), false: breadth-first traversal
private int mXDir; // current horizontal layout direction (-1: left, +1: right)
private double mYOffset; // vertical offset between rows
private double[] mXPositions; // horizontal positions of node columns
private double[] mYPositions; // vertical positions of next nodes in columns
private List<Node2D> mDrawnNodes; // list of nodes that have already been drawn
// //////////////////////////////////////////////////////////////////////////////// CONSTRUCTORS
public LayoutBipartite2D(Graph2D graph){
super(graph, true);
mDimension = new Dimension(1_000_000_000, 1_000_000_000);
mLayout = new StaticLayout<Node2D, Edge2D>(mGraph.getGraph());
mLayout.setSize(mDimension);
mUseDFS = true;
mYOffset = 0.06;
mXPositions = new double[] { 0.2, 0.4, 0.8, 1.2, 1.4 };
mYPositions = new double[5];
mDrawnNodes = new ArrayList<Node2D>();
// ensure that metadata collocation is set to FORK
mGraph.alterMetadataCollocationForEntireGraph(MetadataCollocation.FORK);
}
// ////////////////////////////////////////////////////////////////////////////////////// PUBLIC
/**
* Get graph traversal method.
* @return true: depth-first, false: breadth-first
*/
public boolean useDepthFirstTraversal() {
return mUseDFS;
}
/**
* Set graph traversal method.
* @param useDFS true: depth-first, false: breadth-first
*/
public void useDepthFirstTraversal(boolean useDFS) {
mUseDFS = useDFS;
}
// /////////////////////////////////////////////////////////////////////////////////////// SUPER
/**
* Adjust layout.
* @param iterations unused for bipartite layout
*/
@Override
public void adjust(int iterations) {
LOGGER.trace("Method adjust(" + iterations + ") called.");
mXDir = -1;
Arrays.fill(mYPositions, mYOffset);
mDrawnNodes.clear();
Collection<Node2D> nodes = mGraph.getGraph().getVertices();
for (Node2D node2D : nodes) {
if (node2D instanceof NodeIdentifier2D) {
drawNodeIdentifier((NodeIdentifier2D) node2D, null);
// start next graph component in column 1 (left)
mXDir = -1;
mYPositions[1] += mYOffset;
}
}
}
@Override
public void calculateUniformEdgeLength() {
LOGGER.trace("Method calculateUniformEdgeLength() called.");
// DO NOTHING!!!
// Only necessary for SpringLayout.
}
@Override
public void reset() {
LOGGER.trace("Method reset() called.");
mLayout.reset();
}
@Override
public void setNodePosition(Node2D node2D, double x, double y){
LOGGER.trace("Method setNodePosition(" + node2D + ", " + x + ", " + y + ") called.");
mLayout.setLocation(node2D, x, y);
}
@Override
public double getNodePositionX(Node2D node2D){
LOGGER.trace("Method getNodePositionX("+node2D+") called.");
return mLayout.getX(node2D);
}
@Override
public double getNodePositionY(Node2D node2D){
LOGGER.trace("Method getNodePositionY("+node2D+") called.");
return mLayout.getY(node2D);
}
@Override
public void lockNode(Node2D node2D){
LOGGER.trace("Method lockNode("+node2D+") called.");
mLayout.lock(node2D, true);
}
@Override
public void unlockNode(Node2D node2D){
LOGGER.trace("Method unlockNode("+node2D+") called.");
mLayout.lock(node2D, false);
}
@Override
public void lockAllNodes(){
LOGGER.trace("Method lockAllNodes() called.");
mLayout.lock(true);
}
@Override
public void unlockAllNodes(){
LOGGER.trace("Method unlockAllNodes() called.");
mLayout.lock(false);
}
@Override
public int getMaxIterations() {
LOGGER.trace("Method getMaxIterations() called.");
// DO NOTHING!!!
// Only necessary for iterative layouts.
return 0;
}
@Override
public void setMaxIterations(int maxIterations) {
LOGGER.trace("Method setMaxIterations(" + maxIterations + ") called.");
// DO NOTHING!!!
// Only necessary for iterative layouts.
}
// ////////////////////////////////////////////////////////////////////////////////////// PRIVATE
/**
* Draw identifier node.
* @param nodeId2D identifier node to be drawn
* @param nodeMeOld neighboring metadata node that has just been drawn (or null for first identifier node)
*/
private void drawNodeIdentifier(NodeIdentifier2D nodeId2D, NodeMetadata2D nodeMeOld) {
if (mUseDFS) { // depth-first traversal
drawNodeIdentifierDFS(nodeId2D, nodeMeOld);
}
else { // breadth-first-traversal
drawNodeIdentifierBFS(nodeId2D, nodeMeOld);
}
}
/**
* Draw identifier node with depth-first traversal of attached metadata nodes (recursive implementation).
* @param nodeId2D identifier node to be drawn
* @param nodeMeOld neighboring metadata node that has just been drawn (or null for first identifier node)
*/
private void drawNodeIdentifierDFS(NodeIdentifier2D nodeId2D, NodeMetadata2D nodeMeOld) {
// check if node has already been drawn
if (mDrawnNodes.contains(nodeId2D)) {
return;
}
// draw identifier node
int colIdx = 2 + mXDir; // column 1 (left) or column 3 (right)
if (nodeMeOld != null) {
mYPositions[colIdx] = Math.max(getNodePositionY(nodeMeOld) / getDimensionY(), mYPositions[colIdx]);
}
if (nodeId2D.hasAdjustPermission() && !nodeId2D.wasPicked()) {
nodeId2D.setPositionTriggeredByJung(mXPositions[colIdx] * getDimensionX(), mYPositions[colIdx] * getDimensionY());
}
mYPositions[colIdx] += mYOffset;
mDrawnNodes.add(nodeId2D);
// first draw metadata nodes which are directly attached to identifier
List<NodeMetadata2D> nodesMe = nodeId2D.getNodesMetadata2D();
for (NodeMetadata2D nodeMe2D : nodesMe) {
drawNodeMetadata(nodeMe2D, nodeId2D);
}
// draw all neighbors, incl. metadata nodes attached to links
Collection<Node2D> neighbors = mGraph.getGraph().getNeighbors(nodeId2D);
for (Node2D neighbor2D : neighbors) {
assert neighbor2D instanceof NodeMetadata2D; // neighbor of identifier must be metadata
drawNodeMetadata((NodeMetadata2D) neighbor2D, nodeId2D);
traverseNodeMetadata((NodeMetadata2D) neighbor2D);
}
}
/**
* Draw identifier node with breadth-first traversal of attached metadata nodes (recursive implementation).
* TODO: Improve layout results of breadth-first traversal. <VA> 2014-08-05
* @param nodeId2D identifier node to be drawn
* @param nodeMeOld neighboring metadata node that has just been drawn (or null for first identifier node)
*/
private void drawNodeIdentifierBFS(NodeIdentifier2D nodeId2D, NodeMetadata2D nodeMeOld) {
// check if node has already been drawn
if (mDrawnNodes.contains(nodeId2D)) {
return;
}
// draw identifier node
int colIdx = 2 + mXDir; // column 1 (left) or column 3 (right)
if (nodeMeOld != null) {
mYPositions[colIdx] = Math.max(getNodePositionY(nodeMeOld) / getDimensionY(), mYPositions[colIdx]);
}
if (nodeId2D.hasAdjustPermission() && !nodeId2D.wasPicked()) {
nodeId2D.setPositionTriggeredByJung(mXPositions[colIdx] * getDimensionX(), mYPositions[colIdx] * getDimensionY());
}
mYPositions[colIdx] += mYOffset;
mDrawnNodes.add(nodeId2D);
// first draw metadata nodes which are directly attached to identifier
List<NodeMetadata2D> nodesMe = nodeId2D.getNodesMetadata2D();
for (NodeMetadata2D nodeMe2D : nodesMe) {
drawNodeMetadata(nodeMe2D, nodeId2D);
}
// draw all neighbors, incl. metadata nodes attached to links
Collection<Node2D> neighbors = mGraph.getGraph().getNeighbors(nodeId2D);
List<NodeMetadata2D> nodesToTraverse = new ArrayList<NodeMetadata2D>();
for (Node2D neighbor2D : neighbors) {
assert neighbor2D instanceof NodeMetadata2D; // neighbor of identifier must be metadata
NodeMetadata2D nodeMe2D = (NodeMetadata2D) neighbor2D;
drawNodeMetadata(nodeMe2D, nodeId2D);
nodesToTraverse.add(nodeMe2D);
}
// traverse those neighbors that are metadata nodes attached to links
for (NodeMetadata2D nodeMe2D : nodesToTraverse) {
traverseNodeMetadata(nodeMe2D);
}
}
/**
* Draw metadata node.
* @param nodeMe2D metadata node to be drawn
* @param nodeIdOld neighboring identifier node that has just been drawn
*/
private void drawNodeMetadata(NodeMetadata2D nodeMe2D, NodeIdentifier2D nodeIdOld) {
if (mDrawnNodes.contains(nodeMe2D)) {
return;
}
assert nodeMe2D.getExpandedLink2D() != null || nodeMe2D.getNodeIdentifier2D() != null; // metadata must be attached to either link or identifier
if (nodeMe2D.getExpandedLink2D() != null) { // metadata node attached to link -> column 2 (center)
int colIdx = 2; // column 2 (center)
mYPositions[colIdx] = Math.max(getNodePositionY(nodeIdOld) / getDimensionY(), mYPositions[colIdx]);
if (nodeMe2D.hasAdjustPermission() && !nodeMe2D.wasPicked()) {
nodeMe2D.setPositionTriggeredByJung(mXPositions[colIdx] * getDimensionX(), mYPositions[colIdx] * getDimensionY());
}
mYPositions[colIdx] += mYOffset;
mDrawnNodes.add(nodeMe2D);
}
else if (nodeMe2D.getNodeIdentifier2D() != null) { // metadata node attached to identifier -> column 0 or 4
int colIdx = 2 + 2 * mXDir; // column 0 (left) or column 4 (right)
mYPositions[colIdx] = Math.max(getNodePositionY(nodeIdOld) / getDimensionY(), mYPositions[colIdx]);
if (nodeMe2D.hasAdjustPermission() && !nodeMe2D.wasPicked()) {
nodeMe2D.setPositionTriggeredByJung(mXPositions[colIdx] * getDimensionX(), mYPositions[colIdx] * getDimensionY());
}
mYPositions[colIdx] += mYOffset;
mDrawnNodes.add(nodeMe2D);
}
}
/**
* Traverse metadata node attached to link, i.e., draw second neighboring identifier node.
* @param nodeMe2D metadata node to be traversed
*/
private void traverseNodeMetadata(NodeMetadata2D nodeMe2D) {
// check if metadata node is attached to link (metadata nodes attached to identifiers cannot be traversed)
if (nodeMe2D.getExpandedLink2D() != null) {
// draw all neighbors
mXDir *= -1;
Collection<Node2D> neighbors = mGraph.getGraph().getNeighbors(nodeMe2D);
for (Node2D neighbor2D : neighbors) {
assert neighbor2D instanceof NodeIdentifier2D; // neighbor of metadata must be identifier
drawNodeIdentifier((NodeIdentifier2D) neighbor2D, nodeMe2D);
}
mXDir *= -1;
}
}
}
| |
package edu.mssm.pharm.maayanlab.Enrichr;
import java.io.File;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingWorker;
import edu.mssm.pharm.maayanlab.Enrichr.ResourceLoader.EnrichmentCategory;
import edu.mssm.pharm.maayanlab.Enrichr.ResourceLoader.EnrichmentLibrary;
import edu.mssm.pharm.maayanlab.common.bio.EnrichedTerm;
import edu.mssm.pharm.maayanlab.common.bio.FuzzyGeneSetLibrary;
import edu.mssm.pharm.maayanlab.common.bio.GeneSetLibrary;
import edu.mssm.pharm.maayanlab.common.core.FileUtils;
import edu.mssm.pharm.maayanlab.common.core.Settings;
import edu.mssm.pharm.maayanlab.common.core.SettingsChanger;
import edu.mssm.pharm.maayanlab.common.core.SimpleXMLWriter;
public class EnrichrBatcher implements SettingsChanger {
private static Logger log = Logger.getLogger("maayanlab");
// progress tracking
private SwingWorker<Void, Void> task = null;
private int progress = 0;
private String note = "";
private boolean isCancelled = false;
// Default settings
private final Settings settings = new Settings() {
{
for (EnrichmentCategory category : ResourceLoader.getInstance().getCategories()) {
for (EnrichmentLibrary library : category.getLibraries()) {
set(library.getName(), true);
}
}
}
};
// Formatter
private final DecimalFormat scientificNotation = new DecimalFormat("0.##E0");
// Output header
public static final String HEADER = "Term\tOverlap\tP-value\tGenes";
private HashMap<String, ArrayList<EnrichedTerm>> resultsMap = new HashMap<String, ArrayList<EnrichedTerm>>(18);
public static void main(String[] args) {
// Set logger display level
if (!Boolean.getBoolean("verbose"))
log.setLevel(Level.WARNING);
if (args.length == 2) {
EnrichrBatcher eb = new EnrichrBatcher();
File inputDir = new File(args[0]);
File outputDir = new File(args[1]);
if (inputDir.isDirectory()) {
for (File child : inputDir.listFiles()) {
eb.run(child.getAbsolutePath());
if (outputDir.isDirectory()) {
eb.writeFile(new File(outputDir, FileUtils.stripFileExtension(child.getName()) + ".enrichment.xml").getAbsolutePath());
}
else {
eb.writeFile(new File(outputDir.getParent(), FileUtils.stripFileExtension(child.getName()) + "." + outputDir.getName()).getAbsolutePath());
}
}
}
else {
eb.run(args[0]);
eb.writeFile(args[1]);
}
}
else if (args.length == 3) {
EnrichrBatcher eb = new EnrichrBatcher();
eb.run(args[0], args[1], false, args[2]);
}
else if (args.length == 4) {
EnrichrBatcher eb = new EnrichrBatcher();
eb.run(args[0], args[1], Boolean.parseBoolean(args[2]), args[3]);
}
else
log.warning("Usage: java -jar Enrichr.jar gene_list [background_file is_fuzzy] output");
}
// By default, load settings from file
public EnrichrBatcher() {
settings.loadSettings();
}
// Load external settings, primarily for use with X2K
public EnrichrBatcher(Settings externalSettings) {
settings.loadSettings(externalSettings);
}
// Task methods
public void setTask(SwingWorker<Void, Void> task) {
this.task = task;
}
private void setProgress(int progress, String note) throws InterruptedException {
if (task != null) {
if (isCancelled)
throw new InterruptedException("Task cancelled at " + progress + "%!");
task.firePropertyChange("progress", this.progress, progress);
task.firePropertyChange("note", this.note, note);
this.progress = progress;
this.note = note;
}
}
public void cancel() {
isCancelled = true;
}
@Override
// Used for other methods to set settings
public void setSetting(String key, String value) {
settings.set(key, value);
}
public HashMap<String, ArrayList<EnrichedTerm>> getEnrichmentResults() {
return resultsMap;
}
public void writeFile(String filename) {
// Prefix for individual files
String outputPrefix = filename.replaceFirst("\\.\\w+$", "");
SimpleXMLWriter sxw = new SimpleXMLWriter(filename);
sxw.startPlainElement("Enrichment");
sxw.startPlainElement("Summary");
LinkedList<EnrichedTerm> combinedTerms = new LinkedList<EnrichedTerm>();
for (ArrayList<EnrichedTerm> termList : resultsMap.values())
combinedTerms.addAll(termList);
Collections.sort(combinedTerms);
// Filter down to top 10
while (combinedTerms.size() > 10)
combinedTerms.removeLast();
for (EnrichedTerm term : combinedTerms)
sxw.listElement("Term", term.getName(), "p-value", scientificNotation.format(term.getPValue()));
sxw.endElement();
for (String bgType : resultsMap.keySet()) {
// Write XML summary output
sxw.startElementWithAttributes("Background", "name", bgType);
int i = 1;
for (EnrichedTerm term : resultsMap.get(bgType)) {
sxw.listElement("Term", term.getName(), "p-value", scientificNotation.format(term.getPValue()));
// Stop after 10 entries
if (i++ >= 10)
break;
}
sxw.endElement();
// Write individual enrichment tsv outputs
FileUtils.writeFile(outputPrefix + "_" + bgType + ".txt", Enrichment.HEADER, resultsMap.get(bgType));
}
sxw.close();
}
// Run from cli with custom database
public void run(String geneList, String backgroundFile, boolean isFuzzy, String outputFile) {
GeneSetLibrary geneSetLibrary;
log.info("Running with custom database");
try {
Enrichment app = new Enrichment(FileUtils.readFile(geneList), true);
if (isFuzzy)
geneSetLibrary = new FuzzyGeneSetLibrary(FileUtils.readFile(backgroundFile));
else
geneSetLibrary = new GeneSetLibrary(FileUtils.readFile(backgroundFile));
FileUtils.writeFile(outputFile, Enrichment.HEADER, app.enrich(geneSetLibrary));
} catch (ParseException e) {
log.warning(e.getMessage());
System.exit(-1);
}
}
// Run for file names
public void run(String geneList) {
ArrayList<String> inputList = FileUtils.readFile(geneList);
try {
run(inputList);
} catch (ParseException e) {
log.warning(e.getMessage());
}
}
// Run for calling from other methods and pass in collection
public void run(Collection<String> geneList) throws ParseException {
LinkedList<String> bgList = new LinkedList<String>();
for (EnrichmentCategory category : ResourceLoader.getInstance().getCategories()) {
for (EnrichmentLibrary library : category.getLibraries()) {
if (settings.getBoolean(library.getName())) {
bgList.add(library.getName());
}
}
}
try {
setProgress(0, "Enriching terms...");
computeEnrichment(bgList, geneList);
setProgress(95, "Writing results...");
} catch (InterruptedException e) {
log.info(e.getMessage());
return;
}
}
public void computeEnrichment(LinkedList<String> backgroundList, Collection<String> geneList) throws InterruptedException, ParseException {
int iteration = 0;
int increment = 80 / backgroundList.size();
Enrichment app = new Enrichment(geneList, true);
for (String bgType : backgroundList) {
try {
setProgress(5+increment*iteration, bgType.replace("_", " ") + " enrichment...");
iteration++;
} catch (InterruptedException e) {
throw new InterruptedException(e.getMessage());
}
ArrayList<EnrichedTerm> resultTerms = app.enrich(bgType);
// Only add to results if there are actual results
if (!resultTerms.isEmpty())
resultsMap.put(bgType, resultTerms);
}
}
}
| |
/*
* Copyright 2012 NGDATA nv
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lilyproject.indexer.derefmap;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.Sets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.util.Bytes;
import org.lilyproject.hbaseindex.Index;
import org.lilyproject.hbaseindex.IndexDefinition;
import org.lilyproject.hbaseindex.IndexEntry;
import org.lilyproject.hbaseindex.IndexManager;
import org.lilyproject.hbaseindex.IndexNotFoundException;
import org.lilyproject.hbaseindex.Query;
import org.lilyproject.hbaseindex.QueryResult;
import org.lilyproject.repository.api.AbsoluteRecordId;
import org.lilyproject.repository.api.IdGenerator;
import org.lilyproject.repository.api.RecordId;
import org.lilyproject.repository.api.SchemaId;
import org.lilyproject.util.hbase.HBaseTableFactory;
import org.lilyproject.util.io.Closer;
/**
*
*/
public class DerefMapHbaseImpl implements DerefMap {
private static final byte[] DEPENDENCIES_KEY = Bytes.toBytes("dependencies");
private static final byte[] FIELDS_KEY = Bytes.toBytes("fields");
private static final byte[] DUMMY_IDENTIFIER = new byte[]{0};
private Index forwardDerefIndex;
private Index backwardDerefIndex;
private DerefMapSerializationUtil serializationUtil;
/**
* Private constructor. Clients should use static factory methods {@link #delete(String,
* org.apache.hadoop.conf.Configuration)} and {@link #create(String, Configuration, HBaseTableFactory,
* IdGenerator)}
*/
private DerefMapHbaseImpl(final String owningRepoName, final String indexName, final Configuration hbaseConfiguration,
final HBaseTableFactory tableFactory, final IdGenerator idGenerator)
throws IndexNotFoundException, IOException, InterruptedException {
this.serializationUtil = new DerefMapSerializationUtil(idGenerator);
final IndexManager indexManager = new IndexManager(hbaseConfiguration, tableFactory);
IndexDefinition forwardIndexDef = new IndexDefinition(forwardIndexName(indexName));
// For the record ID we use a variable length byte array field of which the first two bytes are fixed length
// The first byte is actually the record identifier byte.
// The second byte really is the first byte of the record id. We put this in the fixed length part
// (safely because a record id should at least be a single byte long) because this prevents BCD encoding
// on the first byte, thus making it easier to configure table splitting based on the original input.
forwardIndexDef.addVariableLengthByteField("dependant_recordid", 2);
forwardIndexDef.addByteField("dependant_vtag", DerefMapSerializationUtil.SCHEMA_ID_BYTE_LENGTH);
forwardDerefIndex = indexManager.getIndex(owningRepoName, forwardIndexDef);
IndexDefinition backwardIndexDef = new IndexDefinition(backwardIndexName(indexName));
// Same remark as in the forward index.
backwardIndexDef.addVariableLengthByteField("dependency_masterrecordid", 2);
backwardIndexDef.addByteField("dependant_vtag", DerefMapSerializationUtil.SCHEMA_ID_BYTE_LENGTH);
backwardIndexDef.addVariableLengthByteField("variant_properties_pattern");
backwardDerefIndex = indexManager.getIndex(owningRepoName, backwardIndexDef);
}
/**
* Create a DerefMap for a given index. If this is the first time the DerefMap is constructed for this index,
* the forward and backward index tables will be created.
*
* @param indexName name of the index
* @param hbaseConfiguration hbase configuration
* @param idGenerator id generator
* @throws IndexNotFoundException
* @throws IOException
* @throws InterruptedException
*/
public static DerefMap create(final String owningRepoName, final String indexName, final Configuration hbaseConfiguration,
final HBaseTableFactory tableFactory, final IdGenerator idGenerator)
throws IndexNotFoundException, IOException, InterruptedException {
return new DerefMapHbaseImpl(owningRepoName, indexName, hbaseConfiguration, tableFactory, idGenerator);
}
/**
* Delete a DerefMap. This will delete the corresponding hbase tables.
*
* @param indexName name of the index to delete
* @param hbaseConfiguration hbase configuration
* @throws IOException
* @throws IndexNotFoundException if the index doesn't exist (maybe it was already deleted?)
*/
public static void delete(final String indexName, final Configuration hbaseConfiguration)
throws IOException, IndexNotFoundException {
final IndexManager manager = new IndexManager(hbaseConfiguration);
manager.deleteIndex(forwardIndexName(indexName));
manager.deleteIndex(backwardIndexName(indexName));
}
public static String forwardIndexName(String indexName) {
return "deref-forward-" + indexName;
}
public static String backwardIndexName(String indexName) {
return "deref-backward-" + indexName;
}
@Override
public void updateDependants(AbsoluteRecordId parentRecordId, SchemaId parentVtagId,
Map<DependencyEntry, Set<SchemaId>> newDependantEntries)
throws IOException {
final Set<DependencyEntry> existingEntries = findDependencies(parentRecordId, parentVtagId);
// Figure out what changed
final Set<DependencyEntry> removedDependencies =
figureOutRemovedDependencies(newDependantEntries.keySet(), existingEntries);
final Collection<DependencyEntry> addedDependencies =
figureOutAddedDependencies(newDependantEntries.keySet(), existingEntries);
// IMPORTANT implementation note: the order in which changes are applied is not arbitrary. It is such that if
// the process would fail in between, there will never be left any state in the backward index which would not
// be found via the forward index.
// delete removed from bwd index
for (DependencyEntry removed : removedDependencies) {
final IndexEntry backwardEntry =
createBackwardEntry(removed.getDependency(), parentRecordId, parentVtagId, null,
removed.getMoreDimensionedVariants());
backwardDerefIndex.removeEntry(backwardEntry);
}
// update fwd index (added and removed at the same time, it is a single row)
final IndexEntry fwdEntry =
createForwardEntry(parentRecordId, parentVtagId, newDependantEntries.keySet());
forwardDerefIndex.addEntry(fwdEntry);
// add added to bwd idx
for (DependencyEntry added : addedDependencies) {
final Set<SchemaId> fields = newDependantEntries.get(added);
final IndexEntry backwardEntry =
createBackwardEntry(added.getDependency(), parentRecordId, parentVtagId, fields,
added.getMoreDimensionedVariants());
backwardDerefIndex.addEntry(backwardEntry);
}
}
private Set<DependencyEntry> figureOutRemovedDependencies(Collection<DependencyEntry> newDependencies,
Set<DependencyEntry> existingDependencies) {
final Set<DependencyEntry> removed = new HashSet<DependencyEntry>();
// add all existing
removed.addAll(existingDependencies);
// remove all new
removed.removeAll(newDependencies);
return removed;
}
private Collection<DependencyEntry> figureOutAddedDependencies(Set<DependencyEntry> newDependencyEntries,
Set<DependencyEntry> existingDependencies) {
final Set<DependencyEntry> added = new HashSet<DependencyEntry>();
// add all new
added.addAll(newDependencyEntries);
// remove all existing
added.removeAll(existingDependencies);
return added;
}
private IndexEntry createForwardEntry(AbsoluteRecordId parentRecordId, SchemaId parentVtagId,
Collection<DependencyEntry> newDependencies) throws IOException {
final IndexEntry fwdEntry = new IndexEntry(forwardDerefIndex.getDefinition());
fwdEntry.addField("dependant_recordid", parentRecordId.toBytes());
fwdEntry.addField("dependant_vtag", parentVtagId.getBytes());
// we do not really use the identifier... all we are interested in is in the data of the entry
fwdEntry.setIdentifier(DUMMY_IDENTIFIER);
// the data contains the dependencies of the dependant (master record ids and vtags)
fwdEntry.addData(DEPENDENCIES_KEY, this.serializationUtil.serializeDependenciesForward(newDependencies));
return fwdEntry;
}
private IndexEntry createBackwardEntry(AbsoluteRecordId parentRecordId, AbsoluteRecordId dependantRecordId, SchemaId dependantVtagId,
Set<SchemaId> fields, Set<String> moreDimensionedVariantProperties)
throws IOException {
final byte[] serializedVariantPropertiesPattern = this.serializationUtil.serializeVariantPropertiesPattern(
this.serializationUtil.createVariantPropertiesPattern(parentRecordId.getRecordId().getVariantProperties(),
moreDimensionedVariantProperties));
final IndexEntry bwdEntry = new IndexEntry(backwardDerefIndex.getDefinition());
bwdEntry.addField("dependency_masterrecordid", parentRecordId.getRecordId().getMaster().toBytes());
bwdEntry.addField("dependant_vtag", dependantVtagId.getBytes());
bwdEntry.addField("variant_properties_pattern", serializedVariantPropertiesPattern);
// the identifier is the dependant which depends on the dependency
bwdEntry.setIdentifier(dependantRecordId.toBytes());
// the fields which the dependant uses of the dependency (null if used for deleting the entry)
if (fields != null) {
bwdEntry.addData(FIELDS_KEY, this.serializationUtil.serializeFields(fields));
}
return bwdEntry;
}
/**
* Find the set of record ids (and corresponding version tags) on which a given record (in a given version tag)
* depends.
*
* @param parentRecordId record id of the record to find dependencies for
* @param vtag vtag of the record to find dependencies for
* @return the record ids and vtags on which the given record depends
*/
Set<DependencyEntry> findDependencies(AbsoluteRecordId parentRecordId, SchemaId vtag) throws IOException {
final Query query = new Query();
query.addEqualsCondition("dependant_recordid", parentRecordId.toBytes());
query.addEqualsCondition("dependant_vtag", vtag.getBytes());
final Set<DependencyEntry> result;
final QueryResult queryResult = forwardDerefIndex.performQuery(query);
if (queryResult.next() != null) {
final byte[] serializedEntries = queryResult.getData(DEPENDENCIES_KEY);
result = this.serializationUtil.deserializeDependenciesForward(serializedEntries);
if (queryResult.next() != null) {
throw new IllegalStateException(
"Expected only a single matching entry in " + forwardDerefIndex.getDefinition().getName());
}
} else {
result = new HashSet<DependencyEntry>();
}
// Not closed in finally block: avoid HBase contact when there could be connection problems.
Closer.close(queryResult);
return result;
}
@Override
public DependantRecordIdsIterator findDependantsOf(AbsoluteRecordId parentRecordId, Set<SchemaId> fields,
SchemaId vtag) throws IOException {
final RecordId master = parentRecordId.getRecordId().getMaster();
final Query query = new Query();
query.addEqualsCondition("dependency_masterrecordid", master.toBytes());
if (vtag != null) {
query.addEqualsCondition("dependant_vtag", vtag.getBytes());
}
query.setIndexFilter(new DerefMapIndexFilter(parentRecordId.getRecordId().getVariantProperties(), fields));
return new DependantRecordIdsIteratorImpl(backwardDerefIndex.performQuery(query), this.serializationUtil);
}
@Override
public DependantRecordIdsIterator findDependantsOf(AbsoluteRecordId parentRecordId, SchemaId field,
SchemaId vtag) throws IOException {
return findDependantsOf(parentRecordId, field == null ? null : Sets.newHashSet(field), vtag);
}
@Override
public DependantRecordIdsIterator findDependantsOf(AbsoluteRecordId parentRecordId) throws IOException {
return findDependantsOf(parentRecordId, (Set<SchemaId>) null, null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming;
import static org.apache.beam.runners.core.StatefulDoFnRunner.TimeInternalsCleanupTimer.GC_TIMER_ID;
import static org.apache.beam.runners.flink.translation.utils.FlinkPortableRunnerUtils.requiresTimeSortedInput;
import static org.apache.flink.util.Preconditions.checkNotNull;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.DelayedBundleApplication;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleProgressResponse;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleResponse;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey.TypeCase;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.runners.core.DoFnRunner;
import org.apache.beam.runners.core.LateDataUtils;
import org.apache.beam.runners.core.StateInternals;
import org.apache.beam.runners.core.StateInternalsFactory;
import org.apache.beam.runners.core.StateNamespace;
import org.apache.beam.runners.core.StateNamespaces;
import org.apache.beam.runners.core.StateTag;
import org.apache.beam.runners.core.StateTags;
import org.apache.beam.runners.core.StatefulDoFnRunner;
import org.apache.beam.runners.core.StepContext;
import org.apache.beam.runners.core.TimerInternals;
import org.apache.beam.runners.core.TimerInternalsFactory;
import org.apache.beam.runners.core.construction.PTransformTranslation;
import org.apache.beam.runners.core.construction.SerializablePipelineOptions;
import org.apache.beam.runners.core.construction.Timer;
import org.apache.beam.runners.core.construction.graph.ExecutableStage;
import org.apache.beam.runners.core.construction.graph.UserStateReference;
import org.apache.beam.runners.flink.translation.functions.FlinkExecutableStageContextFactory;
import org.apache.beam.runners.flink.translation.types.CoderTypeSerializer;
import org.apache.beam.runners.flink.translation.wrappers.streaming.state.FlinkStateInternals;
import org.apache.beam.runners.fnexecution.control.BundleCheckpointHandler;
import org.apache.beam.runners.fnexecution.control.BundleCheckpointHandlers;
import org.apache.beam.runners.fnexecution.control.BundleCheckpointHandlers.StateAndTimerBundleCheckpointHandler;
import org.apache.beam.runners.fnexecution.control.BundleFinalizationHandler;
import org.apache.beam.runners.fnexecution.control.BundleFinalizationHandlers;
import org.apache.beam.runners.fnexecution.control.BundleFinalizationHandlers.InMemoryFinalizer;
import org.apache.beam.runners.fnexecution.control.BundleProgressHandler;
import org.apache.beam.runners.fnexecution.control.ExecutableStageContext;
import org.apache.beam.runners.fnexecution.control.OutputReceiverFactory;
import org.apache.beam.runners.fnexecution.control.ProcessBundleDescriptors;
import org.apache.beam.runners.fnexecution.control.RemoteBundle;
import org.apache.beam.runners.fnexecution.control.StageBundleFactory;
import org.apache.beam.runners.fnexecution.control.TimerReceiverFactory;
import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
import org.apache.beam.runners.fnexecution.state.StateRequestHandler;
import org.apache.beam.runners.fnexecution.state.StateRequestHandlers;
import org.apache.beam.runners.fnexecution.translation.StreamingSideInputHandlerFactory;
import org.apache.beam.runners.fnexecution.wire.ByteStringCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.fn.data.FnDataReceiver;
import org.apache.beam.sdk.function.ThrowingFunction;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.state.BagState;
import org.apache.beam.sdk.state.State;
import org.apache.beam.sdk.state.StateContext;
import org.apache.beam.sdk.state.TimeDomain;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.DoFnSchemaInformation;
import org.apache.beam.sdk.transforms.join.RawUnionValue;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString;
import org.apache.beam.vendor.grpc.v1p36p0.io.grpc.StatusRuntimeException;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.KeyedStateBackend;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This operator is the streaming equivalent of the {@link
* org.apache.beam.runners.flink.translation.functions.FlinkExecutableStageFunction}. It sends all
* received elements to the SDK harness and emits the received back elements to the downstream
* operators. It also takes care of handling side inputs and state.
*
* <p>TODO Integrate support for progress updates and metrics
*/
// We use Flink's lifecycle methods to initialize transient fields
@SuppressFBWarnings("SE_TRANSIENT_FIELD_NOT_RESTORED")
@SuppressWarnings({
"rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
"nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
})
public class ExecutableStageDoFnOperator<InputT, OutputT> extends DoFnOperator<InputT, OutputT> {
private static final Logger LOG = LoggerFactory.getLogger(ExecutableStageDoFnOperator.class);
private final RunnerApi.ExecutableStagePayload payload;
private final JobInfo jobInfo;
private final FlinkExecutableStageContextFactory contextFactory;
private final Map<String, TupleTag<?>> outputMap;
private final Map<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInputIds;
/** A lock which has to be acquired when concurrently accessing state and timers. */
private final ReentrantLock stateBackendLock;
private final SerializablePipelineOptions pipelineOptions;
private final boolean isStateful;
private final Coder windowCoder;
private final Coder<WindowedValue<InputT>> inputCoder;
private transient ExecutableStageContext stageContext;
private transient StateRequestHandler stateRequestHandler;
private transient BundleProgressHandler progressHandler;
private transient InMemoryFinalizer finalizationHandler;
private transient BundleCheckpointHandler checkpointHandler;
private transient boolean hasSdfProcessFn;
private transient StageBundleFactory stageBundleFactory;
private transient ExecutableStage executableStage;
private transient SdkHarnessDoFnRunner<InputT, OutputT> sdkHarnessRunner;
/** The minimum event time timer timestamp observed during the last bundle. */
private transient long minEventTimeTimerTimestampInLastBundle;
/** The minimum event time timer timestamp observed in the current bundle. */
private transient long minEventTimeTimerTimestampInCurrentBundle;
/** The input watermark before the current bundle started. */
private transient long inputWatermarkBeforeBundleStart;
/** Flag indicating whether the operator has been closed. */
private transient boolean closed;
/** Constructor. */
public ExecutableStageDoFnOperator(
String stepName,
Coder<WindowedValue<InputT>> windowedInputCoder,
Map<TupleTag<?>, Coder<?>> outputCoders,
TupleTag<OutputT> mainOutputTag,
List<TupleTag<?>> additionalOutputTags,
OutputManagerFactory<OutputT> outputManagerFactory,
Map<Integer, PCollectionView<?>> sideInputTagMapping,
Collection<PCollectionView<?>> sideInputs,
Map<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInputIds,
PipelineOptions options,
RunnerApi.ExecutableStagePayload payload,
JobInfo jobInfo,
FlinkExecutableStageContextFactory contextFactory,
Map<String, TupleTag<?>> outputMap,
WindowingStrategy windowingStrategy,
Coder keyCoder,
KeySelector<WindowedValue<InputT>, ?> keySelector) {
super(
new NoOpDoFn(),
stepName,
windowedInputCoder,
outputCoders,
mainOutputTag,
additionalOutputTags,
outputManagerFactory,
windowingStrategy,
sideInputTagMapping,
sideInputs,
options,
keyCoder,
keySelector,
DoFnSchemaInformation.create(),
Collections.emptyMap());
this.isStateful = payload.getUserStatesCount() > 0 || payload.getTimersCount() > 0;
this.payload = payload;
this.jobInfo = jobInfo;
this.contextFactory = contextFactory;
this.outputMap = outputMap;
this.sideInputIds = sideInputIds;
this.stateBackendLock = new ReentrantLock();
this.windowCoder = (Coder<BoundedWindow>) windowingStrategy.getWindowFn().windowCoder();
this.inputCoder = windowedInputCoder;
this.pipelineOptions = new SerializablePipelineOptions(options);
}
@Override
protected Lock getLockToAcquireForStateAccessDuringBundles() {
return stateBackendLock;
}
@Override
public void open() throws Exception {
executableStage = ExecutableStage.fromPayload(payload);
hasSdfProcessFn = hasSDF(executableStage);
initializeUserState(executableStage, getKeyedStateBackend(), pipelineOptions);
// TODO: Wire this into the distributed cache and make it pluggable.
// TODO: Do we really want this layer of indirection when accessing the stage bundle factory?
// It's a little strange because this operator is responsible for the lifetime of the stage
// bundle "factory" (manager?) but not the job or Flink bundle factories. How do we make
// ownership of the higher level "factories" explicit? Do we care?
stageContext = contextFactory.get(jobInfo);
stageBundleFactory = stageContext.getStageBundleFactory(executableStage);
stateRequestHandler = getStateRequestHandler(executableStage);
progressHandler =
new BundleProgressHandler() {
@Override
public void onProgress(ProcessBundleProgressResponse progress) {
if (flinkMetricContainer != null) {
flinkMetricContainer.updateMetrics(stepName, progress.getMonitoringInfosList());
}
}
@Override
public void onCompleted(ProcessBundleResponse response) {
if (flinkMetricContainer != null) {
flinkMetricContainer.updateMetrics(stepName, response.getMonitoringInfosList());
}
}
};
finalizationHandler =
BundleFinalizationHandlers.inMemoryFinalizer(
stageBundleFactory.getInstructionRequestHandler());
checkpointHandler = getBundleCheckpointHandler(hasSdfProcessFn);
minEventTimeTimerTimestampInCurrentBundle = Long.MAX_VALUE;
minEventTimeTimerTimestampInLastBundle = Long.MAX_VALUE;
super.setPreBundleCallback(this::preBundleStartCallback);
super.setBundleFinishedCallback(this::finishBundleCallback);
// This will call {@code createWrappingDoFnRunner} which needs the above dependencies.
super.open();
}
@Override
public final void notifyCheckpointComplete(long checkpointId) throws Exception {
finalizationHandler.finalizeAllOutstandingBundles();
super.notifyCheckpointComplete(checkpointId);
}
private BundleCheckpointHandler getBundleCheckpointHandler(boolean hasSDF) {
if (!hasSDF) {
return response -> {
throw new UnsupportedOperationException(
"Self-checkpoint is only supported on splittable DoFn.");
};
}
return new BundleCheckpointHandlers.StateAndTimerBundleCheckpointHandler(
new SdfFlinkTimerInternalsFactory(),
new SdfFlinkStateInternalsFactory(),
inputCoder,
windowCoder);
}
private boolean hasSDF(ExecutableStage executableStage) {
return executableStage.getTransforms().stream()
.anyMatch(
pTransformNode ->
pTransformNode
.getTransform()
.getSpec()
.getUrn()
.equals(
PTransformTranslation
.SPLITTABLE_PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS_URN));
}
private StateRequestHandler getStateRequestHandler(ExecutableStage executableStage) {
final StateRequestHandler sideInputStateHandler;
if (executableStage.getSideInputs().size() > 0) {
checkNotNull(super.sideInputHandler);
StateRequestHandlers.SideInputHandlerFactory sideInputHandlerFactory =
Preconditions.checkNotNull(
StreamingSideInputHandlerFactory.forStage(
executableStage, sideInputIds, super.sideInputHandler));
try {
sideInputStateHandler =
StateRequestHandlers.forSideInputHandlerFactory(
ProcessBundleDescriptors.getSideInputs(executableStage), sideInputHandlerFactory);
} catch (IOException e) {
throw new RuntimeException("Failed to initialize SideInputHandler", e);
}
} else {
sideInputStateHandler = StateRequestHandler.unsupported();
}
final StateRequestHandler userStateRequestHandler;
if (!executableStage.getUserStates().isEmpty()) {
if (keyedStateInternals == null) {
throw new IllegalStateException("Input must be keyed when user state is used");
}
userStateRequestHandler =
StateRequestHandlers.forBagUserStateHandlerFactory(
stageBundleFactory.getProcessBundleDescriptor(),
new BagUserStateFactory(
keyedStateInternals, getKeyedStateBackend(), stateBackendLock, keyCoder));
} else {
userStateRequestHandler = StateRequestHandler.unsupported();
}
EnumMap<TypeCase, StateRequestHandler> handlerMap = new EnumMap<>(TypeCase.class);
handlerMap.put(TypeCase.ITERABLE_SIDE_INPUT, sideInputStateHandler);
handlerMap.put(TypeCase.MULTIMAP_SIDE_INPUT, sideInputStateHandler);
handlerMap.put(TypeCase.MULTIMAP_KEYS_SIDE_INPUT, sideInputStateHandler);
handlerMap.put(TypeCase.BAG_USER_STATE, userStateRequestHandler);
return StateRequestHandlers.delegateBasedUponType(handlerMap);
}
static class BagUserStateFactory<V, W extends BoundedWindow>
implements StateRequestHandlers.BagUserStateHandlerFactory<ByteString, V, W> {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
/** Lock to hold whenever accessing the state backend. */
private final Lock stateBackendLock;
/** For debugging: The key coder used by the Runner. */
private final @Nullable Coder runnerKeyCoder;
/** For debugging: Same as keyedStateBackend but upcasted, to access key group meta info. */
private final @Nullable AbstractKeyedStateBackend<ByteBuffer> keyStateBackendWithKeyGroupInfo;
BagUserStateFactory(
StateInternals stateInternals,
KeyedStateBackend<ByteBuffer> keyedStateBackend,
Lock stateBackendLock,
@Nullable Coder runnerKeyCoder) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
this.stateBackendLock = stateBackendLock;
if (keyedStateBackend instanceof AbstractKeyedStateBackend) {
// This will always succeed, unless a custom state backend is used which does not extend
// AbstractKeyedStateBackend. This is unlikely but we should still consider this case.
this.keyStateBackendWithKeyGroupInfo =
(AbstractKeyedStateBackend<ByteBuffer>) keyedStateBackend;
} else {
this.keyStateBackendWithKeyGroupInfo = null;
}
this.runnerKeyCoder = runnerKeyCoder;
}
@Override
public StateRequestHandlers.BagUserStateHandler<ByteString, V, W> forUserState(
// Transform id not used because multiple operators with state will not
// be fused together. See GreedyPCollectionFusers
String pTransformId,
String userStateId,
Coder<ByteString> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<ByteString, V, W>() {
@Override
public Iterable<V> get(ByteString key, W window) {
try (Locker locker = Locker.locked(stateBackendLock)) {
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State get for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
}
}
@Override
public void append(ByteString key, W window, Iterator<V> values) {
try (Locker locker = Locker.locked(stateBackendLock)) {
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State append for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
}
}
@Override
public void clear(ByteString key, W window) {
try (Locker locker = Locker.locked(stateBackendLock)) {
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State clear for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
}
}
private void prepareStateBackend(ByteString key) {
// Key for state request is shipped encoded with NESTED context.
ByteBuffer encodedKey = FlinkKeyUtils.fromEncodedKey(key);
keyedStateBackend.setCurrentKey(encodedKey);
if (keyStateBackendWithKeyGroupInfo != null) {
int currentKeyGroupIndex = keyStateBackendWithKeyGroupInfo.getCurrentKeyGroupIndex();
KeyGroupRange keyGroupRange = keyStateBackendWithKeyGroupInfo.getKeyGroupRange();
Preconditions.checkState(
keyGroupRange.contains(currentKeyGroupIndex),
"The current key '%s' with key group index '%s' does not belong to the key group range '%s'. Runner keyCoder: %s. Ptransformid: %s Userstateid: %s",
Arrays.toString(key.toByteArray()),
currentKeyGroupIndex,
keyGroupRange,
runnerKeyCoder,
pTransformId,
userStateId);
}
}
};
}
}
/**
* Note: This is only relevant when we have a stateful DoFn. We want to control the key of the
* state backend ourselves and we must avoid any concurrent setting of the current active key. By
* overwriting this, we also prevent unnecessary serialization as the key has to be encoded as a
* byte array.
*/
@Override
public void setKeyContextElement1(StreamRecord record) {}
/**
* We don't want to set anything here. This is due to asynchronous nature of processing elements
* from the SDK Harness. The Flink runtime sets the current key before calling {@code
* processElement}, but this does not work when sending elements to the SDK harness which may be
* processed at an arbitrary later point in time. State for keys is also accessed asynchronously
* via state requests.
*
* <p>We set the key only as it is required for 1) State requests 2) Timers (setting/firing).
*/
@Override
public void setCurrentKey(Object key) {}
@Override
public ByteBuffer getCurrentKey() {
// This is the key retrieved by HeapInternalTimerService when setting a Flink timer.
// Note: Only called by the TimerService. Must be guarded by a lock.
Preconditions.checkState(
stateBackendLock.isLocked(),
"State backend must be locked when retrieving the current key.");
return this.<ByteBuffer>getKeyedStateBackend().getCurrentKey();
}
void setTimer(Timer<?> timerElement, TimerInternals.TimerData timerData) {
try {
Preconditions.checkState(
sdkHarnessRunner.isBundleInProgress(), "Bundle was expected to be in progress!!");
LOG.debug("Setting timer: {} {}", timerElement, timerData);
// KvToByteBufferKeySelector returns the key encoded, it doesn't care about the
// window, timestamp or pane information.
ByteBuffer encodedKey =
(ByteBuffer)
keySelector.getKey(
WindowedValue.valueInGlobalWindow(
(InputT) KV.of(timerElement.getUserKey(), null)));
// We have to synchronize to ensure the state backend is not concurrently accessed by the
// state requests
try (Locker locker = Locker.locked(stateBackendLock)) {
getKeyedStateBackend().setCurrentKey(encodedKey);
if (timerElement.getClearBit()) {
timerInternals.deleteTimer(timerData);
} else {
timerInternals.setTimer(timerData);
if (!timerData.getTimerId().equals(GC_TIMER_ID)) {
minEventTimeTimerTimestampInCurrentBundle =
Math.min(
minEventTimeTimerTimestampInCurrentBundle,
adjustTimestampForFlink(timerData.getTimestamp().getMillis()));
}
}
}
} catch (Exception e) {
throw new RuntimeException("Couldn't set timer", e);
}
}
/**
* A {@link TimerInternalsFactory} for Flink operator to create a {@link
* StateAndTimerBundleCheckpointHandler} to handle {@link
* org.apache.beam.model.fnexecution.v1.BeamFnApi.DelayedBundleApplication}.
*/
class SdfFlinkTimerInternalsFactory implements TimerInternalsFactory<InputT> {
@Override
public TimerInternals timerInternalsForKey(InputT key) {
try {
ByteBuffer encodedKey =
(ByteBuffer) keySelector.getKey(WindowedValue.valueInGlobalWindow(key));
return new SdfFlinkTimerInternals(encodedKey);
} catch (Exception e) {
throw new RuntimeException("Couldn't get a timer internals", e);
}
}
}
/**
* A {@link TimerInternals} for rescheduling {@link
* org.apache.beam.model.fnexecution.v1.BeamFnApi.DelayedBundleApplication}.
*/
class SdfFlinkTimerInternals implements TimerInternals {
private final ByteBuffer key;
SdfFlinkTimerInternals(ByteBuffer key) {
this.key = key;
}
@Override
public void setTimer(
StateNamespace namespace,
String timerId,
String timerFamilyId,
Instant target,
Instant outputTimestamp,
TimeDomain timeDomain) {
setTimer(
TimerData.of(timerId, timerFamilyId, namespace, target, outputTimestamp, timeDomain));
}
@Override
public void setTimer(TimerData timerData) {
try {
try (Locker locker = Locker.locked(stateBackendLock)) {
getKeyedStateBackend().setCurrentKey(key);
timerInternals.setTimer(timerData);
minEventTimeTimerTimestampInCurrentBundle =
Math.min(
minEventTimeTimerTimestampInCurrentBundle,
adjustTimestampForFlink(timerData.getOutputTimestamp().getMillis()));
}
} catch (Exception e) {
throw new RuntimeException("Couldn't set timer", e);
}
}
@Override
public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
throw new UnsupportedOperationException(
"It is not expected to use SdfFlinkTimerInternals to delete a timer");
}
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) {
throw new UnsupportedOperationException(
"It is not expected to use SdfFlinkTimerInternals to delete a timer");
}
@Override
public void deleteTimer(TimerData timerKey) {
throw new UnsupportedOperationException(
"It is not expected to use SdfFlinkTimerInternals to delete a timer");
}
@Override
public Instant currentProcessingTime() {
return timerInternals.currentProcessingTime();
}
@Override
public @Nullable Instant currentSynchronizedProcessingTime() {
return timerInternals.currentSynchronizedProcessingTime();
}
@Override
public Instant currentInputWatermarkTime() {
return timerInternals.currentInputWatermarkTime();
}
@Override
public @Nullable Instant currentOutputWatermarkTime() {
return timerInternals.currentOutputWatermarkTime();
}
}
/**
* A {@link StateInternalsFactory} for Flink operator to create a {@link
* StateAndTimerBundleCheckpointHandler} to handle {@link
* org.apache.beam.model.fnexecution.v1.BeamFnApi.DelayedBundleApplication}.
*/
class SdfFlinkStateInternalsFactory implements StateInternalsFactory<InputT> {
@Override
public StateInternals stateInternalsForKey(InputT key) {
try {
ByteBuffer encodedKey =
(ByteBuffer) keySelector.getKey(WindowedValue.valueInGlobalWindow(key));
return new SdfFlinkStateInternals(encodedKey);
} catch (Exception e) {
throw new RuntimeException("Couldn't get a state internals", e);
}
}
}
/** A {@link StateInternals} for keeping {@link DelayedBundleApplication}s as states. */
class SdfFlinkStateInternals implements StateInternals {
private final ByteBuffer key;
SdfFlinkStateInternals(ByteBuffer key) {
this.key = key;
}
@Override
public Object getKey() {
return key;
}
@Override
public <T extends State> T state(
StateNamespace namespace, StateTag<T> address, StateContext<?> c) {
try {
try (Locker locker = Locker.locked(stateBackendLock)) {
getKeyedStateBackend().setCurrentKey(key);
return keyedStateInternals.state(namespace, address);
}
} catch (Exception e) {
throw new RuntimeException("Couldn't set state", e);
}
}
}
@Override
protected void fireTimerInternal(ByteBuffer key, TimerInternals.TimerData timer) {
// We have to synchronize to ensure the state backend is not concurrently accessed by the state
// requests
try (Locker locker = Locker.locked(stateBackendLock)) {
getKeyedStateBackend().setCurrentKey(key);
fireTimer(timer);
}
}
@Override
public void close() throws Exception {
closed = true;
// We might still holding back the watermark and Flink does not trigger the timer
// callback for watermark advancement anymore.
processWatermark1(Watermark.MAX_WATERMARK);
while (getCurrentOutputWatermark() < Watermark.MAX_WATERMARK.getTimestamp()) {
invokeFinishBundle();
if (hasSdfProcessFn) {
// Manually drain processing time timers since Flink will ignore pending
// processing-time timers when upstream operators have shut down and will also
// shut down this operator with pending processing-time timers.
// TODO(BEAM-11210, FLINK-18647): It doesn't work efficiently when the watermark of upstream
// advances
// to MAX_TIMESTAMP immediately.
if (numProcessingTimeTimers() > 0) {
timerInternals.processPendingProcessingTimeTimers();
}
}
}
super.close();
}
@Override
public void dispose() throws Exception {
// may be called multiple times when an exception is thrown
if (stageContext != null) {
// Remove the reference to stageContext and make stageContext available for garbage
// collection.
try (AutoCloseable bundleFactoryCloser = stageBundleFactory;
AutoCloseable closable = stageContext) {
// DoFnOperator generates another "bundle" for the final watermark
// https://issues.apache.org/jira/browse/BEAM-5816
super.dispose();
} finally {
stageContext = null;
}
}
}
@Override
protected void addSideInputValue(StreamRecord<RawUnionValue> streamRecord) {
@SuppressWarnings("unchecked")
WindowedValue<KV<Void, Iterable<?>>> value =
(WindowedValue<KV<Void, Iterable<?>>>) streamRecord.getValue().getValue();
PCollectionView<?> sideInput = sideInputTagMapping.get(streamRecord.getValue().getUnionTag());
sideInputHandler.addSideInputValue(sideInput, value.withValue(value.getValue().getValue()));
}
@Override
protected DoFnRunner<InputT, OutputT> createWrappingDoFnRunner(
DoFnRunner<InputT, OutputT> wrappedRunner, StepContext stepContext) {
sdkHarnessRunner =
new SdkHarnessDoFnRunner<>(
wrappedRunner.getFn(),
stageBundleFactory,
stateRequestHandler,
progressHandler,
finalizationHandler,
checkpointHandler,
outputManager,
outputMap,
windowCoder,
inputCoder,
this::setTimer,
() -> FlinkKeyUtils.decodeKey(getCurrentKey(), keyCoder),
keyedStateInternals);
return ensureStateDoFnRunner(sdkHarnessRunner, payload, stepContext);
}
@Override
public long applyInputWatermarkHold(long inputWatermark) {
// We must wait until all elements/timers have been processed (happens async!) before the
// watermark can be progressed. We can't just advance the input watermark until at least one
// bundle has been completed since the watermark has been received. Otherwise we potentially
// violate the Beam timer contract which allows for already set timer to be modified by
// successive elements.
//
// For example, we set a timer at t1, then finish the bundle (e.g. due to the bundle timeout),
// then receive an element which updates the timer to fire at t2, and then receive a watermark
// w1, where w1 > t2 > t1. If we do not hold back the input watermark here, w1 would fire the
// initial timer at t1, but we want to make sure to fire the updated version of the timer at
// t2.
if (sdkHarnessRunner.isBundleInProgress()) {
return inputWatermarkBeforeBundleStart;
} else {
return inputWatermark;
}
}
@Override
public long applyOutputWatermarkHold(long currentOutputWatermark, long potentialOutputWatermark) {
// Due to the asynchronous communication with the SDK harness,
// a bundle might still be in progress and not all items have
// yet been received from the SDK harness. If we just set this
// watermark as the new output watermark, we could violate the
// order of the records, i.e. pending items in the SDK harness
// could become "late" although they were "on time".
//
// We can solve this problem using one of the following options:
//
// 1) Finish the current bundle and emit this watermark as the
// new output watermark. Finishing the bundle ensures that
// all the items have been processed by the SDK harness and
// received by the outputQueue (see below), where they will
// have been emitted to the output stream.
//
// 2) Put a hold on the output watermark for as long as the current
// bundle has not been finished. We have to remember to manually
// finish the bundle in case we receive the final watermark.
// To avoid latency, we should process this watermark again as
// soon as the current bundle is finished.
//
// Approach 1) is the easiest and gives better latency, yet 2)
// gives better throughput due to the bundle not getting cut on
// every watermark. So we have implemented 2) below.
//
if (sdkHarnessRunner.isBundleInProgress()) {
if (minEventTimeTimerTimestampInLastBundle < Long.MAX_VALUE) {
// We can safely advance the watermark to before the last bundle's minimum event timer
// but not past the potential output watermark which includes holds to the input watermark.
return Math.min(minEventTimeTimerTimestampInLastBundle - 1, potentialOutputWatermark);
} else {
// We don't have any information yet, use the current output watermark for now.
return currentOutputWatermark;
}
} else {
// No bundle was started when we advanced the input watermark.
// Thus, we can safely set a new output watermark.
return potentialOutputWatermark;
}
}
private void preBundleStartCallback() {
inputWatermarkBeforeBundleStart = getEffectiveInputWatermark();
}
@SuppressWarnings("FutureReturnValueIgnored")
private void finishBundleCallback() {
minEventTimeTimerTimestampInLastBundle = minEventTimeTimerTimestampInCurrentBundle;
minEventTimeTimerTimestampInCurrentBundle = Long.MAX_VALUE;
try {
if (!closed
&& minEventTimeTimerTimestampInLastBundle < Long.MAX_VALUE
&& minEventTimeTimerTimestampInLastBundle <= getEffectiveInputWatermark()) {
ProcessingTimeService processingTimeService = getProcessingTimeService();
// We are scheduling a timer for advancing the watermark, to not delay finishing the bundle
// and temporarily release the checkpoint lock. Otherwise, we could potentially loop when a
// timer keeps scheduling a timer for the same timestamp.
processingTimeService.registerTimer(
processingTimeService.getCurrentProcessingTime(),
ts -> processWatermark1(new Watermark(getEffectiveInputWatermark())));
} else {
processWatermark1(new Watermark(getEffectiveInputWatermark()));
}
} catch (Exception e) {
throw new RuntimeException(
"Failed to progress watermark to " + getEffectiveInputWatermark(), e);
}
}
private static class SdkHarnessDoFnRunner<InputT, OutputT>
implements DoFnRunner<InputT, OutputT> {
private final DoFn<InputT, OutputT> doFn;
private final LinkedBlockingQueue<KV<String, OutputT>> outputQueue;
private final StageBundleFactory stageBundleFactory;
private final StateRequestHandler stateRequestHandler;
private final BundleProgressHandler progressHandler;
private final BundleFinalizationHandler finalizationHandler;
private final BundleCheckpointHandler checkpointHandler;
private final BufferedOutputManager<OutputT> outputManager;
private final Map<String, TupleTag<?>> outputMap;
private final FlinkStateInternals<?> keyedStateInternals;
private final Coder<BoundedWindow> windowCoder;
private final Coder<WindowedValue<InputT>> residualCoder;
private final BiConsumer<Timer<?>, TimerInternals.TimerData> timerRegistration;
private final Supplier<Object> keyForTimer;
/**
* Current active bundle. Volatile to ensure mutually exclusive bundle processing threads see
* this consistent. Please see the description in DoFnOperator.
*/
private volatile RemoteBundle remoteBundle;
/**
* Current main input receiver. Volatile to ensure mutually exclusive bundle processing threads
* see this consistent. Please see the description in DoFnOperator.
*/
private volatile FnDataReceiver<WindowedValue<?>> mainInputReceiver;
public SdkHarnessDoFnRunner(
DoFn<InputT, OutputT> doFn,
StageBundleFactory stageBundleFactory,
StateRequestHandler stateRequestHandler,
BundleProgressHandler progressHandler,
BundleFinalizationHandler finalizationHandler,
BundleCheckpointHandler checkpointHandler,
BufferedOutputManager<OutputT> outputManager,
Map<String, TupleTag<?>> outputMap,
Coder<BoundedWindow> windowCoder,
Coder<WindowedValue<InputT>> residualCoder,
BiConsumer<Timer<?>, TimerInternals.TimerData> timerRegistration,
Supplier<Object> keyForTimer,
FlinkStateInternals<?> keyedStateInternals) {
this.doFn = doFn;
this.stageBundleFactory = stageBundleFactory;
this.stateRequestHandler = stateRequestHandler;
this.progressHandler = progressHandler;
this.finalizationHandler = finalizationHandler;
this.checkpointHandler = checkpointHandler;
this.outputManager = outputManager;
this.outputMap = outputMap;
this.timerRegistration = timerRegistration;
this.keyForTimer = keyForTimer;
this.windowCoder = windowCoder;
this.residualCoder = residualCoder;
this.outputQueue = new LinkedBlockingQueue<>();
this.keyedStateInternals = keyedStateInternals;
}
@Override
public void startBundle() {
OutputReceiverFactory receiverFactory =
new OutputReceiverFactory() {
@Override
public FnDataReceiver<OutputT> create(String pCollectionId) {
return receivedElement -> {
// handover to queue, do not block the grpc thread
outputQueue.put(KV.of(pCollectionId, receivedElement));
};
}
};
TimerReceiverFactory timerReceiverFactory =
new TimerReceiverFactory(stageBundleFactory, timerRegistration, windowCoder);
try {
remoteBundle =
stageBundleFactory.getBundle(
receiverFactory,
timerReceiverFactory,
stateRequestHandler,
progressHandler,
finalizationHandler,
checkpointHandler);
mainInputReceiver = Iterables.getOnlyElement(remoteBundle.getInputReceivers().values());
} catch (Exception e) {
throw new RuntimeException("Failed to start remote bundle", e);
}
}
@Override
public void processElement(WindowedValue<InputT> element) {
try {
LOG.debug("Processing value: {}", element);
mainInputReceiver.accept(element);
} catch (Exception e) {
throw new RuntimeException("Failed to process element with SDK harness.", e);
}
emitResults();
}
@Override
public <KeyT> void onTimer(
String timerId,
String timerFamilyId,
KeyT key,
BoundedWindow window,
Instant timestamp,
Instant outputTimestamp,
TimeDomain timeDomain) {
Object timerKey = keyForTimer.get();
Preconditions.checkNotNull(timerKey, "Key for timer needs to be set before calling onTimer");
Preconditions.checkNotNull(remoteBundle, "Call to onTimer outside of a bundle");
if (StateAndTimerBundleCheckpointHandler.isSdfTimer(timerId)) {
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
WindowedValue stateValue =
keyedStateInternals.state(namespace, StateTags.value(timerId, residualCoder)).read();
processElement(stateValue);
} else {
KV<String, String> transformAndTimerFamilyId =
TimerReceiverFactory.decodeTimerDataTimerId(timerFamilyId);
LOG.debug(
"timer callback: {} {} {} {} {}",
transformAndTimerFamilyId.getKey(),
transformAndTimerFamilyId.getValue(),
window,
timestamp,
timeDomain);
FnDataReceiver<Timer> timerReceiver =
Preconditions.checkNotNull(
remoteBundle.getTimerReceivers().get(transformAndTimerFamilyId),
"No receiver found for timer %s %s",
transformAndTimerFamilyId.getKey(),
transformAndTimerFamilyId.getValue());
Timer<?> timerValue =
Timer.of(
timerKey,
timerId,
Collections.singletonList(window),
timestamp,
outputTimestamp,
// TODO: Support propagating the PaneInfo through.
PaneInfo.NO_FIRING);
try {
timerReceiver.accept(timerValue);
} catch (Exception e) {
throw new RuntimeException(
String.format(Locale.ENGLISH, "Failed to process timer %s", timerReceiver), e);
}
}
}
@Override
public void finishBundle() {
try {
// TODO: it would be nice to emit results as they arrive, can thread wait non-blocking?
// close blocks until all results are received
remoteBundle.close();
emitResults();
} catch (Exception e) {
if (e.getCause() instanceof StatusRuntimeException) {
throw new RuntimeException("SDK Harness connection lost.", e);
}
throw new RuntimeException("Failed to finish remote bundle", e);
} finally {
remoteBundle = null;
}
}
@Override
public <KeyT> void onWindowExpiration(BoundedWindow window, Instant timestamp, KeyT key) {}
boolean isBundleInProgress() {
return remoteBundle != null;
}
private void emitResults() {
KV<String, OutputT> result;
while ((result = outputQueue.poll()) != null) {
final String outputPCollectionId = Preconditions.checkNotNull(result.getKey());
TupleTag<?> tag = outputMap.get(outputPCollectionId);
WindowedValue windowedValue =
Preconditions.checkNotNull(
(WindowedValue) result.getValue(),
"Received a null value from the SDK harness for %s",
outputPCollectionId);
if (tag == null) {
throw new IllegalStateException(
String.format("Received output for unknown PCollection %s", outputPCollectionId));
}
// process regular elements
outputManager.output(tag, windowedValue);
}
}
@Override
public DoFn<InputT, OutputT> getFn() {
return doFn;
}
}
private DoFnRunner<InputT, OutputT> ensureStateDoFnRunner(
SdkHarnessDoFnRunner<InputT, OutputT> sdkHarnessRunner,
RunnerApi.ExecutableStagePayload payload,
StepContext stepContext) {
if (!isStateful) {
return sdkHarnessRunner;
}
// Takes care of state cleanup via StatefulDoFnRunner
Coder windowCoder = windowingStrategy.getWindowFn().windowCoder();
CleanupTimer<InputT> cleanupTimer =
new CleanupTimer<>(
timerInternals,
stateBackendLock,
windowingStrategy,
keyCoder,
windowCoder,
getKeyedStateBackend());
List<String> userStates =
executableStage.getUserStates().stream()
.map(UserStateReference::localName)
.collect(Collectors.toList());
KeyedStateBackend<ByteBuffer> stateBackend = getKeyedStateBackend();
StateCleaner stateCleaner =
new StateCleaner(
userStates,
windowCoder,
stateBackend::getCurrentKey,
timerInternals::hasPendingEventTimeTimers,
cleanupTimer);
return new StatefulDoFnRunner<InputT, OutputT, BoundedWindow>(
sdkHarnessRunner,
getInputCoder(),
stepContext,
windowingStrategy,
cleanupTimer,
stateCleaner,
requiresTimeSortedInput(payload, true)) {
@Override
public void processElement(WindowedValue<InputT> input) {
try (Locker locker = Locker.locked(stateBackendLock)) {
@SuppressWarnings({"unchecked", "rawtypes"})
final ByteBuffer key =
FlinkKeyUtils.encodeKey(((KV) input.getValue()).getKey(), (Coder) keyCoder);
getKeyedStateBackend().setCurrentKey(key);
super.processElement(input);
}
}
@Override
public void finishBundle() {
// Before cleaning up state, first finish bundle for all underlying DoFnRunners
super.finishBundle();
// execute cleanup after the bundle is complete
if (!stateCleaner.cleanupQueue.isEmpty()) {
try (Locker locker = Locker.locked(stateBackendLock)) {
stateCleaner.cleanupState(keyedStateInternals, stateBackend::setCurrentKey);
} catch (Exception e) {
throw new RuntimeException("Failed to cleanup state.", e);
}
}
}
};
}
static class CleanupTimer<InputT> implements StatefulDoFnRunner.CleanupTimer<InputT> {
private static final String GC_TIMER_ID = "__user-state-cleanup__";
private final TimerInternals timerInternals;
private final Lock stateBackendLock;
private final WindowingStrategy windowingStrategy;
private final Coder keyCoder;
private final Coder windowCoder;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
CleanupTimer(
TimerInternals timerInternals,
Lock stateBackendLock,
WindowingStrategy windowingStrategy,
Coder keyCoder,
Coder windowCoder,
KeyedStateBackend<ByteBuffer> keyedStateBackend) {
this.timerInternals = timerInternals;
this.stateBackendLock = stateBackendLock;
this.windowingStrategy = windowingStrategy;
this.keyCoder = keyCoder;
this.windowCoder = windowCoder;
this.keyedStateBackend = keyedStateBackend;
}
@Override
public void setForWindow(InputT input, BoundedWindow window) {
Preconditions.checkNotNull(input, "Null input passed to CleanupTimer");
if (window.equals(GlobalWindow.INSTANCE)) {
// Skip setting a cleanup timer for the global window as these timers
// lead to potentially unbounded state growth in the runner, depending on key cardinality.
// Cleanup for global window will be performed upon arrival of the final watermark.
return;
}
// needs to match the encoding in prepareStateBackend for state request handler
final ByteBuffer key = FlinkKeyUtils.encodeKey(((KV) input).getKey(), keyCoder);
// Ensure the state backend is not concurrently accessed by the state requests
try (Locker locker = Locker.locked(stateBackendLock)) {
keyedStateBackend.setCurrentKey(key);
setCleanupTimer(window);
}
}
void setCleanupTimer(BoundedWindow window) {
// make sure this fires after any window.maxTimestamp() timers
Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy).plus(1);
timerInternals.setTimer(
StateNamespaces.window(windowCoder, window),
GC_TIMER_ID,
"",
gcTime,
window.maxTimestamp(),
TimeDomain.EVENT_TIME);
}
@Override
public boolean isForWindow(
String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
boolean isEventTimer = timeDomain.equals(TimeDomain.EVENT_TIME);
Instant gcTime = LateDataUtils.garbageCollectionTime(window, windowingStrategy).plus(1);
return isEventTimer && GC_TIMER_ID.equals(timerId) && gcTime.equals(timestamp);
}
}
static class StateCleaner implements StatefulDoFnRunner.StateCleaner<BoundedWindow> {
private final List<String> userStateNames;
private final Coder windowCoder;
private final ArrayDeque<KV<ByteBuffer, BoundedWindow>> cleanupQueue;
private final Supplier<ByteBuffer> currentKeySupplier;
private final ThrowingFunction<Long, Boolean> hasPendingEventTimeTimers;
private final CleanupTimer cleanupTimer;
StateCleaner(
List<String> userStateNames,
Coder windowCoder,
Supplier<ByteBuffer> currentKeySupplier,
ThrowingFunction<Long, Boolean> hasPendingEventTimeTimers,
CleanupTimer cleanupTimer) {
this.userStateNames = userStateNames;
this.windowCoder = windowCoder;
this.currentKeySupplier = currentKeySupplier;
this.hasPendingEventTimeTimers = hasPendingEventTimeTimers;
this.cleanupTimer = cleanupTimer;
this.cleanupQueue = new ArrayDeque<>();
}
@Override
public void clearForWindow(BoundedWindow window) {
// Delay cleanup until the end of the bundle to allow stateful processing and new timers.
// Executed in the context of onTimer(..) where the correct key will be set
cleanupQueue.add(KV.of(currentKeySupplier.get(), window));
}
@SuppressWarnings("ByteBufferBackingArray")
void cleanupState(StateInternals stateInternals, Consumer<ByteBuffer> keyContextConsumer)
throws Exception {
while (!cleanupQueue.isEmpty()) {
KV<ByteBuffer, BoundedWindow> kv = Preconditions.checkNotNull(cleanupQueue.remove());
BoundedWindow window = Preconditions.checkNotNull(kv.getValue());
keyContextConsumer.accept(kv.getKey());
// Check whether we have pending timers which were set during the bundle.
if (hasPendingEventTimeTimers.apply(window.maxTimestamp().getMillis())) {
// Re-add GC timer and let remaining timers fire. Don't cleanup state yet.
cleanupTimer.setCleanupTimer(window);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("State cleanup for {} {}", Arrays.toString(kv.getKey().array()), window);
}
// No more timers (finally!). Time to clean up.
for (String userState : userStateNames) {
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
StateTag<BagState<Void>> bagStateStateTag = StateTags.bag(userState, VoidCoder.of());
BagState<?> state = stateInternals.state(namespace, bagStateStateTag);
state.clear();
}
}
}
}
}
/**
* Eagerly create the user state to work around https://jira.apache.org/jira/browse/FLINK-12653.
*/
private static void initializeUserState(
ExecutableStage executableStage,
@Nullable KeyedStateBackend keyedStateBackend,
SerializablePipelineOptions pipelineOptions) {
executableStage
.getUserStates()
.forEach(
ref -> {
try {
keyedStateBackend.getOrCreateKeyedState(
StringSerializer.INSTANCE,
new ListStateDescriptor<>(
ref.localName(),
new CoderTypeSerializer<>(ByteStringCoder.of(), pipelineOptions)));
} catch (Exception e) {
throw new RuntimeException("Couldn't initialize user states.", e);
}
});
}
private static class NoOpDoFn<InputT, OutputT> extends DoFn<InputT, OutputT> {
@ProcessElement
public void doNothing(ProcessContext context) {}
}
private static class Locker implements AutoCloseable {
public static Locker locked(Lock lock) {
Locker locker = new Locker(lock);
lock.lock();
return locker;
}
private final Lock lock;
Locker(Lock lock) {
this.lock = lock;
}
@Override
public void close() {
lock.unlock();
}
}
}
| |
/**
* Copyright 2015 Smart Society Services B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.opensmartgridplatform.adapter.domain.smartmetering.infra.jms.core;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.jms.JMSException;
import javax.jms.ObjectMessage;
import org.opensmartgridplatform.adapter.domain.smartmetering.infra.jms.ws.WebServiceResponseMessageSender;
import org.opensmartgridplatform.shared.exceptionhandling.ComponentType;
import org.opensmartgridplatform.shared.exceptionhandling.FunctionalException;
import org.opensmartgridplatform.shared.exceptionhandling.OsgpException;
import org.opensmartgridplatform.shared.exceptionhandling.TechnicalException;
import org.opensmartgridplatform.shared.infra.jms.DeviceMessageMetadata;
import org.opensmartgridplatform.shared.infra.jms.MessageProcessor;
import org.opensmartgridplatform.shared.infra.jms.MessageProcessorMap;
import org.opensmartgridplatform.shared.infra.jms.MessageType;
import org.opensmartgridplatform.shared.infra.jms.ResponseMessage;
import org.opensmartgridplatform.shared.infra.jms.ResponseMessageResultType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for MessageProcessor implementations. Each MessageProcessor
* implementation should be annotated with @Component. Further the MessageType
* the MessageProcessor implementation can process should be passed in at
* construction. The Singleton instance is added to the HashMap of
* MessageProcessors after dependency injection has completed.
*
*/
public abstract class OsgpCoreResponseMessageProcessor implements MessageProcessor {
/**
* Logger for this class.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(OsgpCoreResponseMessageProcessor.class);
/**
* This is the message sender needed for the message processor implementation to
* forward response messages to web service adapter.
*/
protected final WebServiceResponseMessageSender webServiceResponseMessageSender;
/**
* The map of message processor instances.
*/
protected final MessageProcessorMap osgpCoreResponseMessageProcessorMap;
private final ComponentType componentType;
/**
* The message types that a message processor implementation can handle.
*/
protected final List<MessageType> messageTypes = new ArrayList<>();
/**
* Construct a message processor instance by passing in the message type.
*
* @param messageType
* The message type a message processor can handle.
* @param componentType
*/
protected OsgpCoreResponseMessageProcessor(WebServiceResponseMessageSender webServiceResponseMessageSender,
MessageProcessorMap osgpCoreResponseMessageProcessorMap, final MessageType messageType, ComponentType componentType) {
this.webServiceResponseMessageSender = webServiceResponseMessageSender;
this.osgpCoreResponseMessageProcessorMap = osgpCoreResponseMessageProcessorMap;
this.componentType = componentType;
this.messageTypes.add(messageType);
}
/**
* In case a message processor instance can process multiple message types, a
* message type can be added.
*
* @param messageType
* The message type a message processor can handle.
*/
protected void addMessageType(final MessageType messageType) {
this.messageTypes.add(messageType);
}
/**
* Initialization function executed after dependency injection has finished.
* The MessageProcessor Singleton is added to the HashMap of
* MessageProcessors.
*/
@PostConstruct
public void init() {
for (final MessageType messageType : this.messageTypes) {
this.osgpCoreResponseMessageProcessorMap.addMessageProcessor(messageType, this);
}
}
@Override
public void processMessage(final ObjectMessage message) throws JMSException {
LOGGER.debug("Processing smart metering response message");
final DeviceMessageMetadata deviceMessageMetadata = new DeviceMessageMetadata(message);
ResponseMessage responseMessage = null;
OsgpException osgpException = null;
try {
responseMessage = (ResponseMessage) message.getObject();
osgpException = responseMessage.getOsgpException();
} catch (final JMSException e) {
LOGGER.error("UNRECOVERABLE ERROR, unable to read ObjectMessage instance, giving up.", e);
LOGGER.debug(deviceMessageMetadata.toString());
LOGGER.debug("osgpException: {}", osgpException);
return;
}
try {
if (osgpException != null) {
this.handleError(osgpException, deviceMessageMetadata, responseMessage);
} else if (this.hasRegularResponseObject(responseMessage)) {
LOGGER.info("Calling application service function to handle response: {}",
deviceMessageMetadata.getMessageType());
this.handleMessage(deviceMessageMetadata, responseMessage, osgpException);
} else {
LOGGER.error(
"No osgpException, yet dataObject ({}) is not of the regular type for handling response: {}",
responseMessage.getDataObject() == null ? null
: responseMessage.getDataObject().getClass().getName(),
deviceMessageMetadata.getMessageType());
this.handleError(new TechnicalException(ComponentType.DOMAIN_SMART_METERING,
"Unexpected response data handling request.", null), deviceMessageMetadata);
}
} catch (final Exception e) {
this.handleError(e, deviceMessageMetadata);
}
}
/**
* The {@code dataObject} in the {@code responseMessage} can either have a value
* that would normally be returned as an answer, or it can contain an object
* that was used in the request message (or other unexpected value).
* <p>
* The object from the request message is sometimes returned as object in the
* response message to allow retries of requests without other knowledge of what
* was sent earlier.
* <p>
* To filter out these, or other unexpected situations that may occur in the
* future, each message processor is supposed to check the response message for
* expected types of data objects.
*
* @param responseMessage
* @return {@code true} if {@code responseMessage} contains a {@code dataObject}
* that can be processed normally; {@code false} otherwise.
*/
protected abstract boolean hasRegularResponseObject(final ResponseMessage responseMessage);
protected abstract void handleMessage(DeviceMessageMetadata deviceMessageMetadata,
final ResponseMessage responseMessage, final OsgpException osgpException) throws FunctionalException;
/**
* In case of an error, this function can be used to send a response containing
* the exception to the web-service-adapter.
* <p>
* The response message is provided to allow manipulation of certain responses,
* for instance in case the error has to be incorporated in the response instead
* of defining the response at its own.
*
* @param e
* the exception.
* @param deviceMessageMetadata
* the device message metadata.
* @param responseMessage
* the response message.
* @throws FunctionalException
*/
protected void handleError(final Exception e, final DeviceMessageMetadata deviceMessageMetadata,
final ResponseMessage responseMessage) throws FunctionalException {
if (responseMessage != null) {
LOGGER.debug("Handling error without using responseMessage for correlationUid: {}",
responseMessage.getCorrelationUid());
}
this.handleError(e, deviceMessageMetadata);
}
/**
* In case of an error, this function can be used to send a response containing
* the exception to the web-service-adapter.
*
* @param e
* the exception.
* @param deviceMessageMetadata
* the device message metadata.
*/
protected void handleError(final Exception e, final DeviceMessageMetadata deviceMessageMetadata) {
LOGGER.info("handeling error: {} for message type: {}", e.getMessage(), deviceMessageMetadata.getMessageType());
final OsgpException osgpException = this.ensureOsgpException(e);
final ResponseMessage responseMessage = ResponseMessage.newResponseMessageBuilder()
.withCorrelationUid(deviceMessageMetadata.getCorrelationUid())
.withOrganisationIdentification(deviceMessageMetadata.getOrganisationIdentification())
.withDeviceIdentification(deviceMessageMetadata.getDeviceIdentification())
.withResult(ResponseMessageResultType.NOT_OK).withOsgpException(osgpException)
.withMessagePriority(deviceMessageMetadata.getMessagePriority()).build();
this.webServiceResponseMessageSender.send(responseMessage, deviceMessageMetadata.getMessageType());
}
protected OsgpException ensureOsgpException(final Exception e) {
if (e instanceof OsgpException) {
return (OsgpException) e;
}
return new TechnicalException(componentType, "An unknown error occurred", e);
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.graph.AbstractBreadthFirstTraversal;
import com.facebook.buck.graph.MutableDirectedGraph;
import com.facebook.buck.graph.TopologicalSort;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.SourcePath;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import java.util.Map;
public class NativeLinkables {
private NativeLinkables() {}
/**
* Find all {@link NativeLinkable} transitive roots reachable from the given {@link BuildRule}s.
*
* @param from the starting set of {@link BuildRule}s to begin the search from.
* @param traverse a {@link Predicate} determining acceptable dependencies to traverse when
* searching for {@link NativeLinkable}s.
* @return all the roots found as a map from {@link BuildTarget} to {@link NativeLinkable}.
*/
public static ImmutableMap<BuildTarget, NativeLinkable> getNativeLinkableRoots(
Iterable<? extends BuildRule> from,
final Predicate<Object> traverse) {
final ImmutableMap.Builder<BuildTarget, NativeLinkable> nativeLinkables =
ImmutableMap.builder();
AbstractBreadthFirstTraversal<BuildRule> visitor =
new AbstractBreadthFirstTraversal<BuildRule>(from) {
@Override
public ImmutableSet<BuildRule> visit(BuildRule rule) {
// If this is `NativeLinkable`, we've found a root so record the rule and terminate
// the search.
if (rule instanceof NativeLinkable) {
NativeLinkable nativeLinkable = (NativeLinkable) rule;
nativeLinkables.put(nativeLinkable.getBuildTarget(), nativeLinkable);
return ImmutableSet.of();
}
// Otherwise, make sure this rule is marked as traversable before following it's deps.
if (traverse.apply(rule)) {
return rule.getDeps();
}
return ImmutableSet.of();
}
};
visitor.start();
return nativeLinkables.build();
}
/**
* Extract from the dependency graph all the libraries which must be considered for linking.
*
* Traversal proceeds depending on whether each dependency is to be statically or dynamically
* linked.
*/
public static ImmutableMap<BuildTarget, NativeLinkable> getNativeLinkables(
final CxxPlatform cxxPlatform,
Iterable<? extends NativeLinkable> inputs,
final Linker.LinkableDepType linkStyle) {
final Map<BuildTarget, NativeLinkable> nativeLinkables = Maps.newHashMap();
for (NativeLinkable nativeLinkable : inputs) {
nativeLinkables.put(nativeLinkable.getBuildTarget(), nativeLinkable);
}
final MutableDirectedGraph<BuildTarget> graph = new MutableDirectedGraph<>();
AbstractBreadthFirstTraversal<BuildTarget> visitor =
new AbstractBreadthFirstTraversal<BuildTarget>(nativeLinkables.keySet()) {
@Override
public ImmutableSet<BuildTarget> visit(BuildTarget target) {
NativeLinkable nativeLinkable = Preconditions.checkNotNull(nativeLinkables.get(target));
graph.addNode(target);
// We always traverse a rule's exported native linkables.
Iterable<? extends NativeLinkable> nativeLinkableDeps =
nativeLinkable.getNativeLinkableExportedDeps(cxxPlatform);
// If we're linking this dependency statically, we also need to traverse its deps.
if (linkStyle != Linker.LinkableDepType.SHARED ||
nativeLinkable.getPreferredLinkage(cxxPlatform) == NativeLinkable.Linkage.STATIC) {
nativeLinkableDeps =
Iterables.concat(
nativeLinkableDeps,
nativeLinkable.getNativeLinkableDeps(cxxPlatform));
}
// Process all the traversable deps.
ImmutableSet.Builder<BuildTarget> deps = ImmutableSet.builder();
for (NativeLinkable dep : nativeLinkableDeps) {
BuildTarget depTarget = dep.getBuildTarget();
graph.addEdge(target, depTarget);
deps.add(depTarget);
nativeLinkables.put(depTarget, dep);
}
return deps.build();
}
};
visitor.start();
// Topologically sort the rules.
Iterable<BuildTarget> ordered =
TopologicalSort.sort(graph, Predicates.<BuildTarget>alwaysTrue()).reverse();
// Return a map of of the results.
ImmutableMap.Builder<BuildTarget, NativeLinkable> result = ImmutableMap.builder();
for (BuildTarget target : ordered) {
result.put(target, nativeLinkables.get(target));
}
return result.build();
}
@VisibleForTesting
protected static Linker.LinkableDepType getLinkStyle(
NativeLinkable.Linkage preferredLinkage,
Linker.LinkableDepType requestedLinkStyle) {
Linker.LinkableDepType linkStyle;
switch (preferredLinkage) {
case SHARED:
linkStyle = Linker.LinkableDepType.SHARED;
break;
case STATIC:
linkStyle =
requestedLinkStyle == Linker.LinkableDepType.STATIC ?
Linker.LinkableDepType.STATIC :
Linker.LinkableDepType.STATIC_PIC;
break;
case ANY:
linkStyle = requestedLinkStyle;
break;
default:
throw new IllegalStateException();
}
return linkStyle;
}
public static NativeLinkableInput getNativeLinkableInput(
CxxPlatform cxxPlatform,
Linker.LinkableDepType linkStyle,
NativeLinkable nativeLinkable) throws NoSuchBuildTargetException {
NativeLinkable.Linkage link = nativeLinkable.getPreferredLinkage(cxxPlatform);
return nativeLinkable.getNativeLinkableInput(cxxPlatform, getLinkStyle(link, linkStyle));
}
/**
* Collect up and merge all {@link com.facebook.buck.cxx.NativeLinkableInput} objects from
* transitively traversing all unbroken dependency chains of
* {@link com.facebook.buck.cxx.NativeLinkable} objects found via the passed in
* {@link com.facebook.buck.rules.BuildRule} roots.
*/
public static NativeLinkableInput getTransitiveNativeLinkableInput(
CxxPlatform cxxPlatform,
Iterable<? extends BuildRule> inputs,
Linker.LinkableDepType depType,
Predicate<Object> traverse) throws NoSuchBuildTargetException {
// Get the topologically sorted native linkables.
ImmutableMap<BuildTarget, NativeLinkable> roots = getNativeLinkableRoots(inputs, traverse);
ImmutableMap<BuildTarget, NativeLinkable> nativeLinkables =
getNativeLinkables(cxxPlatform, roots.values(), depType);
ImmutableList.Builder<NativeLinkableInput> nativeLinkableInputs = ImmutableList.builder();
for (NativeLinkable nativeLinkable : nativeLinkables.values()) {
nativeLinkableInputs.add(getNativeLinkableInput(cxxPlatform, depType, nativeLinkable));
}
return NativeLinkableInput.concat(nativeLinkableInputs.build());
}
public static ImmutableMap<BuildTarget, NativeLinkable> getTransitiveNativeLinkables(
final CxxPlatform cxxPlatform,
Iterable<? extends NativeLinkable> inputs) {
final Map<BuildTarget, NativeLinkable> nativeLinkables = Maps.newHashMap();
for (NativeLinkable nativeLinkable : inputs) {
nativeLinkables.put(nativeLinkable.getBuildTarget(), nativeLinkable);
}
final MutableDirectedGraph<BuildTarget> graph = new MutableDirectedGraph<>();
AbstractBreadthFirstTraversal<BuildTarget> visitor =
new AbstractBreadthFirstTraversal<BuildTarget>(nativeLinkables.keySet()) {
@Override
public ImmutableSet<BuildTarget> visit(BuildTarget target) {
NativeLinkable nativeLinkable = Preconditions.checkNotNull(nativeLinkables.get(target));
graph.addNode(target);
ImmutableSet.Builder<BuildTarget> deps = ImmutableSet.builder();
for (NativeLinkable dep :
Iterables.concat(
nativeLinkable.getNativeLinkableDeps(cxxPlatform),
nativeLinkable.getNativeLinkableExportedDeps(cxxPlatform))) {
BuildTarget depTarget = dep.getBuildTarget();
graph.addEdge(target, depTarget);
deps.add(depTarget);
nativeLinkables.put(depTarget, dep);
}
return deps.build();
}
};
visitor.start();
return ImmutableMap.copyOf(nativeLinkables);
}
/**
* Collect all the shared libraries generated by {@link NativeLinkable}s found by transitively
* traversing all unbroken dependency chains of {@link com.facebook.buck.cxx.NativeLinkable}
* objects found via the passed in {@link com.facebook.buck.rules.BuildRule} roots.
*
* @return a mapping of library name to the library {@link SourcePath}.
*/
public static ImmutableSortedMap<String, SourcePath> getTransitiveSharedLibraries(
CxxPlatform cxxPlatform,
Iterable<? extends BuildRule> inputs,
Predicate<Object> traverse) throws NoSuchBuildTargetException {
ImmutableMap<BuildTarget, NativeLinkable> roots = getNativeLinkableRoots(inputs, traverse);
ImmutableMap<BuildTarget, NativeLinkable> nativeLinkables =
getTransitiveNativeLinkables(cxxPlatform, roots.values());
ImmutableSortedMap.Builder<String, SourcePath> libraries = ImmutableSortedMap.naturalOrder();
for (NativeLinkable nativeLinkable : nativeLinkables.values()) {
NativeLinkable.Linkage linkage = nativeLinkable.getPreferredLinkage(cxxPlatform);
if (linkage != NativeLinkable.Linkage.STATIC) {
libraries.putAll(nativeLinkable.getSharedLibraries(cxxPlatform));
}
}
return libraries.build();
}
/**
* @return the {@link SharedNativeLinkTarget} that can be extracted from {@code object}, if any.
*/
public static Optional<SharedNativeLinkTarget> getSharedNativeLinkTarget(
Object object,
CxxPlatform cxxPlatform) {
if (object instanceof SharedNativeLinkTarget) {
return Optional.of((SharedNativeLinkTarget) object);
}
if (object instanceof CanProvideSharedNativeLinkTarget) {
return ((CanProvideSharedNativeLinkTarget) object).getSharedNativeLinkTarget(cxxPlatform);
}
return Optional.absent();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.nimblestudio.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A studio member is an association of a user from your studio identity source to elevated permissions that they are
* granted in the studio.
* </p>
* <p>
* When you add a user to your studio using the Nimble Studio console, they are given access to the studio's AWS SSO
* application and are given access to log in to the Nimble Studio portal. These users have the permissions provided by
* the studio's user IAM role and do not appear in the studio membership collection. Only studio admins appear in studio
* membership.
* </p>
* <p>
* When you add a user to studio membership with the persona ADMIN, upon logging in to the Nimble Studio portal, they
* are granted permissions specified by the Studio's Admin IAM role.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/nimble-2020-08-01/StudioMembership" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StudioMembership implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID of the identity store.
* </p>
*/
private String identityStoreId;
/**
* <p>
* The persona.
* </p>
*/
private String persona;
/**
* <p>
* The principal ID.
* </p>
*/
private String principalId;
/**
* <p>
* The Active Directory Security Identifier for this user, if available.
* </p>
*/
private String sid;
/**
* <p>
* The ID of the identity store.
* </p>
*
* @param identityStoreId
* The ID of the identity store.
*/
public void setIdentityStoreId(String identityStoreId) {
this.identityStoreId = identityStoreId;
}
/**
* <p>
* The ID of the identity store.
* </p>
*
* @return The ID of the identity store.
*/
public String getIdentityStoreId() {
return this.identityStoreId;
}
/**
* <p>
* The ID of the identity store.
* </p>
*
* @param identityStoreId
* The ID of the identity store.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StudioMembership withIdentityStoreId(String identityStoreId) {
setIdentityStoreId(identityStoreId);
return this;
}
/**
* <p>
* The persona.
* </p>
*
* @param persona
* The persona.
* @see StudioPersona
*/
public void setPersona(String persona) {
this.persona = persona;
}
/**
* <p>
* The persona.
* </p>
*
* @return The persona.
* @see StudioPersona
*/
public String getPersona() {
return this.persona;
}
/**
* <p>
* The persona.
* </p>
*
* @param persona
* The persona.
* @return Returns a reference to this object so that method calls can be chained together.
* @see StudioPersona
*/
public StudioMembership withPersona(String persona) {
setPersona(persona);
return this;
}
/**
* <p>
* The persona.
* </p>
*
* @param persona
* The persona.
* @return Returns a reference to this object so that method calls can be chained together.
* @see StudioPersona
*/
public StudioMembership withPersona(StudioPersona persona) {
this.persona = persona.toString();
return this;
}
/**
* <p>
* The principal ID.
* </p>
*
* @param principalId
* The principal ID.
*/
public void setPrincipalId(String principalId) {
this.principalId = principalId;
}
/**
* <p>
* The principal ID.
* </p>
*
* @return The principal ID.
*/
public String getPrincipalId() {
return this.principalId;
}
/**
* <p>
* The principal ID.
* </p>
*
* @param principalId
* The principal ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StudioMembership withPrincipalId(String principalId) {
setPrincipalId(principalId);
return this;
}
/**
* <p>
* The Active Directory Security Identifier for this user, if available.
* </p>
*
* @param sid
* The Active Directory Security Identifier for this user, if available.
*/
public void setSid(String sid) {
this.sid = sid;
}
/**
* <p>
* The Active Directory Security Identifier for this user, if available.
* </p>
*
* @return The Active Directory Security Identifier for this user, if available.
*/
public String getSid() {
return this.sid;
}
/**
* <p>
* The Active Directory Security Identifier for this user, if available.
* </p>
*
* @param sid
* The Active Directory Security Identifier for this user, if available.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StudioMembership withSid(String sid) {
setSid(sid);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIdentityStoreId() != null)
sb.append("IdentityStoreId: ").append(getIdentityStoreId()).append(",");
if (getPersona() != null)
sb.append("Persona: ").append(getPersona()).append(",");
if (getPrincipalId() != null)
sb.append("PrincipalId: ").append(getPrincipalId()).append(",");
if (getSid() != null)
sb.append("Sid: ").append(getSid());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StudioMembership == false)
return false;
StudioMembership other = (StudioMembership) obj;
if (other.getIdentityStoreId() == null ^ this.getIdentityStoreId() == null)
return false;
if (other.getIdentityStoreId() != null && other.getIdentityStoreId().equals(this.getIdentityStoreId()) == false)
return false;
if (other.getPersona() == null ^ this.getPersona() == null)
return false;
if (other.getPersona() != null && other.getPersona().equals(this.getPersona()) == false)
return false;
if (other.getPrincipalId() == null ^ this.getPrincipalId() == null)
return false;
if (other.getPrincipalId() != null && other.getPrincipalId().equals(this.getPrincipalId()) == false)
return false;
if (other.getSid() == null ^ this.getSid() == null)
return false;
if (other.getSid() != null && other.getSid().equals(this.getSid()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIdentityStoreId() == null) ? 0 : getIdentityStoreId().hashCode());
hashCode = prime * hashCode + ((getPersona() == null) ? 0 : getPersona().hashCode());
hashCode = prime * hashCode + ((getPrincipalId() == null) ? 0 : getPrincipalId().hashCode());
hashCode = prime * hashCode + ((getSid() == null) ? 0 : getSid().hashCode());
return hashCode;
}
@Override
public StudioMembership clone() {
try {
return (StudioMembership) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.nimblestudio.model.transform.StudioMembershipMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
getCoveredAnnotations() contains code adapted from the UIMA Subiterator class.
*/
package org.apache.uima.fit.util;
import static java.util.Arrays.asList;
import java.util.AbstractCollection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.uima.cas.ArrayFS;
import org.apache.uima.cas.BooleanArrayFS;
import org.apache.uima.cas.ByteArrayFS;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.DoubleArrayFS;
import org.apache.uima.cas.FSIndexRepository;
import org.apache.uima.cas.FSIterator;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.FloatArrayFS;
import org.apache.uima.cas.IntArrayFS;
import org.apache.uima.cas.LongArrayFS;
import org.apache.uima.cas.ShortArrayFS;
import org.apache.uima.cas.StringArrayFS;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.TypeSystem;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.cas.text.AnnotationIndex;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.BooleanArray;
import org.apache.uima.jcas.cas.ByteArray;
import org.apache.uima.jcas.cas.DoubleArray;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.uima.jcas.cas.FSList;
import org.apache.uima.jcas.cas.FloatArray;
import org.apache.uima.jcas.cas.FloatList;
import org.apache.uima.jcas.cas.IntegerArray;
import org.apache.uima.jcas.cas.IntegerList;
import org.apache.uima.jcas.cas.LongArray;
import org.apache.uima.jcas.cas.NonEmptyFSList;
import org.apache.uima.jcas.cas.NonEmptyFloatList;
import org.apache.uima.jcas.cas.NonEmptyIntegerList;
import org.apache.uima.jcas.cas.NonEmptyStringList;
import org.apache.uima.jcas.cas.ShortArray;
import org.apache.uima.jcas.cas.StringArray;
import org.apache.uima.jcas.cas.StringList;
import org.apache.uima.jcas.cas.TOP;
/**
* Bridge between Java {@link Collection Collections} from different representations of collections
* of UIMA {@link FeatureStructure FeatureStructures}.
*/
public abstract class FSCollectionFactory {
private FSCollectionFactory() {
// No instances.
}
/**
* Create a {@link Collection} of the given type of feature structures. This collection is backed
* by the CAS, either via an {@link CAS#getAnnotationIndex(Type)} or
* {@link FSIndexRepository#getAllIndexedFS(Type)}.
*
* @param cas
* the CAS to select from.
* @param type
* the type of feature structures to select. All sub-types are returned as well.
* @return a {@link Collection} of the given type of feature structures backed live by the CAS.
* @see <a href="package-summary.html#SortOrder">Order of selected feature structures</a>
* @deprecated Use {@code cas.select(type).asList()}
*/
@Deprecated
@SuppressWarnings({ "unchecked", "rawtypes" })
public static List<FeatureStructure> create(CAS cas, Type type) {
// If the type is an annotation type, we can use the annotation index, which directly
// provides us with its size. If not, we have to use getAllIndexedFS() which we have to
// scan from beginning to end in order to determine its size.
TypeSystem ts = cas.getTypeSystem();
if (ts.subsumes(cas.getAnnotationType(), type)) {
return (List) create(cas.getAnnotationIndex(type));
} else {
return (List) cas.select(type).asList();
}
}
/**
* Convert an {@link FSIterator} to a {@link Collection}.
*
* @param <T>
* the feature structure type
* @param aIterator
* the iterator to convert.
* @return the wrapped iterator.
* @see <a href="package-summary.html#SortOrder">Order of selected feature structures</a>
*/
public static <T extends FeatureStructure> Collection<T> create(FSIterator<T> aIterator) {
return new FSIteratorAdapter<T>(aIterator);
}
/**
* Convert an {@link AnnotationIndex} to a {@link Collection}.
*
* @param <T>
* the feature structure type
* @param aIndex
* the index to convert.
* @return the wrapped index.
* @see <a href="package-summary.html#SortOrder">Order of selected feature structures</a>
* @deprecated Use {@code index.select().asList()}
*/
@Deprecated
public static <T extends AnnotationFS> List<T> create(AnnotationIndex<T> aIndex) {
return aIndex.select().asList();
}
/**
* Convert an {@link ArrayFS} to a {@link Collection}.
*
* @param aArray
* the array to convert.
* @return a new collection containing the same feature structures as the provided array.
* @see <a href="package-summary.html#SortOrder">Order of selected feature structures</a>
*/
public static <T extends FeatureStructure> List<T> create(ArrayFS<T> aArray) {
return create(aArray, (Type) null);
}
/**
* Fetch all annotations of the given type or its sub-types from the given FS array.
*
* @param <T>
* the JCas type.
* @param aArray
* the FS array
* @param aType
* the JCas wrapper class.
* @return a new collection of all feature structures of the given type.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T extends TOP> List<T> create(ArrayFS aArray, Class<T> aType) {
return create(aArray, CasUtil.getType(aArray.getCAS(), aType));
}
/**
* Fetch all annotations of the given type or its sub-types from the given FS array.
*
* @param aArray
* the FS array
* @param aType
* the CAS type.
* @return a new collection of all feature structures of the given type.
*/
public static <T extends FeatureStructure> List<T> create(ArrayFS<T> aArray, Type aType) {
TypeSystem ts = aArray.getCAS().getTypeSystem();
List<FeatureStructure> data = new ArrayList<FeatureStructure>(aArray.size());
for (int i = 0; i < aArray.size(); i++) {
FeatureStructure value = aArray.get(i);
if (value != null && (aType == null || ts.subsumes(aType, value.getType()))) {
data.add(value);
}
}
return (List<T>) asList(data.toArray(new FeatureStructure[data.size()]));
}
public static <T extends FeatureStructure> ArrayFS<T> createArrayFS(CAS aCas,
Collection<T> aCollection) {
return fillArrayFS(aCas.createArrayFS(aCollection.size()), aCollection);
}
public static <T extends FeatureStructure> ArrayFS<T> createArrayFS(CAS aCas, T... aArray) {
return fillArrayFS(aCas.createArrayFS(aArray.length), asList(aArray));
}
public static <T extends FeatureStructure> FSArray<T> createFSArray(JCas aJCas,
Collection<T> aCollection) {
return fillArray(new FSArray<T>(aJCas, aCollection.size()), aCollection);
}
public static <T extends FeatureStructure> FSArray<T> createFSArray(JCas aJCas, T... aArray) {
return fillArray(new FSArray<T>(aJCas, aArray.length), asList(aArray));
}
public static BooleanArrayFS createBooleanArrayFS(CAS aCas, Collection<Boolean> aCollection) {
return fillArrayFS(aCas.createBooleanArrayFS(aCollection.size()), aCollection);
}
public static BooleanArrayFS createBooleanArrayFS(CAS aCas, boolean... aArray) {
return fillArrayFS(aCas.createBooleanArrayFS(aArray.length), aArray);
}
public static BooleanArray createBooleanArray(JCas aJCas, Collection<Boolean> aCollection) {
return fillArray(new BooleanArray(aJCas, aCollection.size()), aCollection);
}
public static BooleanArray createBooleanArray(JCas aJCas, boolean... aArray) {
return fillArray(new BooleanArray(aJCas, aArray.length), aArray);
}
public static ByteArrayFS createByteArrayFS(CAS aCas, Collection<Byte> aCollection) {
return fillArrayFS(aCas.createByteArrayFS(aCollection.size()), aCollection);
}
public static ByteArrayFS createByteArrayFS(CAS aCas, byte... aArray) {
return fillArrayFS(aCas.createByteArrayFS(aArray.length), aArray);
}
public static ByteArray createByteArray(JCas aJCas, Collection<Byte> aCollection) {
return fillArray(new ByteArray(aJCas, aCollection.size()), aCollection);
}
public static ByteArray createByteArray(JCas aJCas, byte... aArray) {
return fillArray(new ByteArray(aJCas, aArray.length), aArray);
}
public static DoubleArrayFS createDoubleArrayFS(CAS aCas, Collection<Double> aCollection) {
return fillArrayFS(aCas.createDoubleArrayFS(aCollection.size()), aCollection);
}
public static DoubleArrayFS createDoubleArrayFS(CAS aCas, double... aArray) {
return fillArrayFS(aCas.createDoubleArrayFS(aArray.length), aArray);
}
public static DoubleArray createDoubleArray(JCas aJCas, Collection<Double> aCollection) {
return fillArray(new DoubleArray(aJCas, aCollection.size()), aCollection);
}
public static DoubleArray createDoubleArray(JCas aJCas, double... aArray) {
return fillArray(new DoubleArray(aJCas, aArray.length), aArray);
}
public static FloatArrayFS createFloatArrayFS(CAS aCas, Collection<Float> aCollection) {
return fillArrayFS(aCas.createFloatArrayFS(aCollection.size()), aCollection);
}
public static FloatArrayFS createFloatArrayFS(CAS aCas, float... aArray) {
return fillArrayFS(aCas.createFloatArrayFS(aArray.length), aArray);
}
public static FloatArray createFloatArray(JCas aJCas, Collection<Float> aCollection) {
return fillArray(new FloatArray(aJCas, aCollection.size()), aCollection);
}
public static FloatArray createFloatArray(JCas aJCas, float... aArray) {
return fillArray(new FloatArray(aJCas, aArray.length), aArray);
}
public static IntArrayFS createIntArrayFS(CAS aCas, Collection<Integer> aCollection) {
return fillArrayFS(aCas.createIntArrayFS(aCollection.size()), aCollection);
}
public static IntArrayFS createIntArrayFS(CAS aCas, int... aArray) {
return fillArrayFS(aCas.createIntArrayFS(aArray.length), aArray);
}
public static IntegerArray createIntArray(JCas aJCas, Collection<Integer> aCollection) {
return fillArray(new IntegerArray(aJCas, aCollection.size()), aCollection);
}
public static IntegerArray createIntArray(JCas aJCas, int... aArray) {
return fillArray(new IntegerArray(aJCas, aArray.length), aArray);
}
public static LongArrayFS createLongArrayFS(CAS aCas, Collection<Long> aCollection) {
return fillArrayFS(aCas.createLongArrayFS(aCollection.size()), aCollection);
}
public static LongArrayFS createLongArrayFS(CAS aCas, long... aArray) {
return fillArrayFS(aCas.createLongArrayFS(aArray.length), aArray);
}
public static LongArray createLongArray(JCas aJCas, Collection<Long> aCollection) {
return fillArray(new LongArray(aJCas, aCollection.size()), aCollection);
}
public static LongArray createLongArray(JCas aJCas, long... aArray) {
return fillArray(new LongArray(aJCas, aArray.length), aArray);
}
public static ShortArrayFS createShortArrayFS(CAS aCas, Collection<Short> aCollection) {
return fillArrayFS(aCas.createShortArrayFS(aCollection.size()), aCollection);
}
public static ShortArrayFS createShortArrayFS(CAS aCas, short... aArray) {
return fillArrayFS(aCas.createShortArrayFS(aArray.length), aArray);
}
public static ShortArray createShortArray(JCas aJCas, Collection<Short> aCollection) {
return fillArray(new ShortArray(aJCas, aCollection.size()), aCollection);
}
public static ShortArray createShortArray(JCas aJCas, short... aArray) {
return fillArray(new ShortArray(aJCas, aArray.length), aArray);
}
public static StringArrayFS createStringArrayFS(CAS aCas, Collection<String> aCollection) {
return fillArrayFS(aCas.createStringArrayFS(aCollection.size()), aCollection);
}
public static StringArrayFS createStringArrayFS(CAS aCas, String... aArray) {
return fillArrayFS(aCas.createStringArrayFS(aArray.length), aArray);
}
public static StringArray createStringArray(JCas aJCas, Collection<String> aCollection) {
return fillArray(new StringArray(aJCas, aCollection.size()), aCollection);
}
public static StringArray createStringArray(JCas aJCas, String... aArray) {
return fillArray(new StringArray(aJCas, aArray.length), aArray);
}
public static <T extends FeatureStructure> FSArray<T> fillArray(FSArray<T> aArray,
Iterable<? extends T> aValues) {
return (FSArray<T>) fillArrayFS(aArray, aValues);
}
public static <T extends FeatureStructure> FSArray<T> fillArray(FSArray<T> aArray,
FeatureStructure... aValues) {
return (FSArray<T>) fillArrayFS(aArray, aValues);
}
public static <T extends FeatureStructure> ArrayFS<T> fillArrayFS(ArrayFS<T> aArrayFs,
Iterable<? extends T> aCollection) {
int i = 0;
for (T fs : aCollection) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static <T extends FeatureStructure> ArrayFS<T> fillArrayFS(ArrayFS<T> aArrayFs,
FeatureStructure... aArray) {
aArrayFs.copyFromArray(aArray, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static BooleanArray fillArray(BooleanArray aArray, Iterable<Boolean> aValues) {
return (BooleanArray) fillArrayFS(aArray, aValues);
}
public static BooleanArray fillArray(BooleanArray aArray, boolean... aValues) {
return (BooleanArray) fillArrayFS(aArray, aValues);
}
public static BooleanArrayFS fillArrayFS(BooleanArrayFS aArrayFs, Iterable<Boolean> aValues) {
int i = 0;
for (Boolean fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static BooleanArrayFS fillArrayFS(BooleanArrayFS aArrayFs, boolean... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static ByteArray fillArray(ByteArray aArray, Iterable<Byte> aValues) {
return (ByteArray) fillArrayFS(aArray, aValues);
}
public static ByteArray fillArray(ByteArray aArray, byte... aValues) {
return (ByteArray) fillArrayFS(aArray, aValues);
}
public static ByteArrayFS fillArrayFS(ByteArrayFS aArrayFs, Iterable<Byte> aValues) {
int i = 0;
for (Byte fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static ByteArrayFS fillArrayFS(ByteArrayFS aArrayFs, byte... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static DoubleArray fillArray(DoubleArray aArray, Iterable<Double> aValues) {
return (DoubleArray) fillArrayFS(aArray, aValues);
}
public static DoubleArray fillArray(DoubleArray aArray, double... aValues) {
return (DoubleArray) fillArrayFS(aArray, aValues);
}
public static DoubleArrayFS fillArrayFS(DoubleArrayFS aArrayFs, Iterable<Double> aValues) {
int i = 0;
for (Double fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static DoubleArrayFS fillArrayFS(DoubleArrayFS aArrayFs, double... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static FloatArray fillArray(FloatArray aArray, Iterable<Float> aValues) {
return (FloatArray) fillArrayFS(aArray, aValues);
}
public static FloatArray fillArray(FloatArray aArray, float... aValues) {
return (FloatArray) fillArrayFS(aArray, aValues);
}
public static FloatArrayFS fillArrayFS(FloatArrayFS aArrayFs, Iterable<Float> aValues) {
int i = 0;
for (Float fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static FloatArrayFS fillArrayFS(FloatArrayFS aArrayFs, float... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static IntegerArray fillArray(IntegerArray aArray, Iterable<Integer> aValues) {
return (IntegerArray) fillArrayFS(aArray, aValues);
}
public static IntegerArray fillArray(IntegerArray aArray, int... aValues) {
return (IntegerArray) fillArrayFS(aArray, aValues);
}
public static IntArrayFS fillArrayFS(IntArrayFS aArrayFs, Iterable<Integer> aValues) {
int i = 0;
for (Integer fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static IntArrayFS fillArrayFS(IntArrayFS aArrayFs, int... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static LongArray fillArray(LongArray aArray, Iterable<Long> aValues) {
return (LongArray) fillArrayFS(aArray, aValues);
}
public static LongArray fillArray(LongArray aArray, long... aValues) {
return (LongArray) fillArrayFS(aArray, aValues);
}
public static LongArrayFS fillArrayFS(LongArrayFS aArrayFs, Iterable<Long> aValues) {
int i = 0;
for (Long fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static LongArrayFS fillArrayFS(LongArrayFS aArrayFs, long... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static ShortArray fillArray(ShortArray aArray, Iterable<Short> aValues) {
return (ShortArray) fillArrayFS(aArray, aValues);
}
public static ShortArray fillArray(ShortArray aArray, short... aValues) {
return (ShortArray) fillArrayFS(aArray, aValues);
}
public static ShortArrayFS fillArrayFS(ShortArrayFS aArrayFs, Iterable<Short> aValues) {
int i = 0;
for (Short fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static ShortArrayFS fillArrayFS(ShortArrayFS aArrayFs, short... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
public static StringArray fillArray(StringArray aArray, Iterable<String> aValues) {
return (StringArray) fillArrayFS(aArray, aValues);
}
public static StringArray fillArray(StringArray aArray, String... aValues) {
return (StringArray) fillArrayFS(aArray, aValues);
}
public static StringArrayFS fillArrayFS(StringArrayFS aArrayFs, Iterable<String> aValues) {
int i = 0;
for (String fs : aValues) {
aArrayFs.set(i, fs);
i++;
}
return aArrayFs;
}
public static StringArrayFS fillArrayFS(StringArrayFS aArrayFs, String... aValues) {
aArrayFs.copyFromArray(aValues, 0, 0, aArrayFs.size());
return aArrayFs;
}
// Using TOP here because FSList is only available in the JCas.
public static <T extends TOP> Collection<T> create(FSList<T> aList) {
return create(aList, (Type) null);
}
/**
* Fetch all annotations of the given type or its sub-types from the given FS list.
*
* @param <T>
* the JCas type.
* @param aList
* the FS list
* @param aType
* the JCas wrapper class.
* @return a new collection of all feature structures of the given type.
*/
public static <T extends TOP> Collection<T> create(FSList<T> aList, Class<? extends T> aType) {
return create(aList, CasUtil.getType(aList.getCAS(), aType));
}
// Using TOP here because FSList is only available in the JCas.
public static <T extends TOP> List<T> create(FSList<T> aList, Type type) {
TypeSystem ts = aList.getCAS().getTypeSystem();
List<FeatureStructure> data = new ArrayList<FeatureStructure>();
FSList<T> i = aList;
while (i instanceof NonEmptyFSList) {
NonEmptyFSList<T> l = (NonEmptyFSList<T>) i;
TOP value = l.getHead();
if (value != null && (type == null || ts.subsumes(type, value.getType()))) {
data.add(l.getHead());
}
i = l.getTail();
}
return (List<T>) asList(data.toArray(new TOP[data.size()]));
}
public static List<String> create(StringList aList) {
List<String> data = new ArrayList<String>();
StringList i = aList;
while (i instanceof NonEmptyStringList) {
NonEmptyStringList l = (NonEmptyStringList) i;
data.add(l.getHead());
i = l.getTail();
}
return asList(data.toArray(new String[data.size()]));
}
public static List<Integer> create(IntegerList aList) {
List<Integer> data = new ArrayList<Integer>();
IntegerList i = aList;
while (i instanceof NonEmptyIntegerList) {
NonEmptyIntegerList l = (NonEmptyIntegerList) i;
data.add(l.getHead());
i = l.getTail();
}
return asList(data.toArray(new Integer[data.size()]));
}
public static List<Float> create(FloatList aList) {
List<Float> data = new ArrayList<Float>();
FloatList i = aList;
while (i instanceof NonEmptyFloatList) {
NonEmptyFloatList l = (NonEmptyFloatList) i;
data.add(l.getHead());
i = l.getTail();
}
return asList(data.toArray(new Float[data.size()]));
}
public static <T extends TOP> FSList<T> createFSList(JCas aJCas, Collection<T> aCollection) {
return createFSList(aJCas.getCas(), aCollection);
}
public static <T extends TOP> FSList<T> createFSList(CAS aCas, T... aValues) {
return createFSList(aCas, asList(aValues));
}
public static <T extends TOP> FSList<T> createFSList(CAS aCas, Collection<T> aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
if (aValues.size() == 0) {
return aCas.emptyFSList();
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_FS_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
Iterator<? extends FeatureStructure> i = aValues.iterator();
while (i.hasNext()) {
head.setFeatureValue(headFeature, i.next());
if (i.hasNext()) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.emptyFSList());
}
}
return (FSList<T>) list;
}
public static FloatList createFloatList(JCas aJCas, float... aValues) {
return createFloatList(aJCas.getCas(), aValues);
}
public static <T extends FeatureStructure> T createFloatList(CAS aCas, float... aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
Type emptyType = ts.getType(CAS.TYPE_NAME_EMPTY_FLOAT_LIST);
if (aValues.length == 0) {
return aCas.createFS(emptyType);
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_FLOAT_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
int i = 0;
while (i < aValues.length) {
head.setFloatValue(headFeature, aValues[i]);
i++;
if (i < aValues.length) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.createFS(emptyType));
}
}
return (T) list;
}
public static <T extends FeatureStructure> T createFloatList(CAS aCas,
Collection<Float> aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
Type emptyType = ts.getType(CAS.TYPE_NAME_EMPTY_FLOAT_LIST);
if (aValues.size() == 0) {
return aCas.createFS(emptyType);
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_FLOAT_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
Iterator<Float> i = aValues.iterator();
while (i.hasNext()) {
head.setFloatValue(headFeature, i.next());
if (i.hasNext()) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.createFS(emptyType));
}
}
return (T) list;
}
public static FloatList createFloatList(JCas aJCas, Collection<Float> aCollection) {
return createFloatList(aJCas.getCas(), aCollection);
}
public static IntegerList createIntegerList(JCas aJCas, int... aValues) {
return createIntegerList(aJCas.getCas(), aValues);
}
public static <T extends FeatureStructure> T createIntegerList(CAS aCas, int... aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
Type emptyType = ts.getType(CAS.TYPE_NAME_EMPTY_INTEGER_LIST);
if (aValues.length == 0) {
return aCas.createFS(emptyType);
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_INTEGER_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
int i = 0;
while (i < aValues.length) {
head.setIntValue(headFeature, aValues[i]);
i++;
if (i < aValues.length) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.createFS(emptyType));
}
}
return (T) list;
}
public static <T extends FeatureStructure> T createIntegerList(CAS aCas,
Collection<Integer> aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
Type emptyType = ts.getType(CAS.TYPE_NAME_EMPTY_INTEGER_LIST);
if (aValues.size() == 0) {
return aCas.createFS(emptyType);
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_INTEGER_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
Iterator<Integer> i = aValues.iterator();
while (i.hasNext()) {
head.setIntValue(headFeature, i.next());
if (i.hasNext()) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.createFS(emptyType));
}
}
return (T) list;
}
public static IntegerList createIntegerList(JCas aJCas, Collection<Integer> aCollection) {
return createIntegerList(aJCas.getCas(), aCollection);
}
public static StringList createStringList(JCas aJCas, String... aValues) {
return createStringList(aJCas.getCas(), aValues);
}
public static <T extends FeatureStructure> T createStringList(CAS aCas, String... aValues) {
return createStringList(aCas, asList(aValues));
}
public static <T extends FeatureStructure> T createStringList(CAS aCas,
Collection<String> aValues) {
if (aValues == null) {
return null;
}
TypeSystem ts = aCas.getTypeSystem();
Type emptyType = ts.getType(CAS.TYPE_NAME_EMPTY_STRING_LIST);
if (aValues.size() == 0) {
return aCas.createFS(emptyType);
}
Type nonEmptyType = ts.getType(CAS.TYPE_NAME_NON_EMPTY_STRING_LIST);
Feature headFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
Feature tailFeature = nonEmptyType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
FeatureStructure head = aCas.createFS(nonEmptyType);
FeatureStructure list = head;
Iterator<String> i = aValues.iterator();
while (i.hasNext()) {
head.setStringValue(headFeature, i.next());
if (i.hasNext()) {
FeatureStructure tail = aCas.createFS(nonEmptyType);
head.setFeatureValue(tailFeature, tail);
head = tail;
} else {
head.setFeatureValue(tailFeature, aCas.createFS(emptyType));
}
}
return (T) list;
}
public static StringList createStringList(JCas aJCas, Collection<String> aCollection) {
return createStringList(aJCas.getCas(), aCollection);
}
private static class FSIteratorAdapter<T extends FeatureStructure> extends AbstractCollection<T> {
private int sizeCache = -1;
private final FSIterator<T> index;
public FSIteratorAdapter(final FSIterator<T> aIterator) {
index = aIterator.copy();
index.moveToFirst();
}
@Override
public Iterator<T> iterator() {
return index.copy();
}
@Override
public int size() {
// Unfortunately FSIterator does not expose the sizes of its internal collection,
// neither the current position although FSIteratorAggregate has a private field
// with that information.
if (sizeCache == -1) {
synchronized (this) {
if (sizeCache == -1) {
FSIterator<T> clone = index.copy();
clone.moveToFirst();
sizeCache = 0;
while (clone.isValid()) {
sizeCache++;
clone.moveToNext();
}
}
}
}
return sizeCache;
}
}
private static class AnnotationIndexAdapter<T extends AnnotationFS>
extends AbstractCollection<T> {
private final AnnotationIndex<T> index;
public AnnotationIndexAdapter(AnnotationIndex<T> aIndex) {
index = aIndex;
}
@Override
public Iterator<T> iterator() {
return index.withSnapshotIterators().iterator();
}
@Override
public int size() {
return index.size();
}
}
}
| |
/*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.util;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.SortedSet;
import javax.annotation.Nonnull;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Ints;
public class CollectionUtils {
public static <T> int listHashCode(@Nonnull Iterable<T> iterable) {
int hashCode = 1;
for (T item: iterable) {
hashCode = hashCode*31 + item.hashCode();
}
return hashCode;
}
public static <T> int lastIndexOf(@Nonnull Iterable<T> iterable, @Nonnull Predicate<? super T> predicate) {
int index = 0;
int lastMatchingIndex = -1;
for (T item: iterable) {
if (predicate.apply(item)) {
lastMatchingIndex = index;
}
index++;
}
return lastMatchingIndex;
}
public static <T extends Comparable<? super T>> int compareAsList(@Nonnull Collection<? extends T> list1,
@Nonnull Collection<? extends T> list2) {
int res = Ints.compare(list1.size(), list2.size());
if (res != 0) return res;
Iterator<? extends T> elements2 = list2.iterator();
for (T element1: list1) {
res = element1.compareTo(elements2.next());
if (res != 0) return res;
}
return 0;
}
public static <T> int compareAsIterable(@Nonnull Comparator<? super T> comparator,
@Nonnull Iterable<? extends T> it1,
@Nonnull Iterable<? extends T> it2) {
Iterator<? extends T> elements2 = it2.iterator();
for (T element1: it1) {
T element2;
try {
element2 = elements2.next();
} catch (NoSuchElementException ex) {
return 1;
}
int res = comparator.compare(element1, element2);
if (res != 0) return res;
}
if (elements2.hasNext()) {
return -1;
}
return 0;
}
public static <T extends Comparable<? super T>> int compareAsIterable(@Nonnull Iterable<? extends T> it1,
@Nonnull Iterable<? extends T> it2) {
Iterator<? extends T> elements2 = it2.iterator();
for (T element1: it1) {
T element2;
try {
element2 = elements2.next();
} catch (NoSuchElementException ex) {
return 1;
}
int res = element1.compareTo(element2);
if (res != 0) return res;
}
if (elements2.hasNext()) {
return -1;
}
return 0;
}
public static <T> int compareAsList(@Nonnull Comparator<? super T> elementComparator,
@Nonnull Collection<? extends T> list1,
@Nonnull Collection<? extends T> list2) {
int res = Ints.compare(list1.size(), list2.size());
if (res != 0) return res;
Iterator<? extends T> elements2 = list2.iterator();
for (T element1: list1) {
res = elementComparator.compare(element1, elements2.next());
if (res != 0) return res;
}
return 0;
}
@Nonnull
public static <T> Comparator<Collection<? extends T>> listComparator(
@Nonnull final Comparator<? super T> elementComparator) {
return new Comparator<Collection<? extends T>>() {
@Override
public int compare(Collection<? extends T> list1, Collection<? extends T> list2) {
return compareAsList(elementComparator, list1, list2);
}
};
}
public static <T> boolean isNaturalSortedSet(@Nonnull Iterable<? extends T> it) {
if (it instanceof SortedSet) {
SortedSet<? extends T> sortedSet = (SortedSet<? extends T>)it;
Comparator<?> comparator = sortedSet.comparator();
return (comparator == null) || comparator.equals(Ordering.natural());
}
return false;
}
public static <T> boolean isSortedSet(@Nonnull Comparator<? extends T> elementComparator,
@Nonnull Iterable<? extends T> it) {
if (it instanceof SortedSet) {
SortedSet<? extends T> sortedSet = (SortedSet<? extends T>)it;
Comparator<?> comparator = sortedSet.comparator();
if (comparator == null) {
return elementComparator.equals(Ordering.natural());
}
return elementComparator.equals(comparator);
}
return false;
}
@Nonnull
private static <T> SortedSet<? extends T> toNaturalSortedSet(@Nonnull Collection<? extends T> collection) {
if (isNaturalSortedSet(collection)) {
return (SortedSet<? extends T>)collection;
}
return ImmutableSortedSet.copyOf(collection);
}
@Nonnull
private static <T> SortedSet<? extends T> toSortedSet(@Nonnull Comparator<? super T> elementComparator,
@Nonnull Collection<? extends T> collection) {
if (collection instanceof SortedSet) {
SortedSet<? extends T> sortedSet = (SortedSet<? extends T>)collection;
Comparator<?> comparator = sortedSet.comparator();
if (comparator != null && comparator.equals(elementComparator)) {
return sortedSet;
}
}
return ImmutableSortedSet.copyOf(elementComparator, collection);
}
@Nonnull
public static <T> Comparator<Collection<? extends T>> setComparator(
@Nonnull final Comparator<? super T> elementComparator) {
return new Comparator<Collection<? extends T>>() {
@Override
public int compare(Collection<? extends T> list1, Collection<? extends T> list2) {
return compareAsSet(elementComparator, list1, list2);
}
};
}
public static <T extends Comparable<T>> int compareAsSet(@Nonnull Collection<? extends T> set1,
@Nonnull Collection<? extends T> set2) {
int res = Ints.compare(set1.size(), set2.size());
if (res != 0) return res;
SortedSet<? extends T> sortedSet1 = toNaturalSortedSet(set1);
SortedSet<? extends T> sortedSet2 = toNaturalSortedSet(set2);
Iterator<? extends T> elements2 = set2.iterator();
for (T element1: set1) {
res = element1.compareTo(elements2.next());
if (res != 0) return res;
}
return 0;
}
public static <T> int compareAsSet(@Nonnull Comparator<? super T> elementComparator,
@Nonnull Collection<? extends T> list1,
@Nonnull Collection<? extends T> list2) {
int res = Ints.compare(list1.size(), list2.size());
if (res != 0) return res;
SortedSet<? extends T> set1 = toSortedSet(elementComparator, list1);
SortedSet<? extends T> set2 = toSortedSet(elementComparator, list2);
Iterator<? extends T> elements2 = set2.iterator();
for (T element1: set1) {
res = elementComparator.compare(element1, elements2.next());
if (res != 0) return res;
}
return 0;
}
}
| |
package org.educama.flightconnection.datafeed;
import org.educama.airline.businessservice.AirlineBusinessService;
import org.educama.airline.model.Airline;
import org.educama.airport.businessservice.AirportBusinessService;
import org.educama.airport.model.Airport;
import org.educama.flightconnection.model.FlightConnection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.supercsv.cellprocessor.CellProcessorAdaptor;
import org.supercsv.cellprocessor.Optional;
import org.supercsv.cellprocessor.ParseInt;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.exception.SuperCsvCellProcessorException;
import org.supercsv.io.CsvBeanReader;
import org.supercsv.io.ICsvBeanReader;
import org.supercsv.prefs.CsvPreference;
import org.supercsv.util.CsvContext;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
/**
* Deserializes the CSV inputstreams containing connections data into connection Objects.
*/
@Component
public class ConnectionCsvDeserializer {
private final String codeshareYes = "Y";
private final String codeshareNo = "";
private final String separator = ",";
private final String empty = "";
private AirportBusinessService airportBusinessService;
private AirlineBusinessService airlineBusinessService;
@Autowired
public ConnectionCsvDeserializer(AirportBusinessService airportBusinessService, AirlineBusinessService airlineBusinessService) {
this.airportBusinessService = airportBusinessService;
this.airlineBusinessService = airlineBusinessService;
}
public List<FlightConnection> deserialize(InputStream in) throws IOException {
InputStreamReader inputStreamReader = new InputStreamReader(in);
ICsvBeanReader csvBeanReader = new CsvBeanReader(inputStreamReader, CsvPreference.STANDARD_PREFERENCE);
final String[] header = {"airlineIataCode", null, "sourceAirportIataCode", null, "destinationAirportIataCode", null, "codeshare", "stops", null};
FlightConnection flightConnection = null;
List<FlightConnection> flightConnections = new ArrayList<>();
try {
while ((flightConnection = csvBeanReader.read(FlightConnection.class, header, getCellProcessors())) != null) {
System.out.println("deserialized " + flightConnection);
flightConnections.add(flightConnection);
}
} finally {
if (csvBeanReader != null) {
csvBeanReader.close();
}
}
return flightConnections;
}
public String createCsvContentForConnections(List<FlightConnection> flightConnections) {
StringBuilder builder = new StringBuilder();
for (FlightConnection flightConnection : flightConnections) {
String codeShare = flightConnection.isCodeshare() ? codeshareYes : codeshareNo;
//@formatter:off
builder.append(flightConnection.getAirlineIataCode()) .append(separator) //ID
.append(empty) .append(separator) //airline ID
.append(flightConnection.getSourceAirportIataCode()) .append(separator)
.append(empty) .append(separator) //source ID
.append(flightConnection.getDestinationAirportIataCode()) .append(separator)
.append(empty) .append(separator) //destination ID
.append(codeShare) .append(separator)
.append(flightConnection.getStops()) .append(separator)
.append(empty) .append("\n"); //Equipment
//@formatter:on
}
return builder.toString();
}
private CellProcessor[] getCellProcessors() {
return new CellProcessor[]{
//@formatter:off
new Optional(new ParseAirlinesIataCode()), //airline
null, //airline ID
new Optional(new ParseAirportIataCode()), //source airport
null, //source airport ID
new Optional(new ParseAirportIataCode()), //destination airport
null, //destination airport ID
new Optional(new ParseCodeShare()), //codeShare
new Optional(new ParseInt()), //stops
null, //equipment
//@formatter:on
};
}
/**
* custom CellProcessor for parsing the codeshares.
*/
private class ParseCodeShare extends CellProcessorAdaptor {
ParseCodeShare() {
super();
}
ParseCodeShare(CellProcessorAdaptor next) {
super(next);
}
@Override
public Object execute(Object value, CsvContext context) {
validateInputNotNull(value, context);
if (codeshareYes.equalsIgnoreCase((String) value)) {
return next.execute(Boolean.TRUE, context);
} else {
if (codeshareNo.equalsIgnoreCase((String) value)) {
return next.execute(Boolean.FALSE, context);
}
}
throw new SuperCsvCellProcessorException(
String.format("Could not parse '%s' as a codeshare", value),
context, this);
}
}
/**
* Custom CellProcessor for parsing the airports codes.
*/
private class ParseAirportIataCode extends CellProcessorAdaptor {
private static final int AIRPORT_IATACODE_LENGTH = 3;
private static final int AIRPORT_ICAOCODE_LENGTH = 4;
ParseAirportIataCode() {
super();
}
ParseAirportIataCode(CellProcessorAdaptor next) {
super(next);
}
@Override
/**
* The airports codes contained in the csv can be IATA ( 3-letter) codes or ICAO (4-letter) codes.
*/
public Object execute(Object value, CsvContext context) {
validateInputNotNull(value, context);
String airportCode = (String) value;
if ((airportCode.length() == AIRPORT_IATACODE_LENGTH)) {
return next.execute(value, context);
}
// attempt to convert the icao code to an IATA code. If not possible proceed with the code as it is.
if (airportCode.length() == AIRPORT_ICAOCODE_LENGTH) {
List<Airport> airports = airportBusinessService.findAirportByIcaoCode(airportCode);
if (!airports.isEmpty()) {
return next.execute(airports.get(0)
.getIataCode(), context);
} else {
return next.execute(airportCode, context);
}
}
throw new SuperCsvCellProcessorException(
String.format("Could not parse '%s' neither as an IATA nor as an IACAO code for Airports", value),
context, this);
}
}
/**
* custom CellProcessor for parsing the airlines codes.
*/
private class ParseAirlinesIataCode extends CellProcessorAdaptor {
private static final int AIRLINE_IATACODE_LENGTH = 2;
private static final int AIRLINE_ICAOCODE_LENGTH = 3;
ParseAirlinesIataCode() {
super();
}
ParseAirlinesIataCode(CellProcessorAdaptor next) {
super(next);
}
@Override
/**
* The airlines codes contained in the csv can be IATA ( 2-letter) codes or ICAO (3-letter) codes.
*/
public Object execute(Object value, CsvContext context) {
validateInputNotNull(value, context);
String airlineCode = (String) value;
if ((airlineCode.length() == AIRLINE_IATACODE_LENGTH)) {
return next.execute(value, context);
}
// attempt to convert the icao code to an IATA code. If not possible proceed with the code as it is.
if (airlineCode.length() == AIRLINE_ICAOCODE_LENGTH) {
List<Airline> airlines = airlineBusinessService.findAirlinesByIcaoCode(airlineCode);
if (!airlines.isEmpty()) {
return next.execute(airlines.get(0)
.getIataCode(), context);
} else {
return next.execute(airlineCode, context);
}
}
throw new SuperCsvCellProcessorException(
String.format("Could not parse '%s' neither as an IATA nor as an IACAO code for Airlines", value),
context, this);
}
}
}
| |
/*
* File: PEP.java
*
* Copyright 2007 Macquarie E-Learning Centre Of Excellence
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.server.security.xacml.pep.ws;
import java.util.Collections;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import javax.xml.namespace.QName;
import javax.xml.ws.handler.MessageContext;
import javax.xml.ws.handler.soap.SOAPMessageContext;
import org.fcrepo.server.security.RequestCtx;
import org.fcrepo.server.security.xacml.pep.AuthzDeniedException;
import org.fcrepo.server.security.xacml.pep.ContextHandler;
import org.fcrepo.server.security.xacml.pep.PEPException;
import org.fcrepo.server.security.xacml.pep.ws.operations.OperationHandler;
import org.fcrepo.server.security.xacml.pep.ws.operations.OperationHandlerException;
import org.fcrepo.server.utilities.CXFUtility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.security.xacml.sunxacml.ctx.ResponseCtx;
import org.jboss.security.xacml.sunxacml.ctx.Result;
/**
* This class is an JAX-WS handler. It is used as a handler on both the request
* and response. The handler examines the operation for the request and
* retrieves an appropriate handler to manage the request.
*
* @author Jiri Kremser
*/
public class PEP
implements javax.xml.ws.handler.soap.SOAPHandler<SOAPMessageContext> {
private static final Logger logger = LoggerFactory.getLogger(PEP.class);
/**
* A list of instantiated handlers. As operations are invoked, handlers for
* those operations are created and added to this list
*/
private Map<String, Map<String, OperationHandler>> m_serviceHandlers = null;
/**
* The XACML context handler.
*/
ContextHandler m_ctxHandler = null;
private final boolean feslAuthZ;
/**
* A time stamp to note the time this AuthHandler was instantiated.
*/
private Date m_ts = null;
/**
* Default constructor that initialises the handlers map and the
* contextHandler.
*
* @throws PEPException
*/
public PEP(boolean feslAuthZ)
throws PEPException {
super();
this.feslAuthZ = feslAuthZ;
logger.info("feslAuthZ = {}", feslAuthZ);
if (feslAuthZ) {
m_serviceHandlers = Collections.emptyMap();
m_ts = new Date();
}
}
public void setContextHandler(ContextHandler ctxHandler) {
m_ctxHandler = ctxHandler;
}
public void setServiceHandlers(Map<String, Map<String,OperationHandler>> serviceHandlers) {
m_serviceHandlers = serviceHandlers;
}
/*
* (non-Javadoc)
*/
@Override
public boolean handleMessage(SOAPMessageContext context) {
if (!feslAuthZ) {
return true;
}
String service =
((QName) context.get(SOAPMessageContext.WSDL_SERVICE))
.getLocalPart();
String operation =
((QName) context.get(SOAPMessageContext.WSDL_OPERATION))
.getLocalPart();
if (logger.isDebugEnabled()) {
logger.debug("AuthHandler executed: " + service + "/" + operation
+ " [" + m_ts + "]");
}
// // Obtain the service details
// ServiceDesc service = context.getService().getServiceDescription();
// // Obtain the operation details and message type
// OperationDesc operation = context.getOperation();
// Obtain a class to handle our request
OperationHandler operationHandler = getHandler(service, operation);
// there must always be a handler.
if (operationHandler == null) {
logger.error("Missing handler for service/operation: " + service
+ "/" + operation);
throw CXFUtility
.getFault(new PEPException("Missing handler for service/operation: "
+ service + "/" + operation));
}
RequestCtx reqCtx = null;
// if we are on the request pathway, outboundProperty == false. True on
// response pathway
Boolean outboundProperty =
(Boolean) context.get(MessageContext.MESSAGE_OUTBOUND_PROPERTY);
try {
if (outboundProperty) {
reqCtx = operationHandler.handleResponse(context);
} else {
reqCtx = operationHandler.handleRequest(context);
}
} catch (OperationHandlerException ohe) {
logger.error("Error handling operation: " + operation, ohe);
throw CXFUtility
.getFault(new PEPException("Error handling operation: "
+ operation, ohe));
}
// if handler returns null, then there is no work to do (would have
// thrown exception if things went wrong).
if (reqCtx == null) {
return false;
}
// if we have received a requestContext, we need to hand it over to the
// context handler for resolution.
ResponseCtx resCtx = null;
try {
resCtx = m_ctxHandler.evaluate(reqCtx);
} catch (PEPException pe) {
logger.error("Error evaluating request", pe);
throw CXFUtility
.getFault(new PEPException("Error evaluating request (operation: "
+ operation + ")",
pe));
}
// TODO: set obligations
/*
* Need to set obligations in some sort of map, with UserID/SessionID +
* list of obligationIDs. Enforce will have to check that these
* obligations are met before providing access. There will need to be an
* external obligations service that this PEP can communicate with. Will
* be working on that... This service will throw an 'Obligations need to
* be met' exception for outstanding obligations
*/
// TODO: enforce will need to ensure that obligations are met.
enforce(resCtx);
return true;
}
/**
* Function to try and obtain a handler using the name of the current SOAP
* service and operation.
*
* @param opName
* the name of the operation
* @return OperationHandler to handle the operation
*/
private OperationHandler getHandler(String serviceName, String operationName) {
if (serviceName == null) {
if (logger.isDebugEnabled()) {
logger.debug("Service Name was null!");
}
return null;
}
if (operationName == null) {
if (logger.isDebugEnabled()) {
logger.debug("Operation Name was null!");
}
return null;
}
Map<String, OperationHandler> handlers =
m_serviceHandlers.get(serviceName);
if (handlers == null) {
if (logger.isDebugEnabled()) {
logger.debug("No Service Handlers found for: " + serviceName);
}
return null;
}
OperationHandler handler = handlers.get(operationName);
if (handler == null) {
if (logger.isDebugEnabled()) {
logger.debug("Handler not found for: " + serviceName + "/"
+ operationName);
}
}
return handler;
}
/**
* Method to check a response and enforce any denial. This is achieved by
* throwing an SoapFault.
*
* @param res
* the ResponseCtx
*/
private void enforce(ResponseCtx res) {
@SuppressWarnings("unchecked")
Set<Result> results = res.getResults();
for (Result r : results) {
if (r.getDecision() != Result.DECISION_PERMIT) {
if (logger.isDebugEnabled()) {
logger.debug("Denying access: " + r.getDecision());
}
switch (r.getDecision()) {
case Result.DECISION_DENY:
throw CXFUtility
.getFault(new AuthzDeniedException("Deny"));
case Result.DECISION_INDETERMINATE:
throw CXFUtility
.getFault(new AuthzDeniedException("Indeterminate"));
case Result.DECISION_NOT_APPLICABLE:
throw CXFUtility
.getFault(new AuthzDeniedException("NotApplicable"));
default:
}
}
}
if (logger.isDebugEnabled()) {
logger.debug("Permitting access!");
}
}
/**
* {@inheritDoc}
*/
@Override
public void close(MessageContext arg0) {
}
/**
* {@inheritDoc}
*/
@Override
public boolean handleFault(SOAPMessageContext arg0) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public Set<QName> getHeaders() {
return null;
}
}
| |
package com.octo.android.robospice.request;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.locks.ReentrantLock;
import roboguice.util.temp.Ln;
import android.content.Context;
import com.octo.android.robospice.exception.NetworkException;
import com.octo.android.robospice.exception.NoNetworkException;
import com.octo.android.robospice.networkstate.NetworkStateChecker;
import com.octo.android.robospice.persistence.CacheManager;
import com.octo.android.robospice.persistence.DurationInMillis;
import com.octo.android.robospice.persistence.exception.CacheCreationException;
import com.octo.android.robospice.persistence.exception.CacheLoadingException;
import com.octo.android.robospice.persistence.exception.CacheSavingException;
import com.octo.android.robospice.persistence.exception.SpiceException;
import com.octo.android.robospice.priority.PriorityRunnable;
import com.octo.android.robospice.request.listener.RequestProgressListener;
import com.octo.android.robospice.request.listener.RequestStatus;
/**
* Default implementation of {@link RequestRunner }. Processes requests. This class is massively multi-threaded and offers good
* performances when processing multiple requests simulaneously.
* @author SNI
* @author Andrew Clark
*/
public class DefaultRequestRunner implements RequestRunner {
// ============================================================================================
// ATTRIBUTES
// ============================================================================================
/**
* Thanks Olivier Croiser from Zenika for his excellent <a href=
* "http://blog.zenika.com/index.php?post/2012/04/11/Introduction-programmation-concurrente-Java-2sur2. "
* >blog article</a>.
*/
private ExecutorService executorService = null;
private final CacheManager cacheManager;
private final Context applicationContext;
private boolean failOnCacheError;
private final NetworkStateChecker networkStateChecker;
private final RequestProgressManager requestProgressManager;
private boolean isStopped;
private ReentrantLock executorLock;
// ============================================================================================
// CONSTRUCTOR
// ====================================================================================
public DefaultRequestRunner(final Context context, final CacheManager cacheManager, final ExecutorService executorService, final RequestProgressManager requestProgressBroadcaster,
final NetworkStateChecker networkStateChecker) {
this.applicationContext = context;
this.cacheManager = cacheManager;
this.networkStateChecker = networkStateChecker;
this.executorLock = new ReentrantLock();
this.executorService = executorService;
this.requestProgressManager = requestProgressBroadcaster;
this.networkStateChecker.checkPermissions(context);
}
public void executeRequest(CachedSpiceRequest<?> request) {
executorLock.lock();
try {
if (isStopped) {
Ln.d("Dropping request : " + request + " as runner is stopped.");
return;
}
planRequestExecution(request);
} finally {
executorLock.unlock();
}
}
protected <T> void processRequest(final CachedSpiceRequest<T> request) {
final long startTime = System.currentTimeMillis();
Ln.d("Processing request : " + request);
T result = null;
// add a progress listener to the request to be notified of
// progress during load data from network
final RequestProgressListener requestProgressListener = requestProgressManager.createProgressListener(request);
request.setRequestProgressListener(requestProgressListener);
if (request.getRequestCacheKey() != null && request.getCacheDuration() != DurationInMillis.ALWAYS_EXPIRED) {
// First, search data in cache
try {
Ln.d("Loading request from cache : " + request);
request.setStatus(RequestStatus.READING_FROM_CACHE);
result = loadDataFromCache(request.getResultType(), request.getRequestCacheKey(), request.getCacheDuration());
// if something is found in cache, fire result and finish
// request
if (result != null) {
Ln.d("Request loaded from cache : " + request + " result=" + result);
requestProgressManager.notifyListenersOfRequestSuccess(request, result);
printRequestProcessingDuration(startTime, request);
return;
} else if (request.isAcceptingDirtyCache()) {
// as a fallback, some request may accept whatever is in the
// cache but still
// want an update from network.
result = loadDataFromCache(request.getResultType(), request.getRequestCacheKey(), DurationInMillis.ALWAYS_RETURNED);
if (result != null) {
requestProgressManager.notifyListenersOfRequestSuccessButDontCompleteRequest(request, result);
}
}
} catch (final SpiceException e) {
Ln.d(e, "Cache file could not be read.");
if (failOnCacheError) {
handleRetry(request, e);
printRequestProcessingDuration(startTime, request);
return;
}
cacheManager.removeDataFromCache(request.getResultType(), request.getRequestCacheKey());
Ln.d(e, "Cache file deleted.");
}
}
// if result is not in cache, load data from network
Ln.d("Cache content not available or expired or disabled");
if (!networkStateChecker.isNetworkAvailable(applicationContext) && !request.isOffline()) {
Ln.e("Network is down.");
if (!request.isCancelled()) {
// don't retry when there is no network
requestProgressManager.notifyListenersOfRequestFailure(request, new NoNetworkException());
}
printRequestProcessingDuration(startTime, request);
return;
}
// network is ok, load data from network
try {
if (request.isCancelled()) {
printRequestProcessingDuration(startTime, request);
return;
}
Ln.d("Calling netwok request.");
request.setStatus(RequestStatus.LOADING_FROM_NETWORK);
result = request.loadDataFromNetwork();
Ln.d("Network request call ended.");
} catch (final Exception e) {
if (!request.isCancelled()) {
Ln.e(e, "An exception occurred during request network execution :" + e.getMessage());
handleRetry(request, new NetworkException("Exception occurred during invocation of web service.", e));
} else {
Ln.e("An exception occurred during request network execution but request was cancelled, so listeners are not called.");
}
printRequestProcessingDuration(startTime, request);
return;
}
if (result != null && request.getRequestCacheKey() != null) {
// request worked and result is not null, save
// it to cache
try {
if (request.isCancelled()) {
printRequestProcessingDuration(startTime, request);
return;
}
Ln.d("Start caching content...");
request.setStatus(RequestStatus.WRITING_TO_CACHE);
result = saveDataToCacheAndReturnData(result, request.getRequestCacheKey());
if (request.isCancelled()) {
printRequestProcessingDuration(startTime, request);
return;
}
requestProgressManager.notifyListenersOfRequestSuccess(request, result);
printRequestProcessingDuration(startTime, request);
return;
} catch (final SpiceException e) {
Ln.d(e, "An exception occurred during service execution :" + e.getMessage());
if (failOnCacheError) {
handleRetry(request, e);
printRequestProcessingDuration(startTime, request);
return;
} else {
if (request.isCancelled()) {
printRequestProcessingDuration(startTime, request);
return;
}
// result can't be saved to
// cache but we reached that
// point after a success of load
// data from
// network
requestProgressManager.notifyListenersOfRequestSuccess(request, result);
}
cacheManager.removeDataFromCache(request.getResultType(), request.getRequestCacheKey());
Ln.d(e, "Cache file deleted.");
}
} else {
// result can't be saved to cache but we reached
// that point after a success of load data from
// network
requestProgressManager.notifyListenersOfRequestSuccess(request, result);
printRequestProcessingDuration(startTime, request);
return;
}
}
protected void planRequestExecution(final CachedSpiceRequest<?> request) {
Future<?> future = executorService.submit(new PriorityRunnable() {
@Override
public void run() {
try {
processRequest(request);
} catch (final Throwable t) {
Ln.d(t, "An unexpected error occurred when processsing request %s", request.toString());
} finally {
request.setRequestCancellationListener(null);
}
}
@Override
public int getPriority() {
return request.getPriority();
}
});
request.setFuture(future);
}
public boolean isFailOnCacheError() {
return failOnCacheError;
}
public void setFailOnCacheError(boolean failOnCacheError) {
this.failOnCacheError = failOnCacheError;
}
public void shouldStop() {
executorLock.lock();
try {
isStopped = true;
executorService.shutdown();
} finally {
executorLock.unlock();
}
}
public boolean isStopped() {
return isStopped;
}
// ============================================================================================
// PRIVATE
// ============================================================================================
private <T> T loadDataFromCache(final Class<T> clazz, final Object cacheKey, final long maxTimeInCacheBeforeExpiry) throws CacheLoadingException, CacheCreationException {
return cacheManager.loadDataFromCache(clazz, cacheKey, maxTimeInCacheBeforeExpiry);
}
private <T> T saveDataToCacheAndReturnData(final T data, final Object cacheKey) throws CacheSavingException, CacheCreationException {
return cacheManager.saveDataToCacheAndReturnData(data, cacheKey);
}
private void handleRetry(final CachedSpiceRequest<?> request, final SpiceException e) {
if (request.getRetryPolicy() != null) {
request.getRetryPolicy().retry(e);
if (request.getRetryPolicy().getRetryCount() > 0) {
new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(request.getRetryPolicy().getDelayBeforeRetry());
executeRequest(request);
} catch (InterruptedException e) {
Ln.e(e, "Retry attempt failed for request " + request);
}
}
}).start();
return;
}
}
requestProgressManager.notifyListenersOfRequestFailure(request, e);
}
private static String getTimeString(long millis) {
return String.format("%02d ms", millis);
}
private static void printRequestProcessingDuration(long startTime, CachedSpiceRequest<?> request) {
Ln.d("It tooks %s to process request %s.", getTimeString(System.currentTimeMillis() - startTime), request.toString());
}
}
| |
/*L
* Copyright Ekagra Software Technologies Ltd.
* Copyright SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cacore-sdk-pre411/LICENSE.txt for details.
*/
package gov.nih.nci.codegen.core.transformer;
import gov.nih.nci.codegen.core.BaseArtifact;
import gov.nih.nci.codegen.core.ConfigurationException;
import gov.nih.nci.codegen.core.XMLConfigurable;
import gov.nih.nci.codegen.core.filter.UML13ClassifierFilter;
import gov.nih.nci.codegen.core.filter.UML13ModelElementFilter;
import gov.nih.nci.codegen.core.util.UML13Utils;
import gov.nih.nci.codegen.core.util.XMLUtils;
import gov.nih.nci.codegen.framework.FilteringException;
import gov.nih.nci.codegen.framework.TransformationException;
import gov.nih.nci.codegen.framework.Transformer;
import gov.nih.nci.common.util.Constant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import javax.jmi.reflect.RefObject;
import org.apache.log4j.Logger;
import org.jaxen.JaxenException;
import org.jaxen.jdom.JDOMXPath;
import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.omg.uml.foundation.core.AssociationEnd;
import org.omg.uml.foundation.core.Attribute;
import org.omg.uml.foundation.core.Classifier;
import org.omg.uml.foundation.core.Dependency;
import org.omg.uml.foundation.core.Feature;
import org.omg.uml.foundation.core.UmlClass;
import org.omg.uml.foundation.extensionmechanisms.Stereotype;
import org.omg.uml.foundation.extensionmechanisms.TaggedValue;
import org.omg.uml.modelmanagement.Model;
/**
* <!-- LICENSE_TEXT_START -->
* Copyright 2001-2004 SAIC. Copyright 2001-2003 SAIC. This software was developed in conjunction with the National Cancer Institute,
* and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105.
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions
* in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
* 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment:
* "This product includes software developed by the SAIC and the National Cancer Institute."
* If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself,
* wherever such third-party acknowledgments normally appear.
* 3. The names "The National Cancer Institute", "NCI" and "SAIC" must not be used to endorse or promote products derived from this software.
* 4. This license does not authorize the incorporation of this software into any third party proprietary programs. This license does not authorize
* the recipient to use any trademarks owned by either NCI or SAIC-Frederick.
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE,
* SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* <!-- LICENSE_TEXT_END -->
*/
/**
* Produces an XML file that contains object-relational mapping configuration
* information for use by the OJB tool ( <a
* href="http://db.apache.org/ojb/" target="_blank">http://db.apache.org/ojb/ </a>). In
* particular, it produces class-descriptor elements from a set classes defined
* in a UML 1.3 model.
* <p>
* In order to use this transformer, the supplied UML model must contain certain
* information, in the form of tagged values and stereotypes. This section
* describes the control file configuration and how it relates to the code. It
* does not describe how the UML model must be annotated (see the User's Guide
* for that).
* <p>
* The content model for this transformer's configuration element is as follows:
* <p>
* <code>
* <pre>
*
* <!ELEMENT transformer (param, filter)>
* <!ATTLIST transformer
* name CDATA #REQUIRED
* className CDATA #FIXED gov.nih.nci.codegen.core.transformer.OJBRepTransformer>
* <!ELEMENT param EMPTY>
* <!ATTLIST param
* name CDATA #FIXED packageName
* value CDATA #REQUIRED>
* <!ELEMENT filter ... see {@link gov.nih.nci.codegen.core.filter.UML13ClassifierFilter#configure(org.w3c.dom.Element)} ...
*
* </pre>
* </code>
* <p>
* As you can see, this transformer expects a nested filter element. The reason
* is that this transformer produces a single Artifact (an XML file) from a
* collection of model elements.
* <p>
* UML13OJBRepTransformer expects to be passed an
* instance of org.omg.uml.modelmanagement.Model. It uses
* UML13ModelElementFilter to obtain all model elements in the model. Then it
* use UML13Classifier to obtain the classifiers selected by the contents of the
* nested filter element. Then it iterates through these classifiers, building
* the class-descriptor elements.
* <p>
* A Collection containing a single Artifact is returned by this transformer's
* execute method. The name attribute of the Artifact is set to "ojb_repository"
* and its source attribute is set to the String that represents the XML
* document.
* <p>
*
* @author caBIO Team
* @version 1.0
*/
public class UML13OJBRepTransformer implements Transformer, XMLConfigurable {
private static Logger log = Logger
.getLogger(UML13OJBRepTransformer.class);
private UML13ClassifierFilter _classifierFilt;
private String _pkgName;
/**
* Creates an instance of UML13OJBRepTransformer
*/
public UML13OJBRepTransformer() {
super();
}
/**
* @see gov.nih.nci.codegen.framework.Transformer#execute(javax.jmi.reflect.RefObject,
* java.util.Collection)
*/
public Collection execute(RefObject modelElement, Collection artifacts)
throws TransformationException {
if (modelElement == null) {
log.error("model element is null");
throw new TransformationException("model element is null");
}
if (!(modelElement instanceof Model)) {
log.error("model element not instance of Model");
throw new TransformationException(
"model element not instance of Model");
}
ArrayList newArtifacts = new ArrayList();
UML13ModelElementFilter meFilt = new UML13ModelElementFilter();
ArrayList umlExtentCol = new ArrayList();
umlExtentCol.add(modelElement.refOutermostPackage());
Collection classifiers = null;
try {
classifiers = _classifierFilt.execute(meFilt.execute(umlExtentCol));
} catch (FilteringException ex) {
log.error("couldn't filter model elements" + ex.getMessage());
throw new TransformationException("couldn't filter model elements",
ex);
}
Element rep = generateRepository(classifiers);
XMLOutputter p = new XMLOutputter();
p.setFormat(Format.getPrettyFormat());
newArtifacts.add(new BaseArtifact("ojb_repository", modelElement, p
.outputString(rep)));
return newArtifacts;
}
private Element generateRepository(Collection classifiers) {
Element root = new Element("class-descriptors");
for (Iterator i = classifiers.iterator(); i.hasNext();) {
Classifier klass = (Classifier) i.next();
Classifier table = getTable(klass);
log.debug("creating class-descriptor for " + klass.getName());
String classExp = "class-descriptor[@class='" + _pkgName + "impl."
+ klass.getName() + "Impl']";
Element classDescEl = null;
try {
classDescEl = (Element) (new JDOMXPath(classExp))
.selectSingleNode(root);
} catch (JaxenException ex) {
log.error("error selecting " + classExp + ": "+ ex.getMessage());
throw new RuntimeException("error selecting " + classExp, ex);
}
if (classDescEl == null) {
classDescEl = new Element("class-descriptor");
root.addContent(classDescEl);
classDescEl.setAttribute("class", _pkgName + "impl."
+ klass.getName() + "Impl");
}
classDescEl.setAttribute("table", table.getName());
classDescEl.setAttribute("initialization-method", "initialize");
HashMap assocMap = new HashMap();
log.debug("creating field-descriptors...");
/**
* If this class is a subclass of some other class, then it must
* define all inherited attributes and associations.
*
* If class is a superclass, it should have an ojbConcreteClass
* field-descriptor.
*/
if (isSuperClass(klass)) {
log.debug("created ojbConcreteClass field-descriptor for "
+ klass.getName());
Element fdEl = new Element("field-descriptor");
classDescEl.addContent(fdEl);
fdEl.setAttribute("name", "ojbConcreteClass");
fdEl.setAttribute("column", "CLASS_NAME");
fdEl.setAttribute("access", "anonymous");
fdEl.setAttribute("jdbc-type", "VARCHAR");
}
Collection columns = UML13Utils.getAttributes((UmlClass) table);
log.debug("got " + columns.size() + " columns");
for (Iterator j = columns.iterator(); j.hasNext();) {
Attribute column = (Attribute) j.next();
String attName = getAttributeName(klass, column);
if (attName == null) {
log.debug("no value for att name found for "
+ klass.getName() + " -> " + table.getName() + Constant.DOT
+ column.getName() + ", continuing...");
continue;
}
log.debug("creating field-descriptor for "
+ klass.getName() + Constant.DOT + attName);
String colName = column.getName();
String dbType = column.getType().getName();
if (dbType == null) {
log.error("no db type for "
+ table.getName() + Constant.DOT + column.getName());
throw new RuntimeException("no db type for "
+ table.getName() + Constant.DOT + column.getName());
}
String jdbcType = getJDBCType(dbType);
String primaryKey = "false";
Stereotype s = UML13Utils.getStereotype(column);
if (s != null && "PK".equals(s.getName())
|| "ID".equals(colName)) {
primaryKey = "true";
}
Element fieldDescEl = new Element("field-descriptor");
classDescEl.addContent(fieldDescEl);
fieldDescEl.setAttribute("name", attName);
fieldDescEl.setAttribute("column", colName);
fieldDescEl.setAttribute("jdbc-type", jdbcType);
fieldDescEl.setAttribute("primarykey", primaryKey);
String conversion = getConversion(column);
if (conversion != null) {
fieldDescEl.setAttribute("conversion", conversion);
}
if (!hasAttribute(klass, attName, true)
&& !"true".equals(primaryKey)) {
fieldDescEl.setAttribute("access", "anonymous");
}
String assocName = getImplementedAssociationName(klass, column,
attName);
if (assocName != null) {
log.debug("adding assocName " + assocName
+ " in assocMap");
assocMap.put(assocName, attName);
}
}
log.debug("...done field-descriptors");
log.debug("creating associations...");
for (Iterator j = UML13Utils.getAssociationEnds(klass, true)
.iterator(); j.hasNext();) {
AssociationEnd thisEnd = (AssociationEnd) j.next();
AssociationEnd otherEnd = UML13Utils
.getOtherAssociationEnd(thisEnd);
Classifier otherEndTable = getTable(otherEnd.getType());
if (otherEndTable == null) {
log.warn(klass.getName() + Constant.DOT + otherEnd.getName()
+ " -> " + otherEnd.getType().getName()
+ " is not persistent, continuing...");
continue;
}
Element ordEl = null;
if (UML13Utils.isOne2One(thisEnd, otherEnd)
|| UML13Utils.isMany2One(thisEnd, otherEnd)) {
log.debug("creating reference-descriptor "
+ klass.getName() + Constant.DOT + otherEnd.getName());
Attribute fkColumn = getFKColumn(klass, table, otherEnd
.getName());
if (fkColumn == null) {
log.warn("no fk column found for "
+ klass.getName() + " - " + table.getName()
+ "->" + otherEnd.getName());
continue;
}
String fieldRefName = getAttributeName(klass, fkColumn);
if (fieldRefName == null) {
log.warn("no field ref name found for "
+ klass.getName() + Constant.DOT + thisEnd.getName()
+ "<->" + otherEnd.getName());
continue;
}
Element refDescEl = new Element("reference-descriptor");
classDescEl.addContent(refDescEl);
ordEl = refDescEl;
refDescEl.setAttribute("name", otherEnd.getName());
refDescEl.setAttribute("class-ref", _pkgName + "impl."
+ otherEnd.getType().getName() + "Impl");
Element fkEl = new Element("foreignkey");
refDescEl.addContent(fkEl);
fkEl.setAttribute("field-ref", fieldRefName);
} else if (UML13Utils.isOne2Many(thisEnd, otherEnd)) {
log.debug("creating 1:n collection-descriptor "
+ klass.getName() + Constant.DOT + otherEnd.getName());
String invFKFieldRef = getInvFKFieldRef(otherEnd.getType(),
otherEndTable, thisEnd.getName());
if (invFKFieldRef == null) {
log.error( "couldn't find inv fk field ref for "
+ klass.getName() + Constant.DOT
+ thisEnd.getName() + "<->"
+ otherEnd.getName());
throw new RuntimeException(
"couldn't find inv fk field ref for "
+ klass.getName() + Constant.DOT
+ thisEnd.getName() + "<->"
+ otherEnd.getName());
}
Element colDescEl = new Element("collection-descriptor");
classDescEl.addContent(colDescEl);
ordEl = colDescEl;
colDescEl.setAttribute("name", otherEnd.getName());
colDescEl.setAttribute("element-class-ref", _pkgName
+ "impl." + otherEnd.getType().getName() + "Impl");
Element invFKEl = new Element("inverse-foreignkey");
colDescEl.addContent(invFKEl);
invFKEl.setAttribute("field-ref", invFKFieldRef);
} else if (UML13Utils.isMany2Many(thisEnd, otherEnd)) {
log.debug("creating m:n collection-descriptor "
+ klass.getName() + Constant.DOT + otherEnd.getName());
Classifier indirTable = getCorrelationTable(table, thisEnd,
otherEnd);
if (indirTable == null) {
log.error("no indirection table found for "
+ klass.getName() + Constant.DOT + thisEnd.getName()
+ "<->" + otherEnd.getName() + Constant.DOT
+ otherEnd.getType().getName());
continue;
}
Attribute fkPointingToThisClass = getFKColumn(otherEnd
.getType(), indirTable, thisEnd.getName());
Attribute fkPointingToElementClass = getFKColumn(thisEnd
.getType(), indirTable, otherEnd.getName());
Element colDescEl = new Element("collection-descriptor");
classDescEl.addContent(colDescEl);
ordEl = colDescEl;
colDescEl.setAttribute("name", otherEnd.getName());
colDescEl.setAttribute("element-class-ref", _pkgName
+ "impl." + otherEnd.getType().getName() + "Impl");
colDescEl.setAttribute("indirection-table", indirTable
.getName());
Element fkpttcEl = new Element("fk-pointing-to-this-class");
colDescEl.addContent(fkpttcEl);
fkpttcEl.setAttribute("column", fkPointingToThisClass
.getName());
Element fkptecEl = new Element(
"fk-pointing-to-element-class");
colDescEl.addContent(fkptecEl);
fkptecEl.setAttribute("column", fkPointingToElementClass
.getName());
} else {
log.error("unknown assoc multiplicity for " + klass.getName()
+ Constant.DOT + thisEnd.getName() + "<->"
+ otherEnd.getName());
throw new RuntimeException(
"unknown assoc multiplicity for " + klass.getName()
+ Constant.DOT + thisEnd.getName() + "<->"
+ otherEnd.getName());
}
ordEl.setAttribute("auto-retrieve", "false");
Element recipRolEl = new Element("attribute");
ordEl.addContent(recipRolEl);
recipRolEl.setAttribute("attribute-name", "reciprocol-role");
recipRolEl.setAttribute("attribute-value", thisEnd.getName());
String navigableStr = Boolean.toString(otherEnd.isNavigable());
Element navigableEl = new Element("attribute");
ordEl.addContent(navigableEl);
navigableEl.setAttribute("attribute-name", "navigable");
navigableEl.setAttribute("attribute-value", navigableStr);
}
log.debug("...done associations.");
String interfaceExp = "class-descriptor[@class='" + _pkgName
+ klass.getName() + "']";
Element interfaceEl = null;
try {
interfaceEl = (Element) (new JDOMXPath(interfaceExp))
.selectSingleNode(root);
} catch (JaxenException ex) {
log.error("error selecting " + interfaceExp +" :" + ex.getMessage());
throw new RuntimeException("error selecting " + interfaceExp,
ex);
}
if (interfaceEl == null) {
interfaceEl = new Element("class-descriptor");
root.addContent(interfaceEl);
interfaceEl.setAttribute("class", _pkgName + klass.getName());
}
String extentExp = "extent-class[@class-ref='" + _pkgName + "impl."
+ klass.getName() + "Impl']";
Element extentEl = null;
try {
extentEl = (Element) (new JDOMXPath(extentExp))
.selectSingleNode(interfaceEl);
} catch (JaxenException ex) {
log.error( "error selecting " + extentExp+ " : " + ex.getMessage());
throw new RuntimeException("error selecting " + extentExp, ex);
}
if (extentEl == null) {
extentEl = new Element("extent-class");
interfaceEl.addContent(extentEl);
extentEl.setAttribute("class-ref", _pkgName + "impl."
+ klass.getName() + "Impl");
}
Classifier superClass = UML13Utils.getSuperClass((UmlClass) klass);
while (superClass != null) {
String intExp = "class-descriptor[@class='" + _pkgName
+ "impl." + superClass.getName() + "Impl']";
Element intEl = null;
try {
intEl = (Element) (new JDOMXPath(intExp))
.selectSingleNode(root);
} catch (JaxenException ex) {
log.error("error selecting " + intExp + " : " + ex.getMessage());
throw new RuntimeException("error selecting " + intExp, ex);
}
if (intEl == null) {
intEl = new Element("class-descriptor");
root.addContent(intEl);
intEl.setAttribute("class", _pkgName + "impl."
+ superClass.getName() + "Impl");
}
String extExp = "extent-class[@class-ref='" + _pkgName
+ "impl." + klass.getName() + "Impl']";
Element extEl = null;
try {
extEl = (Element) (new JDOMXPath(extExp))
.selectSingleNode(intEl);
} catch (JaxenException ex) {
log.error("error selecting " + extExp + " : " + ex.getMessage());
throw new RuntimeException("error selecting " + extExp, ex);
}
if (extEl == null) {
extEl = new Element("extent-class");
intEl.addContent(extEl);
extEl.setAttribute("class-ref", _pkgName + "impl."
+ klass.getName() + "Impl");
}
superClass = UML13Utils.getSuperClass((UmlClass) superClass);
}
/**
* Interface inheritance
*/
superClass = UML13Utils.getSuperClass((UmlClass) klass);
while (superClass != null) {
String intExp = "class-descriptor[@class='" + _pkgName
+ superClass.getName() + "']";
Element intEl = null;
try {
intEl = (Element) (new JDOMXPath(intExp))
.selectSingleNode(root);
} catch (JaxenException ex) {
log.error("error selecting " + intExp + " : " + ex.getMessage());
throw new RuntimeException("error selecting " + intExp, ex);
}
if (intEl == null) {
intEl = new Element("class-descriptor");
root.addContent(intEl);
intEl
.setAttribute("class", _pkgName
+ superClass.getName());
}
String extExp = "extent-class[@class-ref='" + _pkgName
+ klass.getName() + "']";
Element extEl = null;
try {
extEl = (Element) (new JDOMXPath(extExp))
.selectSingleNode(intEl);
} catch (JaxenException ex) {
log.error("error selecting " + extExp + " : " + ex.getMessage());
throw new RuntimeException("error selecting " + extExp, ex);
}
if (extEl == null) {
extEl = new Element("extent-class");
intEl.addContent(extEl);
extEl.setAttribute("class-ref", _pkgName + klass.getName());
}
superClass = UML13Utils.getSuperClass((UmlClass) superClass);
}
}
return root;
}
private String getImplementedAssociationName(Classifier klass,
Attribute column, String attName) {
log.debug("class = " + klass.getName() + ", column = "
+ column.getName() + ", attName = " + attName);
String assocName = null;
uml.UmlPackage umlExtent = (uml.UmlPackage) klass.refOutermostPackage();
Collection tvs = umlExtent.getFoundation().getExtensionMechanisms()
.getAModelElementTaggedValue().getTaggedValue(column);
log.debug("got " + tvs.size() + " tagged values");
Classifier superClass = klass;
search: while (superClass != null) {
for (Iterator i = tvs.iterator(); i.hasNext();) {
TaggedValue tv = (TaggedValue) i.next();
log.debug("checking " + tv.getTag() + Constant.EQUAL + tv.getValue()
+ " against " + superClass.getName() + Constant.DOT + attName);
if ("implements-association".equals(tv.getTag())
&& tv.getValue().equals(
superClass.getName() + Constant.DOT + attName)) {
assocName = tv.getValue().substring(
tv.getValue().indexOf(Constant.DOT) + 1);
break search;
}
}
superClass = UML13Utils.getSuperClass((UmlClass) superClass);
}
return assocName;
}
private boolean hasAttribute(Classifier klass, String attName,
boolean includeInherited) {
boolean hasAtt = false;
Classifier superClass = klass;
search: while (superClass != null) {
for (Iterator i = superClass.getFeature().iterator(); i.hasNext();) {
Feature f = (Feature) i.next();
if (f instanceof Attribute) {
Attribute att = (Attribute) f;
if (attName.equals(att.getName())) {
hasAtt = true;
break search;
}
}
}
if (includeInherited) {
superClass = UML13Utils.getSuperClass((UmlClass) superClass);
} else {
superClass = null;
}
}
return hasAtt;
}
private String getConversion(Attribute column) {
String convStr = null;
TaggedValue convTv = UML13Utils.getTaggedValue(column, "conversion");
if (convTv != null) {
convStr = convTv.getValue();
}
return convStr;
}
private String getJDBCType(String dbType) {
String jdbcType = null;
if (dbType.startsWith("VARCHAR")) {
jdbcType = "VARCHAR";
} else if (dbType.startsWith("NUMBER")) {
jdbcType = "BIGINT";
} else {
log.error("Unknown db type: " + dbType);
throw new RuntimeException("Unknown db type: " + dbType);
}
return jdbcType;
}
private String getAttributeName(Classifier klass, Attribute column) {
log.debug("klass = " + klass.getName() + ", column = "
+ column.getName());
String attName = null;
uml.UmlPackage umlExtent = (uml.UmlPackage) klass.refOutermostPackage();
Collection tvs = umlExtent.getFoundation().getExtensionMechanisms()
.getAModelElementTaggedValue().getTaggedValue(column);
log.debug("got " + tvs.size() + " tagged values");
Classifier superClass = klass;
search: while (superClass != null) {
for (Iterator i = tvs.iterator(); i.hasNext();) {
TaggedValue tv = (TaggedValue) i.next();
log.debug("checking " + tv.getTag() + Constant.EQUAL + tv.getValue());
if ("maps-to-attribute".equals(tv.getTag())
&& tv.getValue().startsWith(superClass.getName())) {
attName = tv.getValue().substring(
tv.getValue().indexOf(Constant.DOT) + 1);
break search;
}
}
superClass = UML13Utils.getSuperClass((UmlClass) superClass);
}
return attName;
}
private boolean isSuperClass(Classifier klass) {
return klass.getSpecialization().size() > 0;
}
private Classifier getCorrelationTable(Classifier table,
AssociationEnd thisEnd, AssociationEnd otherEnd) {
Classifier corrTable = null;
for (Iterator i = UML13Utils.getAssociationEnds(table).iterator(); i
.hasNext();) {
AssociationEnd te = (AssociationEnd) i.next();
AssociationEnd oe = UML13Utils.getOtherAssociationEnd(te);
Classifier c = oe.getType();
if (c.getClientDependency().size() == 2) {
Iterator j = c.getClientDependency().iterator();
Dependency depOne = (Dependency) j.next();
Stereotype depOneS = UML13Utils.getStereotype(depOne);
Dependency depTwo = (Dependency) j.next();
Stereotype depTwoS = UML13Utils.getStereotype(depTwo);
if ("Correlation".equals(depOneS.getName())
&& "Correlation".equals(depTwoS.getName())) {
log.debug("got the correlations");
Classifier suppOne = (Classifier) depOne.getSupplier()
.iterator().next();
Classifier suppTwo = (Classifier) depTwo.getSupplier()
.iterator().next();
log.debug("suppOne = " + suppOne.getName()
+ ", suppTwo = " + suppTwo.getName());
if ((suppOne == thisEnd.getType() || suppOne == otherEnd
.getType())
&& (suppTwo == thisEnd.getType() || suppTwo == otherEnd
.getType())) {
log.debug("suppliers match");
if (getFKColumn(otherEnd.getType(), c, thisEnd
.getName()) != null
&& getFKColumn(thisEnd.getType(), c, otherEnd
.getName()) != null) {
corrTable = c;
break;
}
}
}
}
}
return corrTable;
}
private Attribute getFKColumn(Classifier klass, Classifier table,
String name) {
log.debug("class = " + klass.getName() + ", table = "
+ table.getName() + ", name = " + name);
Attribute column = null;
for (Iterator i = UML13Utils.getAttributes((UmlClass) table).iterator(); i
.hasNext();) {
Attribute aColumn = (Attribute) i.next();
if (getImplementedAssociationName(klass, aColumn, name) != null) {
column = aColumn;
break;
}
}
return column;
}
private Classifier getTable(Classifier klass) {
Classifier table = null;
Collection clients = new ArrayList();
for (Iterator j = klass.getSupplierDependency().iterator(); j.hasNext();) {
Dependency d = (Dependency) j.next();
Stereotype s = UML13Utils.getStereotype(d);
if (s != null && "DataSource".equals(s.getName())) {
clients.addAll(d.getClient());
}
}
if (clients.size() != 1) {
log.error(clients.size() + " data sources found for "
+ klass.getName());
} else {
table = (Classifier) clients.iterator().next();
}
return table;
}
private String getInvFKFieldRef(Classifier klass, Classifier table,
String attName) {
log.debug("class = " + klass.getName() + ", table = "
+ table.getName() + ", attName = " + attName);
String fieldRef = null;
for (Iterator i = UML13Utils.getAttributes((UmlClass) table).iterator(); i
.hasNext();) {
Attribute column = (Attribute) i.next();
if (getImplementedAssociationName(klass, column, attName) != null) {
fieldRef = getAttributeName(klass, column);
break;
}
}
return fieldRef;
}
/*
* (non-Javadoc)
*
* @see gov.nih.nci.codegen.core.JDOMConfigurable#configure(org.jdom.Element)
*/
public void configure(org.w3c.dom.Element config)
throws ConfigurationException {
org.w3c.dom.Element filterEl = XMLUtils.getChild(config, "filter");
if (filterEl == null) {
log.error("no child filter element found");
throw new ConfigurationException("no child filter element found");
}
String className = filterEl.getAttribute("className");
if (className == null) {
log.error("no filter class name specified");
throw new ConfigurationException("no filter class name specified");
}
org.w3c.dom.Element paramEl = XMLUtils.getChild(config, "param");
_pkgName = paramEl.getAttribute("packageName");
if (_pkgName == null) {
_pkgName = "";
} else {
_pkgName = _pkgName + Constant.DOT;
}
try {
_classifierFilt = (UML13ClassifierFilter) Class.forName(className)
.newInstance();
} catch (Exception ex) {
log.error("Couldn't instantiate "
+ className);
throw new ConfigurationException("Couldn't instantiate "
+ className);
}
_classifierFilt.configure(filterEl);
}
}
| |
/*
* Copyright 2015 NEC Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.o3project.odenos.remoteobject;
import static org.msgpack.template.Templates.tMap;
import static org.msgpack.template.Templates.TString;
import org.msgpack.MessagePackable;
import org.msgpack.packer.Packer;
import org.msgpack.unpacker.Unpacker;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* RemoteObject class and RemoteObject subclass property.
*
*/
public class ObjectProperty implements MessagePackable, Cloneable {
public class PropertyNames {
public static final String OBJECT_ID = "id";
public static final String OBJECT_SUPER_TYPE = "super_type";
public static final String OBJECT_TYPE = "type";
public static final String OBJECT_STATE = "state";
public static final String BASE_URI = "base_uri";
public static final String DESCRIPTION = "description";
public static final String CM_ID = "cm_id";
public static final String COMPONENT_TYPES = "component_types";
public static final String CONNECTION_TYPES = "connection_types";
}
public class State {
public static final String INITIALIZING = "initializing";
public static final String RUNNING = "running";
public static final String FINALIZING = "finalizing";
public static final String ERROR = "error";
}
protected Map<String, String> property = new HashMap<String, String>();
/**
* Constructor.
* @deprecated @see #ObjectProperty(String, String)
*/
@Deprecated
public ObjectProperty() {
}
/**
* Constructor.
* @param objectType type of objects.
* @param objectId object ID.
* @param baseUri base URI
* @deprecated @see #ObjectProperty(String, String)
*/
@Deprecated
public ObjectProperty(String objectType, String objectId, String baseUri) {
property.put(PropertyNames.OBJECT_TYPE, objectType);
property.put(PropertyNames.OBJECT_ID, objectId);
property.put(PropertyNames.BASE_URI, baseUri);
}
/**
* Constructor.
* @param objectType type of objects.
* @param objectId object ID.
*/
public ObjectProperty(String objectType, String objectId) {
property.put(PropertyNames.OBJECT_TYPE, objectType);
property.put(PropertyNames.OBJECT_ID, objectId);
}
@Override
public Object clone() {
ObjectProperty obj = new ObjectProperty();
obj.property = new HashMap<String, String>(this.property);
return obj;
}
public String getObjectType() {
return property.get(PropertyNames.OBJECT_TYPE);
}
public String getObjectId() {
return property.get(PropertyNames.OBJECT_ID);
}
public String getBaseUri() {
return property.get(PropertyNames.BASE_URI);
}
public String getObjectState() {
return property.get(PropertyNames.OBJECT_STATE);
}
public String setObjectState(String objectState) {
return setProperty(PropertyNames.OBJECT_STATE, objectState);
}
/**
* Set a property.
* @param key a key name.
* @param value a value of key.
* @return previous value associated with key.
*/
public String setProperty(String key, String value) {
if (!isReadOnlyKey(key)) {
return property.put(key, value);
}
return null;
}
public String getProperty(String key) {
return property.get(key);
}
/**
* Delete a property.
* @param key deleted the key.
* @return previous value associated with key.
*/
public final String deleteProperty(final String key) {
if (!isReadOnlyKey(key)) {
return property.remove(key);
}
return null;
}
/**
* Update a property.
* @param newProperty replaced property.
*/
public final void putProperty(final ObjectProperty newProperty) {
Set<String> newKeySet = newProperty.getKeys();
for (String key : new HashSet<String>(getKeys())) {
if (!newKeySet.contains(key)) {
deleteProperty(key);
}
}
for (Entry<String, String> e : newProperty.property.entrySet()) {
setProperty(e.getKey(), e.getValue());
}
}
public Set<String> getKeys() {
return property.keySet();
}
/**
* Returns true if the settings are modified.
* @param newProperty will replace the settings.
* @return true if the settings are modified.
*/
public final boolean isModify(final ObjectProperty newProperty) {
Set<String> oldKeySet = this.getKeys();
Set<String> newKeySet = newProperty.getKeys();
// check add or delete
if (!oldKeySet.equals(newKeySet)) {
return true;
}
// check value modify
for (Entry<String, String> e : newProperty.property.entrySet()) {
String newValue = e.getValue();
String oldValue = this.getProperty(e.getKey());
if (!newValue.equals(oldValue)) {
return true;
}
}
return false;
}
protected Boolean isReadOnlyKey(String key) {
return key.equals(PropertyNames.OBJECT_TYPE)
|| (key.equals(PropertyNames.OBJECT_SUPER_TYPE)
&& (property.containsKey(key)))
|| (key.equals(PropertyNames.DESCRIPTION)
&& (property.containsKey(key)))
|| (key.equals(PropertyNames.CONNECTION_TYPES)
&& (property.containsKey(key)));
}
@Override
public void writeTo(Packer packer) throws IOException {
packer.write(property);
}
@Override
public void readFrom(Unpacker unpk) throws IOException {
property.clear();
property.putAll(unpk.read(tMap(TString, TString)));
}
}
| |
/*
* This file is part of: Xfolite (J2ME XForms client)
*
* Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
*
* Contact: Oskari Koskimies <oskari.koskimies@nokia.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser
* General Public License along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.nokia.xfolite.client;
import java.util.Hashtable;
import java.util.Calendar;
import java.util.Date;
import javax.microedition.io.Connector;
import javax.microedition.io.HttpConnection;
import javax.microedition.io.StreamConnection;
import javax.microedition.lcdui.AlertType;
import javax.microedition.lcdui.Display;
import javax.microedition.lcdui.Displayable;
import javax.microedition.lcdui.Command;
import javax.microedition.lcdui.CommandListener;
import org.kxml2.io.KXmlParser;
import org.kxml2.io.KXmlSerializer;
import org.xmlpull.v1.XmlPullParserException;
import de.enough.polish.ui.*;
import de.enough.polish.util.ArrayList;
import de.enough.polish.util.Locale;
import com.nokia.xfolite.client.ui.*;
import com.nokia.xfolite.client.util.CookieJar;
import com.nokia.xfolite.client.util.EventQueue;
import com.nokia.xfolite.client.util.ProtocolFactory;
import com.nokia.xfolite.client.util.ThreadPool;
import com.nokia.xfolite.xforms.dom.*;
import com.nokia.xfolite.xforms.model.*;
import com.nokia.xfolite.xforms.model.datasource.*;
import com.nokia.xfolite.xforms.model.datatypes.*;
import com.nokia.xfolite.xforms.submission.*;
import com.nokia.xfolite.xml.dom.*;
import com.nokia.xfolite.xml.dom.events.*;
import com.nokia.xfolite.xml.xpath.NodeSet;
import com.nokia.xfolite.xml.xpath.XPathResult;
import java.io.*;
import javax.microedition.midlet.MIDlet;
import javax.microedition.lcdui.Graphics;
/**
* <p>Provides an XForms form.</p>
*
* @author Oskari Koskimies
*/
public class XFormsForm extends Form implements UserInterface, CommandListener {
private XFormsDocument m_doc = null;
private Display m_display;
private PolishController m_controller = null;
// Listener for non-exec commands
private CommandListener m_listener = null;
private ExecCommand m_exitCommand;
private MIDlet m_midlet;
private Hashtable properties = null;
private String m_title;
private ParseListener parseListener = null;
private final Command m_memoryCommand = new Command(
Locale.get("forms.cmd.memory"), Command.SCREEN, 10 );
/**
* Creates a new XForms-based form.
*
* @param title The default title of the frame (will be overwritten by title element in form if present)
* @param display The Display object for this midlet (required for access to callSerially)
* @param style The style for this form, is applied using the #style preprocessing directive
*/
public XFormsForm(String defaultTitle, ExecCommand exitCommand, Display display) {
this(defaultTitle, exitCommand, display, null);
m_title = defaultTitle;
}
public XFormsForm(String defaultTitle, ExecCommand exitCommand, Display display, Style style ) {
//#if polish.usePolishGui
//# super( defaultTitle, style );
//#else
super( defaultTitle );
//#endif
m_display = display;
m_doc = new XFormsDocument();
m_exitCommand = exitCommand;
if (m_exitCommand != null) {
this.addCommand(m_exitCommand);
}
this.addCommand(m_memoryCommand);
}
public void setParseListner(ParseListener listener, int callbackInterval) {
m_doc.setParseListener(listener, callbackInterval);
}
public void setMidlet(MIDlet midlet) {
this.m_midlet = midlet;
}
public void setProperties(Hashtable properties) {
this.properties = properties;
}
public String getProperty(String name) {
//#debug
System.out.println("Asking for property " + name);
String value = null;
if (properties != null) {
value = (String) properties.get(name);
//#debug
System.out.println("Got from properties: " + value);
}
if (value == null && m_midlet != null) {
value = m_midlet.getAppProperty(name);
//#debug
System.out.println("Got from midlet: " + value);
}
if (value == null) {
value = System.getProperty(name);
//#debug
System.out.println("Got from system: " + value);
}
//#debug
System.out.println("Returning: " + value);
return value;
}
public void setTitle(String title) {
super.setTitle(title);
m_title = title;
}
public void showMessage(String msg) {
//#debug info
System.out.println("Showing message: " + msg);
//#style xformsmessage
Alert alert = new Alert(Locale.get("forms.label.message"),
msg, null, AlertType.INFO);
// Looks like an error but builds
m_display.setCurrent(alert, this);
// FIXME: Implement proper popup
/*
* final Popup popup = new Popup(Pic.get("/info.png"),
* "Message", msg); popup.setCommands(this, Popup.OK, null);
* Screen.get().push(popup);
*/
}
public void close() {
if (m_exitCommand != null) {
m_exitCommand.execute(null, this);
}
}
public void load(String url) {
try
{
//#style xform
XFormsForm form = new XFormsForm(
Locale.get("main.label.loading"), m_exitCommand, m_display);
form.setBaseURL(url);
//form.setMidlet(this);
form.setProperties(this.properties);
m_display.setCurrent( form );
StreamConnection connection;
//#debug info
System.out.println("Loading form: " + url);
connection = ProtocolFactory.getInstance().getConnection(url);
InputStream in = connection.openInputStream();
if (connection instanceof HttpConnection) {
int i = 0;
HttpConnection hc = (HttpConnection) connection;
CookieJar jar = CookieJar.getInstance();
String headerKey;
while((headerKey = hc.getHeaderFieldKey(i)) != null) {
//#debug info
System.out.println("Got header " + headerKey);
if (headerKey.toLowerCase().equals("set-cookie")) {
jar.setCookie(hc.getHeaderField(i));
}
i++;
}
}
form.loadDocument(in);
try {
in.close();
connection.close();
} catch (Exception ignore) {} // We don't care too much if closing fails
//#debug info
System.out.println("Form loaded");
} catch (Exception ex) {
//#debug error
System.out.println(
Locale.get("forms.error.errorWhileLoadingForm") + ": " + ex);
Form form = new Form(Locale.get("main.label.loading"));
StringItem error = new StringItem(ex.getClass().getName(), ex.getMessage());
form.append(error);
form.addCommand(m_exitCommand);
m_display.setCurrent( form );
form.setCommandListener(this);
}
}
public void loadDocument(InputStream form)
throws IOException, XmlPullParserException
{
try {
setBusy(true);
long time = System.currentTimeMillis();
System.out.println("formview.enter()"+time);
KXmlParser parser = new KXmlParser();
parser.setInput(form, "UTF-8");
Container root = new Container(false);
this.deleteAll();
this.append(root);
m_controller = new PolishController(root, m_display, this);
setItemStateListener(m_controller);
super.setCommandListener(this);
//#debug info
System.out.println("Parsing form..");
m_doc.setRendererFactory(m_controller.getWidgetFactory());
// doc.registerDataSource(new SimulatedDataSource()); // for demo purposes
// doc.registerDataSource(new GPSDataSource(MainView._gpsDevice,this,true)); // for demo purposes
m_doc.setUserInterface(this);
//doc.setStoreProvider(this);
//doc.registerEventProvider("gps", this);
m_doc.addSubmitter(new HTTPSubmitter());
m_doc.addSerializer(new XFormsXMLSerializer());
m_doc.addSerializer(new MultipartRelatedSerializer());
m_doc.addSerializer(new MultipartFormDataSerializer());
//doc.addSubmitter(this);
m_doc.parse(parser);
//#debug info
System.out.println("Form construct took: "+(System.currentTimeMillis()-time)+"ms");
} catch(IOException ex1) {
throw ex1;
} finally {
setBusy(false);
}
}
public void loadDocument(InputStream form, InputStream data, String dataId)
throws IOException, XmlPullParserException
{
addData(data, dataId);
loadDocument(form);
}
public void addData(InputStream data, String dataId)
throws IOException, XmlPullParserException
{
KXmlParser parser = new KXmlParser();
parser.setInput(data, "UTF-8");
Document dataDoc = new Document();
dataDoc.parse(parser);
addData(dataDoc, dataId);
}
public void addData(Document data, String dataId) {
if (data != null && dataId != null) {
m_doc.addInstance(data, dataId);
} else {
throw new NullPointerException("Both document and data-ID must be non-null!");
}
}
public void setBaseURL(String url) {
m_doc.setBaseURL(url);
}
public void loadDocument(InputStream form, Document data, String dataId)
throws IOException, XmlPullParserException
{
addData(data, dataId);
loadDocument(form);
}
public void addSubmitter(ISubmitter submitter) {
m_doc.addSubmitter(submitter);
}
public void registerDataSource(DataSourceFactory fact) {
m_doc.registerDataSource(fact);
}
public void addSerializer(ISerializer serializer) {
m_doc.addSerializer(serializer);
}
public XFormsDocument getDocument() {
return m_doc;
}
public Container getRootContainer() {
return this.container;
}
private int busyCount = 0;
public synchronized void setBusy(boolean busy) {
//#debug
System.out.println("******************************************* SETBUSY: " + busy);
if (busy) {
if (++busyCount == 1) {
super.setTitle(Locale.get("forms.label.processing"));
this.serviceRepaints();
}
} else {
if (--busyCount == 0) {
super.setTitle(m_title);
this.serviceRepaints();
}
}
}
public void setContext(String context) {
/* Do not use context for now, takes up screen space and is not too useful
*
if (context != null) {
//#style context
StringItem subTitle = new StringItem(null, context);
setSubTitle(subTitle);
} else {
setSubTitle(null);
}
*/
}
/********************** UserInterface Interface Implementation *******************/
private String setLogLocation(int lvl, String msg, Element el)
{
String location = "<" + el.getNodeName();
// TODO': mh change, if id attribute exist only print it
Attr idAttr=el.getAttributeNode("id");
if (idAttr!=null)
location+=" id=\""+idAttr.getNodeValue()+"\" ";
else
{
// mh change, get only the first attribute
if (el.getAttributeCount()>0)
for (int i = 0; i < 1 /*el.getAttributeCount()*/; i++) {
Attr a = el.getAttribute(i);
location += " " + a.getNodeName() + "=" + "\"" + a.getValue() + "\"";
}
}
location += "/>";
return location;
}
// log messages:
public void log(int lvl, String msg, Element el) {
String location = setLogLocation(lvl, msg, el);
switch (lvl) {
case UserInterface.LVL_ERROR:
//#debug error
System.out.println(
Locale.get("forms.error.errorAt")
+ ": " + location + ":" + msg);
break;
case UserInterface.LVL_WARN:
//#debug warn
System.out.println("Warning at " + location + ":" + msg);
break;
case UserInterface.LVL_STATUS:
//#debug info
System.out.println(location + ":" + msg);
break;
}
}
public void callSerially(Runnable task) {
// should check if series 60 3rd edition, and just call dispatch
if (true /* S60 3rd ed */) {
m_display.callSerially(task); // for S60 3rd edition phones (E70)
} else {
EventQueue.getInstance().callSerially(task, m_display); // for 2.8 phones (6680)
}
}
public void callParallel(Runnable task) {
// In series 60 3rd edition we could actually launch a new thread here.
// new Thread(task).start();
ThreadPool.getInstance().callParallel(task);
}
/************************ CommandListener ***************************/
public void commandAction(Command cmd, Displayable disp) {
//#debug
System.out.println("Command: " + cmd);
if (cmd == this.m_memoryCommand) {
Runtime.getRuntime().gc();
Runtime.getRuntime().gc();
long free = Runtime.getRuntime().freeMemory();
long total = Runtime.getRuntime().totalMemory();
String[] params = new String[] {
Long.toString(free),
Long.toString(total)
};
showMessage(Locale.get("forms.msg.freeMemory", params));
} else if (cmd instanceof ExecCommand) {
//#debug info
System.out.println("Executing: " + cmd);
try {
setBusy(true);
((ExecCommand)cmd).execute(disp);
} catch (Exception ex) {
//#debug warn
System.out.println("Could not execute command: " + ex);
} finally {
setBusy(false);
}
} else if (m_listener != null) {
//#debug info
System.out.println("Forwarding: " + cmd);
m_listener.commandAction(cmd, disp);
} else {
//#debug warn
System.out.println("Ignoring: " + cmd + " (" + cmd.getLabel()+ ")");
}
}
public void setCommandListener(CommandListener listener) {
m_listener = listener;
}
public CommandListener getCommandListener() {
return m_listener;
}
public void paintScreen(Graphics g) {
try {
super.paintScreen(g);
} catch(Exception ex) {
//#debug error
System.out.println(
Locale.get("forms.error.generalUiFailure")
+ ": " + ex);
ex.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Copied from commons-validator:commons-validator:1.6, with [PATCH] modifications */
package jenkins.org.apache.commons.validator.routines;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import java.io.Serializable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* <b>Regular Expression</b> validation (using JDK 1.4+ regex support).
* <p>
* Construct the validator either for a single regular expression or a set (array) of
* regular expressions. By default validation is <i>case sensitive</i> but constructors
* are provided to allow <i>case in-sensitive</i> validation. For example to create
* a validator which does <i>case in-sensitive</i> validation for a set of regular
* expressions:
* </p>
* <pre>
* <code>
* String[] regexs = new String[] {...};
* RegexValidator validator = new RegexValidator(regexs, false);
* </code>
* </pre>
*
* <ul>
* <li>Validate <code>true</code> or <code>false</code>:</li>
* <li>
* <ul>
* <li><code>boolean valid = validator.isValidRootUrl(value);</code></li>
* </ul>
* </li>
* <li>Validate returning an aggregated String of the matched groups:</li>
* <li>
* <ul>
* <li><code>String result = validator.validate(value);</code></li>
* </ul>
* </li>
* <li>Validate returning the matched groups:</li>
* <li>
* <ul>
* <li><code>String[] result = validator.match(value);</code></li>
* </ul>
* </li>
* </ul>
*
* <b>Note that patterns are matched against the entire input.</b>
*
* <p>
* Cached instances pre-compile and re-use {@link Pattern}(s) - which according
* to the {@link Pattern} API are safe to use in a multi-threaded environment.
* </p>
*
* @version $Revision: 1739356 $
* @since Validator 1.4
*/
//[PATCH]
@Restricted(NoExternalUse.class)
// end of [PATCH]
public class RegexValidator implements Serializable {
private static final long serialVersionUID = -8832409930574867162L;
private final Pattern[] patterns;
/**
* Construct a <i>case sensitive</i> validator for a single
* regular expression.
*
* @param regex The regular expression this validator will
* validate against
*/
public RegexValidator(String regex) {
this(regex, true);
}
/**
* Construct a validator for a single regular expression
* with the specified case sensitivity.
*
* @param regex The regular expression this validator will
* validate against
* @param caseSensitive when <code>true</code> matching is <i>case
* sensitive</i>, otherwise matching is <i>case in-sensitive</i>
*/
public RegexValidator(String regex, boolean caseSensitive) {
this(new String[] {regex}, caseSensitive);
}
/**
* Construct a <i>case sensitive</i> validator that matches any one
* of the set of regular expressions.
*
* @param regexs The set of regular expressions this validator will
* validate against
*/
public RegexValidator(String[] regexs) {
this(regexs, true);
}
/**
* Construct a validator that matches any one of the set of regular
* expressions with the specified case sensitivity.
*
* @param regexs The set of regular expressions this validator will
* validate against
* @param caseSensitive when <code>true</code> matching is <i>case
* sensitive</i>, otherwise matching is <i>case in-sensitive</i>
*/
public RegexValidator(String[] regexs, boolean caseSensitive) {
if (regexs == null || regexs.length == 0) {
throw new IllegalArgumentException("Regular expressions are missing");
}
patterns = new Pattern[regexs.length];
int flags = (caseSensitive ? 0: Pattern.CASE_INSENSITIVE);
for (int i = 0; i < regexs.length; i++) {
if (regexs[i] == null || regexs[i].length() == 0) {
throw new IllegalArgumentException("Regular expression[" + i + "] is missing");
}
patterns[i] = Pattern.compile(regexs[i], flags);
}
}
/**
* Validate a value against the set of regular expressions.
*
* @param value The value to validate.
* @return <code>true</code> if the value is valid
* otherwise <code>false</code>.
*/
public boolean isValid(String value) {
if (value == null) {
return false;
}
for (int i = 0; i < patterns.length; i++) {
if (patterns[i].matcher(value).matches()) {
return true;
}
}
return false;
}
/**
* Validate a value against the set of regular expressions
* returning the array of matched groups.
*
* @param value The value to validate.
* @return String array of the <i>groups</i> matched if
* valid or <code>null</code> if invalid
*/
public String[] match(String value) {
if (value == null) {
return null;
}
for (int i = 0; i < patterns.length; i++) {
Matcher matcher = patterns[i].matcher(value);
if (matcher.matches()) {
int count = matcher.groupCount();
String[] groups = new String[count];
for (int j = 0; j < count; j++) {
groups[j] = matcher.group(j+1);
}
return groups;
}
}
return null;
}
/**
* Validate a value against the set of regular expressions
* returning a String value of the aggregated groups.
*
* @param value The value to validate.
* @return Aggregated String value comprised of the
* <i>groups</i> matched if valid or <code>null</code> if invalid
*/
public String validate(String value) {
if (value == null) {
return null;
}
for (int i = 0; i < patterns.length; i++) {
Matcher matcher = patterns[i].matcher(value);
if (matcher.matches()) {
int count = matcher.groupCount();
if (count == 1) {
return matcher.group(1);
}
StringBuilder buffer = new StringBuilder();
for (int j = 0; j < count; j++) {
String component = matcher.group(j+1);
if (component != null) {
buffer.append(component);
}
}
return buffer.toString();
}
}
return null;
}
/**
* Provide a String representation of this validator.
* @return A String representation of this validator
*/
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("RegexValidator{");
for (int i = 0; i < patterns.length; i++) {
if (i > 0) {
buffer.append(",");
}
buffer.append(patterns[i].pattern());
}
buffer.append("}");
return buffer.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.util;
import groovy.lang.GroovyObjectSupport;
import groovy.lang.GroovyRuntimeException;
import javax.management.Attribute;
import javax.management.JMException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanParameterInfo;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A GroovyObject facade for an underlying MBean which acts like a normal
* groovy object but which is actually implemented via
* an underlying JMX MBean.
* Properties and normal method invocations
* delegate to the MBeanServer to the actual MBean.
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Steve Button
* @author Paul King
*/
public class GroovyMBean extends GroovyObjectSupport {
private final MBeanServerConnection server;
private final ObjectName name;
private MBeanInfo beanInfo;
private final boolean ignoreErrors;
private final Map<String, String[]> operations = new HashMap<String, String[]>();
public GroovyMBean(MBeanServerConnection server, String objectName) throws JMException, IOException {
this(server, objectName, false);
}
public GroovyMBean(MBeanServerConnection server, String objectName, boolean ignoreErrors) throws JMException, IOException {
this(server, new ObjectName(objectName), ignoreErrors);
}
public GroovyMBean(MBeanServerConnection server, ObjectName name) throws JMException, IOException {
this(server, name, false);
}
public GroovyMBean(MBeanServerConnection server, ObjectName name, boolean ignoreErrors) throws JMException, IOException {
this.server = server;
this.name = name;
this.ignoreErrors = ignoreErrors;
this.beanInfo = server.getMBeanInfo(name);
MBeanOperationInfo[] operationInfos = beanInfo.getOperations();
for (MBeanOperationInfo info : operationInfos) {
String signature[] = createSignature(info);
// Construct a simplistic key to support overloaded operations on the MBean.
String operationKey = createOperationKey(info.getName(), signature.length);
operations.put(operationKey, signature);
}
}
public MBeanServerConnection server() {
return server;
}
public ObjectName name() {
return name;
}
public MBeanInfo info() {
return beanInfo;
}
public Object getProperty(String property) {
try {
return server.getAttribute(name, property);
}
catch (MBeanException e) {
throwExceptionWithTarget("Could not access property: " + property + ". Reason: ", e);
}
catch (Exception e) {
if (!ignoreErrors)
throwException("Could not access property: " + property + ". Reason: ", e);
}
return null;
}
public void setProperty(String property, Object value) {
try {
server.setAttribute(name, new Attribute(property, value));
}
catch (MBeanException e) {
throwExceptionWithTarget("Could not set property: " + property + ". Reason: ", e);
}
catch (Exception e) {
throwException("Could not set property: " + property + ". Reason: ", e);
}
}
public Object invokeMethod(String method, Object arguments) {
Object[] argArray;
if (arguments instanceof Object[]) {
argArray = (Object[]) arguments;
} else {
argArray = new Object[]{arguments};
}
// Locate the specific method based on the name and number of parameters
String operationKey = createOperationKey(method, argArray.length);
String[] signature = operations.get(operationKey);
if (signature != null) {
try {
return server.invoke(name, method, argArray, signature);
}
catch (MBeanException e) {
throwExceptionWithTarget("Could not invoke method: " + method + ". Reason: ", e);
}
catch (Exception e) {
throwException("Could not invoke method: " + method + ". Reason: ", e);
}
return null;
} else {
return super.invokeMethod(method, arguments);
}
}
protected String[] createSignature(MBeanOperationInfo info) {
MBeanParameterInfo[] params = info.getSignature();
String[] answer = new String[params.length];
for (int i = 0; i < params.length; i++) {
answer[i] = params[i].getType();
}
return answer;
}
/**
* Construct a simple key based on the method name and the number of parameters
*
* @param operation - the mbean operation name
* @param params - the number of parameters the operation supports
* @return simple unique identifier for a method
*/
protected String createOperationKey(String operation, int params) {
// This could be changed to support some hash of the parameter types, etc.
// but should distinguish between reordered params while allowing normal
// type coercions to be honored
return operation + "_" + params;
}
/**
* List of the names of each of the attributes on the MBean
*
* @return list of attribute names
*/
public Collection<String> listAttributeNames() {
List<String> list = new ArrayList<String>();
try {
MBeanAttributeInfo[] attrs = beanInfo.getAttributes();
for (MBeanAttributeInfo attr : attrs) {
list.add(attr.getName());
}
} catch (Exception e) {
throwException("Could not list attribute names. Reason: ", e);
}
return list;
}
/**
* The values of each of the attributes on the MBean
*
* @return list of values of each attribute
*/
public List<String> listAttributeValues() {
List<String> list = new ArrayList<String>();
Collection<String> names = listAttributeNames();
for (String name : names) {
try {
Object val = this.getProperty(name);
if (val != null) {
list.add(name + " : " + val.toString());
}
} catch (Exception e) {
throwException("Could not list attribute values. Reason: ", e);
}
}
return list;
}
/**
* List of string representations of all of the attributes on the MBean.
*
* @return list of descriptions of each attribute on the mbean
*/
public Collection<String> listAttributeDescriptions() {
List<String> list = new ArrayList<String>();
try {
MBeanAttributeInfo[] attrs = beanInfo.getAttributes();
for (MBeanAttributeInfo attr : attrs) {
list.add(describeAttribute(attr));
}
} catch (Exception e) {
throwException("Could not list attribute descriptions. Reason: ", e);
}
return list;
}
/**
* Description of the specified attribute name.
*
* @param attr - the attribute
* @return String the description
*/
protected String describeAttribute(MBeanAttributeInfo attr) {
StringBuilder buf = new StringBuilder();
buf.append("(");
if (attr.isReadable()) {
buf.append("r");
}
if (attr.isWritable()) {
buf.append("w");
}
buf.append(") ")
.append(attr.getType())
.append(" ")
.append(attr.getName());
return buf.toString();
}
/**
* Description of the specified attribute name.
*
* @param attributeName - stringified name of the attribute
* @return the description
*/
public String describeAttribute(String attributeName) {
String ret = "Attribute not found";
try {
MBeanAttributeInfo[] attributes = beanInfo.getAttributes();
for (MBeanAttributeInfo attribute : attributes) {
if (attribute.getName().equals(attributeName)) {
return describeAttribute(attribute);
}
}
} catch (Exception e) {
throwException("Could not describe attribute '" + attributeName + "'. Reason: ", e);
}
return ret;
}
/**
* Names of all the operations available on the MBean.
*
* @return all the operations on the MBean
*/
public Collection<String> listOperationNames() {
List<String> list = new ArrayList<String>();
try {
MBeanOperationInfo[] operations = beanInfo.getOperations();
for (MBeanOperationInfo operation : operations) {
list.add(operation.getName());
}
} catch (Exception e) {
throwException("Could not list operation names. Reason: ", e);
}
return list;
}
/**
* Description of all of the operations available on the MBean.
*
* @return full description of each operation on the MBean
*/
public Collection<String> listOperationDescriptions() {
List<String> list = new ArrayList<String>();
try {
MBeanOperationInfo[] operations = beanInfo.getOperations();
for (MBeanOperationInfo operation : operations) {
list.add(describeOperation(operation));
}
} catch (Exception e) {
throwException("Could not list operation descriptions. Reason: ", e);
}
return list;
}
/**
* Get the description of the specified operation. This returns a Collection since
* operations can be overloaded and one operationName can have multiple forms.
*
* @param operationName the name of the operation to describe
* @return Collection of operation description
*/
public List<String> describeOperation(String operationName) {
List<String> list = new ArrayList<String>();
try {
MBeanOperationInfo[] operations = beanInfo.getOperations();
for (MBeanOperationInfo operation : operations) {
if (operation.getName().equals(operationName)) {
list.add(describeOperation(operation));
}
}
} catch (Exception e) {
throwException("Could not describe operations matching name '" + operationName + "'. Reason: ", e);
}
return list;
}
/**
* Description of the operation.
*
* @param operation the operation to describe
* @return pretty-printed description
*/
protected String describeOperation(MBeanOperationInfo operation) {
StringBuilder buf = new StringBuilder();
buf.append(operation.getReturnType())
.append(" ")
.append(operation.getName())
.append("(");
MBeanParameterInfo[] params = operation.getSignature();
for (int j = 0; j < params.length; j++) {
MBeanParameterInfo param = params[j];
if (j != 0) {
buf.append(", ");
}
buf.append(param.getType())
.append(" ")
.append(param.getName());
}
buf.append(")");
return buf.toString();
}
/**
* Return an end user readable representation of the underlying MBean
*
* @return the user readable description
*/
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append("MBean Name:")
.append("\n ")
.append(name.getCanonicalName())
.append("\n ");
if (!listAttributeDescriptions().isEmpty()) {
buf.append("\nAttributes:");
for (String attrDesc : listAttributeDescriptions()) {
buf.append("\n ").append(attrDesc);
}
}
if (!listOperationDescriptions().isEmpty()) {
buf.append("\nOperations:");
for (String attrDesc : listOperationDescriptions()) {
buf.append("\n ").append(attrDesc);
}
}
return buf.toString();
}
private void throwException(String m, Exception e) {
if (!ignoreErrors) {
throw new GroovyRuntimeException(m + e, e);
}
}
private void throwExceptionWithTarget(String m, MBeanException e) {
if (!ignoreErrors) {
throw new GroovyRuntimeException(m + e, e.getTargetException());
}
}
}
| |
package com.github.jasminb.jsonapi;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.fasterxml.jackson.databind.type.MapType;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.github.jasminb.jsonapi.annotations.Relationship;
import com.github.jasminb.jsonapi.annotations.Type;
import com.github.jasminb.jsonapi.exceptions.DocumentSerializationException;
import com.github.jasminb.jsonapi.exceptions.UnregisteredTypeException;
import com.github.jasminb.jsonapi.models.errors.Error;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.github.jasminb.jsonapi.JSONAPISpecConstants.*;
/**
* JSON API data converter. <br />
*
* Provides methods for conversion between JSON API resources to java POJOs and vice versa.
*
* @author jbegic
*/
public class ResourceConverter {
private final ConverterConfiguration configuration;
private final ObjectMapper objectMapper;
private final PropertyNamingStrategy namingStrategy;
private final Map<Class<?>, RelationshipResolver> typedResolvers = new HashMap<>();
private final ResourceCache resourceCache;
private final Set<DeserializationFeature> deserializationFeatures = DeserializationFeature.getDefaultFeatures();
private final Set<SerializationFeature> serializationFeatures = SerializationFeature.getDefaultFeatures();
private RelationshipResolver globalResolver;
private String baseURL;
/**
* Creates new ResourceConverter.
* <p>
* All classes that should be handled by instance of {@link ResourceConverter} must be registered
* when creating a new instance of it.
* </p>
* @param classes {@link Class} array of classes to be handled by this resource converter instance
*/
public ResourceConverter(Class<?>... classes) {
this(null, null, classes);
}
/**
* Creates new ResourceConverter.
* <p>
* All classes that should be handled by instance of {@link ResourceConverter} must be registered
* when creating a new instance of it.
* </p>
* @param baseURL {@link String} base URL, eg. https://api.mysite.com
* @param classes {@link Class} array of classes to be handled by this resource converter instance
*/
public ResourceConverter(String baseURL, Class<?>... classes) {
this(null, baseURL, classes);
}
public ResourceConverter(ObjectMapper mapper, Class<?>... classes) {
this(mapper, null, classes);
}
/**
* Creates new ResourceConverter.
* @param mapper {@link ObjectMapper} custom mapper to be used for resource parsing
* @param baseURL {@link String} base URL, eg. https://api.mysite.com
* @param classes {@link Class} array of classes to be handled by this resource converter instance
*/
public ResourceConverter(ObjectMapper mapper, String baseURL, Class<?>... classes) {
this.configuration = new ConverterConfiguration(classes);
this.baseURL = baseURL != null ? baseURL : "";
// Set custom mapper if provided
if (mapper != null) {
objectMapper = mapper;
} else {
objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
// Object mapper's naming strategy is used if it is set
if (objectMapper.getPropertyNamingStrategy() != null) {
namingStrategy = objectMapper.getPropertyNamingStrategy();
} else {
namingStrategy = new PropertyNamingStrategy();
}
resourceCache = new ResourceCache();
}
/**
* Registers global relationship resolver. This resolver will be used in case relationship is present in the
* API response but not provided in the <code>included</code> section and relationship resolving is enabled
* trough relationship annotation. <br/>
* In case type resolver is registered it will be used instead.
* @param resolver resolver instance
*/
public void setGlobalResolver(RelationshipResolver resolver) {
this.globalResolver = resolver;
}
/**
* Registers relationship resolver for given type. Resolver will be used if relationship resolution is enabled
* trough relationship annotation.
* @param resolver resolver instance
* @param type type
*/
public void setTypeResolver(RelationshipResolver resolver, Class<?> type) {
if (resolver != null) {
String typeName = ReflectionUtils.getTypeName(type);
if (typeName != null) {
typedResolvers.put(type, resolver);
}
}
}
/**
* Converts raw data input into requested target type.
* @param data raw data
* @param clazz target object
* @param <T> type
* @return converted object
* @throws RuntimeException in case conversion fails
*/
@Deprecated
public <T> T readObject(byte [] data, Class<T> clazz) {
return readDocument(data, clazz).get();
}
/**
* Converts rawdata input into a collection of requested output objects.
* @param data raw data input
* @param clazz target type
* @param <T> type
* @return collection of converted elements
* @throws RuntimeException in case conversion fails
*/
@Deprecated
public <T> List<T> readObjectCollection(byte [] data, Class<T> clazz) {
return readDocumentCollection(data, clazz).get();
}
/**
* Reads JSON API spec document and converts it into target type.
* @param data {@link byte} raw data (server response)
* @param clazz {@link Class} target type
* @param <T> type
* @return {@link JSONAPIDocument}
*/
public <T> JSONAPIDocument<T> readDocument(byte[] data, Class<T> clazz) {
return readDocument(new ByteArrayInputStream(data), clazz);
}
/**
* Reads JSON API spec document and converts it into target type.
* @param dataStream {@link byte} raw dataStream (server response)
* @param clazz {@link Class} target type
* @param <T> type
* @return {@link JSONAPIDocument}
*/
public <T> JSONAPIDocument<T> readDocument(InputStream dataStream, Class<T> clazz) {
try {
resourceCache.init();
JsonNode rootNode = objectMapper.readTree(dataStream);
// Validate
ValidationUtils.ensureValidDocument(objectMapper, rootNode);
JsonNode dataNode = rootNode.get(DATA);
ValidationUtils.ensurePrimaryDataValidObjectOrNull(dataNode);
// Parse data node without handling relationships
T resourceObject = null;
boolean cached = false;
if (ValidationUtils.isNotNullNode(dataNode)) {
String identifier = createIdentifier(dataNode);
cached = resourceCache.contains(identifier);
if (cached) {
resourceObject = (T) resourceCache.get(identifier);
} else {
resourceObject = readObject(dataNode, clazz, false);
}
}
// Parse all included resources
resourceCache.cache(parseIncluded(rootNode));
// Connect data node's relationships now that all resources have been parsed
if (resourceObject != null && !cached) {
handleRelationships(dataNode, resourceObject);
}
JSONAPIDocument<T> result = new JSONAPIDocument<>(resourceObject, rootNode, objectMapper);
// Handle top-level meta
if (rootNode.has(META)) {
result.setMeta(mapMeta(rootNode.get(META)));
}
// Handle top-level links
if (rootNode.has(LINKS)) {
result.setLinks(new Links(mapLinks(rootNode.get(LINKS))));
}
return result;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
resourceCache.clear();
}
}
/**
* Reads JSON API spec document and converts it into collection of target type objects.
* @param data {@link byte} raw data (server response)
* @param clazz {@link Class} target type
* @param <T> type
* @return {@link JSONAPIDocument}
*/
public <T> JSONAPIDocument<List<T>> readDocumentCollection(byte[] data, Class<T> clazz) {
return readDocumentCollection(new ByteArrayInputStream(data), clazz);
}
/**
* Reads JSON API spec document and converts it into collection of target type objects.
* @param dataStream {@link InputStream} input stream
* @param clazz {@link Class} target type
* @param <T> type
* @return {@link JSONAPIDocument}
*/
public <T> JSONAPIDocument<List<T>> readDocumentCollection(InputStream dataStream, Class<T> clazz) {
try {
resourceCache.init();
JsonNode rootNode = objectMapper.readTree(dataStream);
// Validate
ValidationUtils.ensureValidDocument(objectMapper, rootNode);
JsonNode dataNode = rootNode.get(DATA);
ValidationUtils.ensurePrimaryDataValidArray(dataNode);
// Parse data node without handling relationships
List<T> resourceList = new ArrayList<>();
for (JsonNode element : dataNode) {
T pojo = readObject(element, clazz, false);
resourceList.add(pojo);
}
// Parse all included resources
resourceCache.cache(parseIncluded(rootNode));
// Connect data node's relationships now that all resources have been parsed
for (int i = 0; i < resourceList.size(); i++) {
JsonNode source = dataNode.get(i);
T resourceObject = resourceList.get(i);
handleRelationships(source, resourceObject);
}
JSONAPIDocument<List<T>> result = new JSONAPIDocument<>(resourceList, rootNode, objectMapper);
// Handle top-level meta
if (rootNode.has(META)) {
result.setMeta(mapMeta(rootNode.get(META)));
}
// Handle top-level links
if (rootNode.has(LINKS)) {
result.setLinks(new Links(mapLinks(rootNode.get(LINKS))));
}
return result;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
resourceCache.clear();
}
}
/**
* Converts provided input into a target object. After conversion completes any relationships defined are resolved.
* @param source JSON source
* @param clazz target type
* @param <T> type
* @return converted target object
* @throws IOException
* @throws IllegalAccessException
*/
private <T> T readObject(JsonNode source, Class<T> clazz, boolean handleRelationships)
throws IOException, IllegalAccessException, InstantiationException {
String identifier = createIdentifier(source);
T result = (T) resourceCache.get(identifier);
if (result == null) {
Class<?> type = getActualType(source, clazz);
if (source.has(ATTRIBUTES)) {
result = (T) objectMapper.treeToValue(source.get(ATTRIBUTES), type);
} else {
if (type.isInterface()) {
result = null;
} else {
result = (T) objectMapper.treeToValue(objectMapper.createObjectNode(), type);
}
}
// Handle meta
if (source.has(META)) {
Field field = configuration.getMetaField(type);
if (field != null) {
Class<?> metaType = configuration.getMetaType(type);
Object metaObject = objectMapper.treeToValue(source.get(META), metaType);
field.set(result, metaObject);
}
}
// Handle links
if (source.has(LINKS)) {
Field linkField = configuration.getLinksField(type);
if (linkField != null) {
linkField.set(result, new Links(mapLinks(source.get(LINKS))));
}
}
if (result != null) {
// Add parsed object to cache
resourceCache.cache(identifier, result);
// Set object id
setIdValue(result, source.get(ID));
if (handleRelationships) {
// Handle relationships
handleRelationships(source, result);
}
}
}
return result;
}
/**
* Converts included data and returns it as pairs of its unique identifiers and converted types.
* @param parent data source
* @return identifier/object pairs
* @throws IOException
* @throws IllegalAccessException
*/
private Map<String, Object> parseIncluded(JsonNode parent)
throws IOException, IllegalAccessException, InstantiationException {
Map<String, Object> result = new HashMap<>();
if (parent.has(INCLUDED)) {
// Get resources
Map<String, Object> includedResources = getIncludedResources(parent);
if (!includedResources.isEmpty()) {
// Add to result
for (String identifier : includedResources.keySet()) {
result.put(identifier, includedResources.get(identifier));
}
ArrayNode includedArray = (ArrayNode) parent.get(INCLUDED);
for (int i = 0; i < includedArray.size(); i++) {
// Handle relationships
JsonNode node = includedArray.get(i);
Object resourceObject = includedResources.get(createIdentifier(node));
if (resourceObject != null){
handleRelationships(node, resourceObject);
}
}
}
}
return result;
}
/**
* Parses out included resources excluding relationships.
* @param parent root node
* @return map of identifier/resource pairs
* @throws IOException
* @throws IllegalAccessException
* @throws InstantiationException
*/
private Map<String, Object> getIncludedResources(JsonNode parent) throws IOException, IllegalAccessException, InstantiationException {
Map<String, Object> result = new HashMap<>();
JsonNode included = parent.get(INCLUDED);
ValidationUtils.ensureValidResourceObjectArray(included);
for (JsonNode jsonNode : included) {
String type = jsonNode.get(TYPE).asText();
Class<?> clazz = configuration.getTypeClass(type);
if (clazz != null) {
Object object = readObject(jsonNode, clazz, false);
if (object != null) {
result.put(createIdentifier(jsonNode), object);
}
} else if (!deserializationFeatures.contains(DeserializationFeature.ALLOW_UNKNOWN_INCLUSIONS)) {
throw new IllegalArgumentException("Included section contains unknown resource type: " + type);
}
}
return result;
}
private void handleRelationships(JsonNode source, Object object)
throws IllegalAccessException, IOException, InstantiationException {
JsonNode relationships = source.get(RELATIONSHIPS);
if (relationships != null) {
Iterator<String> fields = relationships.fieldNames();
while (fields.hasNext()) {
String field = fields.next();
JsonNode relationship = relationships.get(field);
Field relationshipField = configuration.getRelationshipField(object.getClass(), field);
if (relationshipField != null) {
// Get target type
Class<?> type = configuration.getRelationshipType(object.getClass(), field);
// In case type is not defined, relationship object cannot be processed
if (type == null) {
continue;
}
// Handle meta if present
if (relationship.has(META)) {
Field relationshipMetaField = configuration.getRelationshipMetaField(object.getClass(), field);
if (relationshipMetaField != null) {
relationshipMetaField.set(object, objectMapper.treeToValue(relationship.get(META),
configuration.getRelationshipMetaType(object.getClass(), field)));
}
}
// Handle links if present
if (relationship.has(LINKS)) {
Field relationshipLinksField = configuration.getRelationshipLinksField(object.getClass(), field);
if (relationshipLinksField != null) {
Links links = new Links(mapLinks(relationship.get(LINKS)));
relationshipLinksField.set(object, links);
}
}
// Get resolve flag
boolean resolveRelationship = configuration.getFieldRelationship(relationshipField).resolve();
RelationshipResolver resolver = getResolver(type);
// Use resolver if possible
if (resolveRelationship && resolver != null && relationship.has(LINKS)) {
String relType = configuration.getFieldRelationship(relationshipField).relType().getRelName();
JsonNode linkNode = relationship.get(LINKS).get(relType);
String link;
if (linkNode != null && ((link = getLink(linkNode)) != null)) {
if (isCollection(relationship)) {
relationshipField.set(object,
readDocumentCollection(new ByteArrayInputStream(resolver.resolve(link)), type).get());
} else {
relationshipField.set(object, readDocument(new ByteArrayInputStream(resolver.resolve(link)), type).get());
}
}
} else {
if (isCollection(relationship)) {
@SuppressWarnings("rawtypes")
Collection elements = createCollectionInstance(relationshipField.getType());
for (JsonNode element : relationship.get(DATA)) {
try {
Object relationshipObject = parseRelationship(element, type);
if (relationshipObject != null) {
elements.add(relationshipObject);
}
} catch (UnregisteredTypeException ex) {
// Don't raise exception if the relationship is an interface and that we accept new type
if (relationshipField.getType().isInterface() &&
!deserializationFeatures.contains(DeserializationFeature.ALLOW_UNKNOWN_TYPE_IN_RELATIONSHIP)) {
throw ex;
}
}
}
relationshipField.set(object, elements);
} else {
try {
Object relationshipObject = parseRelationship(relationship.get(DATA), type);
if (relationshipObject != null) {
relationshipField.set(object, relationshipObject);
}
} catch (UnregisteredTypeException ex) {
// Don't raise exception if the relationship is an interface and that we accept new type
if (relationshipField.getType().isInterface() &&
!deserializationFeatures.contains(DeserializationFeature.ALLOW_UNKNOWN_TYPE_IN_RELATIONSHIP)) {
throw ex;
}
}
}
}
}
}
}
}
/**
* Accepts a JsonNode which encapsulates a link. The link may be represented as a simple string or as
* <a href="http://jsonapi.org/format/#document-links">link</a> object. This method introspects on the
* {@code linkNode}, returning the value of the {@code href} member, if it exists, or returns the string form
* of the {@code linkNode} if it doesn't.
* <p>
* <em>Package-private for unit testing.</em>
* </p>
* @param linkNode a JsonNode representing a link, may return {@code null}
* @return the link URL
*/
String getLink(JsonNode linkNode) {
// Handle both representations of a link: as a string or as an object
// http://jsonapi.org/format/#document-links (v1.0)
if (linkNode.has(HREF)) {
// object form
return linkNode.get(HREF).asText();
}
return linkNode.asText(null);
}
/**
* Creates relationship object by consuming provided resource linkage 'DATA' node.
* @param relationshipDataNode relationship data node
* @param type object type
* @return created object or <code>null</code> in case data node is not valid
* @throws IOException
* @throws IllegalAccessException
* @throws InstantiationException
*/
private Object parseRelationship(JsonNode relationshipDataNode, Class<?> type)
throws IOException, IllegalAccessException, InstantiationException {
if (ValidationUtils.isResourceIdentifierObject(relationshipDataNode)) {
String identifier = createIdentifier(relationshipDataNode);
if (resourceCache.contains(identifier)) {
return resourceCache.get(identifier);
} else {
// Never cache relationship objects
resourceCache.lock();
try {
return readObject(relationshipDataNode, type, true);
} finally {
resourceCache.unlock();
}
}
}
return null;
}
/**
* Generates unique resource identifier by combining resource type and resource id fields. <br />
* By specification id/type combination guarantees uniqueness.
* @param object data object
* @return concatenated id and type values
*/
private String createIdentifier(JsonNode object) {
JsonNode idNode = object.get(ID);
String id = idNode != null ? idNode.asText().trim() : "";
if (id.isEmpty() && deserializationFeatures.contains(DeserializationFeature.REQUIRE_RESOURCE_ID)) {
throw new IllegalArgumentException(String.format("Resource must have a non null and non-empty 'id' attribute! %s", object.toString()));
}
JsonNode typeNode = object.get(TYPE);
String type = typeNode != null ? typeNode.asText().trim() : "";
if (type.isEmpty()) {
throw new IllegalArgumentException(String.format("Resource must have a non null and non-empty 'type' attribute! %s", object.toString()));
}
return type.concat(id);
}
/**
* Sets an id attribute value to a target object.
* @param target target POJO
* @param idValue id node
* @throws IllegalAccessException thrown in case target field is not accessible
*/
private void setIdValue(Object target, JsonNode idValue) throws IllegalAccessException {
Field idField = configuration.getIdField(target.getClass());
ResourceIdHandler idHandler = configuration.getIdHandler(target.getClass());
if (idValue != null) {
idField.set(target, idHandler.fromString(idValue.asText()));
}
}
/**
* Reads @Id value from provided source object.
*
* @param source object to read @Id value from
* @return {@link String} id or <code>null</code>
* @throws IllegalAccessException
*/
private String getIdValue(Object source) throws IllegalAccessException {
Field idField = configuration.getIdField(source.getClass());
ResourceIdHandler handler = configuration.getIdHandler(source.getClass());
return handler.asString(idField.get(source));
}
/**
* Checks if <code>data</code> object is an array or just single object holder.
* @param source data node
* @return <code>true</code> if data node is an array else <code>false</code>
*/
private boolean isCollection(JsonNode source) {
JsonNode data = source.get(DATA);
return data != null && data.isArray();
}
/**
* Converts input object to byte array.
* @param object input object
* @return raw bytes
* @throws JsonProcessingException
* @throws IllegalAccessException
*/
@Deprecated
public byte [] writeObject(Object object) throws JsonProcessingException, IllegalAccessException {
try {
return writeDocument(new JSONAPIDocument<>(object));
} catch (DocumentSerializationException e) {
throw new RuntimeException(e);
}
}
/**
* Serializes provided {@link JSONAPIDocument} into JSON API Spec compatible byte representation.
* @param document {@link JSONAPIDocument} document to serialize
* @return serialized content in bytes
* @throws DocumentSerializationException thrown in case serialization fails
*/
public byte [] writeDocument(JSONAPIDocument<?> document) throws DocumentSerializationException {
return writeDocument(document, null);
}
/**
* Serializes provided {@link JSONAPIDocument} into JSON API Spec compatible byte representation.
*
* @param document {@link JSONAPIDocument} document to serialize
* @param settings {@link SerializationSettings} settings that override global serialization settings
* @return serialized content in bytes
* @throws DocumentSerializationException thrown in case serialization fails
*/
public byte [] writeDocument(JSONAPIDocument<?> document, SerializationSettings settings)
throws DocumentSerializationException {
try {
resourceCache.init();
Map<String, ObjectNode> includedDataMap = new HashMap<>();
ObjectNode result = objectMapper.createObjectNode();
// Serialize data if present
if (document.get() != null) {
ObjectNode dataNode = getDataNode(document.get(), includedDataMap, settings);
result.set(DATA, dataNode);
// It is possible that relationships point back to top-level resource, in this case remove it from
// included section since it is already present (as a top level resource)
String identifier = String.valueOf(getIdValue(document.get()))
.concat(configuration.getTypeName(document.get().getClass()));
includedDataMap.remove(identifier);
result = addIncludedSection(result, includedDataMap);
}
// Serialize errors if present
if (document.getErrors() != null) {
ArrayNode errorsNode = objectMapper.createArrayNode();
for (Error error : document.getErrors()) {
errorsNode.add(objectMapper.valueToTree(error));
}
result.set(ERRORS, errorsNode);
}
// Serialize global links and meta
serializeMeta(document, result, settings);
serializeLinks(document, result, settings);
return objectMapper.writeValueAsBytes(result);
} catch (Exception e) {
throw new DocumentSerializationException(e);
} finally {
resourceCache.clear();
}
}
private void serializeMeta(JSONAPIDocument<?> document, ObjectNode resultNode, SerializationSettings settings) {
// Handle global links and meta
if (document.getMeta() != null && !document.getMeta().isEmpty() && shouldSerializeMeta(settings)) {
resultNode.set(META, objectMapper.valueToTree(document.getMeta()));
}
}
private void serializeLinks(JSONAPIDocument<?> document, ObjectNode resultNode, SerializationSettings settings) {
if (document.getLinks() != null && !document.getLinks().getLinks().isEmpty() &&
shouldSerializeLinks(settings)) {
resultNode.set(LINKS, objectMapper.valueToTree(document.getLinks()).get(LINKS));
}
}
/**
* Serializes provided {@link JSONAPIDocument} into JSON API Spec compatible byte representation.
* @param documentCollection {@link JSONAPIDocument} document collection to serialize
* @return serialized content in bytes
* @throws DocumentSerializationException thrown in case serialization fails
*/
public byte [] writeDocumentCollection(JSONAPIDocument<? extends Iterable<?>> documentCollection)
throws DocumentSerializationException {
return writeDocumentCollection(documentCollection, null);
}
/**
* Serializes provided {@link JSONAPIDocument} into JSON API Spec compatible byte representation.
* @param documentCollection {@link JSONAPIDocument} document collection to serialize
* @param serializationSettings {@link SerializationSettings} settings that override global serialization settings
* @return serialized content in bytes
* @throws DocumentSerializationException thrown in case serialization fails
*/
public byte [] writeDocumentCollection(JSONAPIDocument<? extends Iterable<?>> documentCollection,
SerializationSettings serializationSettings)
throws DocumentSerializationException {
try {
resourceCache.init();
ArrayNode results = objectMapper.createArrayNode();
Map<String, ObjectNode> includedDataMap = new LinkedHashMap<>();
for (Object object : documentCollection.get()) {
results.add(getDataNode(object, includedDataMap, serializationSettings));
}
ObjectNode result = objectMapper.createObjectNode();
result.set(DATA, results);
// Handle global links and meta
serializeMeta(documentCollection, result, serializationSettings);
serializeLinks(documentCollection, result, serializationSettings);
result = addIncludedSection(result, includedDataMap);
return objectMapper.writeValueAsBytes(result);
} catch (Exception e) {
throw new DocumentSerializationException(e);
} finally {
resourceCache.clear();
}
}
private ObjectNode getDataNode(
Object object,
Map<String, ObjectNode> includedContainer,
SerializationSettings settings
) throws IllegalAccessException {
ObjectNode dataNode = objectMapper.createObjectNode();
// Perform initial conversion
ObjectNode attributesNode = objectMapper.valueToTree(object);
// Handle id, meta and relationship fields
String resourceId = getIdValue(object);
// Remove id field from resulting attribute node
removeField(attributesNode, configuration.getIdField(object.getClass()));
// Handle meta
Field metaField = configuration.getMetaField(object.getClass());
JsonNode meta = null;
if (metaField != null) {
meta = removeField(attributesNode, metaField);
}
// Handle links
String selfHref = null;
JsonNode jsonLinks = getResourceLinks(object, attributesNode, resourceId, settings);
if (jsonLinks != null) {
if (jsonLinks.has(SELF)) {
JsonNode selfLink = jsonLinks.get(SELF);
if (selfLink instanceof TextNode) {
selfHref = selfLink.textValue();
} else {
selfHref = selfLink.get(HREF).asText();
}
}
}
// Handle resource identifier
dataNode.put(TYPE, configuration.getTypeName(object.getClass()));
if (resourceId != null) {
// Write id if its enabled
if (shouldSerializeId(settings)) {
dataNode.put(ID, resourceId);
}
// Cache the object for recursion breaking purposes
resourceCache.cache(resourceId.concat(configuration.getTypeName(object.getClass())), null);
}
dataNode.set(ATTRIBUTES, attributesNode);
// Handle relationships (remove from base type and add as relationships)
List<Field> relationshipFields = configuration.getRelationshipFields(object.getClass());
if (relationshipFields != null) {
ObjectNode relationshipsNode = objectMapper.createObjectNode();
for (Field relationshipField : relationshipFields) {
Object relationshipObject = relationshipField.get(object);
removeField(attributesNode, relationshipField);
if (relationshipObject != null) {
Relationship relationship = configuration.getFieldRelationship(relationshipField);
// In case serialisation is disabled for a given relationship, skip it
if (!relationship.serialise()) {
continue;
}
String relationshipName = relationship.value();
ObjectNode relationshipDataNode = objectMapper.createObjectNode();
relationshipsNode.set(relationshipName, relationshipDataNode);
// Serialize relationship meta
JsonNode relationshipMeta = getRelationshipMeta(object, relationshipName, settings);
if (relationshipMeta != null) {
relationshipDataNode.set(META, relationshipMeta);
// Remove meta object from serialized JSON
Field refField = configuration
.getRelationshipMetaField(object.getClass(), relationshipName);
removeField(attributesNode, refField);
}
// Serialize relationship links
JsonNode relationshipLinks = getRelationshipLinks(object, relationship, selfHref, settings);
if (relationshipLinks != null) {
relationshipDataNode.set(LINKS, relationshipLinks);
// Remove link object from serialized JSON
Field refField = configuration
.getRelationshipLinksField(object.getClass(), relationshipName);
removeField(attributesNode, refField);
}
boolean shouldSerializeData = configuration.getFieldRelationship(relationshipField).serialiseData();
if (shouldSerializeData) {
if (relationshipObject instanceof Collection) {
ArrayNode dataArrayNode = objectMapper.createArrayNode();
for (Object element : (Collection<?>) relationshipObject) {
String relationshipType = configuration.getTypeName(element.getClass());
String idValue = getIdValue(element);
ObjectNode identifierNode = objectMapper.createObjectNode();
identifierNode.put(TYPE, relationshipType);
identifierNode.put(ID, idValue);
dataArrayNode.add(identifierNode);
// Handle included data
if (shouldSerializeRelationship(relationshipName, settings) && idValue != null) {
String identifier = idValue.concat(relationshipType);
if (!includedContainer.containsKey(identifier) && !resourceCache.contains(identifier)) {
includedContainer.put(identifier,
getDataNode(element, includedContainer, settings));
}
}
}
relationshipDataNode.set(DATA, dataArrayNode);
} else {
String relationshipType = configuration.getTypeName(relationshipObject.getClass());
String idValue = getIdValue(relationshipObject);
ObjectNode identifierNode = objectMapper.createObjectNode();
identifierNode.put(TYPE, relationshipType);
identifierNode.put(ID, idValue);
relationshipDataNode.set(DATA, identifierNode);
if (shouldSerializeRelationship(relationshipName, settings) && idValue != null) {
String identifier = idValue.concat(relationshipType);
if (!includedContainer.containsKey(identifier)) {
includedContainer.put(identifier,
getDataNode(relationshipObject, includedContainer, settings));
}
}
}
}
}
}
if (relationshipsNode.size() > 0) {
dataNode.set(RELATIONSHIPS, relationshipsNode);
}
}
if (jsonLinks != null) {
dataNode.set(LINKS, jsonLinks);
}
if (meta != null && shouldSerializeMeta(settings)) {
dataNode.set(META, meta);
}
return dataNode;
}
/**
* Converts input object to byte array.
*
* @param objects List of input objects
* @return raw bytes
* @throws JsonProcessingException
* @throws IllegalAccessException
* @deprecated use writeDocumentCollection instead
*/
@Deprecated
public <T> byte[] writeObjectCollection(Iterable<T> objects) throws JsonProcessingException, IllegalAccessException {
try {
return writeDocumentCollection(new JSONAPIDocument<>(objects));
} catch (DocumentSerializationException e) {
if (e.getCause() instanceof JsonProcessingException) {
throw (JsonProcessingException) e.getCause();
} else if (e.getCause() instanceof IllegalAccessException) {
throw (IllegalAccessException) e.getCause();
}
throw new RuntimeException(e.getCause());
}
}
/**
* Checks if provided type is registered with this converter instance.
* @param type class to check
* @return returns <code>true</code> if type is registered, else <code>false</code>
*/
public boolean isRegisteredType(Class<?> type) {
return configuration.isRegisteredType(type);
}
/**
* Returns relationship resolver for given type. In case no specific type resolver is registered, global resolver
* is returned.
* @param type relationship object type
* @return relationship resolver or <code>null</code>
*/
private RelationshipResolver getResolver(Class<?> type) {
RelationshipResolver resolver = typedResolvers.get(type);
return resolver != null ? resolver : globalResolver;
}
/**
* Deserializes a <a href="http://jsonapi.org/format/#document-links">JSON-API links object</a> to a {@code Map}
* keyed by the link name.
* <p>
* The {@code linksObject} may represent links in string form or object form; both are supported by this method.
* </p>
* <p>
* E.g.
* <pre>
* "links": {
* "self": "http://example.com/posts"
* }
* </pre>
* </p>
* <p>
* or
* <pre>
* "links": {
* "related": {
* "href": "http://example.com/articles/1/comments",
* "meta": {
* "count": 10
* }
* }
* }
* </pre>
* </p>
*
* @param linksObject a {@code JsonNode} representing a links object
* @return a {@code Map} keyed by link name
*/
private Map<String, Link> mapLinks(JsonNode linksObject) {
Map<String, Link> result = new HashMap<>();
Iterator<Map.Entry<String, JsonNode>> linkItr = linksObject.fields();
while (linkItr.hasNext()) {
Map.Entry<String, JsonNode> linkNode = linkItr.next();
Link linkObj = new Link();
linkObj.setHref(
getLink(
linkNode.getValue()));
if (linkNode.getValue().has(META)) {
linkObj.setMeta(
mapMeta(
linkNode.getValue().get(META)));
}
result.put(linkNode.getKey(), linkObj);
}
return result;
}
/**
* Deserializes a <a href="http://jsonapi.org/format/#document-meta">JSON-API meta object</a> to a {@code Map}
* keyed by the member names. Because {@code meta} objects contain arbitrary information, the values in the
* map are of unknown type.
*
* @param metaNode a JsonNode representing a meta object
* @return a Map of the meta information, keyed by member name.
*/
private Map<String, Object> mapMeta(JsonNode metaNode) {
JsonParser p = objectMapper.treeAsTokens(metaNode);
MapType mapType = TypeFactory.defaultInstance()
.constructMapType(HashMap.class, String.class, Object.class);
try {
return objectMapper.readValue(p, mapType);
} catch (IOException e) {
// TODO: log? No recovery.
}
return null;
}
private ObjectNode addIncludedSection(ObjectNode rootNode, Map<String, ObjectNode> includedDataMap) {
if (!includedDataMap.isEmpty()) {
ArrayNode includedArray = objectMapper.createArrayNode();
includedArray.addAll(includedDataMap.values());
rootNode.set(INCLUDED, includedArray);
}
return rootNode;
}
/**
* Resolves actual type to be used for resource deserialization.
* <p>
* If user provides class with type annotation that is equal to the type value in response data, same class
* will be used. If provided class is super type of actual class that is resolved using response type value,
* subclass will be returned. This allows for deserializing responses in use cases where one of many subtypes
* can be returned by the server and user is not sure which one will it be.
* </p>
* @param object JSON object containing type value
* @param userType provided user type
* @return {@link Class}
*/
private Class<?> getActualType(JsonNode object, Class<?> userType) {
String type = object.get(TYPE).asText();
String definedTypeName = configuration.getTypeName(userType);
if (definedTypeName != null && definedTypeName.equals(type)) {
return userType;
} else {
Class<?> actualType = configuration.getTypeClass(type);
if (actualType != null && userType.isAssignableFrom(actualType)) {
return actualType;
}
}
throw new UnregisteredTypeException(type);
}
private Collection<?> createCollectionInstance(Class<?> type)
throws InstantiationException, IllegalAccessException {
if (!type.isInterface() && !Modifier.isAbstract(type.getModifiers())) {
return (Collection<?>) type.newInstance();
}
if (List.class.equals(type) || Collection.class.equals(type)) {
return new ArrayList<>();
}
if (Set.class.equals(type)) {
return new HashSet<>();
}
throw new RuntimeException("Unable to create appropriate instance for type: " + type.getSimpleName());
}
private JsonNode getRelationshipMeta(Object source, String relationshipName, SerializationSettings settings)
throws IllegalAccessException {
if (shouldSerializeMeta(settings)) {
Field relationshipMetaField = configuration
.getRelationshipMetaField(source.getClass(), relationshipName);
if (relationshipMetaField != null && relationshipMetaField.get(source) != null) {
return objectMapper.valueToTree(relationshipMetaField.get(source));
}
}
return null;
}
private JsonNode getResourceLinks(Object resource, ObjectNode serializedResource, String resourceId,
SerializationSettings settings) throws IllegalAccessException {
Type type = configuration.getType(resource.getClass());
// Check if there are user-provided links
Links links = null;
Field linksField = configuration.getLinksField(resource.getClass());
if (linksField != null) {
links = (Links) linksField.get(resource);
// Remove links from attributes object
//TODO: this state change needs to be removed from here
if (links != null) {
removeField(serializedResource, linksField);
}
}
// If enabled, handle links
if (shouldSerializeLinks(settings)) {
Map<String, Link> linkMap = new HashMap<>();
if (links != null) {
linkMap.putAll(links.getLinks());
}
// If link path is defined in type and id is not null and user did not explicitly set link value, create it
if (!type.path().trim().isEmpty() && !linkMap.containsKey(SELF) && resourceId != null) {
linkMap.put(SELF, new Link(createURL(baseURL, type.path().replace("{id}", resourceId))));
}
// If there is at least one link generated, serialize and return
if (!linkMap.isEmpty()) {
return objectMapper.valueToTree(new Links(linkMap)).get(LINKS);
}
}
return null;
}
private JsonNode getRelationshipLinks(Object source, Relationship relationship, String ownerLink,
SerializationSettings settings) throws IllegalAccessException {
if (shouldSerializeLinks(settings)) {
Links links = null;
Field relationshipLinksField = configuration
.getRelationshipLinksField(source.getClass(), relationship.value());
if (relationshipLinksField != null) {
links = (Links) relationshipLinksField.get(source);
}
Map<String, Link> linkMap = new HashMap<>();
if (links != null) {
linkMap.putAll(links.getLinks());
}
if (!relationship.path().trim().isEmpty() && !linkMap.containsKey(SELF)) {
linkMap.put(SELF, new Link(createURL(ownerLink, relationship.path())));
}
if (!relationship.relatedPath().trim().isEmpty() && !linkMap.containsKey(RELATED)) {
linkMap.put(RELATED, new Link(createURL(ownerLink, relationship.relatedPath())));
}
if (!linkMap.isEmpty()) {
return objectMapper.valueToTree(new Links(linkMap)).get(LINKS);
}
}
return null;
}
private String createURL(String base, String path) {
String result = base;
if (!result.endsWith("/")) {
result = result.concat("/");
}
if (path.startsWith("/")) {
result = result.concat(path.substring(1));
} else {
result = result.concat(path);
}
return result;
}
private boolean shouldSerializeRelationship(String relationshipName, SerializationSettings settings) {
if (settings != null) {
if (settings.isRelationshipIncluded(relationshipName) && !settings.isRelationshipExcluded(relationshipName)) {
return true;
}
if (settings.isRelationshipExcluded(relationshipName)) {
return false;
}
}
return serializationFeatures.contains(SerializationFeature.INCLUDE_RELATIONSHIP_ATTRIBUTES);
}
private boolean shouldSerializeLinks(SerializationSettings settings) {
if (settings != null && settings.serializeLinks() != null) {
return settings.serializeLinks();
}
return serializationFeatures.contains(SerializationFeature.INCLUDE_LINKS);
}
private boolean shouldSerializeMeta(SerializationSettings settings) {
if (settings != null && settings.serializeMeta() != null) {
return settings.serializeMeta();
}
return serializationFeatures.contains(SerializationFeature.INCLUDE_META);
}
private boolean shouldSerializeId(SerializationSettings settings) {
if (settings != null && settings.serializeId() != null) {
return settings.serializeId();
}
return serializationFeatures.contains(SerializationFeature.INCLUDE_ID);
}
private JsonNode removeField(ObjectNode node, Field field) {
if (field != null) {
return node.remove(namingStrategy.nameForField(null, null, field.getName()));
}
return null;
}
/**
* Registers new type to be used with this converter instance.
* @param type {@link Class} type to register
* @return <code>true</code> if type was registed, else <code>false</code> (in case type was registered already or
* type is not eligible for registering ie. missing required annotations)
*/
public boolean registerType(Class<?> type) {
if (!configuration.isRegisteredType(type) && ConverterConfiguration.isEligibleType(type)) {
return configuration.registerType(type);
}
return false;
}
/**
* Adds (enables) new deserialization option.
* @param option {@link DeserializationFeature} option
*/
public void enableDeserializationOption(DeserializationFeature option) {
this.deserializationFeatures.add(option);
}
/**
* Removes (disables) existing deserialization option.
* @param option {@link DeserializationFeature} feature to disable
*/
public void disableDeserializationOption(DeserializationFeature option) {
this.deserializationFeatures.remove(option);
}
/**
* Adds (enables) new serialization option.
* @param option {@link SerializationFeature} option
*/
public void enableSerializationOption(SerializationFeature option) {
this.serializationFeatures.add(option);
}
/**
* Removes (disables) existing serialization option.
* @param option {@link SerializationFeature} feature to disable
*/
public void disableSerializationOption(SerializationFeature option) {
this.serializationFeatures.remove(option);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.command.CommandAdapter;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vcs.actions.VcsContextFactory;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
/**
* @author yole
*/
public abstract class VcsVFSListener implements Disposable {
protected static class MovedFileInfo {
public final String myOldPath;
public String myNewPath;
private final VirtualFile myFile;
protected MovedFileInfo(VirtualFile file, final String newPath) {
myOldPath = file.getPath();
myNewPath = newPath;
myFile = file;
}
}
protected final Project myProject;
protected final AbstractVcs myVcs;
protected final ChangeListManager myChangeListManager;
protected final VcsShowConfirmationOption myAddOption;
protected final VcsShowConfirmationOption myRemoveOption;
protected final List<VirtualFile> myAddedFiles = new ArrayList<VirtualFile>();
protected final Map<VirtualFile, VirtualFile> myCopyFromMap = new HashMap<VirtualFile, VirtualFile>();
protected final List<FilePath> myDeletedFiles = new ArrayList<FilePath>();
protected final List<FilePath> myDeletedWithoutConfirmFiles = new ArrayList<FilePath>();
protected final List<MovedFileInfo> myMovedFiles = new ArrayList<MovedFileInfo>();
protected enum VcsDeleteType {SILENT, CONFIRM, IGNORE}
protected VcsVFSListener(final Project project, final AbstractVcs vcs) {
myProject = project;
myVcs = vcs;
myChangeListManager = ChangeListManager.getInstance(project);
final MyVirtualFileAdapter myVFSListener = new MyVirtualFileAdapter();
final MyCommandAdapter myCommandListener = new MyCommandAdapter();
final ProjectLevelVcsManager vcsManager = ProjectLevelVcsManager.getInstance(project);
myAddOption = vcsManager.getStandardConfirmation(VcsConfiguration.StandardConfirmation.ADD, vcs);
myRemoveOption = vcsManager.getStandardConfirmation(VcsConfiguration.StandardConfirmation.REMOVE, vcs);
VirtualFileManager.getInstance().addVirtualFileListener(myVFSListener, this);
CommandProcessor.getInstance().addCommandListener(myCommandListener, this);
}
public void dispose() {
}
protected boolean isEventIgnored(final VirtualFileEvent event) {
return event.isFromRefresh() || ProjectLevelVcsManager.getInstance(myProject).getVcsFor(event.getFile()) != myVcs;
}
protected void executeAdd() {
final List<VirtualFile> addedFiles = acquireAddedFiles();
final Map<VirtualFile, VirtualFile> copyFromMap = acquireCopiedFiles();
executeAdd(addedFiles, copyFromMap);
}
/**
* @return get map of copied files and clear the map
*/
protected Map<VirtualFile, VirtualFile> acquireCopiedFiles() {
final Map<VirtualFile, VirtualFile> copyFromMap = new HashMap<VirtualFile, VirtualFile>(myCopyFromMap);
myCopyFromMap.clear();
return copyFromMap;
}
/**
* @return get list of added files and clear previous list
*/
protected List<VirtualFile> acquireAddedFiles() {
final List<VirtualFile> addedFiles = new ArrayList<VirtualFile>(myAddedFiles);
myAddedFiles.clear();
return addedFiles;
}
/**
* Execute add that performs adding from specific collections
*
* @param addedFiles the added files
* @param copyFromMap the copied files
*/
protected void executeAdd(List<VirtualFile> addedFiles, Map<VirtualFile, VirtualFile> copyFromMap) {
if (myAddOption.getValue() == VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY) return;
if (myAddOption.getValue() == VcsShowConfirmationOption.Value.DO_ACTION_SILENTLY) {
performAdding(addedFiles, copyFromMap);
}
else {
final AbstractVcsHelper helper = AbstractVcsHelper.getInstance(myProject);
// TODO[yole]: nice and clean description label
Collection<VirtualFile> filesToProcess = helper.selectFilesToProcess(addedFiles, getAddTitle(), null,
getSingleFileAddTitle(), getSingleFileAddPromptTemplate(),
myAddOption);
if (filesToProcess != null) {
performAdding(new ArrayList<VirtualFile>(filesToProcess), copyFromMap);
}
}
}
private void addFileToDelete(VirtualFile file) {
if (file.isDirectory() && file instanceof NewVirtualFile && !isDirectoryVersioningSupported()) {
for (VirtualFile child : ((NewVirtualFile)file).getCachedChildren()) {
addFileToDelete(child);
}
}
else {
final VcsDeleteType type = needConfirmDeletion(file);
final FilePath filePath =
VcsContextFactory.SERVICE.getInstance().createFilePathOnDeleted(new File(file.getPath()), file.isDirectory());
if (type == VcsDeleteType.CONFIRM) {
myDeletedFiles.add(filePath);
}
else if (type == VcsDeleteType.SILENT) {
myDeletedWithoutConfirmFiles.add(filePath);
}
}
}
protected void executeDelete() {
final List<FilePath> filesToDelete = new ArrayList<FilePath>(myDeletedWithoutConfirmFiles);
final List<FilePath> deletedFiles = new ArrayList<FilePath>(myDeletedFiles);
myDeletedWithoutConfirmFiles.clear();
myDeletedFiles.clear();
if (myRemoveOption.getValue() != VcsShowConfirmationOption.Value.DO_NOTHING_SILENTLY) {
if (myRemoveOption.getValue() == VcsShowConfirmationOption.Value.DO_ACTION_SILENTLY || deletedFiles.isEmpty()) {
filesToDelete.addAll(deletedFiles);
}
else {
Collection<FilePath> filePaths = selectFilePathsToDelete(deletedFiles);
if (filePaths != null) {
filesToDelete.addAll(filePaths);
}
}
}
performDeletion(filesToDelete);
}
/**
* Select file paths to delete
*
* @param deletedFiles deleted files set
* @return selected files or null (that is considered as empty file set)
*/
@Nullable
protected Collection<FilePath> selectFilePathsToDelete(final List<FilePath> deletedFiles) {
AbstractVcsHelper helper = AbstractVcsHelper.getInstance(myProject);
return helper.selectFilePathsToProcess(deletedFiles, getDeleteTitle(), null, getSingleFileDeleteTitle(),
getSingleFileDeletePromptTemplate(), myRemoveOption);
}
private void addFileToMove(final VirtualFile file, final String newParentPath, final String newName) {
if (file.isDirectory() && !isDirectoryVersioningSupported()) {
VirtualFile[] children = file.getChildren();
if (children != null) {
for (VirtualFile child : children) {
addFileToMove(child, newParentPath + "/" + newName, child.getName());
}
}
}
else {
processMovedFile(file, newParentPath, newName);
}
}
protected void processMovedFile(VirtualFile file, String newParentPath, String newName) {
if (FileStatusManager.getInstance(myProject).getStatus(file) != FileStatus.UNKNOWN) {
final String newPath = newParentPath + "/" + newName;
boolean foundExistingInfo = false;
for (MovedFileInfo info : myMovedFiles) {
if (info.myFile == file) {
info.myNewPath = newPath;
foundExistingInfo = true;
break;
}
}
if (!foundExistingInfo) {
myMovedFiles.add(new MovedFileInfo(file, newPath));
}
}
}
private void executeMoveRename() {
final List<MovedFileInfo> movedFiles = new ArrayList<MovedFileInfo>(myMovedFiles);
myMovedFiles.clear();
performMoveRename(movedFiles);
}
protected VcsDeleteType needConfirmDeletion(final VirtualFile file) {
return VcsDeleteType.CONFIRM;
}
protected abstract String getAddTitle();
protected abstract String getSingleFileAddTitle();
protected abstract String getSingleFileAddPromptTemplate();
protected abstract void performAdding(final Collection<VirtualFile> addedFiles, final Map<VirtualFile, VirtualFile> copyFromMap);
protected abstract String getDeleteTitle();
protected abstract String getSingleFileDeleteTitle();
protected abstract String getSingleFileDeletePromptTemplate();
protected abstract void performDeletion(List<FilePath> filesToDelete);
protected abstract void performMoveRename(List<MovedFileInfo> movedFiles);
protected abstract boolean isDirectoryVersioningSupported();
private class MyVirtualFileAdapter extends VirtualFileAdapter {
public void fileCreated(final VirtualFileEvent event) {
if (!isEventIgnored(event) && !myChangeListManager.isIgnoredFile(event.getFile()) &&
(isDirectoryVersioningSupported() || !event.getFile().isDirectory())) {
myAddedFiles.add(event.getFile());
}
}
public void fileCopied(final VirtualFileCopyEvent event) {
if (isEventIgnored(event) || myChangeListManager.isIgnoredFile(event.getFile())) return;
final AbstractVcs oldVcs = ProjectLevelVcsManager.getInstance(myProject).getVcsFor(event.getOriginalFile());
if (oldVcs == myVcs) {
final VirtualFile parent = event.getFile().getParent();
if (parent != null) {
myAddedFiles.add(event.getFile());
myCopyFromMap.put(event.getFile(), event.getOriginalFile());
}
}
else {
myAddedFiles.add(event.getFile());
}
}
public void beforeFileDeletion(final VirtualFileEvent event) {
final VirtualFile file = event.getFile();
if (isEventIgnored(event)) {
return;
}
if (!myChangeListManager.isIgnoredFile(file)) {
addFileToDelete(file);
return;
}
// files are ignored, directories are handled recursively
if (event.getFile().isDirectory()) {
final List<VirtualFile> list = new LinkedList<VirtualFile>();
VcsUtil.collectFiles(file, list, true, isDirectoryVersioningSupported());
for (VirtualFile child : list) {
if (!myChangeListManager.isIgnoredFile(child)) {
addFileToDelete(child);
}
}
}
}
public void beforeFileMovement(final VirtualFileMoveEvent event) {
if (isEventIgnored(event)) return;
final VirtualFile file = event.getFile();
final AbstractVcs newVcs = ProjectLevelVcsManager.getInstance(myProject).getVcsFor(event.getNewParent());
if (newVcs == myVcs) {
addFileToMove(file, event.getNewParent().getPath(), file.getName());
}
else {
addFileToDelete(event.getFile());
}
}
public void fileMoved(final VirtualFileMoveEvent event) {
if (isEventIgnored(event)) return;
final AbstractVcs oldVcs = ProjectLevelVcsManager.getInstance(myProject).getVcsFor(event.getOldParent());
if (oldVcs != myVcs) {
myAddedFiles.add(event.getFile());
}
}
public void beforePropertyChange(final VirtualFilePropertyEvent event) {
if (!isEventIgnored(event) && event.getPropertyName().equalsIgnoreCase(VirtualFile.PROP_NAME)) {
String oldName = (String)event.getOldValue();
String newName = (String)event.getNewValue();
// in order to force a reparse of a file, the rename event can be fired with old name equal to new name -
// such events needn't be handled by the VCS
if (!Comparing.equal(oldName, newName)) {
final VirtualFile file = event.getFile();
final VirtualFile parent = file.getParent();
if (parent != null) {
addFileToMove(file, parent.getPath(), newName);
}
}
}
}
}
private class MyCommandAdapter extends CommandAdapter {
private int myCommandLevel;
public void commandStarted(final CommandEvent event) {
if (myProject != event.getProject()) return;
myCommandLevel++;
}
private void checkMovedAddedSourceBack() {
if (myAddedFiles.isEmpty() || myMovedFiles.isEmpty()) return;
final Map<String, VirtualFile> addedPaths = new HashMap<String, VirtualFile>(myAddedFiles.size());
for (VirtualFile file : myAddedFiles) {
addedPaths.put(file.getPath(), file);
}
for (Iterator<MovedFileInfo> iterator = myMovedFiles.iterator(); iterator.hasNext();) {
final MovedFileInfo movedFile = iterator.next();
if (addedPaths.containsKey(movedFile.myOldPath)) {
iterator.remove();
final VirtualFile oldAdded = addedPaths.get(movedFile.myOldPath);
myAddedFiles.remove(oldAdded);
myAddedFiles.add(movedFile.myFile);
myCopyFromMap.put(oldAdded, movedFile.myFile);
}
}
}
public void commandFinished(final CommandEvent event) {
if (myProject != event.getProject()) return;
myCommandLevel--;
if (myCommandLevel == 0) {
if (!myAddedFiles.isEmpty() || !myDeletedFiles.isEmpty() || !myDeletedWithoutConfirmFiles.isEmpty() || !myMovedFiles.isEmpty()) {
// avoid reentering commandFinished handler - saving the documents may cause a "before file deletion" event firing,
// which will cause closing the text editor, which will itself run a command that will be caught by this listener
myCommandLevel++;
try {
FileDocumentManager.getInstance().saveAllDocuments();
}
finally {
myCommandLevel--;
}
checkMovedAddedSourceBack();
if (!myAddedFiles.isEmpty()) {
executeAdd();
}
if (!myDeletedFiles.isEmpty() || !myDeletedWithoutConfirmFiles.isEmpty()) {
executeDelete();
}
if (!myMovedFiles.isEmpty()) {
executeMoveRename();
}
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.client;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response;
import org.apache.lens.api.APIResult;
import org.apache.lens.api.LensConf;
import org.apache.lens.api.metastore.*;
import org.apache.lens.api.query.*;
import org.apache.lens.api.result.LensAPIResult;
import org.apache.lens.api.util.PathValidator;
import org.apache.lens.client.exceptions.LensAPIException;
import org.apache.lens.client.exceptions.LensBriefErrorException;
import org.apache.lens.client.exceptions.LensClientIOException;
import org.apache.lens.client.model.BriefError;
import org.apache.lens.client.model.IdBriefErrorTemplate;
import org.apache.lens.client.model.IdBriefErrorTemplateKey;
import org.apache.lens.client.resultset.CsvResultSet;
import org.apache.lens.client.resultset.ResultSet;
import org.apache.lens.client.resultset.ZippedCsvResultSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class LensClient implements AutoCloseable {
public static final String CLILOGGER = "cliLogger";
private static final String DEFAULT_PASSWORD = "";
@Getter
private final LensClientConfig conf;
@Getter
private final LensMetadataClient mc;
private String password;
@Getter
private LensConnection connection;
private final HashMap<QueryHandle, LensStatement> statementMap =
Maps.newHashMap();
@Getter
private final LensStatement statement;
@Getter
private PathValidator pathValidator;
public static final String QUERY_RESULT_SPLIT_INTO_MULTIPLE = "lens.query.result.split.multiple";
public static final String QUERY_OUTPUT_WRITE_HEADER_ENABLED = "lens.query.output.write.header";
public static final String QUERY_OUTPUT_ENCODING = "lens.query.output.charset.encoding";
public static final char DEFAULT_RESULTSET_DELIMITER = ',';
public static Logger getCliLogger() {
return LoggerFactory.getLogger(CLILOGGER);
}
public LensClient() {
this(new LensClientConfig());
}
public LensClient(LensClientConfig conf) {
this(conf, conf.getUser(), DEFAULT_PASSWORD);
}
public LensClient(String username, String password) {
this(new LensClientConfig(), username, password);
}
public LensClient(LensClientConfig conf, String username, String password) {
this.conf = conf;
conf.setUser(username);
this.password = password;
connectToLensServer();
mc = new LensMetadataClient(connection);
statement = new LensStatement(connection);
}
public LensClient(Credentials cred) {
this(cred.getUsername(), cred.getPassword());
}
public LensMetadataClient getMetadataClient() {
return mc;
}
@Deprecated
public QueryHandle executeQueryAsynch(String sql, String queryName) throws LensAPIException {
return executeQueryAsynch(sql, queryName, new LensConf());
}
public QueryHandle executeQueryAsynch(String sql, String queryName, LensConf conf) throws LensAPIException {
log.debug("Executing query {}", sql);
QueryHandle handle = statement.executeQuery(sql, false, queryName, conf);
statementMap.put(handle, statement);
return handle;
}
/**
* Execute query with timeout option.
* If the query does not finish within the timeout time, server returns the query handle which can be used to
* track further progress.
*
* @param sql : query/command to be executed
* @param queryName : optional query name
* @param timeOutMillis : timeout milliseconds for the query execution.
* @return
* @throws LensAPIException
*/
@Deprecated
public QueryHandleWithResultSet executeQueryWithTimeout(String sql, String queryName, long timeOutMillis)
throws LensAPIException {
return executeQueryWithTimeout(sql, queryName, timeOutMillis, new LensConf());
}
/**
* Execute query with timeout option.
* If the query does not finish within the timeout time, server returns the query handle which can be used to
* track further progress.
*
* @param sql : query/command to be executed
* @param queryName : optional query name
* @param timeOutMillis : timeout milliseconds for the query execution.
* @param conf config to be used for the query
* @return
* @throws LensAPIException
*/
public QueryHandleWithResultSet executeQueryWithTimeout(String sql, String queryName,
long timeOutMillis, LensConf conf) throws LensAPIException {
log.info("Executing query {} with timeout of {} milliseconds", sql, timeOutMillis);
QueryHandleWithResultSet result = statement.executeQuery(sql, queryName, timeOutMillis, conf);
statementMap.put(result.getQueryHandle(), statement);
if (result.getStatus().failed()) {
IdBriefErrorTemplate errorResult = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.QUERY_ID,
result.getQueryHandle().getHandleIdString(), new BriefError(result.getStatus()
.getErrorCode(), result.getStatus().getErrorMessage()));
throw new LensBriefErrorException(errorResult);
}
return result;
}
public Date getLatestDateOfCube(String cubeName, String timePartition) {
return mc.getLatestDateOfCube(cubeName, timePartition);
}
public List<String> getPartitionTimelines(String factName, String storageName, String updatePeriod,
String timeDimension) {
return mc.getPartitionTimelines(factName, storageName, updatePeriod, timeDimension);
}
public static class LensClientResultSetWithStats {
private final LensClientResultSet resultSet;
private final LensQuery query;
public LensClientResultSetWithStats(LensClientResultSet resultSet,
LensQuery query) {
this.resultSet = resultSet;
this.query = query;
}
public LensClientResultSet getResultSet() {
return resultSet;
}
public LensQuery getQuery() {
return query;
}
}
public LensClientResultSetWithStats getResults(String sql, String queryName) throws LensAPIException {
log.debug("Executing query {}", sql);
statement.executeQuery(sql, true, queryName);
return getResultsFromStatement(statement);
}
private LensClientResultSetWithStats getResultsFromStatement(LensStatement statement) {
QueryStatus.Status status = statement.getStatus().getStatus();
if (status != QueryStatus.Status.SUCCESSFUL) {
IdBriefErrorTemplate errorResult = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.QUERY_ID,
statement.getQueryHandleString(), new BriefError(statement.getErrorCode(), statement.getErrorMessage()));
throw new LensBriefErrorException(errorResult);
}
LensClientResultSet result = null;
if (statement.getStatus().isResultSetAvailable()) {
result = new LensClientResultSet(statement.getResultSetMetaData(), statement.getResultSet());
}
return new LensClientResultSetWithStats(result, statement.getQuery());
}
private LensClientResultSetWithStats getResultsFromHandle(QueryHandle q, boolean async) {
if (!async) {
statement.waitForQueryToComplete(q);
}
LensQuery query = statement.getQuery(q);
if (query.getStatus().getStatus()
== QueryStatus.Status.FAILED) {
throw new IllegalStateException(query.getStatus().getErrorMessage());
}
LensClientResultSet result = null;
if (query.getStatus().isResultSetAvailable()) {
result = new LensClientResultSet(statement.getResultSetMetaData(query), statement.getResultSet(query));
}
return new LensClientResultSetWithStats(result, query);
}
public LensClientResultSetWithStats getAsyncResults(QueryHandle q) {
return getResultsFromHandle(q, true);
}
public LensClientResultSetWithStats getSyncResults(QueryHandle q) {
return getResultsFromHandle(q, false);
}
public Response getHttpResults() {
return statement.getHttpResultSet();
}
public Response getHttpResults(QueryHandle q) {
return statement.getHttpResultSet(statement.getQuery(q));
}
/**
* Gets the ResultSet for the query represented by queryHandle.
*
* @param queryHandle : query hanlde
* @return
* @throws LensClientIOException
*/
public ResultSet getHttpResultSet(QueryHandle queryHandle) throws LensClientIOException {
Map<String, String> paramsMap = this.connection.getConnectionParamsAsMap();
String isSplitFileEnabled = paramsMap.get(QUERY_RESULT_SPLIT_INTO_MULTIPLE);
String isHeaderEnabled = paramsMap.get(QUERY_OUTPUT_WRITE_HEADER_ENABLED);
String encoding = paramsMap.get(QUERY_OUTPUT_ENCODING);
return getHttpResultSet(queryHandle, Charset.forName(encoding), Boolean.parseBoolean(isHeaderEnabled),
DEFAULT_RESULTSET_DELIMITER, Boolean.parseBoolean(isSplitFileEnabled));
}
/**
* Gets the ResultSet for the query represented by queryHandle.
*
* @param queryHandle : query handle.
* @param encoding : resultset encoding.
* @param isHeaderPresent : whether the resultset has header row included.
* @param delimiter : delimiter used to seperate columns of resultset.
* @param isResultZipped : whether the resultset is zipped.
* @return
* @throws LensClientIOException
*/
public ResultSet getHttpResultSet(QueryHandle queryHandle, Charset encoding, boolean isHeaderPresent, char delimiter,
boolean isResultZipped) throws LensClientIOException {
InputStream resultStream = null;
try {
Response response = statement.getHttpResultSet(statement.getQuery(queryHandle));
resultStream = response.readEntity(InputStream.class);
} catch (Exception e) {
throw new LensClientIOException("Error while getting resultset", e);
}
if (isResultZipped) {
return new ZippedCsvResultSet(resultStream, encoding, isHeaderPresent, delimiter);
} else {
return new CsvResultSet(resultStream, encoding, isHeaderPresent, delimiter);
}
}
public LensStatement getLensStatement(QueryHandle query) {
return this.statementMap.get(query);
}
public QueryStatus getQueryStatus(QueryHandle query) {
return statement.getQuery(query).getStatus();
}
public LensQuery getQueryDetails(QueryHandle handle) {
return statement.getQuery(handle);
}
public QueryStatus getQueryStatus(String q) {
return getQueryStatus(QueryHandle.fromString(q));
}
public LensQuery getQueryDetails(String handle) {
return getQueryDetails(QueryHandle.fromString(handle));
}
public LensAPIResult<QueryPlan> getQueryPlan(String q) throws LensAPIException {
return statement.explainQuery(q, new LensConf());
}
public boolean killQuery(QueryHandle q) {
return statement.kill(statement.getQuery(q));
}
public QueryResult getResults(QueryHandle query) {
QueryStatus status = getLensStatement(query).getStatus();
if (!status.isResultSetAvailable()) {
log.debug("Current status of the query is {}", status);
throw new IllegalStateException("Resultset for the query "
+ query + " is not available, its current status is " + status);
}
return getLensStatement(query).getResultSet();
}
public List<QueryHandle> getQueries(String state, String queryName, String user, String driver, String fromDate,
String toDate) {
return statement.getAllQueries(state, queryName, user, driver, fromDate, toDate);
}
public List<LensQuery> getQueriesWithDetails(String state, String queryName, String user, String driver,
String fromDate, String toDate) {
return statement.getAllQueryDetails(state, queryName, user, driver, fromDate, toDate);
}
private void connectToLensServer() {
log.debug("Connecting to lens server {}", new LensConnectionParams(conf));
connection = new LensConnection(new LensConnectionParams(conf));
connection.open(password);
log.debug("Successfully connected to server {}", connection);
pathValidator = new PathValidator(connection.getLensConnectionParams().getSessionConf());
Preconditions.checkNotNull(pathValidator, "Error in initializing Path Validator.");
}
public List<String> getAllDatabases() {
log.debug("Getting all database");
return mc.getAlldatabases();
}
public List<String> getAllNativeTables() {
log.debug("Getting all native tables");
return mc.getAllNativeTables();
}
public List<String> getAllFactTables() {
log.debug("Getting all fact table");
return mc.getAllFactTables();
}
public List<String> getAllFactTables(String cubeName) {
log.debug("Getting all fact table");
return mc.getAllFactTables(cubeName);
}
public List<String> getAllDimensionTables() {
log.debug("Getting all dimension table");
return mc.getAllDimensionTables();
}
public List<String> getAllDimensionTables(String dimensionName) {
log.debug("Getting all dimension table");
return mc.getAllDimensionTables(dimensionName);
}
public List<String> getAllCubes() {
log.debug("Getting all cubes in database");
return mc.getAllCubes();
}
public List<String> getAllDimensions() {
log.debug("Getting all dimensions in database");
return mc.getAllDimensions();
}
public String getCurrentDatabae() {
log.debug("Getting current database");
return mc.getCurrentDatabase();
}
public boolean setDatabase(String database) {
log.debug("Set the database to {}", database);
APIResult result = mc.setDatabase(database);
return result.getStatus() == APIResult.Status.SUCCEEDED;
}
public APIResult dropDatabase(String database, boolean cascade) {
log.debug("Dropping database {}, cascade: {}", database, cascade);
APIResult result = mc.dropDatabase(database, cascade);
log.debug("Return status of dropping {} result {}", database, result);
return result;
}
public APIResult createDatabase(String database, boolean ignoreIfExists) {
log.debug("Creating database {} ignore {}", database, ignoreIfExists);
APIResult result = mc.createDatabase(database, ignoreIfExists);
log.debug("Create database result {}", result);
return result;
}
public APIResult setConnectionParam(String key, String val) {
return this.connection.setConnectionParams(key, val);
}
public List<String> getConnectionParam() {
return this.connection.getConnectionParams();
}
public List<String> getConnectionParam(String key) {
return this.connection.getConnectionParams(key);
}
public void closeConnection() {
log.debug("Closing lens connection: {}", new LensConnectionParams(conf));
this.connection.close();
}
public APIResult addJarResource(String path) {
return this.connection.addResourceToConnection("jar", path);
}
public APIResult removeJarResource(String path) {
return this.connection.removeResourceFromConnection("jar", path);
}
public APIResult addFileResource(String path) {
return this.connection.addResourceToConnection("file", path);
}
public APIResult removeFileResource(String path) {
return this.connection.removeResourceFromConnection("file", path);
}
public APIResult createFactTable(String factSpec) {
return mc.createFactTable(factSpec);
}
public APIResult createCube(String cubeSpec) {
return mc.createCube(cubeSpec);
}
public APIResult createStorage(String storageSpec) {
return mc.createNewStorage(storageSpec);
}
public APIResult createDimension(String dimSpec) {
return mc.createDimension(dimSpec);
}
public APIResult createDimensionTable(String dimSpec) {
return mc.createDimensionTable(dimSpec);
}
public List<String> getAllStorages() {
return mc.getAllStorages();
}
public APIResult dropDimensionTable(String dim, boolean cascade) {
return mc.dropDimensionTable(dim, cascade);
}
public APIResult dropFactTable(String fact, boolean cascade) {
return mc.dropFactTable(fact, cascade);
}
public APIResult dropCube(String cube) {
return mc.dropCube(cube);
}
public APIResult dropStorage(String storage) {
return mc.dropStorage(storage);
}
public APIResult dropDimension(String dimName) {
return mc.dropDimension(dimName);
}
public APIResult updateFactTable(String factName, String factSpec) {
return mc.updateFactTable(factName, factSpec);
}
public APIResult updateDimensionTable(String dimName, String dimSpec) {
return mc.updateDimensionTable(dimName, dimSpec);
}
public APIResult updateCube(String cubeName, String cubeSpec) {
return mc.updateCube(cubeName, cubeSpec);
}
public APIResult updateStorage(String storageName, String storageSpec) {
return mc.updateStorage(storageName, storageSpec);
}
public APIResult updateDimension(String dimName, String dimSpec) {
return mc.updateDimension(dimName, dimSpec);
}
public XFactTable getFactTable(String factName) {
return mc.getFactTable(factName);
}
public XDimensionTable getDimensionTable(String dimName) {
return mc.getDimensionTable(dimName);
}
public XNativeTable getNativeTable(String tblName) {
return mc.getNativeTable(tblName);
}
public XCube getCube(String cubeName) {
return mc.getCube(cubeName);
}
public XFlattenedColumns getQueryableFields(String table, boolean flattened) {
return mc.getQueryableFields(table, flattened);
}
public XJoinChains getJoinChains(String table) {
return mc.getJoinChains(table);
}
public XDimension getDimension(String dimName) {
return mc.getDimension(dimName);
}
public XStorage getStorage(String storageName) {
return mc.getStorage(storageName);
}
public List<String> getFactStorages(String fact) {
return mc.getAllStoragesOfFactTable(fact);
}
public List<String> getDimStorages(String dim) {
return mc.getAllStoragesOfDimTable(dim);
}
public APIResult dropAllStoragesOfDim(String table) {
return mc.dropAllStoragesOfDimension(table);
}
public APIResult dropAllStoragesOfFact(String table) {
return mc.dropAllStoragesOfFactTable(table);
}
public APIResult addStorageToFact(String factName, String spec) {
return mc.addStorageToFactTable(factName, spec);
}
public APIResult dropStorageFromFact(String factName, String storage) {
return mc.dropStorageFromFactTable(factName, storage);
}
public XStorageTableElement getStorageFromFact(String fact, String storage) {
return mc.getStorageOfFactTable(fact, storage);
}
public APIResult addStorageToDim(String dim, String storage) {
return mc.addStorageToDimTable(dim, storage);
}
public APIResult dropStorageFromDim(String dim, String storage) {
return mc.dropStoragesOfDimensionTable(dim, storage);
}
public XStorageTableElement getStorageFromDim(String dim, String storage) {
return mc.getStorageOfDimensionTable(dim, storage);
}
public List<XPartition> getAllPartitionsOfFact(String fact, String storage) {
return mc.getPartitionsOfFactTable(fact, storage);
}
public List<XPartition> getAllPartitionsOfFact(String fact, String storage, String list) {
return mc.getPartitionsOfFactTable(fact, storage, list);
}
public List<XPartition> getAllPartitionsOfDim(String dim, String storage) {
return mc.getAllPartitionsOfDimensionTable(dim, storage);
}
public List<XPartition> getAllPartitionsOfDim(String dim, String storage, String list) {
return mc.getAllPartitionsOfDimensionTable(dim, storage);
}
public APIResult dropAllPartitionsOfFact(String fact, String storage) {
return mc.dropPartitionsOfFactTable(fact, storage);
}
public APIResult dropAllPartitionsOfFact(String fact, String storage, String list) {
return mc.dropPartitionsOfFactTable(fact, storage, list);
}
public APIResult dropAllPartitionsOfDim(String dim, String storage) {
return mc.dropAllPartitionsOfDimensionTable(dim, storage);
}
public APIResult dropAllPartitionsOfDim(String dim, String storage, String list) {
return mc.dropAllPartitionsOfDimensionTable(dim, storage, list);
}
public APIResult addPartitionToFact(String table, String storage, String partSpec) {
return mc.addPartitionToFactTable(table, storage, partSpec);
}
public APIResult addPartitionsToFact(String table, String storage, String partsSpec) {
return mc.addPartitionsToFactTable(table, storage, partsSpec);
}
public APIResult addPartitionToFact(String table, String storage, XPartition xp) {
return mc.addPartitionToFactTable(table, storage, xp);
}
public APIResult addPartitionsToFact(String table, String storage, XPartitionList xpList) {
return mc.addPartitionsToFactTable(table, storage, xpList);
}
public APIResult addPartitionToDim(String table, String storage, String partSpec) {
return mc.addPartitionToDimensionTable(table, storage, partSpec);
}
public APIResult addPartitionToDim(String table, String storage, XPartition xp) {
return mc.addPartitionToDimensionTable(table, storage, xp);
}
public APIResult addPartitionsToDim(String table, String storage, XPartitionList xpList) {
return mc.addPartitionsToDimensionTable(table, storage, xpList);
}
public APIResult addPartitionsToDim(String table, String storage, String partsSpec) {
return mc.addPartitionsToDimensionTable(table, storage, partsSpec);
}
public APIResult updatePartitionOfFact(String table, String storage, String partSpec) {
return mc.updatePartitionOfFactTable(table, storage, partSpec);
}
public APIResult updatePartitionsOfFact(String table, String storage, String partsSpec) {
return mc.updatePartitionsOfFactTable(table, storage, partsSpec);
}
public APIResult updatePartitionOfFact(String table, String storage, XPartition xp) {
return mc.updatePartitionOfFactTable(table, storage, xp);
}
public APIResult updatePartitionsOfFact(String table, String storage, XPartitionList xpList) {
return mc.updatePartitionsOfFactTable(table, storage, xpList);
}
public APIResult updatePartitionOfDim(String table, String storage, String partSpec) {
return mc.updatePartitionOfDimensionTable(table, storage, partSpec);
}
public APIResult updatePartitionOfDim(String table, String storage, XPartition xp) {
return mc.updatePartitionOfDimensionTable(table, storage, xp);
}
public APIResult updatePartitionsOfDim(String table, String storage, XPartitionList xpList) {
return mc.updatePartitionsOfDimensionTable(table, storage, xpList);
}
public APIResult updatePartitionsOfDim(String table, String storage, String partsSpec) {
return mc.updatePartitionsOfDimensionTable(table, storage, partsSpec);
}
@Deprecated
public LensAPIResult<QueryPrepareHandle> prepare(String sql, String queryName) throws LensAPIException {
return prepare(sql, queryName, new LensConf());
}
public LensAPIResult<QueryPrepareHandle> prepare(String sql, String queryName, LensConf conf)
throws LensAPIException {
return statement.prepareQuery(sql, queryName, conf);
}
@Deprecated
public LensAPIResult<QueryPlan> explainAndPrepare(String sql, String queryName) throws LensAPIException {
return explainAndPrepare(sql, queryName, new LensConf());
}
public LensAPIResult<QueryPlan> explainAndPrepare(String sql, String queryName, LensConf conf)
throws LensAPIException {
return statement.explainAndPrepare(sql, queryName);
}
public boolean destroyPrepared(QueryPrepareHandle queryPrepareHandle) {
return statement.destroyPrepared(queryPrepareHandle);
}
public List<QueryPrepareHandle> getPreparedQueries(String userName, String queryName, String fromDate,
String toDate) {
return statement.getAllPreparedQueries(userName, queryName, fromDate, toDate);
}
public LensPreparedQuery getPreparedQuery(QueryPrepareHandle phandle) {
return statement.getPreparedQuery(phandle);
}
public LensClientResultSetWithStats getResultsFromPrepared(QueryPrepareHandle phandle, String queryName) {
QueryHandle qh = statement.executeQuery(phandle, true, queryName);
return getResultsFromHandle(qh, true);
}
@Deprecated
public QueryHandle executePrepared(QueryPrepareHandle phandle, String queryName) {
return executePrepared(phandle, queryName, new LensConf());
}
public QueryHandle executePrepared(QueryPrepareHandle phandle, String queryName, LensConf conf) {
return statement.executeQuery(phandle, false, queryName, conf);
}
public boolean isConnectionOpen() {
return this.connection.isOpen();
}
public List<String> listResources(String type) {
return this.connection.listResourcesFromConnection(type);
}
public Response getLogs(String logFile) {
return this.connection.getLogs(logFile);
}
public XSegmentation getSegmentation(String segName) {
return mc.getSegmentation(segName);
}
public List<String> getAllSegmentations() {
return mc.getAllSegmentations();
}
public List<String> getAllSegmentations(String filter) {
return mc.getAllSegmentations(filter);
}
public APIResult createSegmentation(String segSpec) {
return mc.createSegmentation(segSpec);
}
public APIResult updateSegmentation(String segName, String segSpec) {
return mc.updateSegmentation(segName, segSpec);
}
public APIResult dropSegmentation(String segName) {
return mc.dropSegmentation(segName);
}
@Override
public void close() {
closeConnection();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/endpoint.proto
package com.google.api;
/**
*
*
* <pre>
* `Endpoint` describes a network endpoint of a service that serves a set of
* APIs. It is commonly known as a service endpoint. A service may expose
* any number of service endpoints, and all service endpoints share the same
* service definition, such as quota limits and monitoring metrics.
* Example service configuration:
* name: library-example.googleapis.com
* endpoints:
* # Below entry makes 'google.example.library.v1.Library'
* # API be served from endpoint address library-example.googleapis.com.
* # It also allows HTTP OPTIONS calls to be passed to the backend, for
* # it to decide whether the subsequent cross-origin request is
* # allowed to proceed.
* - name: library-example.googleapis.com
* allow_cors: true
* </pre>
*
* Protobuf type {@code google.api.Endpoint}
*/
public final class Endpoint extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.Endpoint)
EndpointOrBuilder {
private static final long serialVersionUID = 0L;
// Use Endpoint.newBuilder() to construct.
private Endpoint(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Endpoint() {
name_ = "";
aliases_ = com.google.protobuf.LazyStringArrayList.EMPTY;
target_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Endpoint();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Endpoint(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
aliases_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
aliases_.add(s);
break;
}
case 40:
{
allowCors_ = input.readBool();
break;
}
case 810:
{
java.lang.String s = input.readStringRequireUtf8();
target_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
aliases_ = aliases_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.EndpointProto.internal_static_google_api_Endpoint_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.EndpointProto.internal_static_google_api_Endpoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Endpoint.class, com.google.api.Endpoint.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ALIASES_FIELD_NUMBER = 2;
private com.google.protobuf.LazyStringList aliases_;
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @return A list containing the aliases.
*/
@java.lang.Deprecated
public com.google.protobuf.ProtocolStringList getAliasesList() {
return aliases_;
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @return The count of aliases.
*/
@java.lang.Deprecated
public int getAliasesCount() {
return aliases_.size();
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param index The index of the element to return.
* @return The aliases at the given index.
*/
@java.lang.Deprecated
public java.lang.String getAliases(int index) {
return aliases_.get(index);
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param index The index of the value to return.
* @return The bytes of the aliases at the given index.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getAliasesBytes(int index) {
return aliases_.getByteString(index);
}
public static final int TARGET_FIELD_NUMBER = 101;
private volatile java.lang.Object target_;
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @return The target.
*/
@java.lang.Override
public java.lang.String getTarget() {
java.lang.Object ref = target_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
target_ = s;
return s;
}
}
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @return The bytes for target.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetBytes() {
java.lang.Object ref = target_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
target_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ALLOW_CORS_FIELD_NUMBER = 5;
private boolean allowCors_;
/**
*
*
* <pre>
* Allowing
* [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka
* cross-domain traffic, would allow the backends served from this endpoint to
* receive and respond to HTTP OPTIONS requests. The response will be used by
* the browser to determine whether the subsequent cross-origin request is
* allowed to proceed.
* </pre>
*
* <code>bool allow_cors = 5;</code>
*
* @return The allowCors.
*/
@java.lang.Override
public boolean getAllowCors() {
return allowCors_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
for (int i = 0; i < aliases_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, aliases_.getRaw(i));
}
if (allowCors_ != false) {
output.writeBool(5, allowCors_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(target_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 101, target_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
{
int dataSize = 0;
for (int i = 0; i < aliases_.size(); i++) {
dataSize += computeStringSizeNoTag(aliases_.getRaw(i));
}
size += dataSize;
size += 1 * getAliasesList().size();
}
if (allowCors_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(5, allowCors_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(target_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(101, target_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.Endpoint)) {
return super.equals(obj);
}
com.google.api.Endpoint other = (com.google.api.Endpoint) obj;
if (!getName().equals(other.getName())) return false;
if (!getAliasesList().equals(other.getAliasesList())) return false;
if (!getTarget().equals(other.getTarget())) return false;
if (getAllowCors() != other.getAllowCors()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (getAliasesCount() > 0) {
hash = (37 * hash) + ALIASES_FIELD_NUMBER;
hash = (53 * hash) + getAliasesList().hashCode();
}
hash = (37 * hash) + TARGET_FIELD_NUMBER;
hash = (53 * hash) + getTarget().hashCode();
hash = (37 * hash) + ALLOW_CORS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowCors());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.Endpoint parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Endpoint parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Endpoint parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Endpoint parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Endpoint parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Endpoint parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Endpoint parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.Endpoint parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.Endpoint parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.Endpoint parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.Endpoint parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.Endpoint parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.Endpoint prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `Endpoint` describes a network endpoint of a service that serves a set of
* APIs. It is commonly known as a service endpoint. A service may expose
* any number of service endpoints, and all service endpoints share the same
* service definition, such as quota limits and monitoring metrics.
* Example service configuration:
* name: library-example.googleapis.com
* endpoints:
* # Below entry makes 'google.example.library.v1.Library'
* # API be served from endpoint address library-example.googleapis.com.
* # It also allows HTTP OPTIONS calls to be passed to the backend, for
* # it to decide whether the subsequent cross-origin request is
* # allowed to proceed.
* - name: library-example.googleapis.com
* allow_cors: true
* </pre>
*
* Protobuf type {@code google.api.Endpoint}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.Endpoint)
com.google.api.EndpointOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.EndpointProto.internal_static_google_api_Endpoint_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.EndpointProto.internal_static_google_api_Endpoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Endpoint.class, com.google.api.Endpoint.Builder.class);
}
// Construct using com.google.api.Endpoint.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
aliases_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
target_ = "";
allowCors_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.EndpointProto.internal_static_google_api_Endpoint_descriptor;
}
@java.lang.Override
public com.google.api.Endpoint getDefaultInstanceForType() {
return com.google.api.Endpoint.getDefaultInstance();
}
@java.lang.Override
public com.google.api.Endpoint build() {
com.google.api.Endpoint result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.Endpoint buildPartial() {
com.google.api.Endpoint result = new com.google.api.Endpoint(this);
int from_bitField0_ = bitField0_;
result.name_ = name_;
if (((bitField0_ & 0x00000001) != 0)) {
aliases_ = aliases_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.aliases_ = aliases_;
result.target_ = target_;
result.allowCors_ = allowCors_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.Endpoint) {
return mergeFrom((com.google.api.Endpoint) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.Endpoint other) {
if (other == com.google.api.Endpoint.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.aliases_.isEmpty()) {
if (aliases_.isEmpty()) {
aliases_ = other.aliases_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAliasesIsMutable();
aliases_.addAll(other.aliases_);
}
onChanged();
}
if (!other.getTarget().isEmpty()) {
target_ = other.target_;
onChanged();
}
if (other.getAllowCors() != false) {
setAllowCors(other.getAllowCors());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.api.Endpoint parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.api.Endpoint) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The canonical name of this endpoint.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList aliases_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureAliasesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
aliases_ = new com.google.protobuf.LazyStringArrayList(aliases_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @return A list containing the aliases.
*/
@java.lang.Deprecated
public com.google.protobuf.ProtocolStringList getAliasesList() {
return aliases_.getUnmodifiableView();
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @return The count of aliases.
*/
@java.lang.Deprecated
public int getAliasesCount() {
return aliases_.size();
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param index The index of the element to return.
* @return The aliases at the given index.
*/
@java.lang.Deprecated
public java.lang.String getAliases(int index) {
return aliases_.get(index);
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param index The index of the value to return.
* @return The bytes of the aliases at the given index.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getAliasesBytes(int index) {
return aliases_.getByteString(index);
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param index The index to set the value at.
* @param value The aliases to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setAliases(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAliasesIsMutable();
aliases_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param value The aliases to add.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder addAliases(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAliasesIsMutable();
aliases_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param values The aliases to add.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder addAllAliases(java.lang.Iterable<java.lang.String> values) {
ensureAliasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, aliases_);
onChanged();
return this;
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearAliases() {
aliases_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Unimplemented. Dot not use.
* DEPRECATED: This field is no longer supported. Instead of using aliases,
* please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended
* aliases.
* Additional names that this endpoint will be hosted on.
* </pre>
*
* <code>repeated string aliases = 2 [deprecated = true];</code>
*
* @param value The bytes of the aliases to add.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder addAliasesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureAliasesIsMutable();
aliases_.add(value);
onChanged();
return this;
}
private java.lang.Object target_ = "";
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @return The target.
*/
public java.lang.String getTarget() {
java.lang.Object ref = target_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
target_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @return The bytes for target.
*/
public com.google.protobuf.ByteString getTargetBytes() {
java.lang.Object ref = target_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
target_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @param value The target to set.
* @return This builder for chaining.
*/
public Builder setTarget(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
target_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @return This builder for chaining.
*/
public Builder clearTarget() {
target_ = getDefaultInstance().getTarget();
onChanged();
return this;
}
/**
*
*
* <pre>
* The specification of an Internet routable address of API frontend that will
* handle requests to this [API
* Endpoint](https://cloud.google.com/apis/design/glossary). It should be
* either a valid IPv4 address or a fully-qualified domain name. For example,
* "8.8.8.8" or "myservice.appspot.com".
* </pre>
*
* <code>string target = 101;</code>
*
* @param value The bytes for target to set.
* @return This builder for chaining.
*/
public Builder setTargetBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
target_ = value;
onChanged();
return this;
}
private boolean allowCors_;
/**
*
*
* <pre>
* Allowing
* [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka
* cross-domain traffic, would allow the backends served from this endpoint to
* receive and respond to HTTP OPTIONS requests. The response will be used by
* the browser to determine whether the subsequent cross-origin request is
* allowed to proceed.
* </pre>
*
* <code>bool allow_cors = 5;</code>
*
* @return The allowCors.
*/
@java.lang.Override
public boolean getAllowCors() {
return allowCors_;
}
/**
*
*
* <pre>
* Allowing
* [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka
* cross-domain traffic, would allow the backends served from this endpoint to
* receive and respond to HTTP OPTIONS requests. The response will be used by
* the browser to determine whether the subsequent cross-origin request is
* allowed to proceed.
* </pre>
*
* <code>bool allow_cors = 5;</code>
*
* @param value The allowCors to set.
* @return This builder for chaining.
*/
public Builder setAllowCors(boolean value) {
allowCors_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Allowing
* [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka
* cross-domain traffic, would allow the backends served from this endpoint to
* receive and respond to HTTP OPTIONS requests. The response will be used by
* the browser to determine whether the subsequent cross-origin request is
* allowed to proceed.
* </pre>
*
* <code>bool allow_cors = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowCors() {
allowCors_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.Endpoint)
}
// @@protoc_insertion_point(class_scope:google.api.Endpoint)
private static final com.google.api.Endpoint DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.Endpoint();
}
public static com.google.api.Endpoint getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Endpoint> PARSER =
new com.google.protobuf.AbstractParser<Endpoint>() {
@java.lang.Override
public Endpoint parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Endpoint(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Endpoint> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Endpoint> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.Endpoint getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import com.google.gwt.core.shared.GwtIncompatible;
/**
* <p>Assists with the serialization process and performs additional functionality based
* on serialization.</p>
*
* <ul>
* <li>Deep clone using serialization
* <li>Serialize managing finally and IOException
* <li>Deserialize managing finally and IOException
* </ul>
*
* <p>This class throws exceptions for invalid {@code null} inputs.
* Each method documents its behaviour in more detail.</p>
*
* <p>#ThreadSafe#</p>
* @since 1.0
*/
@GwtIncompatible("incompatible class")
public class SerializationUtils {
/**
* <p>SerializationUtils instances should NOT be constructed in standard programming.
* Instead, the class should be used as {@code SerializationUtils.clone(object)}.</p>
*
* <p>This constructor is public to permit tools that require a JavaBean instance
* to operate.</p>
* @since 2.0
*/
public SerializationUtils() {
super();
}
// Clone
//-----------------------------------------------------------------------
/**
* <p>Deep clone an {@code Object} using serialization.</p>
*
* <p>This is many times slower than writing clone methods by hand
* on all objects in your object graph. However, for complex object
* graphs, or for those that don't support deep cloning this can
* be a simple alternative implementation. Of course all the objects
* must be {@code Serializable}.</p>
*
* @param <T> the type of the object involved
* @param object the {@code Serializable} object to clone
* @return the cloned object
* @throws SerializationException (runtime) if the serialization fails
*/
public static <T extends Serializable> T clone(final T object) {
if (object == null) {
return null;
}
final byte[] objectData = serialize(object);
final ByteArrayInputStream bais = new ByteArrayInputStream(objectData);
try (ClassLoaderAwareObjectInputStream in = new ClassLoaderAwareObjectInputStream(bais,
object.getClass().getClassLoader())) {
/*
* when we serialize and deserialize an object,
* it is reasonable to assume the deserialized object
* is of the same type as the original serialized object
*/
@SuppressWarnings("unchecked") // see above
final T readObject = (T) in.readObject();
return readObject;
} catch (final ClassNotFoundException ex) {
throw new SerializationException("ClassNotFoundException while reading cloned object data", ex);
} catch (final IOException ex) {
throw new SerializationException("IOException while reading or closing cloned object data", ex);
}
}
/**
* Performs a serialization roundtrip. Serializes and deserializes the given object, great for testing objects that
* implement {@link Serializable}.
*
* @param <T>
* the type of the object involved
* @param msg
* the object to roundtrip
* @return the serialized and deserialized object
* @since 3.3
*/
@SuppressWarnings("unchecked") // OK, because we serialized a type `T`
public static <T extends Serializable> T roundtrip(final T msg) {
return (T) SerializationUtils.deserialize(SerializationUtils.serialize(msg));
}
// Serialize
//-----------------------------------------------------------------------
/**
* <p>Serializes an {@code Object} to the specified stream.</p>
*
* <p>The stream will be closed once the object is written.
* This avoids the need for a finally clause, and maybe also exception
* handling, in the application code.</p>
*
* <p>The stream passed in is not buffered internally within this method.
* This is the responsibility of your application if desired.</p>
*
* @param obj the object to serialize to bytes, may be null
* @param outputStream the stream to write to, must not be null
* @throws IllegalArgumentException if {@code outputStream} is {@code null}
* @throws SerializationException (runtime) if the serialization fails
*/
public static void serialize(final Serializable obj, final OutputStream outputStream) {
Validate.isTrue(outputStream != null, "The OutputStream must not be null");
try (ObjectOutputStream out = new ObjectOutputStream(outputStream)){
out.writeObject(obj);
} catch (final IOException ex) {
throw new SerializationException(ex);
}
}
/**
* <p>Serializes an {@code Object} to a byte array for
* storage/serialization.</p>
*
* @param obj the object to serialize to bytes
* @return a byte[] with the converted Serializable
* @throws SerializationException (runtime) if the serialization fails
*/
public static byte[] serialize(final Serializable obj) {
final ByteArrayOutputStream baos = new ByteArrayOutputStream(512);
serialize(obj, baos);
return baos.toByteArray();
}
// Deserialize
//-----------------------------------------------------------------------
/**
* <p>
* Deserializes an {@code Object} from the specified stream.
* </p>
*
* <p>
* The stream will be closed once the object is written. This avoids the need for a finally clause, and maybe also
* exception handling, in the application code.
* </p>
*
* <p>
* The stream passed in is not buffered internally within this method. This is the responsibility of your
* application if desired.
* </p>
*
* <p>
* If the call site incorrectly types the return value, a {@link ClassCastException} is thrown from the call site.
* Without Generics in this declaration, the call site must type cast and can cause the same ClassCastException.
* Note that in both cases, the ClassCastException is in the call site, not in this method.
* </p>
*
* @param <T> the object type to be deserialized
* @param inputStream
* the serialized object input stream, must not be null
* @return the deserialized object
* @throws IllegalArgumentException
* if {@code inputStream} is {@code null}
* @throws SerializationException
* (runtime) if the serialization fails
*/
public static <T> T deserialize(final InputStream inputStream) {
Validate.isTrue(inputStream != null, "The InputStream must not be null");
try (ObjectInputStream in = new ObjectInputStream(inputStream)) {
@SuppressWarnings("unchecked")
final T obj = (T) in.readObject();
return obj;
} catch (final ClassNotFoundException | IOException ex) {
throw new SerializationException(ex);
}
}
/**
* <p>
* Deserializes a single {@code Object} from an array of bytes.
* </p>
*
* <p>
* If the call site incorrectly types the return value, a {@link ClassCastException} is thrown from the call site.
* Without Generics in this declaration, the call site must type cast and can cause the same ClassCastException.
* Note that in both cases, the ClassCastException is in the call site, not in this method.
* </p>
*
* @param <T> the object type to be deserialized
* @param objectData
* the serialized object, must not be null
* @return the deserialized object
* @throws IllegalArgumentException
* if {@code objectData} is {@code null}
* @throws SerializationException
* (runtime) if the serialization fails
*/
public static <T> T deserialize(final byte[] objectData) {
Validate.isTrue(objectData != null, "The byte[] must not be null");
return SerializationUtils.deserialize(new ByteArrayInputStream(objectData));
}
/**
* <p>Custom specialization of the standard JDK {@link java.io.ObjectInputStream}
* that uses a custom <code>ClassLoader</code> to resolve a class.
* If the specified <code>ClassLoader</code> is not able to resolve the class,
* the context classloader of the current thread will be used.
* This way, the standard deserialization work also in web-application
* containers and application servers, no matter in which of the
* <code>ClassLoader</code> the particular class that encapsulates
* serialization/deserialization lives. </p>
*
* <p>For more in-depth information about the problem for which this
* class here is a workaround, see the JIRA issue LANG-626. </p>
*/
static class ClassLoaderAwareObjectInputStream extends ObjectInputStream {
private static final Map<String, Class<?>> primitiveTypes =
new HashMap<>();
static {
primitiveTypes.put("byte", byte.class);
primitiveTypes.put("short", short.class);
primitiveTypes.put("int", int.class);
primitiveTypes.put("long", long.class);
primitiveTypes.put("float", float.class);
primitiveTypes.put("double", double.class);
primitiveTypes.put("boolean", boolean.class);
primitiveTypes.put("char", char.class);
primitiveTypes.put("void", void.class);
}
private final ClassLoader classLoader;
/**
* Constructor.
* @param in The <code>InputStream</code>.
* @param classLoader classloader to use
* @throws IOException if an I/O error occurs while reading stream header.
* @see java.io.ObjectInputStream
*/
ClassLoaderAwareObjectInputStream(final InputStream in, final ClassLoader classLoader) throws IOException {
super(in);
this.classLoader = classLoader;
}
/**
* Overridden version that uses the parameterized <code>ClassLoader</code> or the <code>ClassLoader</code>
* of the current <code>Thread</code> to resolve the class.
* @param desc An instance of class <code>ObjectStreamClass</code>.
* @return A <code>Class</code> object corresponding to <code>desc</code>.
* @throws IOException Any of the usual Input/Output exceptions.
* @throws ClassNotFoundException If class of a serialized object cannot be found.
*/
@Override
protected Class<?> resolveClass(final ObjectStreamClass desc) throws IOException, ClassNotFoundException {
final String name = desc.getName();
try {
return Class.forName(name, false, classLoader);
} catch (final ClassNotFoundException ex) {
try {
return Class.forName(name, false, Thread.currentThread().getContextClassLoader());
} catch (final ClassNotFoundException cnfe) {
final Class<?> cls = primitiveTypes.get(name);
if (cls != null) {
return cls;
}
throw cnfe;
}
}
}
}
}
| |
/*
* Copyright (c) 1999, 2007, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.crypto;
import java.security.*;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import java.util.NoSuchElementException;
import java.io.Serializable;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.IOException;
/**
* This class contains CryptoPermission objects, organized into
* PermissionCollections according to algorithm names.
*
* <p>When the <code>add</code> method is called to add a
* CryptoPermission, the CryptoPermission is stored in the
* appropriate PermissionCollection. If no such
* collection exists yet, the algorithm name associated with
* the CryptoPermission object is
* determined and the <code>newPermissionCollection</code> method
* is called on the CryptoPermission or CryptoAllPermission class to
* create the PermissionCollection and add it to the Permissions object.
*
* @see javax.crypto.CryptoPermission
* @see java.security.PermissionCollection
* @see java.security.Permissions
*
* @author Sharon Liu
* @since 1.4
*/
final class CryptoPermissions extends PermissionCollection
implements Serializable {
private static final long serialVersionUID = 4946547168093391015L;
// This class is similar to java.security.Permissions
private Hashtable perms;
/**
* Creates a new CryptoPermissions object containing
* no CryptoPermissionCollections.
*/
CryptoPermissions() {
perms = new Hashtable(7);
}
/**
* Populates the crypto policy from the specified
* InputStream into this CryptoPermissions object.
*
* @param in the InputStream to load from.
*
* @exception SecurityException if cannot load
* successfully.
*/
void load(InputStream in)
throws IOException, CryptoPolicyParser.ParsingException {
CryptoPolicyParser parser = new CryptoPolicyParser();
parser.read(new BufferedReader(new InputStreamReader(in, "UTF-8")));
CryptoPermission[] parsingResult = parser.getPermissions();
for (int i = 0; i < parsingResult.length; i++) {
this.add(parsingResult[i]);
}
}
/**
* Returns true if this CryptoPermissions object doesn't
* contain any CryptoPermission objects; otherwise, returns
* false.
*/
boolean isEmpty() {
return perms.isEmpty();
}
/**
* Adds a permission object to the PermissionCollection for the
* algorithm returned by
* <code>(CryptoPermission)permission.getAlgorithm()</code>.
*
* This method creates
* a new PermissionCollection object (and adds the permission to it)
* if an appropriate collection does not yet exist. <p>
*
* @param permission the Permission object to add.
*
* @exception SecurityException if this CryptoPermissions object is
* marked as readonly.
*
* @see isReadOnly
*/
public void add(Permission permission) {
if (isReadOnly())
throw new SecurityException("Attempt to add a Permission " +
"to a readonly CryptoPermissions " +
"object");
if (!(permission instanceof CryptoPermission))
return;
CryptoPermission cryptoPerm = (CryptoPermission)permission;
PermissionCollection pc =
getPermissionCollection(cryptoPerm);
pc.add(cryptoPerm);
String alg = cryptoPerm.getAlgorithm();
if (!perms.containsKey(alg)) {
perms.put(alg, pc);
}
}
/**
* Checks if this object's PermissionCollection for permissons
* of the specified permission's algorithm implies the specified
* permission. Returns true if the checking succeeded.
*
* @param permission the Permission object to check.
*
* @return true if "permission" is implied by the permissions
* in the PermissionCollection it belongs to, false if not.
*
*/
public boolean implies(Permission permission) {
if (!(permission instanceof CryptoPermission)) {
return false;
}
CryptoPermission cryptoPerm = (CryptoPermission)permission;
PermissionCollection pc =
getPermissionCollection(cryptoPerm.getAlgorithm());
return pc.implies(cryptoPerm);
}
/**
* Returns an enumeration of all the Permission objects in all the
* PermissionCollections in this CryptoPermissions object.
*
* @return an enumeration of all the Permissions.
*/
public Enumeration elements() {
// go through each Permissions in the hash table
// and call their elements() function.
return new PermissionsEnumerator(perms.elements());
}
/**
* Returns a CryptoPermissions object which
* represents the minimum of the specified
* CryptoPermissions object and this
* CryptoPermissions object.
*
* @param other the CryptoPermission
* object to compare with this object.
*/
CryptoPermissions getMinimum(CryptoPermissions other) {
if (other == null) {
return null;
}
if (this.perms.containsKey(CryptoAllPermission.ALG_NAME)) {
return other;
}
if (other.perms.containsKey(CryptoAllPermission.ALG_NAME)) {
return this;
}
CryptoPermissions ret = new CryptoPermissions();
PermissionCollection thatWildcard =
(PermissionCollection)other.perms.get(
CryptoPermission.ALG_NAME_WILDCARD);
int maxKeySize = 0;
if (thatWildcard != null) {
maxKeySize = ((CryptoPermission)
thatWildcard.elements().nextElement()).getMaxKeySize();
}
// For each algorithm in this CryptoPermissions,
// find out if there is anything we should add into
// ret.
Enumeration thisKeys = this.perms.keys();
while (thisKeys.hasMoreElements()) {
String alg = (String)thisKeys.nextElement();
PermissionCollection thisPc =
(PermissionCollection)this.perms.get(alg);
PermissionCollection thatPc =
(PermissionCollection)other.perms.get(alg);
CryptoPermission[] partialResult;
if (thatPc == null) {
if (thatWildcard == null) {
// The other CryptoPermissions
// doesn't allow this given
// algorithm at all. Just skip this
// algorithm.
continue;
}
partialResult = getMinimum(maxKeySize, thisPc);
} else {
partialResult = getMinimum(thisPc, thatPc);
}
for (int i = 0; i < partialResult.length; i++) {
ret.add(partialResult[i]);
}
}
PermissionCollection thisWildcard =
(PermissionCollection)this.perms.get(
CryptoPermission.ALG_NAME_WILDCARD);
// If this CryptoPermissions doesn't
// have a wildcard, we are done.
if (thisWildcard == null) {
return ret;
}
// Deal with the algorithms only appear
// in the other CryptoPermissions.
maxKeySize =
((CryptoPermission)
thisWildcard.elements().nextElement()).getMaxKeySize();
Enumeration thatKeys = other.perms.keys();
while (thatKeys.hasMoreElements()) {
String alg = (String)thatKeys.nextElement();
if (this.perms.containsKey(alg)) {
continue;
}
PermissionCollection thatPc =
(PermissionCollection)other.perms.get(alg);
CryptoPermission[] partialResult;
partialResult = getMinimum(maxKeySize, thatPc);
for (int i = 0; i < partialResult.length; i++) {
ret.add(partialResult[i]);
}
}
return ret;
}
/**
* Get the minimum of the two given PermissionCollection
* <code>thisPc</code> and <code>thatPc</code>.
*
* @param thisPc the first given PermissionColloection
* object.
*
* @param thatPc the second given PermissionCollection
* object.
*/
private CryptoPermission[] getMinimum(PermissionCollection thisPc,
PermissionCollection thatPc) {
Vector permVector = new Vector(2);
Enumeration thisPcPermissions = thisPc.elements();
// For each CryptoPermission in
// thisPc object, do the following:
// 1) if this CryptoPermission is implied
// by thatPc, this CryptoPermission
// should be returned, and we can
// move on to check the next
// CryptoPermission in thisPc.
// 2) otherwise, we should return
// all CryptoPermissions in thatPc
// which
// are implied by this CryptoPermission.
// Then we can move on to the
// next CryptoPermission in thisPc.
while (thisPcPermissions.hasMoreElements()) {
CryptoPermission thisCp =
(CryptoPermission)thisPcPermissions.nextElement();
Enumeration thatPcPermissions = thatPc.elements();
while (thatPcPermissions.hasMoreElements()) {
CryptoPermission thatCp =
(CryptoPermission)thatPcPermissions.nextElement();
if (thatCp.implies(thisCp)) {
permVector.addElement(thisCp);
break;
}
if (thisCp.implies(thatCp)) {
permVector.addElement(thatCp);
}
}
}
CryptoPermission[] ret = new CryptoPermission[permVector.size()];
permVector.copyInto(ret);
return ret;
}
/**
* Returns all the CryptoPermission objects in the given
* PermissionCollection object
* whose maximum keysize no greater than <code>maxKeySize</code>.
* For all CryptoPermission objects with a maximum keysize greater
* than <code>maxKeySize</code>, this method constructs a
* corresponding CryptoPermission object whose maximum keysize is
* set to <code>maxKeySize</code>, and includes that in the result.
*
* @param maxKeySize the given maximum key size.
*
* @param pc the given PermissionCollection object.
*/
private CryptoPermission[] getMinimum(int maxKeySize,
PermissionCollection pc) {
Vector permVector = new Vector(1);
Enumeration enum_ = pc.elements();
while (enum_.hasMoreElements()) {
CryptoPermission cp =
(CryptoPermission)enum_.nextElement();
if (cp.getMaxKeySize() <= maxKeySize) {
permVector.addElement(cp);
} else {
if (cp.getCheckParam()) {
permVector.addElement(
new CryptoPermission(cp.getAlgorithm(),
maxKeySize,
cp.getAlgorithmParameterSpec(),
cp.getExemptionMechanism()));
} else {
permVector.addElement(
new CryptoPermission(cp.getAlgorithm(),
maxKeySize,
cp.getExemptionMechanism()));
}
}
}
CryptoPermission[] ret = new CryptoPermission[permVector.size()];
permVector.copyInto(ret);
return ret;
}
/**
* Returns the PermissionCollection for the
* specified algorithm. Returns null if there
* isn't such a PermissionCollection.
*
* @param alg the algorithm name.
*/
PermissionCollection getPermissionCollection(String alg) {
// If this CryptoPermissions includes CryptoAllPermission,
// we should return CryptoAllPermission.
if (perms.containsKey(CryptoAllPermission.ALG_NAME)) {
return
(PermissionCollection)(perms.get(CryptoAllPermission.ALG_NAME));
}
PermissionCollection pc = (PermissionCollection)perms.get(alg);
// If there isn't a PermissionCollection for
// the given algorithm,we should return the
// PermissionCollection for the wildcard
// if there is one.
if (pc == null) {
pc = (PermissionCollection)perms.get(
CryptoPermission.ALG_NAME_WILDCARD);
}
return pc;
}
/**
* Returns the PermissionCollection for the algorithm
* associated with the specified CryptoPermission
* object. Creates such a PermissionCollection
* if such a PermissionCollection does not
* exist yet.
*
* @param cryptoPerm the CryptoPermission object.
*/
private PermissionCollection getPermissionCollection(
CryptoPermission cryptoPerm) {
String alg = cryptoPerm.getAlgorithm();
PermissionCollection pc = (PermissionCollection)perms.get(alg);
if (pc == null) {
pc = cryptoPerm.newPermissionCollection();
}
return pc;
}
}
final class PermissionsEnumerator implements Enumeration {
// all the perms
private Enumeration perms;
// the current set
private Enumeration permset;
PermissionsEnumerator(Enumeration e) {
perms = e;
permset = getNextEnumWithMore();
}
public synchronized boolean hasMoreElements() {
// if we enter with permissionimpl null, we know
// there are no more left.
if (permset == null)
return false;
// try to see if there are any left in the current one
if (permset.hasMoreElements())
return true;
// get the next one that has something in it...
permset = getNextEnumWithMore();
// if it is null, we are done!
return (permset != null);
}
public synchronized Object nextElement() {
// hasMoreElements will update permset to the next permset
// with something in it...
if (hasMoreElements()) {
return permset.nextElement();
} else {
throw new NoSuchElementException("PermissionsEnumerator");
}
}
private Enumeration getNextEnumWithMore() {
while (perms.hasMoreElements()) {
PermissionCollection pc =
(PermissionCollection) perms.nextElement();
Enumeration next = pc.elements();
if (next.hasMoreElements())
return next;
}
return null;
}
}
| |
package ca.uhn.fhir.rest.method;
/*
* #%L
* HAPI FHIR - Core Library
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.client.BaseHttpClientInvocation;
import ca.uhn.fhir.rest.param.BaseQueryParameter;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.IRestfulServer;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
public class SearchMethodBinding extends BaseResourceReturningMethodBinding {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchMethodBinding.class);
private String myCompartmentName;
private String myDescription;
private Integer myIdParamIndex;
private String myQueryName;
private boolean myAllowUnknownParams;
public SearchMethodBinding(Class<? extends IBaseResource> theReturnResourceType, Method theMethod, FhirContext theContext, Object theProvider) {
super(theReturnResourceType, theMethod, theContext, theProvider);
Search search = theMethod.getAnnotation(Search.class);
this.myQueryName = StringUtils.defaultIfBlank(search.queryName(), null);
this.myCompartmentName = StringUtils.defaultIfBlank(search.compartmentName(), null);
this.myIdParamIndex = MethodUtil.findIdParameterIndex(theMethod, getContext());
this.myAllowUnknownParams = search.allowUnknownParams();
Description desc = theMethod.getAnnotation(Description.class);
if (desc != null) {
if (isNotBlank(desc.formalDefinition())) {
myDescription = StringUtils.defaultIfBlank(desc.formalDefinition(), null);
} else {
myDescription = StringUtils.defaultIfBlank(desc.shortDefinition(), null);
}
}
/*
* Check for parameter combinations and names that are invalid
*/
List<IParameter> parameters = getParameters();
// List<SearchParameter> searchParameters = new ArrayList<SearchParameter>();
for (int i = 0; i < parameters.size(); i++) {
IParameter next = parameters.get(i);
if (!(next instanceof SearchParameter)) {
continue;
}
SearchParameter sp = (SearchParameter) next;
if (sp.getName().startsWith("_")) {
if (ALLOWED_PARAMS.contains(sp.getName())) {
String msg = getContext().getLocalizer().getMessage(getClass().getName() + ".invalidSpecialParamName", theMethod.getName(), theMethod.getDeclaringClass().getSimpleName(),
sp.getName());
throw new ConfigurationException(msg);
}
}
// searchParameters.add(sp);
}
// for (int i = 0; i < searchParameters.size(); i++) {
// SearchParameter next = searchParameters.get(i);
// // next.
// }
/*
* Only compartment searching methods may have an ID parameter
*/
if (isBlank(myCompartmentName) && myIdParamIndex != null) {
String msg = theContext.getLocalizer().getMessage(getClass().getName() + ".idWithoutCompartment", theMethod.getName(), theMethod.getDeclaringClass());
throw new ConfigurationException(msg);
}
}
public String getDescription() {
return myDescription;
}
@Override
public RestOperationTypeEnum getRestOperationType() {
return RestOperationTypeEnum.SEARCH_TYPE;
}
@Override
protected BundleTypeEnum getResponseBundleType() {
return BundleTypeEnum.SEARCHSET;
}
@Override
public ReturnTypeEnum getReturnType() {
return ReturnTypeEnum.BUNDLE;
}
@Override
public boolean incomingServerRequestMatchesMethod(RequestDetails theRequest) {
if (theRequest.getId() != null && myIdParamIndex == null) {
ourLog.trace("Method {} doesn't match because ID is not null: {}", theRequest.getId());
return false;
}
if (theRequest.getRequestType() == RequestTypeEnum.GET && theRequest.getOperation() != null && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) {
ourLog.trace("Method {} doesn't match because request type is GET but operation is not null: {}", theRequest.getId(), theRequest.getOperation());
return false;
}
if (theRequest.getRequestType() == RequestTypeEnum.POST && !Constants.PARAM_SEARCH.equals(theRequest.getOperation())) {
ourLog.trace("Method {} doesn't match because request type is POST but operation is not _search: {}", theRequest.getId(), theRequest.getOperation());
return false;
}
if (theRequest.getRequestType() != RequestTypeEnum.GET && theRequest.getRequestType() != RequestTypeEnum.POST) {
ourLog.trace("Method {} doesn't match because request type is {}", getMethod());
return false;
}
if (!StringUtils.equals(myCompartmentName, theRequest.getCompartmentName())) {
ourLog.trace("Method {} doesn't match because it is for compartment {} but request is compartment {}", new Object[] { getMethod(), myCompartmentName, theRequest.getCompartmentName() });
return false;
}
// This is used to track all the parameters so we can reject queries that
// have additional params we don't understand
Set<String> methodParamsTemp = new HashSet<String>();
Set<String> unqualifiedNames = theRequest.getUnqualifiedToQualifiedNames().keySet();
Set<String> qualifiedParamNames = theRequest.getParameters().keySet();
for (int i = 0; i < this.getParameters().size(); i++) {
if (!(getParameters().get(i) instanceof BaseQueryParameter)) {
continue;
}
BaseQueryParameter temp = (BaseQueryParameter) getParameters().get(i);
String name = temp.getName();
if (temp.isRequired()) {
if (qualifiedParamNames.contains(name)) {
QualifierDetails qualifiers = extractQualifiersFromParameterName(name);
if (qualifiers.passes(temp.getQualifierWhitelist(), temp.getQualifierBlacklist())) {
methodParamsTemp.add(name);
}
}
if (unqualifiedNames.contains(name)) {
List<String> qualifiedNames = theRequest.getUnqualifiedToQualifiedNames().get(name);
qualifiedNames = processWhitelistAndBlacklist(qualifiedNames, temp.getQualifierWhitelist(), temp.getQualifierBlacklist());
methodParamsTemp.addAll(qualifiedNames);
}
if (!qualifiedParamNames.contains(name) && !unqualifiedNames.contains(name))
{
ourLog.trace("Method {} doesn't match param '{}' is not present", getMethod().getName(), name);
return false;
}
} else {
if (qualifiedParamNames.contains(name)) {
QualifierDetails qualifiers = extractQualifiersFromParameterName(name);
if (qualifiers.passes(temp.getQualifierWhitelist(), temp.getQualifierBlacklist())) {
methodParamsTemp.add(name);
}
}
if (unqualifiedNames.contains(name)) {
List<String> qualifiedNames = theRequest.getUnqualifiedToQualifiedNames().get(name);
qualifiedNames = processWhitelistAndBlacklist(qualifiedNames, temp.getQualifierWhitelist(), temp.getQualifierBlacklist());
methodParamsTemp.addAll(qualifiedNames);
}
if (!qualifiedParamNames.contains(name) && !qualifiedParamNames.contains(name)) {
methodParamsTemp.add(name);
}
}
}
if (myQueryName != null) {
String[] queryNameValues = theRequest.getParameters().get(Constants.PARAM_QUERY);
if (queryNameValues != null && StringUtils.isNotBlank(queryNameValues[0])) {
String queryName = queryNameValues[0];
if (!myQueryName.equals(queryName)) {
ourLog.trace("Query name does not match {}", myQueryName);
return false;
} else {
methodParamsTemp.add(Constants.PARAM_QUERY);
}
} else {
ourLog.trace("Query name does not match {}", myQueryName);
return false;
}
} else {
String[] queryNameValues = theRequest.getParameters().get(Constants.PARAM_QUERY);
if (queryNameValues != null && StringUtils.isNotBlank(queryNameValues[0])) {
ourLog.trace("Query has name");
return false;
}
}
for (String next : theRequest.getParameters().keySet()) {
if (ALLOWED_PARAMS.contains(next)) {
methodParamsTemp.add(next);
}
}
Set<String> keySet = theRequest.getParameters().keySet();
if (myAllowUnknownParams == false) {
for (String next : keySet) {
if (!methodParamsTemp.contains(next)) {
return false;
}
}
}
return true;
}
@Override
public BaseHttpClientInvocation invokeClient(Object[] theArgs) throws InternalErrorException {
assert (myQueryName == null || ((theArgs != null ? theArgs.length : 0) == getParameters().size())) : "Wrong number of arguments: " + (theArgs != null ? theArgs.length : "null");
Map<String, List<String>> queryStringArgs = new LinkedHashMap<String, List<String>>();
if (myQueryName != null) {
queryStringArgs.put(Constants.PARAM_QUERY, Collections.singletonList(myQueryName));
}
IdDt id = (IdDt) (myIdParamIndex != null ? theArgs[myIdParamIndex] : null);
String resourceName = getResourceName();
if (theArgs != null) {
for (int idx = 0; idx < theArgs.length; idx++) {
IParameter nextParam = getParameters().get(idx);
nextParam.translateClientArgumentIntoQueryArgument(getContext(), theArgs[idx], queryStringArgs, null);
}
}
BaseHttpClientInvocation retVal = createSearchInvocation(getContext(), resourceName, queryStringArgs, id, myCompartmentName, null);
return retVal;
}
@Override
public IBundleProvider invokeServer(IRestfulServer theServer, RequestDetails theRequest, Object[] theMethodParams) throws InvalidRequestException, InternalErrorException {
if (myIdParamIndex != null) {
theMethodParams[myIdParamIndex] = theRequest.getId();
}
Object response = invokeServerMethod(theServer, theRequest, theMethodParams);
return toResourceList(response);
}
@Override
protected boolean isAddContentLocationHeader() {
return false;
}
private List<String> processWhitelistAndBlacklist(List<String> theQualifiedNames, Set<String> theQualifierWhitelist, Set<String> theQualifierBlacklist) {
if (theQualifierWhitelist == null && theQualifierBlacklist == null) {
return theQualifiedNames;
}
ArrayList<String> retVal = new ArrayList<String>(theQualifiedNames.size());
for (String next : theQualifiedNames) {
QualifierDetails qualifiers = extractQualifiersFromParameterName(next);
if (!qualifiers.passes(theQualifierWhitelist, theQualifierBlacklist)) {
continue;
}
retVal.add(next);
}
return retVal;
}
@Override
public String toString() {
return getMethod().toString();
}
public static BaseHttpClientInvocation createSearchInvocation(FhirContext theContext, String theResourceName, Map<String, List<String>> theParameters, IdDt theId, String theCompartmentName,
SearchStyleEnum theSearchStyle) {
SearchStyleEnum searchStyle = theSearchStyle;
if (searchStyle == null) {
int length = 0;
for (Entry<String, List<String>> nextEntry : theParameters.entrySet()) {
length += nextEntry.getKey().length();
for (String next : nextEntry.getValue()) {
length += next.length();
}
}
if (length < 5000) {
searchStyle = SearchStyleEnum.GET;
} else {
searchStyle = SearchStyleEnum.POST;
}
}
BaseHttpClientInvocation invocation;
boolean compartmentSearch = false;
if (theCompartmentName != null) {
if (theId == null || !theId.hasIdPart()) {
String msg = theContext.getLocalizer().getMessage(SearchMethodBinding.class.getName() + ".idNullForCompartmentSearch");
throw new InvalidRequestException(msg);
} else {
compartmentSearch = true;
}
}
/*
* Are we doing a get (GET [base]/Patient?name=foo) or a get with search (GET [base]/Patient/_search?name=foo) or a post (POST [base]/Patient with parameters in the POST body)
*/
switch (searchStyle) {
case GET:
default:
if (compartmentSearch) {
invocation = new HttpGetClientInvocation(theParameters, theResourceName, theId.getIdPart(), theCompartmentName);
} else {
invocation = new HttpGetClientInvocation(theParameters, theResourceName);
}
break;
case GET_WITH_SEARCH:
if (compartmentSearch) {
invocation = new HttpGetClientInvocation(theParameters, theResourceName, theId.getIdPart(), theCompartmentName, Constants.PARAM_SEARCH);
} else {
invocation = new HttpGetClientInvocation(theParameters, theResourceName, Constants.PARAM_SEARCH);
}
break;
case POST:
if (compartmentSearch) {
invocation = new HttpPostClientInvocation(theContext, theParameters, theResourceName, theId.getIdPart(), theCompartmentName, Constants.PARAM_SEARCH);
} else {
invocation = new HttpPostClientInvocation(theContext, theParameters, theResourceName, Constants.PARAM_SEARCH);
}
}
return invocation;
}
public static QualifierDetails extractQualifiersFromParameterName(String theParamName) {
QualifierDetails retVal = new QualifierDetails();
if (theParamName == null || theParamName.length() == 0) {
return retVal;
}
int dotIdx = -1;
int colonIdx = -1;
for (int idx = 0; idx < theParamName.length(); idx++) {
char nextChar = theParamName.charAt(idx);
if (nextChar == '.' && dotIdx == -1) {
dotIdx = idx;
} else if (nextChar == ':' && colonIdx == -1) {
colonIdx = idx;
}
}
if (dotIdx != -1 && colonIdx != -1) {
if (dotIdx < colonIdx) {
retVal.setDotQualifier(theParamName.substring(dotIdx, colonIdx));
retVal.setColonQualifier(theParamName.substring(colonIdx));
} else {
retVal.setColonQualifier(theParamName.substring(colonIdx, dotIdx));
retVal.setDotQualifier(theParamName.substring(dotIdx));
}
} else if (dotIdx != -1) {
retVal.setDotQualifier(theParamName.substring(dotIdx));
} else if (colonIdx != -1) {
retVal.setColonQualifier(theParamName.substring(colonIdx));
}
return retVal;
}
public static class QualifierDetails {
private String myColonQualifier;
private String myDotQualifier;
public boolean passes(Set<String> theQualifierWhitelist, Set<String> theQualifierBlacklist) {
if (theQualifierWhitelist != null) {
if (!theQualifierWhitelist.contains(".*")) {
if (myDotQualifier != null) {
if (!theQualifierWhitelist.contains(myDotQualifier)) {
return false;
}
} else {
if (!theQualifierWhitelist.contains(".")) {
return false;
}
}
}
/*
* This was removed Sep 9 2015, as I don't see any way it could possibly be triggered.
if (!theQualifierWhitelist.contains(SearchParameter.QUALIFIER_ANY_TYPE)) {
if (myColonQualifier != null) {
if (!theQualifierWhitelist.contains(myColonQualifier)) {
return false;
}
} else {
if (!theQualifierWhitelist.contains(":")) {
return false;
}
}
}
*/
}
if (theQualifierBlacklist != null) {
if (myDotQualifier != null) {
if (theQualifierBlacklist.contains(myDotQualifier)) {
return false;
}
}
if (myColonQualifier != null) {
if (theQualifierBlacklist.contains(myColonQualifier)) {
return false;
}
}
}
return true;
}
public void setColonQualifier(String theColonQualifier) {
myColonQualifier = theColonQualifier;
}
public void setDotQualifier(String theDotQualifier) {
myDotQualifier = theDotQualifier;
}
}
public static BaseHttpClientInvocation createSearchInvocation(String theSearchUrl, Map<String, List<String>> theParams) {
return new HttpGetClientInvocation(theParams, theSearchUrl);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc;
import com.facebook.presto.orc.metadata.BooleanStatistics;
import com.facebook.presto.orc.metadata.ColumnStatistics;
import com.facebook.presto.orc.metadata.DateStatistics;
import com.facebook.presto.orc.metadata.DoubleStatistics;
import com.facebook.presto.orc.metadata.IntegerStatistics;
import com.facebook.presto.orc.metadata.StringStatistics;
import com.facebook.presto.spi.Domain;
import com.facebook.presto.spi.Range;
import com.facebook.presto.spi.SortedRangeSet;
import io.airlift.slice.Slice;
import org.testng.annotations.Test;
import static com.facebook.presto.orc.TupleDomainOrcPredicate.getDomain;
import static com.facebook.presto.orc.metadata.OrcMetadataReader.getMaxSlice;
import static com.facebook.presto.orc.metadata.OrcMetadataReader.getMinSlice;
import static com.facebook.presto.spi.Domain.all;
import static com.facebook.presto.spi.Domain.create;
import static com.facebook.presto.spi.Domain.none;
import static com.facebook.presto.spi.Domain.notNull;
import static com.facebook.presto.spi.Domain.onlyNull;
import static com.facebook.presto.spi.Domain.singleValue;
import static com.facebook.presto.spi.Range.greaterThanOrEqual;
import static com.facebook.presto.spi.Range.lessThanOrEqual;
import static com.facebook.presto.spi.Range.range;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static io.airlift.slice.Slices.utf8Slice;
import static org.testng.Assert.assertEquals;
public class TestTupleDomainOrcPredicate
{
@Test
public void testBoolean()
throws Exception
{
assertEquals(getDomain(BOOLEAN, 0, null), none(Boolean.class));
assertEquals(getDomain(BOOLEAN, 10, null), all(Boolean.class));
assertEquals(getDomain(BOOLEAN, 0, booleanColumnStats(null, null)), none(Boolean.class));
assertEquals(getDomain(BOOLEAN, 0, booleanColumnStats(0L, null)), none(Boolean.class));
assertEquals(getDomain(BOOLEAN, 0, booleanColumnStats(0L, 0L)), none(Boolean.class));
assertEquals(getDomain(BOOLEAN, 10, booleanColumnStats(0L, 0L)), onlyNull(Boolean.class));
assertEquals(getDomain(BOOLEAN, 10, booleanColumnStats(10L, null)), notNull(Boolean.class));
assertEquals(getDomain(BOOLEAN, 10, booleanColumnStats(10L, 10L)), singleValue(true));
assertEquals(getDomain(BOOLEAN, 10, booleanColumnStats(10L, 0L)), singleValue(false));
assertEquals(getDomain(BOOLEAN, 20, booleanColumnStats(10L, 5L)), all(Boolean.class));
assertEquals(getDomain(BOOLEAN, 20, booleanColumnStats(10L, 10L)), new Domain(SortedRangeSet.of(Range.equal(true)), true));
assertEquals(getDomain(BOOLEAN, 20, booleanColumnStats(10L, 0L)), new Domain(SortedRangeSet.of(Range.equal(false)), true));
}
private static ColumnStatistics booleanColumnStats(Long numberOfValues, Long trueValueCount)
{
BooleanStatistics booleanStatistics = null;
if (trueValueCount != null) {
booleanStatistics = new BooleanStatistics(trueValueCount);
}
return new ColumnStatistics(numberOfValues, booleanStatistics, null, null, null, null);
}
@Test
public void testBigint()
throws Exception
{
assertEquals(getDomain(BIGINT, 0, null), none(Long.class));
assertEquals(getDomain(BIGINT, 10, null), all(Long.class));
assertEquals(getDomain(BIGINT, 0, integerColumnStats(null, null, null)), none(Long.class));
assertEquals(getDomain(BIGINT, 0, integerColumnStats(0L, null, null)), none(Long.class));
assertEquals(getDomain(BIGINT, 0, integerColumnStats(0L, 100L, 100L)), none(Long.class));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(0L, null, null)), onlyNull(Long.class));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(10L, null, null)), notNull(Long.class));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(10L, 100L, 100L)), singleValue(100L));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(10L, 0L, 100L)), create(SortedRangeSet.of(range(0L, true, 100L, true)), false));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(10L, null, 100L)), create(SortedRangeSet.of(lessThanOrEqual(100L)), false));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(10L, 0L, null)), create(SortedRangeSet.of(greaterThanOrEqual(0L)), false));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(5L, 0L, 100L)), create(SortedRangeSet.of(range(0L, true, 100L, true)), true));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(5L, null, 100L)), create(SortedRangeSet.of(lessThanOrEqual(100L)), true));
assertEquals(getDomain(BIGINT, 10, integerColumnStats(5L, 0L, null)), create(SortedRangeSet.of(greaterThanOrEqual(0L)), true));
}
private static ColumnStatistics integerColumnStats(Long numberOfValues, Long minimum, Long maximum)
{
return new ColumnStatistics(numberOfValues, null, new IntegerStatistics(minimum, maximum), null, null, null);
}
@Test
public void testDouble()
throws Exception
{
assertEquals(getDomain(DOUBLE, 0, null), none(Double.class));
assertEquals(getDomain(DOUBLE, 10, null), all(Double.class));
assertEquals(getDomain(DOUBLE, 0, doubleColumnStats(null, null, null)), none(Double.class));
assertEquals(getDomain(DOUBLE, 0, doubleColumnStats(0L, null, null)), none(Double.class));
assertEquals(getDomain(DOUBLE, 0, doubleColumnStats(0L, 42.24, 42.24)), none(Double.class));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(0L, null, null)), onlyNull(Double.class));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(10L, null, null)), notNull(Double.class));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(10L, 42.24, 42.24)), singleValue(42.24));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(10L, 3.3, 42.24)), create(SortedRangeSet.of(range(3.3, true, 42.24, true)), false));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(10L, null, 42.24)), create(SortedRangeSet.of(lessThanOrEqual(42.24)), false));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(10L, 3.3, null)), create(SortedRangeSet.of(greaterThanOrEqual(3.3)), false));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(5L, 3.3, 42.24)), create(SortedRangeSet.of(range(3.3, true, 42.24, true)), true));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(5L, null, 42.24)), create(SortedRangeSet.of(lessThanOrEqual(42.24)), true));
assertEquals(getDomain(DOUBLE, 10, doubleColumnStats(5L, 3.3, null)), create(SortedRangeSet.of(greaterThanOrEqual(3.3)), true));
}
private static ColumnStatistics doubleColumnStats(Long numberOfValues, Double minimum, Double maximum)
{
return new ColumnStatistics(numberOfValues, null, null, new DoubleStatistics(minimum, maximum), null, null);
}
@Test
public void testString()
throws Exception
{
assertEquals(getDomain(VARCHAR, 0, null), none(Slice.class));
assertEquals(getDomain(VARCHAR, 10, null), all(Slice.class));
assertEquals(getDomain(VARCHAR, 0, stringColumnStats(null, null, null)), none(Slice.class));
assertEquals(getDomain(VARCHAR, 0, stringColumnStats(0L, null, null)), none(Slice.class));
assertEquals(getDomain(VARCHAR, 0, stringColumnStats(0L, "taco", "taco")), none(Slice.class));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(0L, null, null)), onlyNull(Slice.class));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, null, null)), notNull(Slice.class));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "taco", "taco")), singleValue(utf8Slice("taco")));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "apple", "taco")), create(SortedRangeSet.of(range(utf8Slice("apple"), true, utf8Slice("taco"), true)), false));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, null, "taco")), create(SortedRangeSet.of(lessThanOrEqual(utf8Slice("taco"))), false));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "apple", null)), create(SortedRangeSet.of(greaterThanOrEqual(utf8Slice("apple"))), false));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, "apple", "taco")), create(SortedRangeSet.of(range(utf8Slice("apple"), true, utf8Slice("taco"), true)), true));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, null, "taco")), create(SortedRangeSet.of(lessThanOrEqual(utf8Slice("taco"))), true));
assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, "apple", null)), create(SortedRangeSet.of(greaterThanOrEqual(utf8Slice("apple"))), true));
}
private static ColumnStatistics stringColumnStats(Long numberOfValues, String minimum, String maximum)
{
return new ColumnStatistics(numberOfValues, null, null, null, new StringStatistics(getMinSlice(minimum), getMaxSlice(maximum)), null);
}
@Test
public void testDate()
throws Exception
{
assertEquals(getDomain(DATE, 0, null), none(Long.class));
assertEquals(getDomain(DATE, 10, null), all(Long.class));
assertEquals(getDomain(DATE, 0, dateColumnStats(null, null, null)), none(Long.class));
assertEquals(getDomain(DATE, 0, dateColumnStats(0L, null, null)), none(Long.class));
assertEquals(getDomain(DATE, 0, dateColumnStats(0L, 100, 100)), none(Long.class));
assertEquals(getDomain(DATE, 10, dateColumnStats(0L, null, null)), onlyNull(Long.class));
assertEquals(getDomain(DATE, 10, dateColumnStats(10L, null, null)), notNull(Long.class));
assertEquals(getDomain(DATE, 10, dateColumnStats(10L, 100, 100)), singleValue(100L));
assertEquals(getDomain(DATE, 10, dateColumnStats(10L, 0, 100)), create(SortedRangeSet.of(range(0L, true, 100L, true)), false));
assertEquals(getDomain(DATE, 10, dateColumnStats(10L, null, 100)), create(SortedRangeSet.of(lessThanOrEqual(100L)), false));
assertEquals(getDomain(DATE, 10, dateColumnStats(10L, 0, null)), create(SortedRangeSet.of(greaterThanOrEqual(0L)), false));
assertEquals(getDomain(DATE, 10, dateColumnStats(5L, 0, 100)), create(SortedRangeSet.of(range(0L, true, 100L, true)), true));
assertEquals(getDomain(DATE, 10, dateColumnStats(5L, null, 100)), create(SortedRangeSet.of(lessThanOrEqual(100L)), true));
assertEquals(getDomain(DATE, 10, dateColumnStats(5L, 0, null)), create(SortedRangeSet.of(greaterThanOrEqual(0L)), true));
}
private static ColumnStatistics dateColumnStats(Long numberOfValues, Integer minimum, Integer maximum)
{
return new ColumnStatistics(numberOfValues, null, null, null, null, new DateStatistics(minimum, maximum));
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package p2p.linker;
import java.awt.Desktop;
import java.awt.SystemColor;
import java.net.URI;
/**
*
* @author Igor Ilic
*/
public class about extends javax.swing.JFrame {
/**
* Creates new form about
*/
public about() {
initComponents();
this.setLocationRelativeTo(null);
this.getContentPane().setBackground(SystemColor.activeCaption);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jButton4 = new javax.swing.JButton();
jLabel1 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("P2P Linker - about");
jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/p2p/linker/images/facebook.png"))); // NOI18N
jButton1.setToolTipText("Add me on facebook");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/p2p/linker/images/twitter.png"))); // NOI18N
jButton2.setToolTipText("Follow me on twitter");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/p2p/linker/images/google+.png"))); // NOI18N
jButton3.setToolTipText("Add me on google+");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/p2p/linker/images/exit.png"))); // NOI18N
jButton4.setText("Close");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jLabel1.setFont(new java.awt.Font("Ubuntu", 0, 14)); // NOI18N
jLabel1.setText("<html>\n<p>Hi,</p>\n<p>My name is Igor and I am a web developer.</p>\n<p>I started this project because I was so much tired from finding something good on the internet but not be able to show it to someone because they don't have an accout/s on social network/s. And it wast becaming so anoying to go copy a link,open my email create an email and sending it.</p>\n<p> </p>\n<p>If you have any ideas,sugesstions or questions pleas contact me trough the social networks below or using the report a problem form</p>\n</html>");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 554, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(jButton1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jButton4)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 163, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jButton4)
.addComponent(jButton3)
.addComponent(jButton2)
.addComponent(jButton1))
.addContainerGap())
);
pack();
setLocationRelativeTo(null);
}// </editor-fold>//GEN-END:initComponents
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
this.setVisible(false);
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
URI uri = URI.create("https://facebook.com/igorili");
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
desktop.browse(uri);
} catch (Exception e) {
e.printStackTrace();
}
}
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
URI uri = URI.create("https://twitter.com/GigiliBL");
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
desktop.browse(uri);
} catch (Exception e) {
e.printStackTrace();
}
}
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
URI uri = URI.create("https://plus.google.com/+IgorIlic/");
Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null;
if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) {
try {
desktop.browse(uri);
} catch (Exception e) {
e.printStackTrace();
}
}
}//GEN-LAST:event_jButton3ActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(about.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(about.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(about.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(about.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new about().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JLabel jLabel1;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.clientImpl;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.ScannerBase;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.data.Column;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.thrift.IterInfo;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
public class ScannerOptions implements ScannerBase {
protected List<IterInfo> serverSideIteratorList = Collections.emptyList();
protected Map<String,Map<String,String>> serverSideIteratorOptions = Collections.emptyMap();
protected SortedSet<Column> fetchedColumns = new TreeSet<>();
protected long timeOut = Long.MAX_VALUE;
protected long batchTimeOut = Long.MAX_VALUE;
private String regexIterName = null;
private SamplerConfiguration samplerConfig = null;
protected String classLoaderContext = null;
protected Map<String,String> executionHints = Collections.emptyMap();
protected ScannerOptions() {}
public ScannerOptions(ScannerOptions so) {
setOptions(this, so);
}
@Override
public synchronized void addScanIterator(IteratorSetting si) {
checkArgument(si != null, "si is null");
if (serverSideIteratorList.size() == 0) {
serverSideIteratorList = new ArrayList<>();
}
for (IterInfo ii : serverSideIteratorList) {
if (ii.iterName.equals(si.getName())) {
throw new IllegalArgumentException("Iterator name is already in use " + si.getName());
}
if (ii.getPriority() == si.getPriority()) {
throw new IllegalArgumentException(
"Iterator priority is already in use " + si.getPriority());
}
}
serverSideIteratorList.add(new IterInfo(si.getPriority(), si.getIteratorClass(), si.getName()));
if (serverSideIteratorOptions.size() == 0) {
serverSideIteratorOptions = new HashMap<>();
}
Map<String,String> opts = serverSideIteratorOptions.get(si.getName());
if (opts == null) {
opts = new HashMap<>();
serverSideIteratorOptions.put(si.getName(), opts);
}
opts.putAll(si.getOptions());
}
@Override
public synchronized void removeScanIterator(String iteratorName) {
checkArgument(iteratorName != null, "iteratorName is null");
// if no iterators are set, we don't have it, so it is already removed
if (serverSideIteratorList.size() == 0) {
return;
}
for (IterInfo ii : serverSideIteratorList) {
if (ii.iterName.equals(iteratorName)) {
serverSideIteratorList.remove(ii);
break;
}
}
serverSideIteratorOptions.remove(iteratorName);
}
@Override
public synchronized void updateScanIteratorOption(String iteratorName, String key, String value) {
checkArgument(iteratorName != null, "iteratorName is null");
checkArgument(key != null, "key is null");
checkArgument(value != null, "value is null");
if (serverSideIteratorOptions.size() == 0) {
serverSideIteratorOptions = new HashMap<>();
}
Map<String,String> opts = serverSideIteratorOptions.get(iteratorName);
if (opts == null) {
opts = new HashMap<>();
serverSideIteratorOptions.put(iteratorName, opts);
}
opts.put(key, value);
}
@Override
public synchronized void fetchColumnFamily(Text col) {
checkArgument(col != null, "col is null");
Column c = new Column(TextUtil.getBytes(col), null, null);
fetchedColumns.add(c);
}
@Override
public synchronized void fetchColumn(Text colFam, Text colQual) {
checkArgument(colFam != null, "colFam is null");
checkArgument(colQual != null, "colQual is null");
Column c = new Column(TextUtil.getBytes(colFam), TextUtil.getBytes(colQual), null);
fetchedColumns.add(c);
}
@Override
public void fetchColumn(IteratorSetting.Column column) {
checkArgument(column != null, "Column is null");
fetchColumn(column.getColumnFamily(), column.getColumnQualifier());
}
@Override
public synchronized void clearColumns() {
fetchedColumns.clear();
}
public synchronized SortedSet<Column> getFetchedColumns() {
return fetchedColumns;
}
@Override
public synchronized void clearScanIterators() {
serverSideIteratorList = Collections.emptyList();
serverSideIteratorOptions = Collections.emptyMap();
regexIterName = null;
}
protected static void setOptions(ScannerOptions dst, ScannerOptions src) {
synchronized (dst) {
synchronized (src) {
dst.regexIterName = src.regexIterName;
dst.fetchedColumns = new TreeSet<>(src.fetchedColumns);
dst.serverSideIteratorList = new ArrayList<>(src.serverSideIteratorList);
dst.classLoaderContext = src.classLoaderContext;
dst.serverSideIteratorOptions = new HashMap<>();
Set<Entry<String,Map<String,String>>> es = src.serverSideIteratorOptions.entrySet();
for (Entry<String,Map<String,String>> entry : es) {
dst.serverSideIteratorOptions.put(entry.getKey(), new HashMap<>(entry.getValue()));
}
dst.samplerConfig = src.samplerConfig;
dst.batchTimeOut = src.batchTimeOut;
// its an immutable map, so can avoid copy here
dst.executionHints = src.executionHints;
}
}
}
@Override
public Iterator<Entry<Key,Value>> iterator() {
throw new UnsupportedOperationException();
}
@Override
public synchronized void setTimeout(long timeout, TimeUnit timeUnit) {
if (timeOut < 0) {
throw new IllegalArgumentException("TimeOut must be positive : " + timeOut);
}
if (timeout == 0) {
this.timeOut = Long.MAX_VALUE;
} else {
this.timeOut = timeUnit.toMillis(timeout);
}
}
@Override
public synchronized long getTimeout(TimeUnit timeunit) {
return timeunit.convert(timeOut, TimeUnit.MILLISECONDS);
}
@Override
public void close() {
// Nothing needs to be closed
}
@Override
public Authorizations getAuthorizations() {
throw new UnsupportedOperationException("No authorizations to return");
}
@Override
public synchronized void setSamplerConfiguration(SamplerConfiguration samplerConfig) {
requireNonNull(samplerConfig);
this.samplerConfig = samplerConfig;
}
@Override
public synchronized SamplerConfiguration getSamplerConfiguration() {
return samplerConfig;
}
@Override
public synchronized void clearSamplerConfiguration() {
this.samplerConfig = null;
}
@Override
public void setBatchTimeout(long timeout, TimeUnit timeUnit) {
if (timeOut < 0) {
throw new IllegalArgumentException("Batch timeout must be positive : " + timeOut);
}
if (timeout == 0) {
this.batchTimeOut = Long.MAX_VALUE;
} else {
this.batchTimeOut = timeUnit.toMillis(timeout);
}
}
@Override
public long getBatchTimeout(TimeUnit timeUnit) {
return timeUnit.convert(batchTimeOut, TimeUnit.MILLISECONDS);
}
@Override
public void setClassLoaderContext(String classLoaderContext) {
requireNonNull(classLoaderContext, "classloader context name cannot be null");
this.classLoaderContext = classLoaderContext;
}
@Override
public void clearClassLoaderContext() {
this.classLoaderContext = null;
}
@Override
public String getClassLoaderContext() {
return this.classLoaderContext;
}
@Override
public synchronized void setExecutionHints(Map<String,String> hints) {
this.executionHints = Map.copyOf(Objects.requireNonNull(hints));
}
}
| |
package com.planet_ink.coffee_mud.Commands;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Draw extends Get
{
public Draw(){}
private final String[] access=_i(new String[]{"DRAW"});
@Override public String[] getAccessWords(){return access;}
public Vector getSheaths(MOB mob)
{
final Vector sheaths=new Vector();
if(mob!=null)
for(int i=0;i<mob.numItems();i++)
{
final Item I=mob.getItem(i);
if((I!=null)
&&(!I.amWearingAt(Wearable.IN_INVENTORY))
&&(I instanceof Container)
&&(((Container)I).capacity()>0)
&&(((Container)I).containTypes()!=Container.CONTAIN_ANYTHING))
{
final List<Item> contents=((Container)I).getContents();
for(int c=0;c<contents.size();c++)
if(contents.get(c) instanceof Weapon)
{
sheaths.addElement(I);
break;
}
}
}
return sheaths;
}
@Override
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
boolean quiet=false;
boolean noerrors=false;
boolean ifNecessary=false;
if((commands.size()>0)&&(((String)commands.lastElement()).equalsIgnoreCase("IFNECESSARY")))
{
quiet=true;
noerrors=true;
commands.removeElementAt(commands.size()-1);
if((commands.size()>0)
&&(((String)commands.lastElement()).equalsIgnoreCase("HELD")))
{
commands.removeElementAt(commands.size()-1);
if(mob.fetchHeldItem()!=null)
return false;
}
else
if(mob.fetchWieldedItem()!=null)
return false;
}
else
{
if((commands.size()>0)&&(((String)commands.lastElement()).equalsIgnoreCase("QUIETLY")))
{
commands.removeElementAt(commands.size()-1);
quiet=true;
}
if((commands.size()>0)&&(((String)commands.lastElement()).equalsIgnoreCase("IFNECESSARY")))
{
ifNecessary=true;
commands.removeElementAt(commands.size()-1);
noerrors=true;
}
}
boolean allFlag=false;
List<Container> containers=new Vector();
String containerName="";
String whatToGet="";
int c=0;
final Vector sheaths=getSheaths(mob);
if(commands.size()>0)
commands.removeElementAt(0);
if(commands.size()==0)
{
if(sheaths.size()>0)
containerName=((Item)sheaths.elementAt(0)).name();
else
containerName="a weapon";
for(int i=0;i<mob.numItems();i++)
{
final Item I=mob.getItem(i);
if((I instanceof Weapon)
&&(I.container()!=null)
&&(sheaths.contains(I.container())))
{
containers.add(I.container());
whatToGet=I.name();
break;
}
}
if(whatToGet.length()==0)
for(int i=0;i<mob.numItems();i++)
{
final Item I=mob.getItem(i);
if(I instanceof Weapon)
{
whatToGet=I.name();
break;
}
}
}
else
{
containerName=(String)commands.lastElement();
commands.insertElementAt("all",0);
containers=CMLib.english().possibleContainers(mob,commands,Wearable.FILTER_WORNONLY,true);
if(containers.size()==0) containers=sheaths;
whatToGet=CMParms.combine(commands,0);
allFlag=((String)commands.elementAt(0)).equalsIgnoreCase("all");
if(whatToGet.toUpperCase().startsWith("ALL.")){ allFlag=true; whatToGet="ALL "+whatToGet.substring(4);}
if(whatToGet.toUpperCase().endsWith(".ALL")){ allFlag=true; whatToGet="ALL "+whatToGet.substring(0,whatToGet.length()-4);}
}
boolean doneSomething=false;
while((c<containers.size())||(containers.size()==0))
{
final Vector V=new Vector();
Container container=null;
if(containers.size()>0) container=containers.get(c++);
int addendum=1;
String addendumStr="";
boolean doBugFix = true;
while(doBugFix || allFlag)
{
doBugFix=false;
Environmental getThis=null;
if((container!=null)&&(mob.isMine(container)))
getThis=mob.findItem(container,whatToGet+addendumStr);
if(getThis==null) break;
if(getThis instanceof Weapon)
V.addElement(getThis);
addendumStr="."+(++addendum);
}
for(int i=0;i<V.size();i++)
{
final Item getThis=(Item)V.elementAt(i);
long wearCode=0;
if(container!=null) wearCode=container.rawWornCode();
if((ifNecessary)
&&(mob.freeWearPositions(Wearable.WORN_WIELD,(short)0,(short)0)==0)
&&(mob.freeWearPositions(Wearable.WORN_HELD,(short)0,(short)0)==0))
break;
if(get(mob,container,getThis,quiet,"draw",false))
{
if(getThis.container()==null)
{
if(mob.freeWearPositions(Wearable.WORN_WIELD,(short)0,(short)0)==0)
{
final CMMsg newMsg=CMClass.getMsg(mob,getThis,null,CMMsg.MSG_HOLD,null);
if(mob.location().okMessage(mob,newMsg))
mob.location().send(mob,newMsg);
}
else
{
final CMMsg newMsg=CMClass.getMsg(mob,getThis,null,CMMsg.MSG_WIELD,null);
if(mob.location().okMessage(mob,newMsg))
mob.location().send(mob,newMsg);
}
}
}
if(container!=null) container.setRawWornCode(wearCode);
doneSomething=true;
}
if(containers.size()==0) break;
}
if((!doneSomething)&&(!noerrors))
{
if(containers.size()>0)
{
final Container container=containers.get(0);
if(container.isOpen())
mob.tell(_("You don't see that in @x1.",container.name()));
else
mob.tell(_("@x1 is closed.",container.name()));
}
else
mob.tell(_("You don't see @x1 here.",containerName));
}
return false;
}
@Override public double actionsCost(final MOB mob, final List<String> cmds){return CMProps.getActionCost(ID());}
@Override public boolean canBeOrdered(){return true;}
}
| |
package rmi;
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import datastructure.FileUnit;
import datastructure.SynchroMeta;
import server.Machine;
import server.naming.NamingServer;
public class NamingServiceImpl extends UnicastRemoteObject implements NamingService {
private static final long serialVersionUID = 1L;
private static final Logger logger = LogManager.getLogger(NamingServiceImpl.class);
public NamingServiceImpl() throws RemoteException {
super();
}
@Override
public void updateMachine(Machine machine) throws RemoteException {
logger.entry(machine);
final Map<Machine, Long> storageValids = NamingServer.getInstance().storageValids;
final long now = new Date().getTime();
if ( storageValids.containsKey(machine) ) {
final long date = storageValids.get(machine);
if ( date < now ) storageValids.put(machine, now);
} else {
storageValids.put(machine, now);
NamingServer.getInstance().checkDupFileNumAndIncrease(NamingServer.getInstance().getRoot(), "");
}
}
@Override
public Machine createFile(String fullFilePath) throws RemoteException {
logger.entry(fullFilePath);
NamingServer namingServer = NamingServer.getInstance();
int storeIndex = fullFilePath.hashCode() % namingServer.storageValids.size();
storeIndex = (storeIndex + namingServer.storageValids.size()) % namingServer.storageValids.size();
Machine machine = new ArrayList<>(namingServer.storageValids.keySet()).get(storeIndex);
String[] path = fullFilePath.split("/");
FileUnit nowFileUnit = namingServer.getRoot();
for (int i = 0; i < path.length; i++) {
if (i != path.length-1) {
List<FileUnit> fileUnits = nowFileUnit.list();
boolean isCreatedDir = false;
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
isCreatedDir = true;
break;
}
}
if (!isCreatedDir) {
return null;
}
} else {
List<FileUnit> fileUnits = nowFileUnit.list();
boolean isCreatedFile = false;
for (int j = 0; j < fileUnits.size(); j++) {
if (!fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
logger.info("isCreatedFile:" + fileUnits.get(j).getName());
isCreatedFile = true;
break;
}
}
if (isCreatedFile) {
return null;
}
return machine;
}
}
return null;
}
@Override
public Machine getFileLocation(String fullFilePath) throws RemoteException {
logger.entry(fullFilePath);
NamingServer namingServer = NamingServer.getInstance();
String[] path = fullFilePath.split("/");
FileUnit nowFileUnit = namingServer.getRoot();
for (int i = 0; i < path.length; i++) {
List<FileUnit> fileUnits = nowFileUnit.list();
for (int j = 0; j < fileUnits.size(); j++) {
if (i != path.length-1) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
break;
}
} else {
if (!fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
return fileUnits.get(j).getAllMachines().get(0);
}
}
if (j == fileUnits.size()-1) {
return null;
}
}
}
return null;
}
@Override
public Machine createDir(String fullDirPath) throws RemoteException {
logger.entry(fullDirPath);
NamingServer namingServer = NamingServer.getInstance();
int storeIndex = fullDirPath.hashCode() % namingServer.storageValids.size();
storeIndex = (storeIndex + namingServer.storageValids.size()) % namingServer.storageValids.size();
Machine machine = new ArrayList<>(namingServer.storageValids.keySet()).get(storeIndex);
String[] path = fullDirPath.split("/");
FileUnit nowFileUnit = namingServer.getRoot();
for (int i = 0; i < path.length; i++) {
if (i != path.length-1) {
List<FileUnit> fileUnits = nowFileUnit.list();
boolean isCreatedDir = false;
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
isCreatedDir = true;
break;
}
}
if (!isCreatedDir) {
return null;
}
} else {
List<FileUnit> fileUnits = nowFileUnit.list();
boolean isCreatedDir = false;
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
isCreatedDir = true;
break;
}
}
if (isCreatedDir) {
return null;
}
return machine;
}
}
return null;
}
@Override
public Machine getDirLocation(String fullDirPath) throws RemoteException {
logger.entry(fullDirPath);
NamingServer namingServer = NamingServer.getInstance();
String[] path = fullDirPath.split("/");
FileUnit nowFileUnit = namingServer.getRoot();
for (int i = 0; i < path.length; i++) {
List<FileUnit> fileUnits = nowFileUnit.list();
for (int j = 0; j < fileUnits.size(); j++) {
if (i != path.length-1) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
break;
}
} else {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
return fileUnits.get(j).getAllMachines().get(0);
}
}
if (j == fileUnits.size()-1) {
return null;
}
}
}
return null;
}
@Override
public List<FileUnit> listDir(String fullDirPath)
throws RemoteException {
logger.entry(fullDirPath);
NamingServer namingServer = NamingServer.getInstance();
String[] path = fullDirPath.split("/");
FileUnit nowFileUnit = namingServer.getRoot();
if (fullDirPath.equals("")) {
return nowFileUnit.list();
}
for (int i = 0; i < path.length; i++) {
List<FileUnit> fileUnits = nowFileUnit.list();
for (int j = 0; j < fileUnits.size(); j++) {
if (i != path.length-1) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
break;
}
} else {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
return fileUnits.get(j).list();
}
}
if (j == fileUnits.size()-1) {
return null;
}
}
}
return nowFileUnit.list();
}
private List<SynchroMeta> addToDir(Machine machine, FileUnit srcDir, FileUnit targetDir, String lastDirName) {
List<SynchroMeta> synchroMetas = new ArrayList<>();
List<FileUnit> srcLowerFileUnits = srcDir.list();
for (FileUnit toAddFileUnit : srcLowerFileUnits) {
List<FileUnit> targetLowerFileUnits = targetDir.list();
boolean isExist = false;
boolean isTypeEqual = true;
FileUnit targetAddedFileUnit = null;
for (FileUnit existFileUnit : targetLowerFileUnits) {
if (existFileUnit.getName().equals(toAddFileUnit.getName())) {
isExist = true;
if (existFileUnit.isDir() != toAddFileUnit.isDir()) {
isTypeEqual = false;
} else {
List<Machine> machines = existFileUnit.getAllMachines();
boolean isRecordThisMachine = false;
for (Machine recordMachine : machines) {
if (recordMachine.ip.equals(machine.ip) && recordMachine.port == machine.port) {
isRecordThisMachine = true;
break;
}
}
if (!isRecordThisMachine) {
if (!existFileUnit.isDir()) {
Machine targetMachine = existFileUnit.getAllMachines().get(0);
synchroMetas.add(new SynchroMeta(lastDirName+"/"+toAddFileUnit.getName(), SynchroMeta.CONFIRM, new Machine(targetMachine.ip, targetMachine.port)));
}
targetAddedFileUnit = existFileUnit;
existFileUnit.addStorageMachine(machine);
}
}
}
}
if (isExist) {
if (!isTypeEqual) {
synchroMetas.add(new SynchroMeta(lastDirName+"/"+toAddFileUnit.getName(), SynchroMeta.DELETE, null));
continue;
}
} else {
targetAddedFileUnit = new FileUnit(toAddFileUnit.getName(), toAddFileUnit.isDir());
targetAddedFileUnit.addStorageMachine(machine);
targetDir.addLowerFileUnit(targetAddedFileUnit);
}
if (targetAddedFileUnit.isDir()) {
List<SynchroMeta> lowerSynchroMetas = addToDir(machine, toAddFileUnit, targetAddedFileUnit, lastDirName+"/"+targetAddedFileUnit.getName());
synchroMetas.addAll(lowerSynchroMetas);
}
}
return synchroMetas;
}
@Override
public List<SynchroMeta> informOnline(Machine machine, FileUnit localRoot)
throws RemoteException {
logger.entry(machine, localRoot);
FileUnit root = NamingServer.getInstance().getRoot();
List<SynchroMeta> result = addToDir(machine, localRoot, root, "");
return result;
}
private FileUnit findChangedFileUnitFather(String fullFilePath) {
FileUnit nowFileUnit = NamingServer.getInstance().getRoot();
String[] path = fullFilePath.split("/");
for (int i = 0; i < path.length-1; i++) {
List<FileUnit> fileUnits = nowFileUnit.list();
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
nowFileUnit = fileUnits.get(j);
break;
}
}
}
return nowFileUnit;
}
@Override
public List<Machine> notifyCreateFile(String fullFilePath,
boolean isOrigin, Machine operateMachine) throws RemoteException {
logger.entry(fullFilePath, isOrigin, operateMachine);
FileUnit fatheFileUnit = NamingServer.getInstance().getRoot();
String[] path = fullFilePath.split("/");
for (int i = 0; i < path.length-1; i++) {
List<FileUnit> fileUnits = fatheFileUnit.list();
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(path[i])) {
fatheFileUnit = fileUnits.get(j);
fatheFileUnit.addStorageMachine(operateMachine);
break;
}
}
}
String name = path[path.length-1];
List<FileUnit> fileUnits = fatheFileUnit.list();
FileUnit changedFileUnit = null;
for (int j = 0; j < fileUnits.size(); j++) {
if (!fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(name)) {
changedFileUnit = fileUnits.get(j);
break;
}
}
if (changedFileUnit == null) {
changedFileUnit = new FileUnit(name, false);
fatheFileUnit.addLowerFileUnit(changedFileUnit);
}
changedFileUnit.addStorageMachine(operateMachine);
if (isOrigin) {
NamingServer namingServer = NamingServer.getInstance();
int storeIndex = fullFilePath.hashCode() % namingServer.storageValids.size();
storeIndex = (storeIndex + namingServer.storageValids.size()) % namingServer.storageValids.size();
int backNum = 3;
if (namingServer.storageValids.size() < backNum) {
backNum = namingServer.storageValids.size() - 1;
}
List<Machine> result = new ArrayList<>();
while (backNum > 0) {
Machine machine = new ArrayList<>(namingServer.storageValids.keySet()).get(storeIndex);
if (!machine.ip.equals(operateMachine) || machine.port != operateMachine.port) {
result.add(new Machine(machine.ip, machine.port));
backNum--;
}
storeIndex = (storeIndex + 1) % namingServer.storageValids.size();
}
return result;
}
return null;
}
@Override
public List<Machine> notifyDeleteFile(String fullFilePath,
boolean isOrigin, Machine operateMachine) throws RemoteException {
logger.entry(fullFilePath, isOrigin, operateMachine);
FileUnit fatheFileUnit = findChangedFileUnitFather(fullFilePath);
String[] path = fullFilePath.split("/");
String name = path[path.length-1];
List<FileUnit> fileUnits = fatheFileUnit.list();
FileUnit changedFileUnit = null;
for (int j = 0; j < fileUnits.size(); j++) {
if (!fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(name)) {
changedFileUnit = fileUnits.get(j);
break;
}
}
if (changedFileUnit == null) {
return null;
}
changedFileUnit.deleteStorageMachine(operateMachine);
if (changedFileUnit.getAllMachines().size() == 0) {
fatheFileUnit.deleteLowerFileUnit(changedFileUnit);
}
if (isOrigin) {
List<Machine> result = changedFileUnit.getAllMachines();
return result;
}
return null;
}
@Override
public List<Machine> notifyWriteFile(String fullFilePath, boolean isOrigin) throws RemoteException {
logger.entry(fullFilePath, isOrigin);
if (isOrigin) {
FileUnit fatheFileUnit = findChangedFileUnitFather(fullFilePath);
String[] path = fullFilePath.split("/");
String name = path[path.length-1];
List<FileUnit> fileUnits = fatheFileUnit.list();
FileUnit changedFileUnit = null;
for (int j = 0; j < fileUnits.size(); j++) {
if (!fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(name)) {
changedFileUnit = fileUnits.get(j);
break;
}
}
if (changedFileUnit == null) {
return null;
}
return changedFileUnit.getAllMachines();
}
return null;
}
@Override
public List<Machine> notifyCreateDir(String fullDirPath,
boolean isOrigin, Machine operateMachine) throws RemoteException {
logger.entry(fullDirPath, isOrigin, operateMachine);
FileUnit fatheFileUnit = findChangedFileUnitFather(fullDirPath);
String[] path = fullDirPath.split("/");
String name = path[path.length-1];
List<FileUnit> fileUnits = fatheFileUnit.list();
FileUnit changedFileUnit = null;
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(name)) {
changedFileUnit = fileUnits.get(j);
break;
}
}
if (changedFileUnit == null) {
changedFileUnit = new FileUnit(name, true);
fatheFileUnit.addLowerFileUnit(changedFileUnit);
}
changedFileUnit.addStorageMachine(operateMachine);
if (isOrigin) {
NamingServer namingServer = NamingServer.getInstance();
int storeIndex = fullDirPath.hashCode() % namingServer.storageValids.size();
storeIndex = (storeIndex + namingServer.storageValids.size()) % namingServer.storageValids.size();
int backNum = 3;
if (namingServer.storageValids.size() < backNum) {
backNum = namingServer.storageValids.size() - 1;
}
List<Machine> result = new ArrayList<>();
while (backNum > 0) {
Machine machine = new ArrayList<>(namingServer.storageValids.keySet()).get(storeIndex);
if (!machine.ip.equals(operateMachine) || machine.port != operateMachine.port) {
result.add(new Machine(machine.ip, machine.port));
backNum--;
}
storeIndex = (storeIndex + 1) % namingServer.storageValids.size();
}
return result;
}
return null;
}
@Override
public List<Machine> notifyDeleteDir(String fullDirPath,
boolean isOrigin, Machine operateMachine) throws RemoteException {
logger.entry(fullDirPath, isOrigin, operateMachine);
FileUnit fatheFileUnit = findChangedFileUnitFather(fullDirPath);
String[] path = fullDirPath.split("/");
String name = path[path.length-1];
List<FileUnit> fileUnits = fatheFileUnit.list();
FileUnit changedFileUnit = null;
for (int j = 0; j < fileUnits.size(); j++) {
if (fileUnits.get(j).isDir() && fileUnits.get(j).getName().equals(name)) {
changedFileUnit = fileUnits.get(j);
break;
}
}
if (changedFileUnit == null) {
return null;
}
changedFileUnit.deleteStorageMachine(operateMachine);
if (changedFileUnit.getAllMachines().size() == 0) {
fatheFileUnit.deleteLowerFileUnit(changedFileUnit);
}
if (isOrigin) {
List<Machine> result = changedFileUnit.getAllMachines();
return result;
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.randomwalk.concurrent;
import java.util.Properties;
import java.util.SortedSet;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.impl.thrift.TableOperationExceptionType;
import org.apache.accumulo.core.client.impl.thrift.ThriftTableOperationException;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.test.randomwalk.Environment;
import org.apache.accumulo.test.randomwalk.State;
import org.apache.accumulo.test.randomwalk.Test;
import org.apache.commons.math.random.RandomData;
import org.apache.commons.math.random.RandomDataImpl;
public class Config extends Test {
private static final String LAST_SETTING = "lastSetting";
private static final String LAST_TABLE_SETTING = "lastTableSetting";
private static final String LAST_NAMESPACE_SETTING = "lastNamespaceSetting";
static class Setting {
public Property property;
public long min;
public long max;
public Setting(Property property, long min, long max) {
this.property = property;
this.min = min;
this.max = max;
}
}
static Setting s(Property property, long min, long max) {
return new Setting(property, min, max);
}
/* @formatter:off */
Setting[] settings = {
s(Property.TSERV_BLOOM_LOAD_MAXCONCURRENT, 1, 10),
s(Property.TSERV_BULK_PROCESS_THREADS, 1, 10),
s(Property.TSERV_BULK_RETRY, 1, 10),
s(Property.TSERV_BULK_TIMEOUT, 10, 600),
s(Property.TSERV_BULK_ASSIGNMENT_THREADS, 1, 10),
s(Property.TSERV_DATACACHE_SIZE, 0, 1000000000L),
s(Property.TSERV_INDEXCACHE_SIZE, 0, 1000000000L),
s(Property.TSERV_CLIENT_TIMEOUT, 100, 10000),
s(Property.TSERV_MAJC_MAXCONCURRENT, 1, 10),
s(Property.TSERV_MAJC_DELAY, 100, 10000),
s(Property.TSERV_MAJC_THREAD_MAXOPEN, 3, 100),
s(Property.TSERV_MINC_MAXCONCURRENT, 1, 10),
s(Property.TSERV_DEFAULT_BLOCKSIZE, 100000, 10000000L),
s(Property.TSERV_MAX_IDLE, 10000, 500 * 1000),
s(Property.TSERV_MAXMEM, 1000000, 3 * 1024 * 1024 * 1024L),
s(Property.TSERV_READ_AHEAD_MAXCONCURRENT, 1, 25),
s(Property.TSERV_MIGRATE_MAXCONCURRENT, 1, 10),
s(Property.TSERV_MUTATION_QUEUE_MAX, 10000, 1024 * 1024),
s(Property.TSERV_RECOVERY_MAX_CONCURRENT, 1, 100),
s(Property.TSERV_SCAN_MAX_OPENFILES, 10, 1000),
s(Property.TSERV_THREADCHECK, 100, 10000),
s(Property.TSERV_MINTHREADS, 1, 100),
s(Property.TSERV_SESSION_MAXIDLE, 100, 5 * 60 * 1000),
s(Property.TSERV_SORT_BUFFER_SIZE, 1024 * 1024, 1024 * 1024 * 1024L),
s(Property.TSERV_TABLET_SPLIT_FINDMIDPOINT_MAXOPEN, 5, 100),
s(Property.TSERV_WAL_BLOCKSIZE, 1024 * 1024, 1024 * 1024 * 1024 * 10L),
s(Property.TSERV_WORKQ_THREADS, 1, 10),
s(Property.MASTER_BULK_THREADPOOL_SIZE, 1, 10),
s(Property.MASTER_BULK_RETRIES, 1, 10),
s(Property.MASTER_BULK_TIMEOUT, 10, 600),
s(Property.MASTER_FATE_THREADPOOL_SIZE, 1, 100),
s(Property.MASTER_RECOVERY_DELAY, 0, 100),
s(Property.MASTER_LEASE_RECOVERY_WAITING_PERIOD, 0, 10),
s(Property.MASTER_RECOVERY_MAXTIME, 10, 1000),
s(Property.MASTER_THREADCHECK, 100, 10000),
s(Property.MASTER_MINTHREADS, 1, 200),};
Setting[] tableSettings = {
s(Property.TABLE_MAJC_RATIO, 1, 10),
s(Property.TABLE_MAJC_COMPACTALL_IDLETIME, 100, 10 * 60 * 60 * 1000L),
s(Property.TABLE_SPLIT_THRESHOLD, 10 * 1024, 10L * 1024 * 1024 * 1024),
s(Property.TABLE_MINC_COMPACT_IDLETIME, 100, 100 * 60 * 60 * 1000L),
s(Property.TABLE_SCAN_MAXMEM, 10 * 1024, 10 * 1024 * 1024),
s(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE, 10 * 1024, 10 * 1024 * 1024L),
s(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX, 10 * 1024, 10 * 1024 * 1024L),
s(Property.TABLE_FILE_REPLICATION, 0, 5),
s(Property.TABLE_FILE_MAX, 2, 50),};
/* @formatter:on */
@Override
public void visit(State state, Environment env, Properties props) throws Exception {
// reset any previous setting
Object lastSetting = state.getOkIfAbsent(LAST_SETTING);
if (lastSetting != null) {
int choice = Integer.parseInt(lastSetting.toString());
Property property = settings[choice].property;
log.debug("Setting " + property.getKey() + " back to " + property.getDefaultValue());
env.getConnector().instanceOperations().setProperty(property.getKey(), property.getDefaultValue());
}
lastSetting = state.getOkIfAbsent(LAST_TABLE_SETTING);
if (lastSetting != null) {
String parts[] = lastSetting.toString().split(",");
String table = parts[0];
int choice = Integer.parseInt(parts[1]);
Property property = tableSettings[choice].property;
if (env.getConnector().tableOperations().exists(table)) {
log.debug("Setting " + property.getKey() + " on " + table + " back to " + property.getDefaultValue());
try {
env.getConnector().tableOperations().setProperty(table, property.getKey(), property.getDefaultValue());
} catch (AccumuloException ex) {
if (ex.getCause() instanceof ThriftTableOperationException) {
ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
if (ttoe.type == TableOperationExceptionType.NOTFOUND)
return;
}
throw ex;
}
}
}
lastSetting = state.getOkIfAbsent(LAST_NAMESPACE_SETTING);
if (lastSetting != null) {
String parts[] = lastSetting.toString().split(",");
String namespace = parts[0];
int choice = Integer.parseInt(parts[1]);
Property property = tableSettings[choice].property;
if (env.getConnector().namespaceOperations().exists(namespace)) {
log.debug("Setting " + property.getKey() + " on " + namespace + " back to " + property.getDefaultValue());
try {
env.getConnector().namespaceOperations().setProperty(namespace, property.getKey(), property.getDefaultValue());
} catch (AccumuloException ex) {
if (ex.getCause() instanceof ThriftTableOperationException) {
ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
if (ttoe.type == TableOperationExceptionType.NAMESPACE_NOTFOUND)
return;
}
throw ex;
}
}
}
state.remove(LAST_SETTING);
state.remove(LAST_TABLE_SETTING);
state.remove(LAST_NAMESPACE_SETTING);
RandomData random = new RandomDataImpl();
int dice = random.nextInt(0, 2);
if (dice == 0) {
changeTableSetting(random, state, env, props);
} else if (dice == 1) {
changeNamespaceSetting(random, state, env, props);
} else {
changeSetting(random, state, env, props);
}
}
private void changeTableSetting(RandomData random, State state, Environment env, Properties props) throws Exception {
// pick a random property
int choice = random.nextInt(0, tableSettings.length - 1);
Setting setting = tableSettings[choice];
// pick a random table
SortedSet<String> tables = env.getConnector().tableOperations().list().tailSet("ctt").headSet("ctu");
if (tables.isEmpty())
return;
String table = random.nextSample(tables, 1)[0].toString();
// generate a random value
long newValue = random.nextLong(setting.min, setting.max);
state.set(LAST_TABLE_SETTING, table + "," + choice);
log.debug("Setting " + setting.property.getKey() + " on table " + table + " to " + newValue);
try {
env.getConnector().tableOperations().setProperty(table, setting.property.getKey(), "" + newValue);
} catch (AccumuloException ex) {
if (ex.getCause() instanceof ThriftTableOperationException) {
ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
if (ttoe.type == TableOperationExceptionType.NOTFOUND)
return;
}
throw ex;
}
}
private void changeNamespaceSetting(RandomData random, State state, Environment env, Properties props) throws Exception {
// pick a random property
int choice = random.nextInt(0, tableSettings.length - 1);
Setting setting = tableSettings[choice];
// pick a random table
SortedSet<String> namespaces = env.getConnector().namespaceOperations().list();
if (namespaces.isEmpty())
return;
String namespace = random.nextSample(namespaces, 1)[0].toString();
// generate a random value
long newValue = random.nextLong(setting.min, setting.max);
state.set(LAST_NAMESPACE_SETTING, namespace + "," + choice);
log.debug("Setting " + setting.property.getKey() + " on namespace " + namespace + " to " + newValue);
try {
env.getConnector().namespaceOperations().setProperty(namespace, setting.property.getKey(), "" + newValue);
} catch (AccumuloException ex) {
if (ex.getCause() instanceof ThriftTableOperationException) {
ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
if (ttoe.type == TableOperationExceptionType.NAMESPACE_NOTFOUND)
return;
}
throw ex;
}
}
private void changeSetting(RandomData random, State state, Environment env, Properties props) throws Exception {
// pick a random property
int choice = random.nextInt(0, settings.length - 1);
Setting setting = settings[choice];
// generate a random value
long newValue = random.nextLong(setting.min, setting.max);
state.set(LAST_SETTING, "" + choice);
log.debug("Setting " + setting.property.getKey() + " to " + newValue);
env.getConnector().instanceOperations().setProperty(setting.property.getKey(), "" + newValue);
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2014, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2008/12/01 Martin D. Flynn
// -Initial release
// 2009/01/28 Martin D. Flynn
// -Added "Logged-In" list column
// 2009/08/23 Martin D. Flynn
// -Added ability to log-in to selected account (this feature controlled by
// property "sysAdminAccounts.allowAccountLogin" - default is "false").
// -Convert new entered IDs to lowercase
// 2009/09/23 Martin D. Flynn
// -Added "TemporaryProperties" field.
// 2010/09/09 Martin D. Flynn
// -Moved to "org.opengts.war.track.page"
// 2011/03/08 Martin D. Flynn
// -Added GeocoderMode, IsBorderCrossing (moved from AccountInfo.java)
// 2011/06/16 Martin D. Flynn
// -Added "Notes" text option
// 2012/12/24 Martin D. Flynn
// -Change "form" target to "_self" (rather than "_top")
// 2013/03/01 Martin D. Flynn
// -Default AccountManagerID to selected AccountID, if unspecified.
// 2013/08/06 Martin D. Flynn
// -Added ACL support for Read/View
// -Added setting for Account "Notify Enable" (see PARM_ACCT_RULE_ALLOW)
// ----------------------------------------------------------------------------
package org.opengts.war.track.page;
import java.util.*;
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.*;
import org.opengts.db.AclEntry.AccessLevel;
import org.opengts.db.tables.*;
import org.opengts.war.tools.*;
import org.opengts.war.track.*;
public class SysAdminAccounts
extends WebPageAdaptor
implements Constants
{
// ------------------------------------------------------------------------
private static final boolean SHOW_NOTES = false;
// ------------------------------------------------------------------------
// Parameters
// forms
public static final String FORM_ACCOUNT_SELECT = "SysAdminSelect";
public static final String FORM_ACCOUNT_EDIT = "SysAdminEdit";
public static final String FORM_ACCOUNT_NEW = "SysAdminNew";
// commands
public static final String COMMAND_INFO_UPDATE = "update";
public static final String COMMAND_INFO_SELECT = "select";
public static final String COMMAND_INFO_NEW = "new";
// submit
public static final String PARM_SUBMIT_EDIT = "a_subedit";
public static final String PARM_SUBMIT_VIEW = "a_subview";
public static final String PARM_SUBMIT_CHG = "a_subchg";
public static final String PARM_SUBMIT_DEL = "a_subdel";
public static final String PARM_SUBMIT_NEW = "a_subnew";
public static final String PARM_SUBMIT_LOGIN = "a_sublogin";
// buttons
public static final String PARM_BUTTON_CANCEL = "d_btncan";
public static final String PARM_BUTTON_BACK = "d_btnbak";
// parameters
public static final String PARM_NEW_NAME = "s_newname";
public static final String PARM_ACCOUNT_SELECT = "s_account";
public static final String PARM_ACCT_ID = "a_id";
public static final String PARM_ACCT_CREATED = "a_created";
public static final String PARM_ACCT_LAST_LOGIN = "a_lastlogin";
public static final String PARM_ACCT_DESC = "a_desc";
public static final String PARM_ACCT_PASSWORD = "a_pass";
public static final String PARM_ACCT_ACTIVE = "a_active";
public static final String PARM_ACCT_CONTACT_NAME = "a_contact";
public static final String PARM_ACCT_CONTACT_PHONE = "a_phone";
public static final String PARM_ACCT_CONTACT_EMAIL = "a_email";
public static final String PARM_ACCT_TIMEZONE = "a_tmz";
public static final String PARM_ACCT_AUTODEV = "a_autodev";
public static final String PARM_ACCT_MAXDEV = "a_maxdev";
public static final String PARM_ACCT_EXPIRE = "a_expire";
public static final String PARM_ACCT_TEMP_PROPS = "a_tmpProps";
public static final String PARM_ACCT_NOTES = "a_notes";
public static final String PARM_ACCT_IS_MANAGER = "a_manager";
public static final String PARM_ACCT_MANAGER_ID = "a_managerid";
public static final String PARM_ACCT_PRIVLABEL = "a_privlbl";
public static final String PARM_ACCT_RG_MODE = "a_rgmode";
public static final String PARM_ACCT_IS_BCROSS = "a_bcross";
public static final String PARM_ACCT_DATA_PUSH_URL = "a_pushurl";
public static final String PARM_ACCT_DCS_PROPS_ID = "a_dcspropid";
public static final String PARM_ACCT_MAX_PING = "a_pingmax";
public static final String PARM_ACCT_TOTAL_PING = "a_pingtotal";
public static final String PARM_ACCT_PING_RESET = "a_pingreset";
public static final String PARM_ACCT_RULE_ALLOW = "a_ruleallw"; // see Device.PARM_DEV_RULE_ALLOW
public static final String PARM_ACCT_SMS_ENABLED = "a_smsenable";
public static final String PARM_ACCT_SMS_PROPS = "a_smsprops";
public static final String PARM_ACCT_RETAIN_EVENTS = "a_retainEv";
public static final String PARM_ACCT_WEB_SERVICE = "a_wsAllow";
// ------------------------------------------------------------------------
// password holder/indicator
private static final String PASSWORD_HOLDER = "**********";
private static final char PASSWORD_INVALID_CHAR = '*'; // password can't have all '*'
// ------------------------------------------------------------------------
// WebPage interface
public SysAdminAccounts()
{
this.setBaseURI(RequestProperties.TRACK_BASE_URI());
this.setPageName(PAGE_SYSADMIN_ACCOUNTS);
this.setPageNavigation(new String[] { PAGE_LOGIN, PAGE_MENU_TOP });
this.setLoginRequired(true);
//this.setCssDirectory("extra/css");
}
// ------------------------------------------------------------------------
//public void setCssDirectory(String cssDir)
//{
// super.setCssDirectory(cssDir);
// Print.logStackTrace("CSS Dir: " + cssDir);
//}
// ------------------------------------------------------------------------
public String getMenuName(RequestProperties reqState)
{
return MenuBar.MENU_ADMIN;
}
public String getMenuDescription(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getMenuDescription(reqState,i18n.getString("SysAdminAccounts.editMenuDesc","System Accounts"));
}
public String getMenuHelp(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getMenuHelp(reqState,i18n.getString("SysAdminAccounts.editMenuHelp","Create/Delete/Edit/View System Accounts"));
}
// ------------------------------------------------------------------------
public String getNavigationDescription(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getNavigationDescription(reqState,i18n.getString("SysAdminAccounts.navDesc","Accounts"));
}
public String getNavigationTab(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return i18n.getString("SysAdminAccounts.navTab","System Accounts");
}
// ------------------------------------------------------------------------
/* true if this page iis for the system admin only */
public boolean systemAdminOnly()
{
return false;
}
// ------------------------------------------------------------------------
public boolean isOkToDisplay(RequestProperties reqState)
{
Account account = (reqState != null)? reqState.getCurrentAccount() : null;
if (account == null) {
return false; // no account?
} else
if (account.isSystemAdmin()) {
return true;
} else
if (account.isAccountManager()) {
return true;
} else {
return false;
}
}
// ------------------------------------------------------------------------
private static String filter(String s)
{
return StringTools.isBlank(s)? " " : StringTools.htmlFilterText(s);
}
private boolean isValidPassword(String pwd)
{
if (StringTools.isBlank(pwd)) {
return true; // user is not allowed to log-in
} else
if (pwd.equals(PASSWORD_HOLDER)) {
return false;
} else {
for (int i = 0; i < pwd.length(); i++) {
if (pwd.charAt(i) != PASSWORD_INVALID_CHAR) {
return true;
}
}
return false; // all '*'
}
}
private Map<String,java.util.List<String>> getLoggedInAccounts(RequestProperties reqState)
{
final Map<String,java.util.List<String>> acctLoginMap = new HashMap<String,java.util.List<String>>();
HttpSession session = AttributeTools.getSession(reqState.getHttpServletRequest());
if (session != null) {
int count = RTConfigContextListener.GetSessionCount(session.getServletContext(),
new RTConfigContextListener.HttpSessionFilter() {
public boolean countSession(HttpSession session) {
String acctID = (String)AttributeTools.getSessionAttribute(session,Constants.PARM_ACCOUNT,null);
if (!StringTools.isBlank(acctID)) {
java.util.List<String> userList = acctLoginMap.get(acctID);
if (userList == null) {
userList = new Vector<String>();
acctLoginMap.put(acctID,userList);
}
String userID = (String)AttributeTools.getSessionAttribute(session,Constants.PARM_USER,null);
if (!StringTools.isBlank(userID)) {
userList.add(userID);
} else {
userID = "?";
}
Print.logInfo("Logged-in User: %s,%s", acctID, userID);
return true;
}
return false;
}
}
);
}
return acctLoginMap;
}
// ------------------------------------------------------------------------
public void writePage(
final RequestProperties reqState,
String pageMsg)
throws IOException
{
final HttpServletRequest request = reqState.getHttpServletRequest();
final PrivateLabel privLabel = reqState.getPrivateLabel(); // never null
final String dtFormat = privLabel.getDateFormat() + " " + privLabel.getTimeFormat();
final I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
final Locale locale = reqState.getLocale();
final Account currAcct = reqState.getCurrentAccount(); // never null
final String currAcctID = reqState.getCurrentAccountID();
final String currAcctTZID = currAcct.getTimeZone();
final TimeZone currAcctTZ = currAcct.getTimeZone(null);
final boolean isSysAdmin = Account.isSystemAdmin(currAcct); // all access
final boolean isAccountMgr = currAcct.isAccountManager();
final User currUser = reqState.getCurrentUser(); // may be null
final String currUserID = reqState.getCurrentUserID();
final String pageName = this.getPageName();
final boolean accountProps = privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_accountProperties,false);
String m = pageMsg;
boolean error = false;
/* account manager */
final boolean hasAccountMgr = Account.SupportsAccountManager();
final boolean showAccountManager = hasAccountMgr &&
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showAccountManager,false);
final boolean showRetainEvents =
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showRetainEvents,false);
final boolean showAllowWebService =
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showAllowWebService,false);
final boolean showAutoAddDevices =
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showAutoAddDevices,false);
final boolean showMaxPingCount = DBConfig.hasExtraPackage();
/* data push url */
final boolean showDataPushURL =
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showDataPushURL,false) &&
Account.SupportsDataPushURL();
/* invalid user? */
if (!isAccountMgr && !isSysAdmin) {
Print.logWarn("Current user is neither a SysAdmin, nor Account Manager! ==> " + currAcctID);
// access will be restricted below
}
/* list of authorized accounts */
Collection<String> accountList = null;
try {
accountList = Account.getAuthorizedAccounts(currAcct);
} catch (DBException dbe) {
Print.logError("Error reading authorized Accounts");
}
if (ListTools.isEmpty(accountList)) {
accountList = new Vector<String>();
accountList.add(currAcctID);
}
// 'accountList' has at least one element in it.
/* selected account-id */
String selAccountID = AttributeTools.getRequestString(reqState.getHttpServletRequest(), PARM_ACCOUNT_SELECT, "");
if (StringTools.isBlank(selAccountID)) {
selAccountID = ListTools.itemAt(accountList, 0, "");
}
if (!ListTools.contains(accountList,selAccountID)) {
// Authorized account list does not contain the selected account
selAccountID = currAcctID;
}
final boolean isCurrentAccountSelected = selAccountID.equals(currAcctID);
/* account db */
Account selAccount = null;
try {
selAccount = !StringTools.isBlank(selAccountID)? Account.getAccount(selAccountID) : null; // may still be null
} catch (DBException dbe) {
// ignore
}
/* command */
String accountCmd = reqState.getCommandName();
boolean listAccounts = false;
boolean updateAccount = accountCmd.equals(COMMAND_INFO_UPDATE);
boolean selectAccount = accountCmd.equals(COMMAND_INFO_SELECT);
boolean newAccount = accountCmd.equals(COMMAND_INFO_NEW);
boolean deleteAccount = false;
boolean editAccount = false;
boolean viewAccount = false;
boolean loginAccount = false;
/* submit buttons */
String submitEdit = AttributeTools.getRequestString(request, PARM_SUBMIT_EDIT , "");
String submitView = AttributeTools.getRequestString(request, PARM_SUBMIT_VIEW , "");
String submitChange = AttributeTools.getRequestString(request, PARM_SUBMIT_CHG , "");
String submitNew = AttributeTools.getRequestString(request, PARM_SUBMIT_NEW , "");
String submitDelete = AttributeTools.getRequestString(request, PARM_SUBMIT_DEL , "");
String submitLogin = AttributeTools.getRequestString(request, PARM_SUBMIT_LOGIN, "");
/* CACHE_ACL: ACL allow edit/view */
boolean allowNew = (isAccountMgr || isSysAdmin) && privLabel.hasAllAccess(currUser,this.getAclName());
boolean allowDelete = allowNew; // 'delete' allowed if 'new' allowed
boolean allowEdit = allowNew || privLabel.hasWriteAccess(currUser, this.getAclName());
boolean allowView = allowEdit || privLabel.hasReadAccess(currUser, this.getAclName());
boolean allowLogin = allowEdit && privLabel.isSystemAccountsLoginEnabled(currAcctID);
/*
boolean allowNew = isAccountMgr || isSysAdmin;
boolean allowDelete = allowNew; // 'delete' allowed if 'new' allowed
boolean allowEdit = allowNew || (isAccountMgr || isSysAdmin);
boolean allowView = true;
boolean allowLogin = allowEdit && privLabel.isSystemAccountsLoginEnabled(currAcctID);
*/
/* sub-command */
String newAccountID = null;
if (newAccount) {
if (!allowNew) {
newAccount = false; // not authorized
} else {
HttpServletRequest httpReq = reqState.getHttpServletRequest();
newAccountID = AttributeTools.getRequestString(httpReq,PARM_NEW_NAME,"").trim();
newAccountID = newAccountID.toLowerCase();
if (StringTools.isBlank(newAccountID)) {
m = i18n.getString("SysAdminAccounts.enterNewAccount","Please enter a new Account name."); // UserErrMsg
error = true;
newAccount = false;
} else
if (!WebPageAdaptor.isValidID(reqState,/*PrivateLabel.PROP_SysAdminAccounts_validateNewIDs,*/newAccountID)) {
m = i18n.getString("SysAdminAccounts.invalidIDChar","ID contains invalid characters"); // UserErrMsg
error = true;
newAccount = false;
}
}
} else
if (updateAccount) {
if (!allowEdit) {
// not authorized to update users
updateAccount = false;
} else
if (!SubmitMatch(submitChange,i18n.getString("SysAdminAccounts.change","Change"))) {
updateAccount = false;
}
} else
if (selectAccount) {
if (SubmitMatch(submitLogin,i18n.getString("SysAdminAccounts.login","Login"))) {
if (allowLogin) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
listAccounts = true;
} else
if (isCurrentAccountSelected) {
m = i18n.getString("SysAdminAccounts.alreadyLoggedInToAccount","Already Logged-In to this Account"); // UserErrMsg
error = true;
listAccounts = true;
} else {
loginAccount = true;
}
}
} else
if (SubmitMatch(submitDelete,i18n.getString("SysAdminAccounts.delete","Delete"))) {
if (allowDelete) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
listAccounts = true;
} else
if (isCurrentAccountSelected) {
m = i18n.getString("SysAdminAccounts.cannotDeleteCurrentAccount","Cannot delete current logged-in Account"); // UserErrMsg
error = true;
listAccounts = true;
} else {
deleteAccount = true;
}
}
} else
if (SubmitMatch(submitEdit,i18n.getString("SysAdminAccounts.edit","Edit"))) {
if (allowEdit) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
listAccounts = true;
} else {
editAccount = !isCurrentAccountSelected;
viewAccount = true;
}
}
} else
if (SubmitMatch(submitView,i18n.getString("SysAdminAccounts.view","View"))) {
if (allowView) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
listAccounts = true;
} else {
viewAccount = true;
}
}
} else {
listAccounts = true;
}
} else {
listAccounts = true;
}
/* login to account? */
if (loginAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
} else {
try {
String loginAcctID = selAccount.getAccountID();
String loginUserID = User.getAdminUserID();
User loginUser = User.getUser(selAccount, loginUserID);
String loginPasswd = (loginUser != null)? loginUser.getDecodedPassword(null) : selAccount.getDecodedPassword(null);
String passcode = privLabel.getSystemAccountsLoginPasscode(currAcctID);
URIArg url = new URIArg(reqState.getBaseURI());
url.addArg(Constants.PARM_ACCOUNT , loginAcctID);
url.addArg(Constants.PARM_USER , loginUserID);
url.addArg(Constants.PARM_PASSWORD , StringTools.blankDefault(loginPasswd,""));
url.addArg(CommonServlet.PARM_PAGE , Constants.PAGE_MENU_TOP);
url.addArg(Constants.PARM_SA_RELOGIN, passcode);
Print.logInfo("ReLogin URL: " + url);
AttributeTools.clearSessionAttributes(request); // invalidate/logout
AttributeTools.setSessionLong(request, Constants.PARM_SA_RELOGIN_SESS, DateTime.getCurrentTimeSec());
AttributeTools.setSessionAttribute(request, Constants.PARM_SA_RELOGIN_ACCT, currAcctID);
AttributeTools.setSessionAttribute(request, Constants.PARM_SA_RELOGIN_USER, currUserID);
HttpServletResponse response = reqState.getHttpServletResponse();
RequestDispatcher rd = request.getRequestDispatcher(url.toString());
rd.forward(request, response);
return;
} catch (Throwable th) {
m = i18n.getString("SysAdminAccounts.errorDuringLoginDispatch","Error ocurred during dispatch to login"); // UserErrMsg
error = true;
}
}
listAccounts = true;
}
/* delete account? */
if (deleteAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account"); // UserErrMsg
error = true;
} else {
try {
Account.Key accountKey = (Account.Key)selAccount.getRecordKey();
Print.logWarn("Deleting Account: " + accountKey);
accountKey.delete(true); // will also delete dependencies
accountList = Account.getAuthorizedAccounts(currAcct);
selAccountID = ListTools.itemAt(accountList, 0, "");
try {
selAccount = !selAccountID.equals("")? Account.getAccount(selAccountID) : null; // may still be null
} catch (DBException dbe) {
selAccount = null;
}
} catch (DBException dbe) {
Print.logException("Unable to delete account: " + selAccount.getAccountID(), dbe);
m = i18n.getString("SysAdminAccounts.errorDelete","Internal error deleting Account"); // UserErrMsg
error = true;
}
}
listAccounts = true;
}
/* new account? */
if (newAccount) {
boolean createAccountOK = true;
try {
if (Account.exists(newAccountID)) {
m = i18n.getString("SysAdminAccounts.alreadyExists","This Account already exists"); // UserErrMsg
error = true;
createAccountOK = false;
}
} catch (DBException dbe) {
m = i18n.getString("SysAdminAccounts.accountError","Error checking account"); // UserErrMsg
error = true;
createAccountOK = false;
}
if (createAccountOK) {
try {
String newPasswd = null;
Account account = Account.createNewAccount(currAcct, newAccountID, newPasswd); // saved
accountList = Account.getAuthorizedAccounts(currAcct);
selAccount = account;
selAccountID = account.getAccountID();
m = i18n.getString("SysAdminAccounts.createdAccount","New Account has been created"); // UserErrMsg
} catch (DBNotAuthorizedException dbaee) {
m = i18n.getString("SysAdminAccounts.notAuthorized","Not authorized to create account"); // UserErrMsg
error = true;
} catch (DBAlreadyExistsException dbaee) {
m = i18n.getString("SysAdminAccounts.alreadyExists","This Account already exists"); // UserErrMsg
error = true;
} catch (DBException dbe) {
m = i18n.getString("SysAdminAccounts.errorCreate","Internal error creating Account"); // UserErrMsg
error = true;
}
}
listAccounts = true;
}
/* change/update the account info? */
if (updateAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.noAccounts","There are currently no defined Accounts."); // UserErrMsg
} else {
String acctDesc = AttributeTools.getRequestString(request, PARM_ACCT_DESC , "");
String acctActive = AttributeTools.getRequestString(request, PARM_ACCT_ACTIVE , "");
String acctPassword = AttributeTools.getRequestString(request, PARM_ACCT_PASSWORD , "");
String contactName = AttributeTools.getRequestString(request, PARM_ACCT_CONTACT_NAME , "");
String contactPhone = AttributeTools.getRequestString(request, PARM_ACCT_CONTACT_PHONE , "");
String contactEmail = AttributeTools.getRequestString(request, PARM_ACCT_CONTACT_EMAIL , "");
String acctTimeZone = AttributeTools.getRequestString(request, PARM_ACCT_TIMEZONE , "");
String acctAutoDev = AttributeTools.getRequestString(request, PARM_ACCT_AUTODEV , "");
String acctMaxDev = AttributeTools.getRequestString(request, PARM_ACCT_MAXDEV , "");
String acctIsManager = AttributeTools.getRequestString(request, PARM_ACCT_IS_MANAGER , "");
String acctManagerID = AttributeTools.getRequestString(request, PARM_ACCT_MANAGER_ID , "");
String acctPrivLabel = AttributeTools.getRequestString(request, PARM_ACCT_PRIVLABEL , "<n/a>");
String acctGeoMode = AttributeTools.getRequestString(request, PARM_ACCT_RG_MODE , "");
String dcsPropsID = AttributeTools.getRequestString(request, PARM_ACCT_DCS_PROPS_ID , "");
String isBCrossStr = AttributeTools.getRequestString(request, PARM_ACCT_IS_BCROSS , "");
int maxPingCount = AttributeTools.getRequestInt( request, PARM_ACCT_MAX_PING , 0);
boolean totPingReset = !StringTools.isBlank(AttributeTools.getRequestString(request,PARM_ACCT_PING_RESET,null));
String dataPushURL = AttributeTools.getRequestString(request, PARM_ACCT_DATA_PUSH_URL , "");
String noteText = AttributeTools.getRequestString(request, PARM_ACCT_NOTES , "");
String ruleAllowStr = AttributeTools.getRequestString(request, PARM_ACCT_RULE_ALLOW , null);
String smsEnabledStr = AttributeTools.getRequestString(request, PARM_ACCT_SMS_ENABLED , "");
String smsProps = AttributeTools.getRequestString(request, PARM_ACCT_SMS_PROPS , "");
String retainEvStr = AttributeTools.getRequestString(request, PARM_ACCT_RETAIN_EVENTS , "");
String wsAllowStr = AttributeTools.getRequestString(request, PARM_ACCT_WEB_SERVICE , "");
User adminUser = null;
listAccounts = true;
// sms default enabled?
switch (Account.GetDefaultSmsEnabledState()) {
case FALSE:
smsEnabledStr = "false";
//smsProps = "";
break;
case TRUE:
smsEnabledStr = "true";
break;
case ACCOUNT:
// leave as is
break;
}
// update
try {
boolean saveOK = true;
// active
if (isCurrentAccountSelected) {
if (!selAccount.getIsActive()) {
selAccount.setIsActive(true);
}
} else {
boolean acctActv = ComboOption.parseYesNoText(locale, acctActive, true);
if (selAccount.getIsActive() != acctActv) {
selAccount.setIsActive(acctActv);
}
}
// password
if (!isCurrentAccountSelected) {
if (acctPassword.equals(PASSWORD_HOLDER)) {
// password not entered
} else
if (this.isValidPassword(acctPassword)) {
selAccount.setDecodedPassword(null,acctPassword);
try {
adminUser = User.getUser(selAccount, User.getAdminUserID());
if (adminUser != null) {
adminUser.setDecodedPassword(null,acctPassword);
}
} catch (DBException dbe) {
// ignore
}
} else {
m = i18n.getString("SysAdminAccounts.pleaseEnterValidPassword","Please enter a valid password"); // UserErrMsg
error = true;
saveOK = false;
editAccount = true;
listAccounts = false;
}
}
// description
if (!acctDesc.equals("")) {
selAccount.setDescription(acctDesc);
}
// contact name
if (!contactName.equals(selAccount.getContactName())) {
selAccount.setContactName(contactName);
}
// contact phone
if (!contactPhone.equals(selAccount.getContactPhone())) {
selAccount.setContactPhone(contactPhone);
}
// contact email
if (!contactEmail.equals(selAccount.getContactEmail())) {
selAccount.setContactEmail(contactEmail);
}
// Timezone
if (!acctTimeZone.equals(selAccount.getTimeZone())) {
selAccount.setTimeZone(acctTimeZone);
}
// isAccountManager/ManagerID
if (showAccountManager && !isCurrentAccountSelected && isSysAdmin) {
boolean isManager = false;
if (!StringTools.isBlank(acctIsManager)) {
isManager = ComboOption.parseYesNoText(locale, acctIsManager, false);
if (selAccount.getIsAccountManager() != isManager) {
selAccount.setIsAccountManager(isManager);
}
}
String oldMgrID = selAccount.getManagerID();
if (isManager) {
String newMgrID = !StringTools.isBlank(acctManagerID)?
acctManagerID : selAccount.getAccountID(); // default to AccountID
if (StringTools.isBlank(oldMgrID)) {
selAccount.setManagerID(newMgrID);
} else
if (!oldMgrID.equals(newMgrID)) {
Print.logWarn("Changing 'ManagerID': " + oldMgrID + " ==> " + newMgrID);
selAccount.setManagerID(newMgrID);
}
} else {
if (!StringTools.isBlank(oldMgrID)) {
selAccount.setManagerID(""); // clear (not a manager)
}
}
}
// showRetainEvents
if (showRetainEvents && isSysAdmin) {
boolean retainEvents = ComboOption.parseYesNoText(locale, retainEvStr, false);
if (retainEvents) {
if (!selAccount.hasRetainedEventAge()) {
long retainSec = 99999999L; // DateTime.YearSecods(1) * 20L;
selAccount.setRetainedEventAge(retainSec); // 1981/07/17,07:00:00,GMT
} else {
// leave existing retained-age-sec as-is
}
} else {
selAccount.setRetainedEventAge(0L);
}
}
// showAllowWebService
if (showAllowWebService && isSysAdmin) {
boolean wsAllow = ComboOption.parseYesNoText(locale, wsAllowStr, false);
if (selAccount.getAllowWebService() != wsAllow) {
selAccount.setAllowWebService(wsAllow);
}
}
// private label name
if (isSysAdmin) {
if (!acctPrivLabel.equals("<n/a>")) {
selAccount.setPrivateLabelName(acctPrivLabel);
}
}
// reverse-geocoder mode
if (isSysAdmin) {
selAccount.setGeocoderMode(acctGeoMode, locale);
}
// DCS properties ID
if (isSysAdmin && !selAccount.getDcsPropertiesID().equals(dcsPropsID)) {
selAccount.setDcsPropertiesID(dcsPropsID);
}
// Allow Notification
if (isSysAdmin) {
boolean allowNotify = ComboOption.parseYesNoText(locale, ruleAllowStr, true);
if (selAccount.getAllowNotify() != allowNotify) {
selAccount.setAllowNotify(allowNotify);
}
}
// SMS
if (isSysAdmin) {
boolean smsEnabled = ComboOption.parseYesNoText(locale, smsEnabledStr, false);
if (selAccount.getSmsEnabled() != smsEnabled) {
selAccount.setSmsEnabled(smsEnabled);
}
if (!selAccount.getSmsProperties().equals(smsProps)) {
selAccount.setSmsProperties(smsProps);
}
}
// border crossing
if (Account.SupportsBorderCrossing()) {
boolean isBCross = ComboOption.parseYesNoText(locale, isBCrossStr, false);
if (selAccount.getIsBorderCrossing() != isBCross) {
selAccount.setIsBorderCrossing(isBCross);
}
}
// auto-add devices
if (showAutoAddDevices && isSysAdmin) {
boolean autoAddDev = ComboOption.parseYesNoText(locale, acctAutoDev, false);
if (selAccount.getAutoAddDevices() != autoAddDev) {
selAccount.setAutoAddDevices(autoAddDev);
}
}
// maximum allowed devices
if (!StringTools.isBlank(acctMaxDev)) {
long maxCnt = acctMaxDev.equalsIgnoreCase("n/a")? 0 : StringTools.parseLong(acctMaxDev,0L);
selAccount.setMaximumDevices(maxCnt);
}
// maximum allowed Commands/Locates
if (showMaxPingCount && maxPingCount != selAccount.getMaxPingCount()) {
selAccount.setMaxPingCount((maxPingCount >= 0)? maxPingCount : 0);
}
if (showMaxPingCount && totPingReset) {
selAccount.resetTotalPingCount(false);
}
// datsPush URL
if (isSysAdmin && showDataPushURL) {
if (!selAccount.getDataPushURL().equals(dataPushURL)) {
selAccount.setDataPushURL(dataPushURL);
}
}
// Notes
boolean notesOK = isSysAdmin && // !isAccountMgr
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showNotes,SHOW_NOTES);
if (notesOK && !selAccount.getNotes().equals(noteText)) {
selAccount.setNotes(noteText);
}
// save
if (saveOK) {
if (adminUser != null) {
try {
adminUser.update(User.FLD_password);
} catch (DBException dbe) {
Print.logError("Error saving 'admin' User password", dbe);
}
}
if (!isSysAdmin) {
selAccount.addExcludedUpdateFields(
Account.FLD_isAccountManager,
Account.FLD_managerID
);
}
selAccount.save();
if (accountProps) {
String acctTempProps = AttributeTools.getRequestString(request, PARM_ACCT_TEMP_PROPS, "");
try {
acctTempProps = (new RTProperties(acctTempProps.replace('\n',' '))).toString();
Resource resource = Resource.getResource(selAccount, Resource.RESID_TemporaryProperties);
if (StringTools.isBlank(acctTempProps)) {
if ((resource != null) && !StringTools.isBlank(resource.getProperties())) {
resource.setProperties("");
resource.update(Resource.FLD_properties);
} else {
// no change
}
} else {
if (resource != null) {
if (!acctTempProps.equals(resource.getProperties())) {
resource.setProperties(acctTempProps);
resource.update(Resource.FLD_properties);
} else {
// no change
}
} else {
resource = Resource.getResource(selAccount, Resource.RESID_TemporaryProperties, true);
resource.setType(Resource.TYPE_RTPROPS);
resource.setProperties(acctTempProps);
resource.save();
}
}
} catch (DBException dbe) {
Print.logException("Unable to save Resource: " + selAccount.getAccountID(), dbe);
}
} // accountProps
m = i18n.getString("SysAdminAccounts.accountUpdated","Account information updated"); // UserErrMsg
} else {
// should stay on this page
editAccount = !isCurrentAccountSelected;
listAccounts = false;
}
} catch (Throwable t) {
m = i18n.getString("SysAdminAccounts.errorUpdate","Internal error updating Account"); // UserErrMsg
error = true;
}
}
}
/* Style */
HTMLOutput HTML_CSS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
String cssDir = SysAdminAccounts.this.getCssDirectory();
WebPageAdaptor.writeCssLink(out, reqState, "SysAdminAccounts.css", cssDir);
}
};
/* JavaScript */
HTMLOutput HTML_JS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
MenuBar.writeJavaScript(out, pageName, reqState);
JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef(SORTTABLE_JS), request);
}
};
/* Content */
final Collection<String> _accountList = accountList;
final String _selAccountID = selAccountID;
final Account _selAccount = selAccount;
final boolean _allowEdit = allowEdit;
final boolean _allowView = allowView;
final boolean _allowNew = allowNew;
final boolean _allowDelete = allowDelete;
final boolean _allowLogin = allowLogin;
final boolean _editAccount = _allowEdit && editAccount;
final boolean _viewAccount = _editAccount || viewAccount;
final boolean _listAccounts = listAccounts;
final ComboMap _rgList = privLabel.getEnumComboMap(Account.GeocoderMode.class);
final ComboMap _tzList = privLabel.getTimeZoneComboMap();
HTMLOutput HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) {
public void write(PrintWriter out) throws IOException {
String pageName = SysAdminAccounts.this.getPageName();
boolean notesOK = isSysAdmin && // !isAccountMgr
privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showNotes,SHOW_NOTES);
// frame header
//String menuURL = EncodeMakeURL(reqState,RequestProperties.TRACK_BASE_URI(),PAGE_MENU_TOP);
String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP);
String editURL = SysAdminAccounts.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI());
String selectURL = SysAdminAccounts.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI());
String newURL = SysAdminAccounts.this.encodePageURL(reqState);//,RequestProperties.TRACK_BASE_URI());
String frameTitle = _allowNew?
i18n.getString("SysAdminAccounts.createDeleteAccounts","Create/Delete/Edit Accounts") :
i18n.getString("SysAdminAccounts.viewEditAccounts","View/Edit Accounts");
out.write("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>\n");
out.write("<hr>\n");
// account selection table (Select, Account ID, Account Description)
if (_listAccounts) {
// account selection table (Select, Account ID, Account Description)
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("SysAdminAccounts.selectAccount","Select an Account")+":</h1>\n");
out.write("<div style='margin-left:25px;'>\n");
out.write("<form name='"+FORM_ACCOUNT_SELECT+"' method='post' action='"+selectURL+"' target='_self'>"); // target='_top'
out.write("<input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_SELECT+"'/>");
out.write("<table class='"+CommonServlet.CSS_ADMIN_SELECT_TABLE+"' cellspacing=0 cellpadding=0 border=0>\n");
out.write(" <thead>\n");
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_ROW+"'>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL_SEL+"' nowrap>"+filter(i18n.getString("SysAdminAccounts.select","Select"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.accountID","Account ID"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.accountName","Account Description"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.active","Active"))+"</th>\n");
if (hasAccountMgr && (isSysAdmin || isAccountMgr)) {
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.manager","Manager"))+"</th>\n");
}
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.deviceCount","Device\nCount"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.privateLabel","PrivateLabel\nName"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.created","Created\n{0}",currAcctTZID))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.lastLogin","Last Login\n{0}",currAcctTZID))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.loggedIn","Logged\nIn Now"))+"</th>\n");
out.write(" </tr>\n");
out.write(" </thead>\n");
out.write(" <tbody>\n");
Map<String,java.util.List<String>> loggedInAccounts = SysAdminAccounts.this.getLoggedInAccounts(reqState);
for (int u = 0; u < ListTools.size(_accountList); u++) {
// get Account
Account acct = null;
try {
acct = Account.getAccount(ListTools.itemAt(_accountList,u,""));
} catch (DBException dbe) {
//
}
if (acct == null) {
continue;
}
String acctID = acct.getAccountID();
String acctDesc = acct.getDescription();
String prvLabelName = acct.getPrivateLabelName();
//if (!prvLabelName.equals("*")) { continue; } // <-- debug/testing
// odd/even row
boolean oddRow = ((u & 1) == 0); // odd row index starts at '0'
if (oddRow) {
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_ODD+"'>\n");
} else {
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_EVEN+"'>\n");
}
// display account info
String active = ComboOption.getYesNoText(locale,acct.isActive());
long creationTime = acct.getCreationTime();
String creationStr = (creationTime > 0L)? new DateTime(creationTime,currAcctTZ).format(dtFormat) : i18n.getString("SysAdminAccounts.unknown","unknown");
long lastLoginTime = acct.getLastLoginTime();
long deltaTimeSec = DateTime.getCurrentTimeSec() - lastLoginTime;
String lastLoginStr = (lastLoginTime > 0L)? new DateTime(lastLoginTime,currAcctTZ).format(dtFormat) : i18n.getString("SysAdminAccounts.never","never");
String lastLoginCls = oddRow? "normalLoginDate_odd" : "normalLoginDate_even";
if (deltaTimeSec <= DateTime.DaySeconds(1)) {
// has logged i within the last 24 hours (green)
lastLoginCls = oddRow? "recentLoginDate_odd" : "recentLoginDate_even";
} else
if (deltaTimeSec <= DateTime.DaySeconds(7)) {
// has logged i within the last week (black)
lastLoginCls = oddRow? "normalLoginDate_odd" : "normalLoginDate_even";
} else
if (deltaTimeSec <= DateTime.DaySeconds(21)) {
// has logged i within the last 3 weeks (yellow)
lastLoginCls = oddRow? "oldLoginDate_odd" : "oldLoginDate_even";
} else {
// logged in more than 3 weeks ago (red)
lastLoginCls = oddRow? "veryOldLoginDate_odd" : "veryOldLoginDate_even"; // (196, 54, 54)
}
String deviceCountS = String.valueOf(acct.getDeviceCount());
int loginCount = 0;
String loginCountS = "--"; // ComboOption.getYesNoText(locale,false);
if (loggedInAccounts.containsKey(acctID)) {
java.util.List<String> userList = loggedInAccounts.get(acctID);
loginCount = userList.size();
loginCountS = "(" + loginCount + ")";
}
//if (prvLabelName.equals("*")) { prvLabelName = "default"; }
String checked = _selAccountID.equals(acctID)? " checked" : "";
String viewStyle = currAcctID.equals(acctID)? "background-color:#E5E5E5;" : "background-color:#FFFFFF;";
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL_SEL+"' "+SORTTABLE_SORTKEY+"='"+u+"' style='"+viewStyle+"'><input type='radio' name='"+PARM_ACCOUNT_SELECT+"' id='"+acctID+"' value='"+acctID+"' "+checked+"></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap><label for='"+acctID+"'>"+filter(acctID)+"</label></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(acctDesc)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(active)+"</td>\n");
if (hasAccountMgr && (isSysAdmin || isAccountMgr)) {
String sortID = "";
String mgrID;
if (isSysAdmin) {
if (acct.isSystemAdmin()) {
mgrID = "***";
sortID = " ";
} else
if (acct.isAccountManager()) {
String m = StringTools.blankDefault(acct.getManagerID(),"?");
mgrID = "*" + m + "*";
sortID = m;
} else {
mgrID = StringTools.blankDefault(acct.getManagerID(),"--");
sortID = mgrID;
}
} else
if (isAccountMgr) {
if (acct.isSystemAdmin()) {
mgrID = ComboOption.getYesNoText(locale,true);
sortID = "1";
} else
if (acct.isAccountManager()) {
mgrID = ComboOption.getYesNoText(locale,true);
sortID = "1";
} else {
mgrID = "--";
sortID = "0";
}
} else {
mgrID = "--";
sortID = "0";
}
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL+"' "+SORTTABLE_SORTKEY+"='"+sortID+"' nowrap>"+filter(mgrID)+"</td>\n");
}
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(deviceCountS)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(prvLabelName)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+creationTime +"' nowrap>"+filter(creationStr)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+lastLoginTime+"' nowrap><span class='"+lastLoginCls+"'>"+filter(lastLoginStr)+"</span></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+loginCount +"' nowrap>"+filter(loginCountS)+"</td>\n");
// end of table row
out.write(" </tr>\n");
}
out.write(" </tbody>\n");
out.write("</table>\n");
out.write("<table cellpadding='0' cellspacing='0' border='0' style='width:95%; margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<tr>\n");
if (_allowView) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_VIEW+"' value='"+i18n.getString("SysAdminAccounts.view","View")+"'>");
out.write("</td>\n");
}
if (_allowEdit) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_EDIT+"' value='"+i18n.getString("SysAdminAccounts.edit","Edit")+"'>");
out.write("</td>\n");
}
if (_allowLogin) {
out.write("<td style='padding-left:30px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_LOGIN+"' value='"+i18n.getString("SysAdminAccounts.login","Login")+"' "+Onclick_ConfirmLogin(locale)+">");
out.write("</td>\n");
}
out.write("<td style='width:100%; text-align:right; padding-right:10px;'>");
if (_allowDelete) {
out.write("<input type='submit' name='"+PARM_SUBMIT_DEL+"' value='"+i18n.getString("SysAdminAccounts.delete","Delete")+"' "+Onclick_ConfirmDelete(locale)+">");
} else {
out.write(" ");
}
out.write("</td>\n");
out.write("</tr>\n");
out.write("</table>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
/* new Account */
if (_allowNew) {
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("SysAdminAccounts.createNewAccount","Create a new Account")+":</h1>\n");
out.write("<div style='margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<form name='"+FORM_ACCOUNT_NEW+"' method='post' action='"+newURL+"' target='_self'>"); // target='_top'
out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_NEW+"'/>");
out.write(i18n.getString("SysAdminAccounts.accountID","Account ID")+": <input type='text' class='"+CommonServlet.CSS_TEXT_INPUT+"' name='"+PARM_NEW_NAME+"' value='' size='32' maxlength='32'><br>\n");
out.write(" <input type='submit' name='"+PARM_SUBMIT_NEW+"' value='"+i18n.getString("SysAdminAccounts.new","New")+"' style='margin-top:5px; margin-left:10px;'>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
}
} else {
// user view/edit form
/* start of form */
out.write("<form name='"+FORM_ACCOUNT_EDIT+"' method='post' action='"+editURL+"' target='_self'>\n"); // target='_top'
out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_UPDATE+"'/>\n");
/* password */
String decodedPass = PASSWORD_HOLDER;
boolean showPass = privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showPasswords,false);
if (showPass && (_selAccount != null)) {
try {
User adminUser = User.getUser(_selAccount, User.getAdminUserID());
if (adminUser != null) {
decodedPass = adminUser.getDecodedPassword(null);
} else {
decodedPass = _selAccount.getDecodedPassword(null);
}
} catch (DBException dbe) {
decodedPass = _selAccount.getDecodedPassword(null);
}
if (decodedPass == null) { decodedPass = "?"; }
}
decodedPass = StringTools.htmlFilterValue(decodedPass);
/* Account fields */
ComboOption acctActive = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.isActive()));
String acctDesc = (_selAccount!=null)? _selAccount.getDescription() : "";
ComboOption autoAddDev = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.getAutoAddDevices()));
long acctMaxDev = (_selAccount!=null)? _selAccount.getMaximumDevices() : 0;
String contactName = (_selAccount!=null)? _selAccount.getContactName() : "";
String contactPhone= (_selAccount!=null)? _selAccount.getContactPhone() : "";
String contactEmail= (_selAccount!=null)? _selAccount.getContactEmail() : "";
String acctTimeZone= (_selAccount!=null)? _selAccount.getTimeZone() : Account.GetDefaultTimeZone();
String acctPrivLbl = (_selAccount!=null)? _selAccount.getPrivateLabelName() : "";
boolean editSysAdmin= _editAccount && isSysAdmin; // only editable if sys-admin
ComboMap privLblList = null;
if (editSysAdmin) {
// editable PrivateLabel
privLblList = isSysAdmin?
new ComboMap(BasicPrivateLabelLoader.getPrivateLabelNames(true)) :
new ComboMap();
if (!ListTools.containsKey(privLblList, acctPrivLbl)) {
privLblList.insert(acctPrivLbl);
}
if (isSysAdmin) {
if (!ListTools.containsKey(privLblList, "")) {
privLblList.insert("");
}
if (!ListTools.containsKey(privLblList, "*")) {
privLblList.insert("*");
}
}
} else {
// non-editable PrivateLabel
privLblList = new ComboMap();
privLblList.insert(acctPrivLbl);
}
ComboOption geocoderMode = privLabel.getEnumComboOption(Account.getGeocoderMode(_selAccount));
ComboOption isBCross = ComboOption.getYesNoOption(locale, ((_selAccount != null)? _selAccount.isBorderCrossing() : false));
int maxPingCnt = (_selAccount != null)? _selAccount.getMaxPingCount() : 0;
int totalPingCnt = (_selAccount != null)? _selAccount.getTotalPingCount() : 0;
String dcsPropsID = (_selAccount != null)? _selAccount.getDcsPropertiesID() : "";
out.println("<table class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE+"' cellspacing='0' callpadding='0' border='0'>");
long createTS = (_selAccount!=null)? _selAccount.getCreationTime() : 0L;
String createStr = reqState.formatDateTime(createTS , "--");
long lastLoginTS = (_selAccount!=null)? _selAccount.getLastLoginTime() : 0L;
String lastLoginStr = reqState.formatDateTime(lastLoginTS, "--");
out.println(FormRow_TextField(PARM_ACCOUNT_SELECT , false , i18n.getString("SysAdminAccounts.accountID","Account ID")+":" , _selAccountID, 40, 40));
out.println(FormRow_TextField(PARM_ACCT_CREATED , false , i18n.getString("SysAdminAccounts.creationDate","Creation Date")+":" , createStr, 24, 24));
out.println(FormRow_TextField(PARM_ACCT_LAST_LOGIN , false , i18n.getString("SysAdminAccounts.lastLoginDate","Last Login Date")+":" , lastLoginStr, 24, 24));
/* description/password */
out.println(FormRow_Separator());
out.println(FormRow_TextField(PARM_ACCT_DESC , _editAccount , i18n.getString("SysAdminAccounts.accountDesc","Account Description")+":" , acctDesc, 40, 40));
out.println(FormRow_ComboBox (PARM_ACCT_ACTIVE , _editAccount , i18n.getString("SysAdminAccounts.isActive","Is Active")+":" , acctActive, ComboMap.getYesNoMap(locale), "", -1));
out.println(FormRow_TextField(PARM_ACCT_PASSWORD , _editAccount , i18n.getString("SysAdminAccounts.password","Password")+":" , decodedPass, 20, 20));
/* contact/default info */
out.println(FormRow_Separator());
out.println(FormRow_TextField(PARM_ACCT_CONTACT_NAME , _editAccount , i18n.getString("SysAdminAccounts.contactName","Contact Name:") , contactName, 40, 40));
out.println(FormRow_TextField(PARM_ACCT_CONTACT_PHONE, _editAccount , i18n.getString("SysAdminAccounts.contactPhone","Contact Phone:") , contactPhone, 20, 20));
out.println(FormRow_TextField(PARM_ACCT_CONTACT_EMAIL, _editAccount , i18n.getString("SysAdminAccounts.contactEmail","Contact Email:") , contactEmail, 60, 100));
/* initial timezone */
out.println(FormRow_ComboBox (PARM_ACCT_TIMEZONE , _editAccount , i18n.getString("SysAdminAccounts.timeZone","Time Zone:") , acctTimeZone, _tzList, null, 20));
/* device counts */
if (showAutoAddDevices && isSysAdmin) {
out.println(FormRow_ComboBox (PARM_ACCT_AUTODEV , _editAccount , i18n.getString("SysAdminAccounts.autoAddDevices","Allow Auto-Add Devices")+":", autoAddDev, ComboMap.getYesNoMap(locale), "", -1));
}
out.println(FormRow_TextField(PARM_ACCT_MAXDEV , _editAccount , i18n.getString("SysAdminAccounts.maxDevices" ,"Maximum Devices") +":" , String.valueOf(acctMaxDev), 6, 7, i18n.getString("SysAdminAccounts.enter0ForUnlimited","(Enter '0' for unlimited)")));
/* ping count */
if (showMaxPingCount) {
out.print("<tr>");
out.print("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_HEADER+"' nowrap>"+i18n.getString("SysAdminAccounts.maxCommandCount","Max Commands/Locates")+":</td>");
out.print("<td class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE_DATA+"'>");
out.print(Form_TextField(PARM_ACCT_MAX_PING , editSysAdmin, String.valueOf(maxPingCnt) , 7, 7));
out.print(" " + i18n.getString("SysAdminAccounts.totalCommandCount","Total") + ": ");
out.print(Form_TextField(PARM_ACCT_TOTAL_PING, false , String.valueOf(totalPingCnt), 7, 7));
if (editSysAdmin) {
out.print(" (" + i18n.getString("SysAdminAccounts.pingReset","Check to Reset") + " ");
out.print(Form_CheckBox(PARM_ACCT_PING_RESET, PARM_ACCT_PING_RESET, true, false, null, null));
out.print(")");
}
out.print("</td>");
out.print("</tr>\n");
}
/* Account manager */
if (showAccountManager && isSysAdmin) {
// show account manager fields
out.println(FormRow_Separator());
ComboOption acctManager = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.isAccountManager()));
String acctManagerID = (_selAccount != null)? _selAccount.getManagerID() : "";
out.println(FormRow_ComboBox (PARM_ACCT_IS_MANAGER, editSysAdmin , i18n.getString("SysAdminAccounts.isManager","Is Account Manager")+":" , acctManager, ComboMap.getYesNoMap(locale), "", -1));
out.println(FormRow_TextField(PARM_ACCT_MANAGER_ID, editSysAdmin , i18n.getString("SysAdminAccounts.managerID","Account Manager ID")+":" , acctManagerID, 32, 32));
}
/* Data Push URL */
if (showDataPushURL && isSysAdmin) {
out.println(FormRow_Separator());
String dpURL = (_selAccount != null)? _selAccount.getDataPushURL() : "";
out.println(FormRow_TextField(PARM_ACCT_DATA_PUSH_URL, editSysAdmin, i18n.getString("SysAdminAccounts.dataPushURL","Data Push URL")+":" , dpURL, 100, 200));
}
/* privateLabel, geocoderMode, isBorderCrossing */
out.println(FormRow_Separator());
out.println(FormRow_ComboBox (PARM_ACCT_PRIVLABEL , editSysAdmin , i18n.getString("SysAdminAccounts.privateLabelName","PrivateLabel Name")+":" , acctPrivLbl , privLblList, "", -1));
out.println(FormRow_ComboBox (PARM_ACCT_RG_MODE , editSysAdmin , i18n.getString("SysAdminAccounts.geocoderMode","Geocoder Mode:") , geocoderMode, _rgList, null, 10));
if (Account.SupportsBorderCrossing()) {
out.println(FormRow_ComboBox (PARM_ACCT_IS_BCROSS, editSysAdmin, i18n.getString("SysAdminAccounts.isBorderCross","Enable Border Crossing:") , isBCross , ComboMap.getYesNoMap(locale), "", -1));
}
if (isSysAdmin) {
out.println(FormRow_TextField(PARM_ACCT_DCS_PROPS_ID, editSysAdmin, i18n.getString("SysAdminAccounts.dcsPropertiesID","DCS Properties ID")+":" , dcsPropsID , 32, 32));
}
if (isSysAdmin && RTConfig.getBoolean(DBConfig.PROP_Device_checkAccountAllowNotify,false)) {
ComboOption allowNotify = ComboOption.getYesNoOption(locale, ((_selAccount != null)? _selAccount.getAllowNotify() : false));
out.println(FormRow_ComboBox(PARM_ACCT_RULE_ALLOW, editSysAdmin, i18n.getString("SysAdminAccounts.notifyAllow","Notify Enable")+":" , allowNotify , ComboMap.getYesNoMap(locale), "", -1));
}
if (isSysAdmin) {
ComboOption smsEnabled = ComboOption.getYesNoOption(locale, ((_selAccount != null)? _selAccount.getSmsEnabled() : false));
String smsProps = (_selAccount != null)? _selAccount.getSmsProperties() : "";
Account.SMSDefaultState smsState = Account.GetDefaultSmsEnabledState();
if (smsState.equals(Account.SMSDefaultState.ACCOUNT)) {
// display only if Account selectable
out.println(FormRow_ComboBox (PARM_ACCT_SMS_ENABLED , editSysAdmin, i18n.getString("SysAdminAccounts.smsEnabled","Enable SMS")+":" , smsEnabled , ComboMap.getYesNoMap(locale), "", -1));
}
if (!smsState.equals(Account.SMSDefaultState.FALSE)) {
// display if TRUE or ACCOUNT
out.println(FormRow_TextField(PARM_ACCT_SMS_PROPS , editSysAdmin, i18n.getString("SysAdminAccounts.smsProperties","SMS Properties")+":" , smsProps , 80, 130));
}
}
if (showRetainEvents && isSysAdmin) {
// show "Retain Events" field
ComboOption retainEvents = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.hasRetainedEventAge()));
out.println(FormRow_ComboBox (PARM_ACCT_RETAIN_EVENTS, editSysAdmin, i18n.getString("SysAdminAccounts.retainEvents","Retain Events")+":" , retainEvents, ComboMap.getYesNoMap(locale), "", -1));
}
if (showAllowWebService && isSysAdmin) {
// show "Allow Web Service" field
ComboOption allowWS = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.getAllowWebService()));
out.println(FormRow_ComboBox (PARM_ACCT_WEB_SERVICE, editSysAdmin, i18n.getString("SysAdminAccounts.allowWebService","Allow Web Service")+":" , allowWS , ComboMap.getYesNoMap(locale), "", -1));
}
/* account properties */
if (accountProps) {
try {
String acctTempProps = "";
Resource resource = (_selAccount != null)? Resource.getResource(_selAccount, Resource.RESID_TemporaryProperties) : null;
if (resource != null) {
RTProperties resRtp = resource.getRTProperties();
acctTempProps = resRtp.toString(null, null, "");
}
out.println(FormRow_TextArea(PARM_ACCT_TEMP_PROPS, _editAccount , i18n.getString("SysAdminAccounts.accountProperties" ,"Account Properties")+":", acctTempProps, 7, 75));
} catch (DBException dbe) {
Print.logError("Unable to read Account Resource: " + dbe);
}
}
/* Notes */
if (notesOK) {
String noteText = (_selAccount != null)? StringTools.decodeNewline(_selAccount.getNotes()) : "";
out.println(FormRow_Separator());
out.println(FormRow_TextArea(PARM_ACCT_NOTES, _editAccount, i18n.getString("SysAdminAccounts.notes" ,"General Notes")+":", noteText, 5, 70));
}
out.println("</table>");
/* end of form */
out.write("<hr>\n");
out.write("<span style='padding-left:10px'> </span>\n");
if (_editAccount) {
out.write("<input type='submit' name='"+PARM_SUBMIT_CHG+"' value='"+i18n.getString("SysAdminAccounts.change","Change")+"'>\n");
out.write("<span style='padding-left:10px'> </span>\n");
out.write("<input type='button' name='"+PARM_BUTTON_CANCEL+"' value='"+i18n.getString("SysAdminAccounts.cancel","Cancel")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n");
} else {
out.write("<input type='button' name='"+PARM_BUTTON_BACK+"' value='"+i18n.getString("SysAdminAccounts.back","Back")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n");
}
out.write("</form>\n");
}
}
};
/* write frame */
String onload = error? JS_alert(true,m) : null;
CommonServlet.writePageFrame(
reqState,
onload,null, // onLoad/onUnload
HTML_CSS, // Style sheets
HTML_JS, // Javascript
null, // Navigation
HTML_CONTENT); // Content
}
protected String Onclick_ConfirmLogin(Locale locale)
{
I18N i18n = I18N.getI18N(SysAdminAccounts.class, locale);
String confirmLogin = i18n.getString("SysAdminAccounts.confirmLogin",
"Are you sure you want to login to the selected Account?");
return "onclick=\"return confirm('"+confirmLogin+"');\"";
}
// ------------------------------------------------------------------------
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.state.CompositeStateHandle;
import org.apache.flink.runtime.state.SharedStateRegistry;
import org.apache.flink.runtime.state.memory.ByteStreamStateHandle;
import org.apache.flink.util.Preconditions;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static org.apache.flink.util.Preconditions.checkState;
/**
* Simple container class which contains the raw/managed operator state and key-group state handles
* from all sub tasks of an operator and therefore represents the complete state of a logical
* operator.
*/
public class OperatorState implements CompositeStateHandle {
private static final long serialVersionUID = -4845578005863201810L;
/** The id of the operator. */
private final OperatorID operatorID;
/** The handles to states created by the parallel tasks: subtaskIndex -> subtaskstate. */
private final Map<Integer, OperatorSubtaskState> operatorSubtaskStates;
/** The state of the operator coordinator. Null, if no such state exists. */
@Nullable private ByteStreamStateHandle coordinatorState;
/** The parallelism of the operator when it was checkpointed. */
private final int parallelism;
/**
* The maximum parallelism (for number of keygroups) of the operator when the job was first
* created.
*/
private final int maxParallelism;
public OperatorState(OperatorID operatorID, int parallelism, int maxParallelism) {
if (parallelism > maxParallelism) {
throw new IllegalArgumentException(
String.format(
"Parallelism %s is not smaller or equal to max parallelism %s.",
parallelism, maxParallelism));
}
this.operatorID = operatorID;
this.operatorSubtaskStates = new HashMap<>(parallelism);
this.parallelism = parallelism;
this.maxParallelism = maxParallelism;
}
public OperatorID getOperatorID() {
return operatorID;
}
public boolean isFullyFinished() {
return false;
}
public void putState(int subtaskIndex, OperatorSubtaskState subtaskState) {
Preconditions.checkNotNull(subtaskState);
if (subtaskIndex < 0 || subtaskIndex >= parallelism) {
throw new IndexOutOfBoundsException(
"The given sub task index "
+ subtaskIndex
+ " exceeds the maximum number of sub tasks "
+ operatorSubtaskStates.size());
} else {
operatorSubtaskStates.put(subtaskIndex, subtaskState);
}
}
public OperatorSubtaskState getState(int subtaskIndex) {
if (subtaskIndex < 0 || subtaskIndex >= parallelism) {
throw new IndexOutOfBoundsException(
"The given sub task index "
+ subtaskIndex
+ " exceeds the maximum number of sub tasks "
+ operatorSubtaskStates.size());
} else {
return operatorSubtaskStates.get(subtaskIndex);
}
}
public void setCoordinatorState(@Nullable ByteStreamStateHandle coordinatorState) {
checkState(this.coordinatorState == null, "coordinator state already set");
this.coordinatorState = coordinatorState;
}
@Nullable
public ByteStreamStateHandle getCoordinatorState() {
return coordinatorState;
}
public Map<Integer, OperatorSubtaskState> getSubtaskStates() {
return Collections.unmodifiableMap(operatorSubtaskStates);
}
public Collection<OperatorSubtaskState> getStates() {
return operatorSubtaskStates.values();
}
public int getNumberCollectedStates() {
return operatorSubtaskStates.size();
}
public int getParallelism() {
return parallelism;
}
public int getMaxParallelism() {
return maxParallelism;
}
public OperatorState copyAndDiscardInFlightData() {
OperatorState newState = new OperatorState(operatorID, parallelism, maxParallelism);
for (Map.Entry<Integer, OperatorSubtaskState> originalSubtaskStateEntry :
operatorSubtaskStates.entrySet()) {
newState.putState(
originalSubtaskStateEntry.getKey(),
originalSubtaskStateEntry
.getValue()
.toBuilder()
.setResultSubpartitionState(StateObjectCollection.empty())
.setInputChannelState(StateObjectCollection.empty())
.build());
}
return newState;
}
@Override
public void discardState() throws Exception {
for (OperatorSubtaskState operatorSubtaskState : operatorSubtaskStates.values()) {
operatorSubtaskState.discardState();
}
if (coordinatorState != null) {
coordinatorState.discardState();
}
}
@Override
public void registerSharedStates(SharedStateRegistry sharedStateRegistry, long checkpointID) {
for (OperatorSubtaskState operatorSubtaskState : operatorSubtaskStates.values()) {
operatorSubtaskState.registerSharedStates(sharedStateRegistry, checkpointID);
}
}
public boolean hasSubtaskStates() {
return operatorSubtaskStates.size() > 0;
}
@Override
public long getStateSize() {
long result = coordinatorState == null ? 0L : coordinatorState.getStateSize();
for (int i = 0; i < parallelism; i++) {
OperatorSubtaskState operatorSubtaskState = operatorSubtaskStates.get(i);
if (operatorSubtaskState != null) {
result += operatorSubtaskState.getStateSize();
}
}
return result;
}
@Override
public long getCheckpointedSize() {
long result = coordinatorState == null ? 0L : coordinatorState.getStateSize();
for (int i = 0; i < parallelism; i++) {
OperatorSubtaskState operatorSubtaskState = operatorSubtaskStates.get(i);
if (operatorSubtaskState != null) {
result += operatorSubtaskState.getCheckpointedSize();
}
}
return result;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof OperatorState) {
OperatorState other = (OperatorState) obj;
return operatorID.equals(other.operatorID)
&& parallelism == other.parallelism
&& Objects.equals(coordinatorState, other.coordinatorState)
&& operatorSubtaskStates.equals(other.operatorSubtaskStates);
} else {
return false;
}
}
@Override
public int hashCode() {
return parallelism + 31 * Objects.hash(operatorID, operatorSubtaskStates);
}
@Override
public String toString() {
// KvStates are always null in 1.1. Don't print this as it might
// confuse users that don't care about how we store it internally.
return "OperatorState("
+ "operatorID: "
+ operatorID
+ ", parallelism: "
+ parallelism
+ ", maxParallelism: "
+ maxParallelism
+ ", coordinatorState: "
+ (coordinatorState == null ? "(none)" : coordinatorState.getStateSize() + " bytes")
+ ", sub task states: "
+ operatorSubtaskStates.size()
+ ", total size (bytes): "
+ getStateSize()
+ ')';
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.indexing;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import java.io.IOException;
import java.util.Objects;
/**
* This class holds the runtime statistics of a job. The stats are not used by any internal process
* and are only for external monitoring/reference. Statistics are not persisted with the job, so if the
* allocated task is shutdown/restarted on a different node all the stats will reset.
*/
public abstract class IndexerJobStats implements ToXContentObject, Writeable {
public static final ParseField NAME = new ParseField("job_stats");
protected long numPages = 0;
protected long numInputDocuments = 0;
protected long numOuputDocuments = 0;
protected long numInvocations = 0;
protected long indexTime = 0;
protected long searchTime = 0;
protected long indexTotal = 0;
protected long searchTotal = 0;
protected long processingTime = 0;
protected long processingTotal = 0;
protected long indexFailures = 0;
protected long searchFailures = 0;
private long startIndexTime;
private long startSearchTime;
private long startProcessingTime;
public IndexerJobStats() {
}
public IndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations,
long indexTime, long searchTime, long processingTime, long indexTotal, long searchTotal,
long processingTotal, long indexFailures, long searchFailures) {
this.numPages = numPages;
this.numInputDocuments = numInputDocuments;
this.numOuputDocuments = numOuputDocuments;
this.numInvocations = numInvocations;
this.indexTime = indexTime;
this.searchTime = searchTime;
this.processingTime = processingTime;
this.indexTotal = indexTotal;
this.searchTotal = searchTotal;
this.processingTotal = processingTotal;
this.indexFailures = indexFailures;
this.searchFailures = searchFailures;
}
public IndexerJobStats(StreamInput in) throws IOException {
this.numPages = in.readVLong();
this.numInputDocuments = in.readVLong();
this.numOuputDocuments = in.readVLong();
this.numInvocations = in.readVLong();
this.indexTime = in.readVLong();
this.searchTime = in.readVLong();
this.indexTotal = in.readVLong();
this.searchTotal = in.readVLong();
this.indexFailures = in.readVLong();
this.searchFailures = in.readVLong();
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
this.processingTime = in.readVLong();
this.processingTotal = in.readVLong();
}
}
public long getNumPages() {
return numPages;
}
public long getNumDocuments() {
return numInputDocuments;
}
public long getNumInvocations() {
return numInvocations;
}
public long getOutputDocuments() {
return numOuputDocuments;
}
public long getIndexFailures() {
return indexFailures;
}
public long getSearchFailures() {
return searchFailures;
}
public long getIndexTime() {
return indexTime;
}
public long getSearchTime() {
return searchTime;
}
public long getProcessingTime() {
return processingTime;
}
public long getIndexTotal() {
return indexTotal;
}
public long getSearchTotal() {
return searchTotal;
}
public long getProcessingTotal() {
return processingTotal;
}
public void incrementNumPages(long n) {
assert(n >= 0);
numPages += n;
}
public void incrementNumDocuments(long n) {
assert(n >= 0);
numInputDocuments += n;
}
public void incrementNumInvocations(long n) {
assert(n >= 0);
numInvocations += n;
}
public void incrementNumOutputDocuments(long n) {
assert(n >= 0);
numOuputDocuments += n;
}
public void incrementIndexingFailures() {
this.indexFailures += 1;
}
public void incrementSearchFailures() {
this.searchFailures += 1;
}
public void markStartIndexing() {
this.startIndexTime = System.nanoTime();
}
public void markEndIndexing() {
indexTime += ((System.nanoTime() - startIndexTime) / 1000000);
indexTotal += 1;
}
public void markStartSearch() {
this.startSearchTime = System.nanoTime();
}
public void markEndSearch() {
searchTime += ((System.nanoTime() - startSearchTime) / 1000000);
searchTotal += 1;
}
public void markStartProcessing() {
this.startProcessingTime = System.nanoTime();
}
public void markEndProcessing() {
processingTime += ((System.nanoTime() - startProcessingTime) / 1000000);
processingTotal += 1;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(numPages);
out.writeVLong(numInputDocuments);
out.writeVLong(numOuputDocuments);
out.writeVLong(numInvocations);
out.writeVLong(indexTime);
out.writeVLong(searchTime);
out.writeVLong(indexTotal);
out.writeVLong(searchTotal);
out.writeVLong(indexFailures);
out.writeVLong(searchFailures);
if (out.getVersion().onOrAfter(Version.V_7_7_0)) {
out.writeVLong(processingTime);
out.writeVLong(processingTotal);
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
IndexerJobStats that = (IndexerJobStats) other;
return Objects.equals(this.numPages, that.numPages)
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
&& Objects.equals(this.numOuputDocuments, that.numOuputDocuments)
&& Objects.equals(this.numInvocations, that.numInvocations)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
&& Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.indexTotal, that.indexTotal)
&& Objects.equals(this.searchTotal, that.searchTotal)
&& Objects.equals(this.processingTotal, that.processingTotal);
}
@Override
public int hashCode() {
return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations,
indexTime, searchTime, processingTime, indexFailures, searchFailures, indexTotal, searchTotal, processingTotal);
}
}
| |
package com.bazaarvoice.emodb.auth.permissions;
import com.bazaarvoice.emodb.auth.permissions.matching.AnyPart;
import com.bazaarvoice.emodb.auth.permissions.matching.ConstantPart;
import com.bazaarvoice.emodb.auth.permissions.matching.MatchingPart;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.shiro.authz.Permission;
import org.apache.shiro.authz.permission.InvalidPermissionStringException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
/**
* Similar to {@link org.apache.shiro.authz.permission.WildcardPermission} with the following differences:
* <ol>
* <li>Parts are separated by "|" instead of ":", since ":" occurs commonly in emo table names</li>
* <li>Provides basic wildcard support, although the implementation is extensible to provide further wildcard capabilities.</li>
* <li>Provides methods for escaping separators and wildcards.</li>
* </ol>
*/
public class MatchingPermission implements Permission, Serializable {
private final static String SEPARATOR = "|";
private final static String UNESCAPED_SEPARATOR_REGEX = "\\|";
private final static String SEPARATOR_ESCAPE = "\\\\|";
private final static String ESCAPED_SEPARATOR_REGEX = "\\\\\\|";
private final static Pattern SEPARATOR_SPLIT_PATTERN = Pattern.compile("(?<!\\\\)\\|");
private final static String UNESCAPED_WILDCARD_REGEX = "\\*";
private final static String WILDCARD_ESCAPE = "\\\\\\*";
private final static String ANY_INDICATOR = "*";
private final String _permission;
private List<MatchingPart> _parts;
public MatchingPermission(String permission) {
this(permission, true);
}
protected MatchingPermission(String permission, boolean initializePermission) {
_permission = requireNonNull(permission, "permission");
if ("".equals(permission.trim())) {
throw new InvalidPermissionStringException("Permission must be a non-null, non-empty string", permission);
}
if (initializePermission) {
initializePermission();
}
}
/**
* Parses and initializes the permission's parts. By default this is performed by the constructor. If a subclass
* needs to perform its own initialization prior to initializing the permissions then it should call the constructor
* with the initialization parameter set to false and then call this method when ready.
*/
protected void initializePermission() {
try {
List<MatchingPart> parts = Lists.newArrayList();
for (String partString : split(_permission)) {
partString = partString.trim();
checkArgument(!"".equals(partString), "Permission cannot contain empty parts");
MatchingPart part = toPart(Collections.unmodifiableList(parts), partString);
parts.add(part);
}
_parts = ImmutableList.copyOf(parts);
} catch (InvalidPermissionStringException e) {
throw e;
} catch (Exception e) {
// Rethrow any uncaught exception as being caused by an invalid permission string
throw new InvalidPermissionStringException(e.getMessage(), _permission);
}
}
public MatchingPermission(String... parts) {
this(Joiner.on(SEPARATOR).join(parts));
}
private Iterable<String> split(String permission) {
// Use the split pattern to avoid splitting on escaped separators.
return Arrays.asList(SEPARATOR_SPLIT_PATTERN.split(permission));
}
/**
* Converts a String part into the corresponding {@link MatchingPart}.
*/
protected MatchingPart toPart(List<MatchingPart> leadingParts, String part) {
if (ANY_INDICATOR.equals(part)) {
return getAnyPart();
}
// This part is a constant string
return createConstantPart(part);
}
@Override
public boolean implies(Permission p) {
if (!(p instanceof MatchingPermission)) {
return false;
}
MatchingPermission other = (MatchingPermission) p;
int commonLength = Math.min(_parts.size(), other._parts.size());
for (int i=0; i < commonLength; i++) {
if (!_parts.get(i).implies(other._parts.get(i), other._parts.subList(0, i))) {
return false;
}
}
// If this had more parts than the other permission then only pass if all remaining parts are wildcards
while (commonLength < _parts.size()) {
if (!_parts.get(commonLength++).impliesAny()) {
return false;
}
}
// It's possible the other also has more parts, but in this case it's narrower than this permission and
// hence is still implied by it.
return true;
}
/**
* Some permissions fall into one of the following categories:
*
* <ol>
* <li>The permission is intended for validation purposes only, such as for creating a table.</li>
* <li>The permission format is deprecated and no new permissions of the format are allowed.</li>
* </ol>
*
* This method returns true if the permission should be assignable to a user/role, false otherwise.
*/
public boolean isAssignable() {
for (MatchingPart part : _parts) {
if (!part.isAssignable()) {
return false;
}
}
return true;
}
/**
* Returns a string escaped so it will be interpreted literally by the matcher. Specifically it converts all
* '|' and '*' characters to "\|" and "\*" respectively.
*/
public static String escape(String raw) {
requireNonNull(raw, "raw");
String escaped = raw;
escaped = escaped.replaceAll(UNESCAPED_WILDCARD_REGEX, WILDCARD_ESCAPE);
escaped = escapeSeparators(escaped);
return escaped;
}
/**
* Returns a string with only the separators escaped, unlike {@link #escape(String)} which also protects
* wildcards. This is useful for subclasses which have their own formatting that needs to be protected from being split.
*/
public static String escapeSeparators(String raw) {
return raw.replaceAll(UNESCAPED_SEPARATOR_REGEX, SEPARATOR_ESCAPE);
}
/**
* Returns a string with the modifications made by {@link #escapeSeparators(String)} reversed.
*/
public static String unescapeSeparators(String escaped) {
return escaped.replaceAll(ESCAPED_SEPARATOR_REGEX, SEPARATOR);
}
protected final List<MatchingPart> getParts() {
return _parts;
}
protected ConstantPart createConstantPart(String value) {
return new ConstantPart(value);
}
protected AnyPart getAnyPart() {
return AnyPart.instance();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MatchingPermission that = (MatchingPermission) o;
return _permission.equals(that._permission);
}
@Override
public int hashCode() {
return _permission.hashCode();
}
@Override
public String toString() {
return _permission;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright 2017 Alexander Orlov <alexander.orlov@loxal.net>. All rights reserved.
* Copyright (c) [2016] [ <ether.camp> ]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package org.ethereum.net.shh;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ethereum.crypto.ECKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongycastle.util.encoders.Hex;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Whisper implementation which works through JSON RPC API
*
* Created by Anton Nashatyrev on 05.10.2015.
*/
public class JsonRpcWhisper extends Whisper {
private final static Logger logger = LoggerFactory.getLogger("net.shh");
private final URL rpcUrl;
private final Map<Integer, MessageWatcher> watchers = new HashMap<>();
public JsonRpcWhisper(final URL rpcUrl) {
this.rpcUrl = rpcUrl;
final ScheduledExecutorService poller = Executors.newSingleThreadScheduledExecutor();
poller.scheduleAtFixedRate(() -> {
try {
pollFilters();
} catch (final Exception e) {
logger.error("Unhandled exception", e);
}
}, 1, 1, TimeUnit.SECONDS);
}
private static String add0X(final String s) {
if (s == null) return null;
return s.startsWith("0x") ? s : "0x" + s;
}
private static String del0X(final String s) {
if (s == null) return null;
return s.startsWith("0x") ? s.substring(2) : s;
}
private static String encodeString(final String s) {
return s == null ? null : "0x" + Hex.toHexString(s.getBytes());
}
private static String decodeString(String s) {
if (s.startsWith("0x")) s = s.substring(2);
return new String(Hex.decode(s));
}
public static void main(final String[] args) throws Exception {
final String json = "{\"jsonrpc\":\"2.0\",\n" +
" \n" +
" \"method\":\"shh_newIdentity\",\n" +
" \"params\": [{ \"payload\": \"Hello\", \"ttl\": \"100\", \"to\" : \"0xbd27a63c91fe3233c5777e6d3d7b39204d398c8f92655947eb5a373d46e1688f022a1632d264725cbc7dc43ee1cfebde42fa0a86d08b55d2acfbb5e9b3b48dc5\", \"from\": \"id1\" }],\n" +
" \"id\":1001\n" +
"}";
final JsonRpcWhisper rpcWhisper = new JsonRpcWhisper(new URL("http://localhost:8545"));
// JsonRpcResponse resp = rpcWhisper.sendJson(new JsonRpcRequest("shh_post",
// new PostParams("Hello").to("0xbd27a63c91fe3233c5777e6d3d7b39204d398c8f92655947eb5a373d46e1688f022a1632d264725cbc7dc43ee1cfebde42fa0a86d08b55d2acfbb5e9b3b48dc5")));
// Hex.decode("7d04a8170c432240dcf544e27610cc3a10a32c6a5f8ff8cf5a06d26ee0d37da4075701ff03cee88d50885ff56bcd9a5070ff98b9a3045d6ff32e0f1821c21f87")
rpcWhisper.send(null, null, "Hello C++ Whisper".getBytes(), Topic.createTopics("ATopic"), 60, 1);
rpcWhisper.watch(new MessageWatcher(null,
null, Topic.createTopics("ATopic")) {
@Override
protected void newMessage(final WhisperMessage msg) {
System.out.println("JsonRpcWhisper.newMessage:" + "msg = [" + msg + "]");
}
});
Thread.sleep(1000000000);
// String resp = rpcWhisper.sendPost(json);
// System.out.println("Resp: " + resp);
}
@Override
public String addIdentity(final ECKey key) {
throw new RuntimeException("Not supported by public JSON RPC API");
}
@Override
public String newIdentity() {
final SimpleResponse resp = sendJson(new JsonRpcRequest("shh_newIdentity", null), SimpleResponse.class);
return del0X(resp.result);
}
@Override
public void watch(final MessageWatcher f) {
final String[] topics = f.getTopics().length == 0 ? null : new String[f.getTopics().length];
for (int i = 0; i < f.getTopics().length; i++) {
topics[i] = f.getTopics()[i].getOriginalTopic();
}
final FilterParams params = new FilterParams(add0X(f.getTo()), topics);
final SimpleResponse resp = sendJson(new JsonRpcRequest("shh_newFilter", params), SimpleResponse.class);
final int filterId = Integer.parseInt(del0X(resp.result), 16);
watchers.put(filterId, f);
}
@Override
public void unwatch(final MessageWatcher f) {
int filterId = -1;
for (final Map.Entry<Integer, MessageWatcher> entry : watchers.entrySet()) {
if (entry.getValue() == f) {
filterId = entry.getKey();
break;
}
}
if (filterId == -1) return;
sendJson(new JsonRpcRequest("shh_uninstallFilter",
add0X(Integer.toHexString(filterId))), SimpleResponse.class);
}
private String fromAddress(String s) {
if (s == null) return null;
s = del0X(s);
final BigInteger i = new BigInteger(s, 16);
if (i.bitCount() > 0) {
return s;
}
return null;
}
private void pollFilters() {
for (final Map.Entry<Integer, MessageWatcher> entry : watchers.entrySet()) {
final MessagesResponse ret = sendJson(new JsonRpcRequest("shh_getFilterChanges",
add0X(Integer.toHexString(entry.getKey()))), MessagesResponse.class);
for (final MessageParams msg : ret.result) {
final Topic[] topics = msg.topics == null ? null : new Topic[msg.topics.length];
for (int i = 0; topics != null && i < topics.length; i++) {
topics[i] = new Topic(Hex.decode(del0X(msg.topics[i])));
}
final WhisperMessage m = new WhisperMessage()
.setPayload(decodeString(msg.payload))
.setFrom(fromAddress(msg.from))
.setTo(fromAddress(msg.to))
.setTopics(topics);
entry.getValue().newMessage(m);
}
}
}
@Override
public void send(final String from, final String to, final byte[] payload, final Topic[] topics, final int ttl, final int workToProve) {
final String[] topicsS = new String[topics.length];
for (int i = 0; i < topics.length; i++) {
topicsS[i] = topics[i].getOriginalTopic();
}
final SimpleResponse res = sendJson(new JsonRpcRequest("shh_post",
new MessageParams(new String(payload), add0X(to), add0X(from),
topicsS, ttl, workToProve)), SimpleResponse.class);
if (!"true".equals(res.result)) {
throw new RuntimeException("Shh post failed: " + res);
}
}
private <RespType extends JsonRpcResponse> RespType sendJson(final JsonRpcRequest req, final Class<RespType> respClazz) {
String out = null, in = null;
try {
final ObjectMapper om = new ObjectMapper();
out = om.writeValueAsString(req);
logger.debug("JSON RPC Outbound: " + out);
in = sendPost(out);
logger.debug("JSON RPC Inbound: " + in);
final RespType resp = om.readValue(in, respClazz);
resp.throwIfError();
return resp;
} catch (final IOException e) {
throw new RuntimeException("Error processing JSON (Sent: " + out + ", Received: " + in + ")", e);
}
}
private String sendPost(final String urlParams) {
try {
final HttpURLConnection con = (HttpURLConnection) rpcUrl.openConnection();
//add reuqest header
con.setRequestMethod("POST");
// Send post request
con.setDoOutput(true);
final DataOutputStream wr = new DataOutputStream(con.getOutputStream());
wr.writeBytes(urlParams);
wr.flush();
wr.close();
final int responseCode = con.getResponseCode();
if (responseCode != 200) {
throw new RuntimeException("HTTP Response: " + responseCode);
}
final BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
final StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
return response.toString();
} catch (final IOException e) {
throw new RuntimeException("Error sending POST to " + rpcUrl, e);
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class JsonRpcRequest<ParamT> {
private static final AtomicInteger idCount = new AtomicInteger(0);
public final String jsonrpc = "2.0";
public final String method;
public final List<ParamT> params;
public int id = idCount.incrementAndGet();
public JsonRpcRequest(final String method, final ParamT params) {
this.method = method;
this.params = params == null ? null : Collections.singletonList(params);
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class MessageParams {
public String payload;
public String to;
public String from;
public String[] topics;
public String ttl = "0x60";
public Integer priority;
// response fields
public String hash;
public String expiry;
public String sent;
public String workProved;
public MessageParams() {
}
public MessageParams(final String payload, final String to, final String from, final String[] topics, final Integer ttl, final Integer priority) {
this.payload = encodeString(payload);
this.to = to;
this.from = from;
this.topics = topics;
for (int i = 0; i < this.topics.length; i++) {
this.topics[i] = encodeString(this.topics[i]);
}
this.ttl = "0x" + Integer.toHexString(ttl);
this.priority = priority;
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
public static class FilterParams {
public final String to;
public final String[] topics;
public FilterParams(final String to, final String[] topics) {
this.to = to;
this.topics = topics;
for (int i = 0; topics != null && i < this.topics.length; i++) {
this.topics[i] = encodeString(this.topics[i]);
}
}
}
public static class JsonRpcResponse {
public int id;
public String jsonrpc;
public Error error;
public void throwIfError() {
if (error != null) {
throw new RuntimeException("JSON RPC returned error (" + error.code + "): " + error.message);
}
}
public static class Error {
public int code;
public String message;
}
}
public static class SimpleResponse extends JsonRpcResponse {
public String result;
@Override
public String toString() {
return "JsonRpcResponse{" +
"id=" + id +
", jsonrpc='" + jsonrpc + '\'' +
", result='" + result + '\'' +
'}';
}
}
public static class MessagesResponse extends JsonRpcResponse {
public List<MessageParams> result;
@Override
public String toString() {
return "MessagesResponse{" +
"id=" + id +
", jsonrpc='" + jsonrpc + '\'' +
"result=" + result +
'}';
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elastictranscoder.model;
import java.io.Serializable;
/**
* <p>
* When you update a pipeline, Elastic Transcoder returns the values that
* you specified in the request.
* </p>
*/
public class UpdatePipelineResult implements Serializable, Cloneable {
/**
* The pipeline (queue) that is used to manage jobs.
*/
private Pipeline pipeline;
/**
* Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Warning> warnings;
/**
* The pipeline (queue) that is used to manage jobs.
*
* @return The pipeline (queue) that is used to manage jobs.
*/
public Pipeline getPipeline() {
return pipeline;
}
/**
* The pipeline (queue) that is used to manage jobs.
*
* @param pipeline The pipeline (queue) that is used to manage jobs.
*/
public void setPipeline(Pipeline pipeline) {
this.pipeline = pipeline;
}
/**
* The pipeline (queue) that is used to manage jobs.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param pipeline The pipeline (queue) that is used to manage jobs.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public UpdatePipelineResult withPipeline(Pipeline pipeline) {
this.pipeline = pipeline;
return this;
}
/**
* Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*
* @return Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*/
public java.util.List<Warning> getWarnings() {
if (warnings == null) {
warnings = new com.amazonaws.internal.ListWithAutoConstructFlag<Warning>();
warnings.setAutoConstruct(true);
}
return warnings;
}
/**
* Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*
* @param warnings Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*/
public void setWarnings(java.util.Collection<Warning> warnings) {
if (warnings == null) {
this.warnings = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Warning> warningsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Warning>(warnings.size());
warningsCopy.addAll(warnings);
this.warnings = warningsCopy;
}
/**
* Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setWarnings(java.util.Collection)} or {@link
* #withWarnings(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param warnings Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public UpdatePipelineResult withWarnings(Warning... warnings) {
if (getWarnings() == null) setWarnings(new java.util.ArrayList<Warning>(warnings.length));
for (Warning value : warnings) {
getWarnings().add(value);
}
return this;
}
/**
* Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param warnings Elastic Transcoder returns a warning if the resources used by your
* pipeline are not in the same region as the pipeline. <p>Using
* resources in the same region, such as your Amazon S3 buckets, Amazon
* SNS notification topics, and AWS KMS key, reduces processing time and
* prevents cross-regional charges.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public UpdatePipelineResult withWarnings(java.util.Collection<Warning> warnings) {
if (warnings == null) {
this.warnings = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Warning> warningsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Warning>(warnings.size());
warningsCopy.addAll(warnings);
this.warnings = warningsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPipeline() != null) sb.append("Pipeline: " + getPipeline() + ",");
if (getWarnings() != null) sb.append("Warnings: " + getWarnings() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPipeline() == null) ? 0 : getPipeline().hashCode());
hashCode = prime * hashCode + ((getWarnings() == null) ? 0 : getWarnings().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof UpdatePipelineResult == false) return false;
UpdatePipelineResult other = (UpdatePipelineResult)obj;
if (other.getPipeline() == null ^ this.getPipeline() == null) return false;
if (other.getPipeline() != null && other.getPipeline().equals(this.getPipeline()) == false) return false;
if (other.getWarnings() == null ^ this.getWarnings() == null) return false;
if (other.getWarnings() != null && other.getWarnings().equals(this.getWarnings()) == false) return false;
return true;
}
@Override
public UpdatePipelineResult clone() {
try {
return (UpdatePipelineResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.analyzer;
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
import io.trino.cost.CostCalculator;
import io.trino.cost.StatsCalculator;
import io.trino.execution.DataDefinitionTask;
import io.trino.execution.warnings.WarningCollector;
import io.trino.metadata.Metadata;
import io.trino.security.AccessControl;
import io.trino.spi.TrinoException;
import io.trino.spi.security.GroupProvider;
import io.trino.spi.type.TypeOperators;
import io.trino.sql.parser.SqlParser;
import io.trino.sql.planner.LogicalPlanner;
import io.trino.sql.planner.Plan;
import io.trino.sql.planner.PlanFragmenter;
import io.trino.sql.planner.PlanNodeIdAllocator;
import io.trino.sql.planner.PlanOptimizersFactory;
import io.trino.sql.planner.SubPlan;
import io.trino.sql.planner.TypeAnalyzer;
import io.trino.sql.planner.optimizations.PlanOptimizer;
import io.trino.sql.planner.planprinter.IoPlanPrinter;
import io.trino.sql.planner.planprinter.PlanPrinter;
import io.trino.sql.tree.ExplainType.Type;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.Statement;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.sql.ParameterUtils.parameterExtractor;
import static io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED_AND_VALIDATED;
import static io.trino.sql.planner.planprinter.IoPlanPrinter.textIoPlan;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
public class QueryExplainer
{
private final List<PlanOptimizer> planOptimizers;
private final PlanFragmenter planFragmenter;
private final Metadata metadata;
private final TypeOperators typeOperators;
private final GroupProvider groupProvider;
private final AccessControl accessControl;
private final SqlParser sqlParser;
private final StatsCalculator statsCalculator;
private final CostCalculator costCalculator;
private final Map<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask;
@Inject
public QueryExplainer(
PlanOptimizersFactory planOptimizersFactory,
PlanFragmenter planFragmenter,
Metadata metadata,
TypeOperators typeOperators,
GroupProvider groupProvider,
AccessControl accessControl,
SqlParser sqlParser,
StatsCalculator statsCalculator,
CostCalculator costCalculator,
Map<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask)
{
this(
planOptimizersFactory.get(),
planFragmenter,
metadata,
typeOperators,
groupProvider,
accessControl,
sqlParser,
statsCalculator,
costCalculator,
dataDefinitionTask);
}
public QueryExplainer(
List<PlanOptimizer> planOptimizers,
PlanFragmenter planFragmenter,
Metadata metadata,
TypeOperators typeOperators,
GroupProvider groupProvider,
AccessControl accessControl,
SqlParser sqlParser,
StatsCalculator statsCalculator,
CostCalculator costCalculator,
Map<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask)
{
this.planOptimizers = requireNonNull(planOptimizers, "planOptimizers is null");
this.planFragmenter = requireNonNull(planFragmenter, "planFragmenter is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.typeOperators = requireNonNull(typeOperators, "typeOperators is null");
this.groupProvider = requireNonNull(groupProvider, "groupProvider is null");
this.accessControl = requireNonNull(accessControl, "accessControl is null");
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
this.statsCalculator = requireNonNull(statsCalculator, "statsCalculator is null");
this.costCalculator = requireNonNull(costCalculator, "costCalculator is null");
this.dataDefinitionTask = ImmutableMap.copyOf(requireNonNull(dataDefinitionTask, "dataDefinitionTask is null"));
}
public Analysis analyze(Session session, Statement statement, List<Expression> parameters, WarningCollector warningCollector)
{
Analyzer analyzer = new Analyzer(session, metadata, sqlParser, groupProvider, accessControl, Optional.of(this), parameters, parameterExtractor(statement, parameters), warningCollector, statsCalculator);
return analyzer.analyze(statement);
}
public String getPlan(Session session, Statement statement, Type planType, List<Expression> parameters, WarningCollector warningCollector)
{
DataDefinitionTask<?> task = dataDefinitionTask.get(statement.getClass());
if (task != null) {
return explainTask(statement, task, parameters);
}
switch (planType) {
case LOGICAL:
Plan plan = getLogicalPlan(session, statement, parameters, warningCollector);
return PlanPrinter.textLogicalPlan(plan.getRoot(), plan.getTypes(), metadata, plan.getStatsAndCosts(), session, 0, false);
case DISTRIBUTED:
SubPlan subPlan = getDistributedPlan(session, statement, parameters, warningCollector);
return PlanPrinter.textDistributedPlan(subPlan, metadata, session, false);
case IO:
return IoPlanPrinter.textIoPlan(getLogicalPlan(session, statement, parameters, warningCollector), metadata, typeOperators, session);
case VALIDATE:
// unsupported
break;
}
throw new IllegalArgumentException("Unhandled plan type: " + planType);
}
private static <T extends Statement> String explainTask(Statement statement, DataDefinitionTask<T> task, List<Expression> parameters)
{
return task.explain((T) statement, parameters);
}
public String getGraphvizPlan(Session session, Statement statement, Type planType, List<Expression> parameters, WarningCollector warningCollector)
{
DataDefinitionTask<?> task = dataDefinitionTask.get(statement.getClass());
if (task != null) {
// todo format as graphviz
return explainTask(statement, task, parameters);
}
switch (planType) {
case LOGICAL:
Plan plan = getLogicalPlan(session, statement, parameters, warningCollector);
return PlanPrinter.graphvizLogicalPlan(plan.getRoot(), plan.getTypes());
case DISTRIBUTED:
SubPlan subPlan = getDistributedPlan(session, statement, parameters, warningCollector);
return PlanPrinter.graphvizDistributedPlan(subPlan);
case VALIDATE:
case IO:
// unsupported
}
throw new IllegalArgumentException("Unhandled plan type: " + planType);
}
public String getJsonPlan(Session session, Statement statement, Type planType, List<Expression> parameters, WarningCollector warningCollector)
{
DataDefinitionTask<?> task = dataDefinitionTask.get(statement.getClass());
if (task != null) {
// todo format as json
return explainTask(statement, task, parameters);
}
switch (planType) {
case IO:
Plan plan = getLogicalPlan(session, statement, parameters, warningCollector);
return textIoPlan(plan, metadata, typeOperators, session);
case LOGICAL:
case DISTRIBUTED:
case VALIDATE:
// unsupported
break;
}
throw new TrinoException(NOT_SUPPORTED, format("Unsupported explain plan type %s for JSON format", planType));
}
public Plan getLogicalPlan(Session session, Statement statement, List<Expression> parameters, WarningCollector warningCollector)
{
// analyze statement
Analysis analysis = analyze(session, statement, parameters, warningCollector);
PlanNodeIdAllocator idAllocator = new PlanNodeIdAllocator();
// plan statement
LogicalPlanner logicalPlanner = new LogicalPlanner(
session,
planOptimizers,
idAllocator,
metadata,
typeOperators,
new TypeAnalyzer(sqlParser, metadata),
statsCalculator,
costCalculator,
warningCollector);
return logicalPlanner.plan(analysis, OPTIMIZED_AND_VALIDATED, true);
}
private SubPlan getDistributedPlan(Session session, Statement statement, List<Expression> parameters, WarningCollector warningCollector)
{
Plan plan = getLogicalPlan(session, statement, parameters, warningCollector);
return planFragmenter.createSubPlans(session, plan, false, warningCollector);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appmesh.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* An object representing the health check policy for a virtual node's listener.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appmesh-2019-01-25/HealthCheckPolicy" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class HealthCheckPolicy implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The number of consecutive successful health checks that must occur before declaring listener healthy.
* </p>
*/
private Integer healthyThreshold;
/**
* <p>
* The time period in milliseconds between each health check execution.
* </p>
*/
private Long intervalMillis;
/**
* <p>
* The destination path for the health check request. This is required only if the specified protocol is HTTP. If
* the protocol is TCP, this parameter is ignored.
* </p>
*/
private String path;
/**
* <p>
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
* </p>
*/
private Integer port;
/**
* <p>
* The protocol for the health check request.
* </p>
*/
private String protocol;
/**
* <p>
* The amount of time to wait when receiving a response from the health check, in milliseconds.
* </p>
*/
private Long timeoutMillis;
/**
* <p>
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
* </p>
*/
private Integer unhealthyThreshold;
/**
* <p>
* The number of consecutive successful health checks that must occur before declaring listener healthy.
* </p>
*
* @param healthyThreshold
* The number of consecutive successful health checks that must occur before declaring listener healthy.
*/
public void setHealthyThreshold(Integer healthyThreshold) {
this.healthyThreshold = healthyThreshold;
}
/**
* <p>
* The number of consecutive successful health checks that must occur before declaring listener healthy.
* </p>
*
* @return The number of consecutive successful health checks that must occur before declaring listener healthy.
*/
public Integer getHealthyThreshold() {
return this.healthyThreshold;
}
/**
* <p>
* The number of consecutive successful health checks that must occur before declaring listener healthy.
* </p>
*
* @param healthyThreshold
* The number of consecutive successful health checks that must occur before declaring listener healthy.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withHealthyThreshold(Integer healthyThreshold) {
setHealthyThreshold(healthyThreshold);
return this;
}
/**
* <p>
* The time period in milliseconds between each health check execution.
* </p>
*
* @param intervalMillis
* The time period in milliseconds between each health check execution.
*/
public void setIntervalMillis(Long intervalMillis) {
this.intervalMillis = intervalMillis;
}
/**
* <p>
* The time period in milliseconds between each health check execution.
* </p>
*
* @return The time period in milliseconds between each health check execution.
*/
public Long getIntervalMillis() {
return this.intervalMillis;
}
/**
* <p>
* The time period in milliseconds between each health check execution.
* </p>
*
* @param intervalMillis
* The time period in milliseconds between each health check execution.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withIntervalMillis(Long intervalMillis) {
setIntervalMillis(intervalMillis);
return this;
}
/**
* <p>
* The destination path for the health check request. This is required only if the specified protocol is HTTP. If
* the protocol is TCP, this parameter is ignored.
* </p>
*
* @param path
* The destination path for the health check request. This is required only if the specified protocol is
* HTTP. If the protocol is TCP, this parameter is ignored.
*/
public void setPath(String path) {
this.path = path;
}
/**
* <p>
* The destination path for the health check request. This is required only if the specified protocol is HTTP. If
* the protocol is TCP, this parameter is ignored.
* </p>
*
* @return The destination path for the health check request. This is required only if the specified protocol is
* HTTP. If the protocol is TCP, this parameter is ignored.
*/
public String getPath() {
return this.path;
}
/**
* <p>
* The destination path for the health check request. This is required only if the specified protocol is HTTP. If
* the protocol is TCP, this parameter is ignored.
* </p>
*
* @param path
* The destination path for the health check request. This is required only if the specified protocol is
* HTTP. If the protocol is TCP, this parameter is ignored.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withPath(String path) {
setPath(path);
return this;
}
/**
* <p>
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
* </p>
*
* @param port
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
*/
public void setPort(Integer port) {
this.port = port;
}
/**
* <p>
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
* </p>
*
* @return The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
*/
public Integer getPort() {
return this.port;
}
/**
* <p>
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
* </p>
*
* @param port
* The destination port for the health check request. This port must match the port defined in the
* <a>PortMapping</a> for the listener.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withPort(Integer port) {
setPort(port);
return this;
}
/**
* <p>
* The protocol for the health check request.
* </p>
*
* @param protocol
* The protocol for the health check request.
* @see PortProtocol
*/
public void setProtocol(String protocol) {
this.protocol = protocol;
}
/**
* <p>
* The protocol for the health check request.
* </p>
*
* @return The protocol for the health check request.
* @see PortProtocol
*/
public String getProtocol() {
return this.protocol;
}
/**
* <p>
* The protocol for the health check request.
* </p>
*
* @param protocol
* The protocol for the health check request.
* @return Returns a reference to this object so that method calls can be chained together.
* @see PortProtocol
*/
public HealthCheckPolicy withProtocol(String protocol) {
setProtocol(protocol);
return this;
}
/**
* <p>
* The protocol for the health check request.
* </p>
*
* @param protocol
* The protocol for the health check request.
* @return Returns a reference to this object so that method calls can be chained together.
* @see PortProtocol
*/
public HealthCheckPolicy withProtocol(PortProtocol protocol) {
this.protocol = protocol.toString();
return this;
}
/**
* <p>
* The amount of time to wait when receiving a response from the health check, in milliseconds.
* </p>
*
* @param timeoutMillis
* The amount of time to wait when receiving a response from the health check, in milliseconds.
*/
public void setTimeoutMillis(Long timeoutMillis) {
this.timeoutMillis = timeoutMillis;
}
/**
* <p>
* The amount of time to wait when receiving a response from the health check, in milliseconds.
* </p>
*
* @return The amount of time to wait when receiving a response from the health check, in milliseconds.
*/
public Long getTimeoutMillis() {
return this.timeoutMillis;
}
/**
* <p>
* The amount of time to wait when receiving a response from the health check, in milliseconds.
* </p>
*
* @param timeoutMillis
* The amount of time to wait when receiving a response from the health check, in milliseconds.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withTimeoutMillis(Long timeoutMillis) {
setTimeoutMillis(timeoutMillis);
return this;
}
/**
* <p>
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
* </p>
*
* @param unhealthyThreshold
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
*/
public void setUnhealthyThreshold(Integer unhealthyThreshold) {
this.unhealthyThreshold = unhealthyThreshold;
}
/**
* <p>
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
* </p>
*
* @return The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
*/
public Integer getUnhealthyThreshold() {
return this.unhealthyThreshold;
}
/**
* <p>
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
* </p>
*
* @param unhealthyThreshold
* The number of consecutive failed health checks that must occur before declaring a virtual node unhealthy.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public HealthCheckPolicy withUnhealthyThreshold(Integer unhealthyThreshold) {
setUnhealthyThreshold(unhealthyThreshold);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHealthyThreshold() != null)
sb.append("HealthyThreshold: ").append(getHealthyThreshold()).append(",");
if (getIntervalMillis() != null)
sb.append("IntervalMillis: ").append(getIntervalMillis()).append(",");
if (getPath() != null)
sb.append("Path: ").append(getPath()).append(",");
if (getPort() != null)
sb.append("Port: ").append(getPort()).append(",");
if (getProtocol() != null)
sb.append("Protocol: ").append(getProtocol()).append(",");
if (getTimeoutMillis() != null)
sb.append("TimeoutMillis: ").append(getTimeoutMillis()).append(",");
if (getUnhealthyThreshold() != null)
sb.append("UnhealthyThreshold: ").append(getUnhealthyThreshold());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof HealthCheckPolicy == false)
return false;
HealthCheckPolicy other = (HealthCheckPolicy) obj;
if (other.getHealthyThreshold() == null ^ this.getHealthyThreshold() == null)
return false;
if (other.getHealthyThreshold() != null && other.getHealthyThreshold().equals(this.getHealthyThreshold()) == false)
return false;
if (other.getIntervalMillis() == null ^ this.getIntervalMillis() == null)
return false;
if (other.getIntervalMillis() != null && other.getIntervalMillis().equals(this.getIntervalMillis()) == false)
return false;
if (other.getPath() == null ^ this.getPath() == null)
return false;
if (other.getPath() != null && other.getPath().equals(this.getPath()) == false)
return false;
if (other.getPort() == null ^ this.getPort() == null)
return false;
if (other.getPort() != null && other.getPort().equals(this.getPort()) == false)
return false;
if (other.getProtocol() == null ^ this.getProtocol() == null)
return false;
if (other.getProtocol() != null && other.getProtocol().equals(this.getProtocol()) == false)
return false;
if (other.getTimeoutMillis() == null ^ this.getTimeoutMillis() == null)
return false;
if (other.getTimeoutMillis() != null && other.getTimeoutMillis().equals(this.getTimeoutMillis()) == false)
return false;
if (other.getUnhealthyThreshold() == null ^ this.getUnhealthyThreshold() == null)
return false;
if (other.getUnhealthyThreshold() != null && other.getUnhealthyThreshold().equals(this.getUnhealthyThreshold()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHealthyThreshold() == null) ? 0 : getHealthyThreshold().hashCode());
hashCode = prime * hashCode + ((getIntervalMillis() == null) ? 0 : getIntervalMillis().hashCode());
hashCode = prime * hashCode + ((getPath() == null) ? 0 : getPath().hashCode());
hashCode = prime * hashCode + ((getPort() == null) ? 0 : getPort().hashCode());
hashCode = prime * hashCode + ((getProtocol() == null) ? 0 : getProtocol().hashCode());
hashCode = prime * hashCode + ((getTimeoutMillis() == null) ? 0 : getTimeoutMillis().hashCode());
hashCode = prime * hashCode + ((getUnhealthyThreshold() == null) ? 0 : getUnhealthyThreshold().hashCode());
return hashCode;
}
@Override
public HealthCheckPolicy clone() {
try {
return (HealthCheckPolicy) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.appmesh.model.transform.HealthCheckPolicyMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
/**
* <p>
* Contains information about a server certificate without its certificate body,
* certificate chain, and private key.
* </p>
* <p>
* This data type is used as a response element in the
* <a>UploadServerCertificate</a> and <a>ListServerCertificates</a> actions.
* </p>
*/
public class ServerCertificateMetadata implements Serializable, Cloneable {
/**
* <p>
* The path to the server certificate. For more information about paths, see
* <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*/
private String path;
/**
* <p>
* The name that identifies the server certificate.
* </p>
*/
private String serverCertificateName;
/**
* <p>
* The stable and unique string identifying the server certificate. For more
* information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*/
private String serverCertificateId;
/**
* <p>
* The Amazon Resource Name (ARN) specifying the server certificate. For
* more information about ARNs and how to use them in policies, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*/
private String arn;
/**
* <p>
* The date when the server certificate was uploaded.
* </p>
*/
private java.util.Date uploadDate;
/**
* <p>
* The date on which the certificate is set to expire.
* </p>
*/
private java.util.Date expiration;
/**
* Default constructor for ServerCertificateMetadata object. Callers should
* use the setter or fluent setter (with...) methods to initialize the
* object after creating it.
*/
public ServerCertificateMetadata() {
}
/**
* Constructs a new ServerCertificateMetadata object. Callers should use the
* setter or fluent setter (with...) methods to initialize any additional
* object members.
*
* @param path
* The path to the server certificate. For more information about
* paths, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* @param serverCertificateName
* The name that identifies the server certificate.
* @param serverCertificateId
* The stable and unique string identifying the server certificate.
* For more information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* @param arn
* The Amazon Resource Name (ARN) specifying the server certificate.
* For more information about ARNs and how to use them in policies,
* see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public ServerCertificateMetadata(String path, String serverCertificateName,
String serverCertificateId, String arn) {
setPath(path);
setServerCertificateName(serverCertificateName);
setServerCertificateId(serverCertificateId);
setArn(arn);
}
/**
* <p>
* The path to the server certificate. For more information about paths, see
* <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param path
* The path to the server certificate. For more information about
* paths, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public void setPath(String path) {
this.path = path;
}
/**
* <p>
* The path to the server certificate. For more information about paths, see
* <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @return The path to the server certificate. For more information about
* paths, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public String getPath() {
return this.path;
}
/**
* <p>
* The path to the server certificate. For more information about paths, see
* <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param path
* The path to the server certificate. For more information about
* paths, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withPath(String path) {
setPath(path);
return this;
}
/**
* <p>
* The name that identifies the server certificate.
* </p>
*
* @param serverCertificateName
* The name that identifies the server certificate.
*/
public void setServerCertificateName(String serverCertificateName) {
this.serverCertificateName = serverCertificateName;
}
/**
* <p>
* The name that identifies the server certificate.
* </p>
*
* @return The name that identifies the server certificate.
*/
public String getServerCertificateName() {
return this.serverCertificateName;
}
/**
* <p>
* The name that identifies the server certificate.
* </p>
*
* @param serverCertificateName
* The name that identifies the server certificate.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withServerCertificateName(
String serverCertificateName) {
setServerCertificateName(serverCertificateName);
return this;
}
/**
* <p>
* The stable and unique string identifying the server certificate. For more
* information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param serverCertificateId
* The stable and unique string identifying the server certificate.
* For more information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public void setServerCertificateId(String serverCertificateId) {
this.serverCertificateId = serverCertificateId;
}
/**
* <p>
* The stable and unique string identifying the server certificate. For more
* information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @return The stable and unique string identifying the server certificate.
* For more information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public String getServerCertificateId() {
return this.serverCertificateId;
}
/**
* <p>
* The stable and unique string identifying the server certificate. For more
* information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param serverCertificateId
* The stable and unique string identifying the server certificate.
* For more information about IDs, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withServerCertificateId(
String serverCertificateId) {
setServerCertificateId(serverCertificateId);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) specifying the server certificate. For
* more information about ARNs and how to use them in policies, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) specifying the server certificate.
* For more information about ARNs and how to use them in policies,
* see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) specifying the server certificate. For
* more information about ARNs and how to use them in policies, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @return The Amazon Resource Name (ARN) specifying the server certificate.
* For more information about ARNs and how to use them in policies,
* see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The Amazon Resource Name (ARN) specifying the server certificate. For
* more information about ARNs and how to use them in policies, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* </p>
*
* @param arn
* The Amazon Resource Name (ARN) specifying the server certificate.
* For more information about ARNs and how to use them in policies,
* see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html"
* >IAM Identifiers</a> in the <i>Using IAM</i> guide.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* The date when the server certificate was uploaded.
* </p>
*
* @param uploadDate
* The date when the server certificate was uploaded.
*/
public void setUploadDate(java.util.Date uploadDate) {
this.uploadDate = uploadDate;
}
/**
* <p>
* The date when the server certificate was uploaded.
* </p>
*
* @return The date when the server certificate was uploaded.
*/
public java.util.Date getUploadDate() {
return this.uploadDate;
}
/**
* <p>
* The date when the server certificate was uploaded.
* </p>
*
* @param uploadDate
* The date when the server certificate was uploaded.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withUploadDate(java.util.Date uploadDate) {
setUploadDate(uploadDate);
return this;
}
/**
* <p>
* The date on which the certificate is set to expire.
* </p>
*
* @param expiration
* The date on which the certificate is set to expire.
*/
public void setExpiration(java.util.Date expiration) {
this.expiration = expiration;
}
/**
* <p>
* The date on which the certificate is set to expire.
* </p>
*
* @return The date on which the certificate is set to expire.
*/
public java.util.Date getExpiration() {
return this.expiration;
}
/**
* <p>
* The date on which the certificate is set to expire.
* </p>
*
* @param expiration
* The date on which the certificate is set to expire.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ServerCertificateMetadata withExpiration(java.util.Date expiration) {
setExpiration(expiration);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPath() != null)
sb.append("Path: " + getPath() + ",");
if (getServerCertificateName() != null)
sb.append("ServerCertificateName: " + getServerCertificateName()
+ ",");
if (getServerCertificateId() != null)
sb.append("ServerCertificateId: " + getServerCertificateId() + ",");
if (getArn() != null)
sb.append("Arn: " + getArn() + ",");
if (getUploadDate() != null)
sb.append("UploadDate: " + getUploadDate() + ",");
if (getExpiration() != null)
sb.append("Expiration: " + getExpiration());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ServerCertificateMetadata == false)
return false;
ServerCertificateMetadata other = (ServerCertificateMetadata) obj;
if (other.getPath() == null ^ this.getPath() == null)
return false;
if (other.getPath() != null
&& other.getPath().equals(this.getPath()) == false)
return false;
if (other.getServerCertificateName() == null
^ this.getServerCertificateName() == null)
return false;
if (other.getServerCertificateName() != null
&& other.getServerCertificateName().equals(
this.getServerCertificateName()) == false)
return false;
if (other.getServerCertificateId() == null
^ this.getServerCertificateId() == null)
return false;
if (other.getServerCertificateId() != null
&& other.getServerCertificateId().equals(
this.getServerCertificateId()) == false)
return false;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null
&& other.getArn().equals(this.getArn()) == false)
return false;
if (other.getUploadDate() == null ^ this.getUploadDate() == null)
return false;
if (other.getUploadDate() != null
&& other.getUploadDate().equals(this.getUploadDate()) == false)
return false;
if (other.getExpiration() == null ^ this.getExpiration() == null)
return false;
if (other.getExpiration() != null
&& other.getExpiration().equals(this.getExpiration()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getPath() == null) ? 0 : getPath().hashCode());
hashCode = prime
* hashCode
+ ((getServerCertificateName() == null) ? 0
: getServerCertificateName().hashCode());
hashCode = prime
* hashCode
+ ((getServerCertificateId() == null) ? 0
: getServerCertificateId().hashCode());
hashCode = prime * hashCode
+ ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode
+ ((getUploadDate() == null) ? 0 : getUploadDate().hashCode());
hashCode = prime * hashCode
+ ((getExpiration() == null) ? 0 : getExpiration().hashCode());
return hashCode;
}
@Override
public ServerCertificateMetadata clone() {
try {
return (ServerCertificateMetadata) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.HdfsEnvironment.HdfsContext;
import com.facebook.presto.hive.HiveBucketing.HiveBucketFilter;
import com.facebook.presto.hive.HiveSplit.BucketConversion;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.Partition;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.hive.util.HiveFileIterator;
import com.facebook.presto.hive.util.HiveFileIterator.NestedDirectoryNotAllowedException;
import com.facebook.presto.hive.util.InternalHiveSplitFactory;
import com.facebook.presto.hive.util.ResumableTask;
import com.facebook.presto.hive.util.ResumableTasks;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.collect.Streams;
import com.google.common.io.CharStreams;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.annotation.Annotation;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.Executor;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.IntPredicate;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_BAD_DATA;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_METADATA;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_UNKNOWN_ERROR;
import static com.facebook.presto.hive.HiveSessionProperties.isForceLocalScheduling;
import static com.facebook.presto.hive.HiveUtil.checkCondition;
import static com.facebook.presto.hive.HiveUtil.getFooterCount;
import static com.facebook.presto.hive.HiveUtil.getHeaderCount;
import static com.facebook.presto.hive.HiveUtil.getInputFormat;
import static com.facebook.presto.hive.S3SelectPushdown.shouldEnablePushdownForTable;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getHiveSchema;
import static com.facebook.presto.hive.metastore.MetastoreUtil.getPartitionLocation;
import static com.facebook.presto.hive.util.ConfigurationUtils.toJobConf;
import static com.facebook.presto.hive.util.HiveFileIterator.NestedDirectoryPolicy.FAIL;
import static com.facebook.presto.hive.util.HiveFileIterator.NestedDirectoryPolicy.IGNORED;
import static com.facebook.presto.hive.util.HiveFileIterator.NestedDirectoryPolicy.RECURSE;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.hive.common.FileUtils.HIDDEN_FILES_PATH_FILTER;
public class BackgroundHiveSplitLoader
implements HiveSplitLoader
{
private static final ListenableFuture<?> COMPLETED_FUTURE = immediateFuture(null);
private final Table table;
private final TupleDomain<? extends ColumnHandle> compactEffectivePredicate;
private final Optional<BucketSplitInfo> tableBucketInfo;
private final HdfsEnvironment hdfsEnvironment;
private final HdfsContext hdfsContext;
private final NamenodeStats namenodeStats;
private final DirectoryLister directoryLister;
private final int loaderConcurrency;
private final boolean recursiveDirWalkerEnabled;
private final Executor executor;
private final ConnectorSession session;
private final ConcurrentLazyQueue<HivePartitionMetadata> partitions;
private final Deque<Iterator<InternalHiveSplit>> fileIterators = new ConcurrentLinkedDeque<>();
// Purpose of this lock:
// * Write lock: when you need a consistent view across partitions, fileIterators, and hiveSplitSource.
// * Read lock: when you need to modify any of the above.
// Make sure the lock is held throughout the period during which they may not be consistent with each other.
// Details:
// * When write lock is acquired, except the holder, no one can do any of the following:
// ** poll from (or check empty) partitions
// ** poll from (or check empty) or push to fileIterators
// ** push to hiveSplitSource
// * When any of the above three operations is carried out, either a read lock or a write lock must be held.
// * When a series of operations involving two or more of the above three operations are carried out, the lock
// must be continuously held throughout the series of operations.
// Implications:
// * if you hold a read lock but not a write lock, you can do any of the above three operations, but you may
// see a series of operations involving two or more of the operations carried out half way.
private final ReentrantReadWriteLock taskExecutionLock = new ReentrantReadWriteLock();
private HiveSplitSource hiveSplitSource;
private volatile boolean stopped;
public BackgroundHiveSplitLoader(
Table table,
Iterable<HivePartitionMetadata> partitions,
TupleDomain<? extends ColumnHandle> compactEffectivePredicate,
Optional<BucketSplitInfo> tableBucketInfo,
ConnectorSession session,
HdfsEnvironment hdfsEnvironment,
NamenodeStats namenodeStats,
DirectoryLister directoryLister,
Executor executor,
int loaderConcurrency,
boolean recursiveDirWalkerEnabled)
{
this.table = table;
this.compactEffectivePredicate = compactEffectivePredicate;
this.tableBucketInfo = tableBucketInfo;
this.loaderConcurrency = loaderConcurrency;
this.session = session;
this.hdfsEnvironment = hdfsEnvironment;
this.namenodeStats = namenodeStats;
this.directoryLister = directoryLister;
this.recursiveDirWalkerEnabled = recursiveDirWalkerEnabled;
this.executor = executor;
this.partitions = new ConcurrentLazyQueue<>(partitions);
this.hdfsContext = new HdfsContext(session, table.getDatabaseName(), table.getTableName());
}
@Override
public void start(HiveSplitSource splitSource)
{
this.hiveSplitSource = splitSource;
for (int i = 0; i < loaderConcurrency; i++) {
ResumableTasks.submit(executor, new HiveSplitLoaderTask());
}
}
@Override
public void stop()
{
stopped = true;
}
private class HiveSplitLoaderTask
implements ResumableTask
{
@Override
public TaskStatus process()
{
while (true) {
if (stopped) {
return TaskStatus.finished();
}
ListenableFuture<?> future;
taskExecutionLock.readLock().lock();
try {
future = loadSplits();
}
catch (Exception e) {
if (e instanceof IOException) {
e = new PrestoException(HIVE_FILESYSTEM_ERROR, e);
}
else if (!(e instanceof PrestoException)) {
e = new PrestoException(HIVE_UNKNOWN_ERROR, e);
}
// Fail the split source before releasing the execution lock
// Otherwise, a race could occur where the split source is completed before we fail it.
hiveSplitSource.fail(e);
checkState(stopped);
return TaskStatus.finished();
}
finally {
taskExecutionLock.readLock().unlock();
}
invokeNoMoreSplitsIfNecessary();
if (!future.isDone()) {
return TaskStatus.continueOn(future);
}
}
}
}
private void invokeNoMoreSplitsIfNecessary()
{
taskExecutionLock.readLock().lock();
try {
// This is an opportunistic check to avoid getting the write lock unnecessarily
if (!partitions.isEmpty() || !fileIterators.isEmpty()) {
return;
}
}
catch (Exception e) {
hiveSplitSource.fail(e);
checkState(stopped, "Task is not marked as stopped even though it failed");
return;
}
finally {
taskExecutionLock.readLock().unlock();
}
taskExecutionLock.writeLock().lock();
try {
// the write lock guarantees that no one is operating on the partitions, fileIterators, or hiveSplitSource, or half way through doing so.
if (partitions.isEmpty() && fileIterators.isEmpty()) {
// It is legal to call `noMoreSplits` multiple times or after `stop` was called.
// Nothing bad will happen if `noMoreSplits` implementation calls methods that will try to obtain a read lock because the lock is re-entrant.
hiveSplitSource.noMoreSplits();
}
}
catch (Exception e) {
hiveSplitSource.fail(e);
checkState(stopped, "Task is not marked as stopped even though it failed");
}
finally {
taskExecutionLock.writeLock().unlock();
}
}
private ListenableFuture<?> loadSplits()
throws IOException
{
Iterator<InternalHiveSplit> splits = fileIterators.poll();
if (splits == null) {
HivePartitionMetadata partition = partitions.poll();
if (partition == null) {
return COMPLETED_FUTURE;
}
return loadPartition(partition);
}
while (splits.hasNext() && !stopped) {
ListenableFuture<?> future = hiveSplitSource.addToQueue(splits.next());
if (!future.isDone()) {
fileIterators.addFirst(splits);
return future;
}
}
// No need to put the iterator back, since it's either empty or we've stopped
return COMPLETED_FUTURE;
}
private ListenableFuture<?> loadPartition(HivePartitionMetadata partition)
throws IOException
{
String partitionName = partition.getHivePartition().getPartitionId();
Properties schema = getPartitionSchema(table, partition.getPartition());
List<HivePartitionKey> partitionKeys = getPartitionKeys(table, partition.getPartition());
TupleDomain<HiveColumnHandle> effectivePredicate = (TupleDomain<HiveColumnHandle>) compactEffectivePredicate;
Path path = new Path(getPartitionLocation(table, partition.getPartition()));
Configuration configuration = hdfsEnvironment.getConfiguration(hdfsContext, path);
InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, false);
FileSystem fs = hdfsEnvironment.getFileSystem(hdfsContext, path);
boolean s3SelectPushdownEnabled = shouldEnablePushdownForTable(session, table, path.toString(), partition.getPartition());
if (inputFormat instanceof SymlinkTextInputFormat) {
if (tableBucketInfo.isPresent()) {
throw new PrestoException(NOT_SUPPORTED, "Bucketed table in SymlinkTextInputFormat is not yet supported");
}
// TODO: This should use an iterator like the HiveFileIterator
ListenableFuture<?> lastResult = COMPLETED_FUTURE;
for (Path targetPath : getTargetPathsFromSymlink(fs, path)) {
// The input should be in TextInputFormat.
TextInputFormat targetInputFormat = new TextInputFormat();
// the splits must be generated using the file system for the target path
// get the configuration for the target path -- it may be a different hdfs instance
FileSystem targetFilesystem = hdfsEnvironment.getFileSystem(hdfsContext, targetPath);
JobConf targetJob = toJobConf(targetFilesystem.getConf());
targetJob.setInputFormat(TextInputFormat.class);
targetInputFormat.configure(targetJob);
FileInputFormat.setInputPaths(targetJob, targetPath);
InputSplit[] targetSplits = targetInputFormat.getSplits(targetJob, 0);
InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory(
targetFilesystem,
partitionName,
inputFormat,
schema,
partitionKeys,
effectivePredicate,
partition.getColumnCoercions(),
Optional.empty(),
isForceLocalScheduling(session),
s3SelectPushdownEnabled);
lastResult = addSplitsToSource(targetSplits, splitFactory);
if (stopped) {
return COMPLETED_FUTURE;
}
}
return lastResult;
}
Optional<BucketConversion> bucketConversion = Optional.empty();
boolean bucketConversionRequiresWorkerParticipation = false;
if (partition.getPartition().isPresent()) {
Optional<HiveBucketProperty> partitionBucketProperty = partition.getPartition().get().getStorage().getBucketProperty();
if (tableBucketInfo.isPresent() && partitionBucketProperty.isPresent()) {
int readBucketCount = tableBucketInfo.get().getReadBucketCount();
int partitionBucketCount = partitionBucketProperty.get().getBucketCount();
// Validation was done in HiveSplitManager#getPartitionMetadata.
// Here, it's just trying to see if its needs the BucketConversion.
if (readBucketCount != partitionBucketCount) {
bucketConversion = Optional.of(new BucketConversion(readBucketCount, partitionBucketCount, tableBucketInfo.get().getBucketColumns()));
if (readBucketCount > partitionBucketCount) {
bucketConversionRequiresWorkerParticipation = true;
}
}
}
}
InternalHiveSplitFactory splitFactory = new InternalHiveSplitFactory(
fs,
partitionName,
inputFormat,
schema,
partitionKeys,
effectivePredicate,
partition.getColumnCoercions(),
bucketConversionRequiresWorkerParticipation ? bucketConversion : Optional.empty(),
isForceLocalScheduling(session),
s3SelectPushdownEnabled);
// To support custom input formats, we want to call getSplits()
// on the input format to obtain file splits.
if (shouldUseFileSplitsFromInputFormat(inputFormat)) {
if (tableBucketInfo.isPresent()) {
throw new PrestoException(NOT_SUPPORTED, "Presto cannot read bucketed partition in an input format with UseFileSplitsFromInputFormat annotation: " + inputFormat.getClass().getSimpleName());
}
JobConf jobConf = toJobConf(configuration);
FileInputFormat.setInputPaths(jobConf, path);
InputSplit[] splits = inputFormat.getSplits(jobConf, 0);
return addSplitsToSource(splits, splitFactory);
}
// Bucketed partitions are fully loaded immediately since all files must be loaded to determine the file to bucket mapping
if (tableBucketInfo.isPresent()) {
return hiveSplitSource.addToQueue(getBucketedSplits(path, fs, splitFactory, tableBucketInfo.get(), bucketConversion));
}
// S3 Select pushdown works at the granularity of individual S3 objects,
// therefore we must not split files when it is enabled.
boolean splittable = getHeaderCount(schema) == 0 && getFooterCount(schema) == 0 && !s3SelectPushdownEnabled;
fileIterators.addLast(createInternalHiveSplitIterator(path, fs, splitFactory, splittable));
return COMPLETED_FUTURE;
}
private ListenableFuture<?> addSplitsToSource(InputSplit[] targetSplits, InternalHiveSplitFactory splitFactory)
throws IOException
{
ListenableFuture<?> lastResult = COMPLETED_FUTURE;
for (InputSplit inputSplit : targetSplits) {
Optional<InternalHiveSplit> internalHiveSplit = splitFactory.createInternalHiveSplit((FileSplit) inputSplit);
if (internalHiveSplit.isPresent()) {
lastResult = hiveSplitSource.addToQueue(internalHiveSplit.get());
}
if (stopped) {
return COMPLETED_FUTURE;
}
}
return lastResult;
}
private static boolean shouldUseFileSplitsFromInputFormat(InputFormat<?, ?> inputFormat)
{
return Arrays.stream(inputFormat.getClass().getAnnotations())
.map(Annotation::annotationType)
.map(Class::getSimpleName)
.anyMatch(name -> name.equals("UseFileSplitsFromInputFormat"));
}
private Iterator<InternalHiveSplit> createInternalHiveSplitIterator(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, boolean splittable)
{
return Streams.stream(new HiveFileIterator(path, fileSystem, directoryLister, namenodeStats, recursiveDirWalkerEnabled ? RECURSE : IGNORED))
.map(status -> splitFactory.createInternalHiveSplit(status, splittable))
.filter(Optional::isPresent)
.map(Optional::get)
.iterator();
}
private List<InternalHiveSplit> getBucketedSplits(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, BucketSplitInfo bucketSplitInfo, Optional<BucketConversion> bucketConversion)
{
int readBucketCount = bucketSplitInfo.getReadBucketCount();
int tableBucketCount = bucketSplitInfo.getTableBucketCount();
int partitionBucketCount = bucketConversion.isPresent() ? bucketConversion.get().getPartitionBucketCount() : tableBucketCount;
// list all files in the partition
ArrayList<LocatedFileStatus> files = new ArrayList<>(partitionBucketCount);
try {
Iterators.addAll(files, new HiveFileIterator(path, fileSystem, directoryLister, namenodeStats, FAIL));
}
catch (NestedDirectoryNotAllowedException e) {
// Fail here to be on the safe side. This seems to be the same as what Hive does
throw new PrestoException(
HIVE_INVALID_BUCKET_FILES,
format("Hive table '%s' is corrupt. Found sub-directory in bucket directory for partition: %s",
new SchemaTableName(table.getDatabaseName(), table.getTableName()),
splitFactory.getPartitionName()));
}
// verify we found one file per bucket
if (files.size() != partitionBucketCount) {
throw new PrestoException(
HIVE_INVALID_BUCKET_FILES,
format("Hive table '%s' is corrupt. The number of files in the directory (%s) does not match the declared bucket count (%s) for partition: %s",
new SchemaTableName(table.getDatabaseName(), table.getTableName()),
files.size(),
partitionBucketCount,
splitFactory.getPartitionName()));
}
// Sort FileStatus objects (instead of, e.g., fileStatus.getPath().toString). This matches org.apache.hadoop.hive.ql.metadata.Table.getSortedPaths
files.sort(null);
// convert files internal splits
List<InternalHiveSplit> splitList = new ArrayList<>();
for (int bucketNumber = 0; bucketNumber < Math.max(readBucketCount, partitionBucketCount); bucketNumber++) {
// Physical bucket #. This determine file name. It also determines the order of splits in the result.
int partitionBucketNumber = bucketNumber % partitionBucketCount;
// Logical bucket #. Each logical bucket corresponds to a "bucket" from engine's perspective.
int readBucketNumber = bucketNumber % readBucketCount;
boolean containsEligibleTableBucket = false;
boolean containsIneligibleTableBucket = false;
for (int tableBucketNumber = bucketNumber % tableBucketCount; tableBucketNumber < tableBucketCount; tableBucketNumber += Math.max(readBucketCount, partitionBucketCount)) {
// table bucket number: this is used for evaluating "$bucket" filters.
if (bucketSplitInfo.isTableBucketEnabled(tableBucketNumber)) {
containsEligibleTableBucket = true;
}
else {
containsIneligibleTableBucket = true;
}
}
if (containsEligibleTableBucket && containsIneligibleTableBucket) {
throw new PrestoException(
NOT_SUPPORTED,
"The bucket filter cannot be satisfied. There are restrictions on the bucket filter when all the following is true: " +
"1. a table has a different buckets count as at least one of its partitions that is read in this query; " +
"2. the table has a different but compatible bucket number with another table in the query; " +
"3. some buckets of the table is filtered out from the query, most likely using a filter on \"$bucket\". " +
"(table name: " + table.getTableName() + ", table bucket count: " + tableBucketCount + ", " +
"partition bucket count: " + partitionBucketCount + ", effective reading bucket count: " + readBucketCount + ")");
}
if (containsEligibleTableBucket) {
LocatedFileStatus file = files.get(partitionBucketNumber);
splitFactory.createInternalHiveSplit(file, readBucketNumber)
.ifPresent(splitList::add);
}
}
return splitList;
}
private static List<Path> getTargetPathsFromSymlink(FileSystem fileSystem, Path symlinkDir)
{
try {
FileStatus[] symlinks = fileSystem.listStatus(symlinkDir, HIDDEN_FILES_PATH_FILTER);
List<Path> targets = new ArrayList<>();
for (FileStatus symlink : symlinks) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(symlink.getPath()), StandardCharsets.UTF_8))) {
CharStreams.readLines(reader).stream()
.map(Path::new)
.forEach(targets::add);
}
}
return targets;
}
catch (IOException e) {
throw new PrestoException(HIVE_BAD_DATA, "Error parsing symlinks from: " + symlinkDir, e);
}
}
private static List<HivePartitionKey> getPartitionKeys(Table table, Optional<Partition> partition)
{
if (!partition.isPresent()) {
return ImmutableList.of();
}
ImmutableList.Builder<HivePartitionKey> partitionKeys = ImmutableList.builder();
List<Column> keys = table.getPartitionColumns();
List<String> values = partition.get().getValues();
checkCondition(keys.size() == values.size(), HIVE_INVALID_METADATA, "Expected %s partition key values, but got %s", keys.size(), values.size());
for (int i = 0; i < keys.size(); i++) {
String name = keys.get(i).getName();
HiveType hiveType = keys.get(i).getType();
if (!hiveType.isSupportedType()) {
throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName()));
}
String value = values.get(i);
checkCondition(value != null, HIVE_INVALID_PARTITION_VALUE, "partition key value cannot be null for field: %s", name);
partitionKeys.add(new HivePartitionKey(name, value));
}
return partitionKeys.build();
}
private static Properties getPartitionSchema(Table table, Optional<Partition> partition)
{
if (!partition.isPresent()) {
return getHiveSchema(table);
}
return getHiveSchema(partition.get(), table);
}
public static class BucketSplitInfo
{
private final List<HiveColumnHandle> bucketColumns;
private final int tableBucketCount;
private final int readBucketCount;
private final IntPredicate bucketFilter;
public static Optional<BucketSplitInfo> createBucketSplitInfo(Optional<HiveBucketHandle> bucketHandle, Optional<HiveBucketFilter> bucketFilter)
{
requireNonNull(bucketHandle, "bucketHandle is null");
requireNonNull(bucketFilter, "buckets is null");
if (!bucketHandle.isPresent()) {
checkArgument(!bucketFilter.isPresent(), "bucketHandle must be present if bucketFilter is present");
return Optional.empty();
}
int tableBucketCount = bucketHandle.get().getTableBucketCount();
int readBucketCount = bucketHandle.get().getReadBucketCount();
if (tableBucketCount != readBucketCount && bucketFilter.isPresent()) {
// TODO: support this once we fix the "$bucket" column for compatible bucketing read
throw new PrestoException(
NOT_SUPPORTED,
"Bucket filter (most likely using a filter on \"$bucket\") is not supported in this query. " +
"Since the table has a different but compatible bucket number with another table in the query.");
}
List<HiveColumnHandle> bucketColumns = bucketHandle.get().getColumns();
IntPredicate predicate = bucketFilter
.<IntPredicate>map(filter -> filter.getBucketsToKeep()::contains)
.orElse(bucket -> true);
return Optional.of(new BucketSplitInfo(bucketColumns, tableBucketCount, readBucketCount, predicate));
}
private BucketSplitInfo(List<HiveColumnHandle> bucketColumns, int tableBucketCount, int readBucketCount, IntPredicate bucketFilter)
{
this.bucketColumns = ImmutableList.copyOf(requireNonNull(bucketColumns, "bucketColumns is null"));
this.tableBucketCount = tableBucketCount;
this.readBucketCount = readBucketCount;
this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null");
}
public List<HiveColumnHandle> getBucketColumns()
{
return bucketColumns;
}
public int getTableBucketCount()
{
return tableBucketCount;
}
public int getReadBucketCount()
{
return readBucketCount;
}
/**
* Evaluates whether the provided table bucket number passes the bucket predicate.
* A bucket predicate can be present in two cases:
* <ul>
* <li>Filter on "$bucket" column. e.g. {@code "$bucket" between 0 and 100}
* <li>Single-value equality filter on all bucket columns. e.g. for a table with two bucketing columns,
* {@code bucketCol1 = 'a' AND bucketCol2 = 123}
* </ul>
*/
public boolean isTableBucketEnabled(int tableBucketNumber)
{
return bucketFilter.test(tableBucketNumber);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.core.io;
import org.apache.flink.core.memory.ByteArrayInputStreamWithPos;
import org.apache.flink.core.memory.ByteArrayOutputStreamWithPos;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.junit.Assert;
import org.junit.Test;
import java.io.EOFException;
import java.io.IOException;
/** Suite of tests for {@link PostVersionedIOReadableWritable}. */
public class PostVersionedIOReadableWritableTest {
@Test
public void testReadVersioned() throws IOException {
byte[] payload = "test-data".getBytes();
byte[] serialized = serializeWithPostVersionedReadableWritable(payload);
byte[] restored = restoreWithPostVersionedReadableWritable(serialized, payload.length);
Assert.assertArrayEquals(payload, restored);
}
@Test
public void testReadNonVersioned() throws IOException {
byte[] preVersionedPayload = new byte[] {0x00, 0x00, 0x02, 0x33};
byte[] serialized = serializeWithNonVersionedReadableWritable(preVersionedPayload);
byte[] restored =
restoreWithPostVersionedReadableWritable(serialized, preVersionedPayload.length);
Assert.assertArrayEquals(preVersionedPayload, restored);
}
@Test
public void testReadNonVersionedWithLongPayload() throws IOException {
byte[] preVersionedPayload = "test-data".getBytes();
byte[] serialized = serializeWithNonVersionedReadableWritable(preVersionedPayload);
byte[] restored =
restoreWithPostVersionedReadableWritable(serialized, preVersionedPayload.length);
Assert.assertArrayEquals(preVersionedPayload, restored);
}
@Test
public void testReadNonVersionedWithShortPayload() throws IOException {
byte[] preVersionedPayload = new byte[] {-15, -51};
byte[] serialized = serializeWithNonVersionedReadableWritable(preVersionedPayload);
byte[] restored =
restoreWithPostVersionedReadableWritable(serialized, preVersionedPayload.length);
Assert.assertArrayEquals(preVersionedPayload, restored);
}
@Test
public void testReadNonVersionedWithEmptyPayload() throws IOException {
byte[] preVersionedPayload = new byte[0];
byte[] serialized = serializeWithNonVersionedReadableWritable(preVersionedPayload);
byte[] restored =
restoreWithPostVersionedReadableWritable(serialized, preVersionedPayload.length);
Assert.assertArrayEquals(preVersionedPayload, restored);
}
private byte[] serializeWithNonVersionedReadableWritable(byte[] payload) throws IOException {
TestNonVersionedReadableWritable versionedReadableWritable =
new TestNonVersionedReadableWritable(payload);
byte[] serialized;
try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
versionedReadableWritable.write(new DataOutputViewStreamWrapper(out));
serialized = out.toByteArray();
}
return serialized;
}
private byte[] serializeWithPostVersionedReadableWritable(byte[] payload) throws IOException {
TestPostVersionedReadableWritable versionedReadableWritable =
new TestPostVersionedReadableWritable(payload);
byte[] serialized;
try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
versionedReadableWritable.write(new DataOutputViewStreamWrapper(out));
serialized = out.toByteArray();
}
return serialized;
}
private byte[] restoreWithPostVersionedReadableWritable(byte[] serialized, int expectedLength)
throws IOException {
TestPostVersionedReadableWritable restoredVersionedReadableWritable =
new TestPostVersionedReadableWritable(expectedLength);
try (ByteArrayInputStreamWithPos in =
new TestByteArrayInputStreamProducingOneByteAtATime(serialized)) {
restoredVersionedReadableWritable.read(in);
}
return restoredVersionedReadableWritable.getData();
}
private static void assertEmpty(DataInputView in) throws IOException {
try {
in.readByte();
Assert.fail();
} catch (EOFException ignore) {
}
}
static class TestPostVersionedReadableWritable extends PostVersionedIOReadableWritable {
private static final int VERSION = 1;
private byte[] data;
TestPostVersionedReadableWritable(int len) {
this.data = new byte[len];
}
TestPostVersionedReadableWritable(byte[] data) {
this.data = data;
}
@Override
public int getVersion() {
return VERSION;
}
@Override
public void write(DataOutputView out) throws IOException {
super.write(out);
out.write(data);
}
@Override
protected void read(DataInputView in, boolean wasVersioned) throws IOException {
in.readFully(data);
assertEmpty(in);
}
public byte[] getData() {
return data;
}
}
static class TestNonVersionedReadableWritable implements IOReadableWritable {
private byte[] data;
TestNonVersionedReadableWritable(byte[] data) {
this.data = data;
}
@Override
public void write(DataOutputView out) throws IOException {
out.write(data);
}
@Override
public void read(DataInputView in) throws IOException {
in.readFully(data);
assertEmpty(in);
}
}
static class TestByteArrayInputStreamProducingOneByteAtATime
extends ByteArrayInputStreamWithPos {
public TestByteArrayInputStreamProducingOneByteAtATime(byte[] buf) {
super(buf);
}
@Override
public int read(byte[] b, int off, int len) {
return super.read(b, off, Math.min(len, 1));
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
}
}
| |
/*
* Copyright 2011 - 2013 NTB University of Applied Sciences in Technology
* Buchs, Switzerland, http://www.ntb.ch/inf
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.ntb.inf.deep.runtime.mpc555.driver;
import ch.ntb.inf.deep.runtime.mpc555.IntbMpc555HB;
import ch.ntb.inf.deep.unsafe.US;
/**
* Driver for the mpc555 board for control applications.
* This board comprises two regular analog outputs and two analog outputs with
* 1A current supply. Further there are 4 analog input channels as well as
* 8 digital in/output channels.
*
* @author Graf Urs
*/
/* Changes:
* 01.05.2011 Urs Graf: Fehler in DACinit korrigiert
* 18.02.2010 M. Zueger: Treiber fuer DAC und ADC angepasst und direkt integriert
*/
public class RTBoard implements IntbMpc555HB {
private static final int ADDR_OFFSET = 64;
private static final int CCW_INIT = 0x0000;
private static final int END_OF_QUEUE = 0x003F;
private static TPU_DIO[] led, dio;
private static TPU_FQD[] fqd;
/**
* Returns the value of an analog input channel.<br>
* The analog signal will be read from <code>channel</code>.
* The channels carry the names <code>A-In0..3 bezeichnet</code>. The range
* of the return value is between -10..+10 corresponding to Volts.
* The resolution of the ADC is 10 bit.
*
* @param channel
* Channel with analog signal.
* @return Value in Volts (-10..+10).
*/
public static float analogIn(int channel) {
return ((US.GET2(RJURR_A + ADDR_OFFSET + channel * 2)) - 511.5f) / 511.5f * 10f;
}
/**
* Writes a value to an regular analog output <code>channel</code>
* The channels are denoted with <code>A-Out0</code> and <code>A-Out1</code>.
* The range of <code>val</code> is between -10..+10 corresponding to Volts
* The resolution of the DAC is 12 bit.
*
* @param channel
* Channel with analog signal.
* @param val
* Value in Volts (-10..+10).
*/
public static void analogOut(int channel, float val) {
US.PUT2(TRANRAM + 2 * channel, (channel % 4) * 0x4000 + ((int)(val / 10 * 2047.5f + 2047.5f) & 0xfff));
}
/**
* Writes a value to an analog output <code>channel</code> with 1A current drive capability.
* The channels are denoted with <code>Power-Out0</code> and <code>Power-Out1</code>.
* The range of <code>val</code> is between -10..+10 corresponding to Volts
* The resolution of the DAC is 12 bit.
*
* @param channel
* Channel with analog signal.
* @param val
* Value in Volts (-10..+10).
*/
public static void analogPowerOut(int channel, float val) {
channel += 2;
US.PUT2(TRANRAM + 2 * channel, (channel % 4) * 0x4000 + ((int)(val / 10 * 2047.5f + 2047.5f) & 0xfff));
}
/**
* Initializes a digital <code>channel</code> as input or output. Channels are numbered 0..7.
*
* @param channel
* Channel to be initialized.
* @param out
* If <code>true</code> the channel will be an output, otherwise it will be an input.
*/
public static void dioInit(int channel, boolean out) {
if (channel >= 0 && channel < 8) dio[channel] = new TPU_DIO(true, channel, out);
}
/**
* The digital input at <code>channel</code> is read. Channels are numbered
* 0..7. The value <code>true</code> corresponds to the logical signal <code>1</code>.
*
* @param channel
* Channel to be read.
* @return Digital signal at <code>channel</code>.
*/
public static boolean dioIn(int channel) {
return dio[channel].get();
}
/**
* Write a digital output to <code>channel</code>.
* Channels are numbered <code>0..7</code>.
*
* @param channel
* Channel to write.
* @param level
* Digital signal, <code>true</code> corresponds to the logical signal <code>1</code>.
*/
public static void dioOut(int channel, boolean level) {
dio[channel].set(level);
}
/**
* Write a digital output to a led.
* leds are numbered <code>0..3</code>.
*
* @param channel
* Led channel.
* @param level
* <code>true</code> corresponds to the led lightening up.
*/
public static void ledOut(int channel, boolean level) {
led[channel].set(!level);
}
/**
* Initializes two digital input channels as encoder input.<br>
* <code>channel</code> can be in the range of <code>0..3</code>.
* <br>
* <b>Important:</b><br>
* As two digital inputs are necessary for a single encoder input,
* <code>channel+1</code> will be reserved and used as well.
*
* @param channel
* <code>channel</code> and <code>channel+1</code> will be used for encoder input signals.
*/
public static void encInit(int channel) {
fqd[channel] = new TPU_FQD(true, channel);
fqd[channel].setPosition(0);
}
/**
* Reads the encoder position.<br>
*
* @param channel Channel of encoder input.
* @return Position.
*/
public static short getEncCount(int channel) {
return fqd[channel].getPosition();
}
/**
* Set the encoder position.<br>
*
* @param channel Channel of encoder input.
* @param pos Position to initialize encoder.
*/
public static void setEncCount(int channel, short pos) {
fqd[channel].setPosition(pos);
}
private static void initDAC() {
US.PUT2(SPCR1, 0x0); //disable QSPI
US.PUT1(PQSPAR, 0x013); // use PCS1, MOSI, MISO for QSPI
US.PUT1(DDRQS, 0x016); //SCK, MOSI, PCS1 output; MISO is input
US.PUT2(PORTQS, 0x0FF); //all Pins, in case QSPI disabled, are high
US.PUT2(SPCR0, 0x08302); // QSPI is master, 16 bits per transfer, inactive state of SCLK is high (CPOL=1), data changed on leading edge (CPHA=1), clock = 10MHz
US.PUT2(SPCR2, 0x4300); // no interrupts, wraparound mode, NEWQP=0, ENDQP=03
for(int i=0; i<4; i++) US.PUT1(COMDRAM + i, 0x6D); //disable chip select after transfer, use bits in SPCR0, use PCS1
for(int i=0; i<4; i++) US.PUT2(TRANRAM + 2 * i, i * 0x4000 + 2048);
US.PUT2(SPCR1, 0x08010); //enable QSPI, delay 13us after transfer
}
private static void initADC() {
// user access
US.PUT2(QADC64MCR_A, 0);
// internal multiplexing, use ETRIG1 for queue1, QCLK = 40 MHz / (11+1 + 7+1) = 2 MHz
US.PUT2(QACR0_A, 0x00B7);
// queue2:
// Software triggered continuous-scan mode
// Resume execution with the aborted CCW
// queue2 begins at CCW + 2*32 = 64 ( = ADDR_OFFSET)
US.PUT2(QACR2_A, 0x31A0);
// CCW for AN48 - AN59, max sample time
// ADDR_OFFSET: Using queue2
for (int i = 52; i <= 58; i += 2) {
US.PUT2(CCW_A + ADDR_OFFSET + (i-52), CCW_INIT + i);
}
// end of queue
US.PUT2(CCW_A + ADDR_OFFSET + 4 * 2, END_OF_QUEUE);
}
static {
/* 1) Initialize DAC */
initDAC();
/* 2) Initialize digital I/Os */
led = new TPU_DIO[4];
for (int i = 0; i < 4; i++) led[i] = new TPU_DIO(false, i * 2 + 1, true);
dio = new TPU_DIO[8];
fqd = new TPU_FQD[8];
/* 3) Initialize ADC */
initADC();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/v1/bigtable_data.proto
package com.google.bigtable.v1;
/**
* <pre>
* Specifies the complete (requested) contents of a single row of a table.
* Rows which exceed 256MiB in size cannot be read in full.
* </pre>
*
* Protobuf type {@code google.bigtable.v1.Row}
*/
public final class Row extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.bigtable.v1.Row)
RowOrBuilder {
// Use Row.newBuilder() to construct.
private Row(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Row() {
key_ = com.google.protobuf.ByteString.EMPTY;
families_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Row(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
key_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
families_ = new java.util.ArrayList<com.google.bigtable.v1.Family>();
mutable_bitField0_ |= 0x00000002;
}
families_.add(
input.readMessage(com.google.bigtable.v1.Family.parser(), extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
families_ = java.util.Collections.unmodifiableList(families_);
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v1.BigtableDataProto.internal_static_google_bigtable_v1_Row_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v1.BigtableDataProto.internal_static_google_bigtable_v1_Row_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v1.Row.class, com.google.bigtable.v1.Row.Builder.class);
}
private int bitField0_;
public static final int KEY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString key_;
/**
* <pre>
* The unique key which identifies this row within its table. This is the same
* key that's used to identify the row in, for example, a MutateRowRequest.
* May contain any non-empty byte string up to 4KiB in length.
* </pre>
*
* <code>optional bytes key = 1;</code>
*/
public com.google.protobuf.ByteString getKey() {
return key_;
}
public static final int FAMILIES_FIELD_NUMBER = 2;
private java.util.List<com.google.bigtable.v1.Family> families_;
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public java.util.List<com.google.bigtable.v1.Family> getFamiliesList() {
return families_;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public java.util.List<? extends com.google.bigtable.v1.FamilyOrBuilder>
getFamiliesOrBuilderList() {
return families_;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public int getFamiliesCount() {
return families_.size();
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.Family getFamilies(int index) {
return families_.get(index);
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.FamilyOrBuilder getFamiliesOrBuilder(
int index) {
return families_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!key_.isEmpty()) {
output.writeBytes(1, key_);
}
for (int i = 0; i < families_.size(); i++) {
output.writeMessage(2, families_.get(i));
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!key_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, key_);
}
for (int i = 0; i < families_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, families_.get(i));
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.bigtable.v1.Row)) {
return super.equals(obj);
}
com.google.bigtable.v1.Row other = (com.google.bigtable.v1.Row) obj;
boolean result = true;
result = result && getKey()
.equals(other.getKey());
result = result && getFamiliesList()
.equals(other.getFamiliesList());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
if (getFamiliesCount() > 0) {
hash = (37 * hash) + FAMILIES_FIELD_NUMBER;
hash = (53 * hash) + getFamiliesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.bigtable.v1.Row parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v1.Row parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v1.Row parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v1.Row parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v1.Row parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v1.Row parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v1.Row parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.bigtable.v1.Row parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v1.Row parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v1.Row parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.v1.Row prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Specifies the complete (requested) contents of a single row of a table.
* Rows which exceed 256MiB in size cannot be read in full.
* </pre>
*
* Protobuf type {@code google.bigtable.v1.Row}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.bigtable.v1.Row)
com.google.bigtable.v1.RowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v1.BigtableDataProto.internal_static_google_bigtable_v1_Row_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v1.BigtableDataProto.internal_static_google_bigtable_v1_Row_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v1.Row.class, com.google.bigtable.v1.Row.Builder.class);
}
// Construct using com.google.bigtable.v1.Row.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getFamiliesFieldBuilder();
}
}
public Builder clear() {
super.clear();
key_ = com.google.protobuf.ByteString.EMPTY;
if (familiesBuilder_ == null) {
families_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
familiesBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.bigtable.v1.BigtableDataProto.internal_static_google_bigtable_v1_Row_descriptor;
}
public com.google.bigtable.v1.Row getDefaultInstanceForType() {
return com.google.bigtable.v1.Row.getDefaultInstance();
}
public com.google.bigtable.v1.Row build() {
com.google.bigtable.v1.Row result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.bigtable.v1.Row buildPartial() {
com.google.bigtable.v1.Row result = new com.google.bigtable.v1.Row(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.key_ = key_;
if (familiesBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
families_ = java.util.Collections.unmodifiableList(families_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.families_ = families_;
} else {
result.families_ = familiesBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.v1.Row) {
return mergeFrom((com.google.bigtable.v1.Row)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.v1.Row other) {
if (other == com.google.bigtable.v1.Row.getDefaultInstance()) return this;
if (other.getKey() != com.google.protobuf.ByteString.EMPTY) {
setKey(other.getKey());
}
if (familiesBuilder_ == null) {
if (!other.families_.isEmpty()) {
if (families_.isEmpty()) {
families_ = other.families_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFamiliesIsMutable();
families_.addAll(other.families_);
}
onChanged();
}
} else {
if (!other.families_.isEmpty()) {
if (familiesBuilder_.isEmpty()) {
familiesBuilder_.dispose();
familiesBuilder_ = null;
families_ = other.families_;
bitField0_ = (bitField0_ & ~0x00000002);
familiesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getFamiliesFieldBuilder() : null;
} else {
familiesBuilder_.addAllMessages(other.families_);
}
}
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.v1.Row parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.bigtable.v1.Row) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY;
/**
* <pre>
* The unique key which identifies this row within its table. This is the same
* key that's used to identify the row in, for example, a MutateRowRequest.
* May contain any non-empty byte string up to 4KiB in length.
* </pre>
*
* <code>optional bytes key = 1;</code>
*/
public com.google.protobuf.ByteString getKey() {
return key_;
}
/**
* <pre>
* The unique key which identifies this row within its table. This is the same
* key that's used to identify the row in, for example, a MutateRowRequest.
* May contain any non-empty byte string up to 4KiB in length.
* </pre>
*
* <code>optional bytes key = 1;</code>
*/
public Builder setKey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
onChanged();
return this;
}
/**
* <pre>
* The unique key which identifies this row within its table. This is the same
* key that's used to identify the row in, for example, a MutateRowRequest.
* May contain any non-empty byte string up to 4KiB in length.
* </pre>
*
* <code>optional bytes key = 1;</code>
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
private java.util.List<com.google.bigtable.v1.Family> families_ =
java.util.Collections.emptyList();
private void ensureFamiliesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
families_ = new java.util.ArrayList<com.google.bigtable.v1.Family>(families_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v1.Family, com.google.bigtable.v1.Family.Builder, com.google.bigtable.v1.FamilyOrBuilder> familiesBuilder_;
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public java.util.List<com.google.bigtable.v1.Family> getFamiliesList() {
if (familiesBuilder_ == null) {
return java.util.Collections.unmodifiableList(families_);
} else {
return familiesBuilder_.getMessageList();
}
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public int getFamiliesCount() {
if (familiesBuilder_ == null) {
return families_.size();
} else {
return familiesBuilder_.getCount();
}
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.Family getFamilies(int index) {
if (familiesBuilder_ == null) {
return families_.get(index);
} else {
return familiesBuilder_.getMessage(index);
}
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder setFamilies(
int index, com.google.bigtable.v1.Family value) {
if (familiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamiliesIsMutable();
families_.set(index, value);
onChanged();
} else {
familiesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder setFamilies(
int index, com.google.bigtable.v1.Family.Builder builderForValue) {
if (familiesBuilder_ == null) {
ensureFamiliesIsMutable();
families_.set(index, builderForValue.build());
onChanged();
} else {
familiesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder addFamilies(com.google.bigtable.v1.Family value) {
if (familiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamiliesIsMutable();
families_.add(value);
onChanged();
} else {
familiesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder addFamilies(
int index, com.google.bigtable.v1.Family value) {
if (familiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFamiliesIsMutable();
families_.add(index, value);
onChanged();
} else {
familiesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder addFamilies(
com.google.bigtable.v1.Family.Builder builderForValue) {
if (familiesBuilder_ == null) {
ensureFamiliesIsMutable();
families_.add(builderForValue.build());
onChanged();
} else {
familiesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder addFamilies(
int index, com.google.bigtable.v1.Family.Builder builderForValue) {
if (familiesBuilder_ == null) {
ensureFamiliesIsMutable();
families_.add(index, builderForValue.build());
onChanged();
} else {
familiesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder addAllFamilies(
java.lang.Iterable<? extends com.google.bigtable.v1.Family> values) {
if (familiesBuilder_ == null) {
ensureFamiliesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, families_);
onChanged();
} else {
familiesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder clearFamilies() {
if (familiesBuilder_ == null) {
families_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
familiesBuilder_.clear();
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public Builder removeFamilies(int index) {
if (familiesBuilder_ == null) {
ensureFamiliesIsMutable();
families_.remove(index);
onChanged();
} else {
familiesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.Family.Builder getFamiliesBuilder(
int index) {
return getFamiliesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.FamilyOrBuilder getFamiliesOrBuilder(
int index) {
if (familiesBuilder_ == null) {
return families_.get(index); } else {
return familiesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public java.util.List<? extends com.google.bigtable.v1.FamilyOrBuilder>
getFamiliesOrBuilderList() {
if (familiesBuilder_ != null) {
return familiesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(families_);
}
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.Family.Builder addFamiliesBuilder() {
return getFamiliesFieldBuilder().addBuilder(
com.google.bigtable.v1.Family.getDefaultInstance());
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public com.google.bigtable.v1.Family.Builder addFamiliesBuilder(
int index) {
return getFamiliesFieldBuilder().addBuilder(
index, com.google.bigtable.v1.Family.getDefaultInstance());
}
/**
* <pre>
* May be empty, but only if the entire row is empty.
* The mutual ordering of column families is not specified.
* </pre>
*
* <code>repeated .google.bigtable.v1.Family families = 2;</code>
*/
public java.util.List<com.google.bigtable.v1.Family.Builder>
getFamiliesBuilderList() {
return getFamiliesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v1.Family, com.google.bigtable.v1.Family.Builder, com.google.bigtable.v1.FamilyOrBuilder>
getFamiliesFieldBuilder() {
if (familiesBuilder_ == null) {
familiesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.bigtable.v1.Family, com.google.bigtable.v1.Family.Builder, com.google.bigtable.v1.FamilyOrBuilder>(
families_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
families_ = null;
}
return familiesBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.bigtable.v1.Row)
}
// @@protoc_insertion_point(class_scope:google.bigtable.v1.Row)
private static final com.google.bigtable.v1.Row DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.v1.Row();
}
public static com.google.bigtable.v1.Row getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Row>
PARSER = new com.google.protobuf.AbstractParser<Row>() {
public Row parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Row(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Row> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Row> getParserForType() {
return PARSER;
}
public com.google.bigtable.v1.Row getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package cz.brmlab.yodaqa.analysis.passage.biotagger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.CASException;
import org.apache.uima.fit.descriptor.SofaCapability;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
import org.cleartk.ml.CleartkSequenceAnnotator;
import org.cleartk.ml.Feature;
import org.cleartk.ml.Instances;
import org.cleartk.ml.feature.extractor.CleartkExtractor;
import org.cleartk.ml.feature.extractor.CleartkExtractor.Focus;
import org.cleartk.ml.feature.extractor.CleartkExtractor.Following;
import org.cleartk.ml.feature.extractor.CleartkExtractor.Ngram;
import org.cleartk.ml.feature.extractor.CleartkExtractor.Preceding;
import org.cleartk.ml.feature.extractor.CombinedExtractor1;
import org.cleartk.ml.feature.extractor.FeatureExtractor1;
import org.cleartk.ml.feature.extractor.TypePathExtractor;
import approxlib.distance.EditDist;
import approxlib.tree.LblTree;
import cz.brmlab.yodaqa.analysis.answer.LATByQuantity;
import cz.brmlab.yodaqa.model.SearchResult.AnswerBioMention;
import cz.brmlab.yodaqa.model.SearchResult.Passage;
import cz.brmlab.yodaqa.model.TyCor.LAT;
import cz.brmlab.yodaqa.model.TyCor.QuestionWordLAT;
import cz.brmlab.yodaqa.provider.crf.CRFSuite;
import cz.brmlab.yodaqa.provider.crf.CRFTagging;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
/** A CRF-based token sequence annotator tagging tokens with B-I-O labels.
* That is, "Begin"/"Inside"/"Outside". I.e., "bio" does not relate
* to anything biological. This can be also perceived as basically
* a custom "answer named entity" recognizer (that can use some
* question-specific features).
*
* The B-I-O token labelling is done based on various token-specific
* features that also take into account some question features. These
* features are passed through a sequence machine learned model; we use
* CRF (Conditional Random Fields). Overally, we are heavily inspired
* by (Yao and van Durme, 2013a) here (aka JacanaQA).
*
* Unlike our other machine learners (well, the one for answer selection)
* we do not re-train the model used here on each evaluation run; that's
* a TODO item. XXX: add re-training instructions
*
* We use ClearTK for the training of the CRF model, also unlike our
* other machine learners. We use it because of the CRFsuite interface
* (I also looked at DKPro-TC, but it seems to be built just to run
* experiments, not to actually *use* the models within a pipeline too)
* but also to check if it's nice enough to convert AnswerFV to it too.
*
* XXX: ...and for *tagging* based on trained model, we use jcrfsuite
* instead. Yes, this is messy, but ClearTK interface for crfsuite
* has three caveats:
*
* * It is GPL2, unlike most of ClearTK
* * It does not support outputting tag probability as of now
* (but see https://groups.google.com/forum/#!topic/cleartk-users/n6xoaINnJu8)
* * It is rather slow as it executes new crfsuite process for
* each classify() call
*
* TODO: Either switch to ClearTK with some more of our machine
* learning stuff and eventually contribute back a better crfsuite
* wrapper, or use jcrfsuite for training too. */
@SofaCapability(
inputSofas = { "Question", "Result", "PickedPassages" },
outputSofas = { "PickedPassages" }
)
public class BIOTaggerCRF extends CleartkSequenceAnnotator<String> {
protected FeatureExtractor1<Token> tokenFeatureExtractor;
protected List<CleartkExtractor<Token, Token>> ngramFeatureExtractors;
protected CRFBioChunking<Token, AnswerBioMention> chunking;
public void initialize(UimaContext context) throws ResourceInitializationException {
super.initialize(context);
FeatureExtractor1<Token> posExtractor = new TypePathExtractor<Token>(Token.class, "pos/PosValue");
FeatureExtractor1<Token> NETypeExtractor = new CoveringNETypeExtractor<Token>();
FeatureExtractor1<Token> depExtractor = new DependencyTypeExtractor<Token>();
this.tokenFeatureExtractor = new CombinedExtractor1<Token>(
// TODO: NumericTypeFeatureFunction?
posExtractor,
NETypeExtractor,
depExtractor);
this.ngramFeatureExtractors = new ArrayList<>();
addNgramFeatureExtractor(posExtractor, 3);
addNgramFeatureExtractor(NETypeExtractor, 3);
/* TODO: The n-grams here should not be on token
* sequence but parse tree. Maybe. */
addNgramFeatureExtractor(depExtractor, 2);
/* Tokens will be combined to form AnswerBioMentions,
* with labels from the "mentionType" attribute; this
* label is actually always "ans", so we get B-ans, I-ans,
* O-ans. */
this.chunking = new CRFBioChunking<Token, AnswerBioMention>(
Token.class, AnswerBioMention.class);
}
protected void addNgramFeatureExtractor(FeatureExtractor1<Token> extractor, int n_context) {
/* Context width: 3 */
/* Unigrams (shifted): */
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Following(0, 1))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Following(1, 2))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Preceding(0, 1))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Preceding(1, 2))));
if (n_context == 1)
return;
/* Bigrams: */
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Focus(), new Following(1))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Following(2))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Preceding(1), new Focus())));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Preceding(2))));
if (n_context == 2)
return;
/* Trigrams: */
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Focus(), new Following(2))));
this.ngramFeatureExtractors.add(new CleartkExtractor<Token, Token>(Token.class, extractor,
new Ngram(new Preceding(2), new Focus())));
if (n_context == 3)
return;
}
public void process(JCas jcas) throws AnalysisEngineProcessException {
JCas questionView, passagesView;
try {
questionView = jcas.getView("Question");
passagesView = jcas.getView("PickedPassages");
} catch (CASException e) {
throw new AnalysisEngineProcessException(e);
}
/* We may want to generate question-specific features
* based on a question LAT. Decide on the set of LATs
* (or rather just their synset ids) to use for this. */
Collection<Long> lats = getSpecializingLATs(questionView);
/* A tree representation of the question dependency tree. */
LblTree qTree = LblTreeCASFactory.casToTree(questionView);
// for each sentence in the document, generate training/classification instances
for (Passage p : JCasUtil.select(passagesView, Passage.class)) {
processPassage(passagesView, p, lats, qTree);
}
}
protected Collection<Long> getSpecializingLATs(JCas questionView) {
Collection<LAT> qLats = JCasUtil.select(questionView, LAT.class);
Collection<Long> lats = new HashSet<Long>();
for (LAT lat : qLats) {
if (lat.getSynset() == 0)
continue; // no synset
if (lats.contains(lat.getSynset()))
continue; // dupe
if (lat instanceof QuestionWordLAT) {
lats.add(lat.getSynset());
} else if (LATByQuantity.latIsQuantity(lat)) {
lats.add(2L /* special indicator for quantity LATs */);
}
}
return lats;
}
protected void processPassage(JCas passagesView, Passage p, Collection<Long> lats, LblTree qTree)
throws AnalysisEngineProcessException {
List<List<Feature>> featureLists = new ArrayList<List<Feature>>();
/* Compare the dependency tree of the passage and the question;
* this will be used to produce alignment-related features.
* In other words, we are trying to find a way to rewrite
* the passage to the question, and take note which tokens
* we can keep as they are, which we need to delete and which
* we should rename (i.e. change their tagging). */
LblTree aTree = LblTreeCASFactory.spanToTree(passagesView, p);
EditFeatureGenerator editExtractor = null;
/* N.B. dependency tree may be missing, e.g. due to the #tokens
* limit parser hit. */
if (aTree != null && qTree != null) {
EditDist editDist = new EditDist(/* normalized */ true);
editDist.treeDist(aTree, qTree);
editDist.printHumaneEditScript();
editExtractor = new EditFeatureGenerator(editDist);
}
// for each token, extract features and the outcome
List<Token> tokens = JCasUtil.selectCovered(Token.class, p);
int i = 0;
for (Token token : tokens) {
// apply the feature extractors
List<Feature> tokenFeatures = new ArrayList<Feature>();
tokenFeatures.addAll(this.tokenFeatureExtractor.extract(passagesView, token));
for (CleartkExtractor<Token, Token> ngramExtractor : ngramFeatureExtractors)
tokenFeatures.addAll(ngramExtractor.extractWithin(passagesView, token, p));
// tokenFeatures.add(new Feature("lemma", token.getLemma().getValue())); // for debugging
// apply the edit feature generator
if (editExtractor != null)
tokenFeatures.addAll(editExtractor.extract(tokenFeatures, i, token, aTree, qTree));
/* Combine with question LAT info, so each feature
* will have specific weight for the given class
* of questions. N.B. non-combined features are also
* still kept and used! (The motivation is to provide
* a reasonable baseline for LATs unseen during
* training.) */
tokenFeatures.addAll(expandFeaturesByLats(tokenFeatures, lats));
featureLists.add(tokenFeatures);
i++;
}
if (this.isTraining()) {
// during training, convert existing mentions in the CAS into expected classifier outcomes
List<AnswerBioMention> abms = JCasUtil.selectCovered(AnswerBioMention.class, p);
if (!abms.isEmpty()) {
/* Do not train on passages with no answer
* mentions, the set would be too negatively
* biased then. */
// convert the mention annotations into token-level BIO outcome labels
List<String> outcomes = this.chunking.createOutcomes(passagesView, tokens, abms);
// write the features and outcomes as training instances
this.dataWriter.write(Instances.toInstances(outcomes, featureLists));
}
} else {
// during classification, convert classifier outcomes into mentions in the CAS
// get the predicted BIO outcome labels from the classifier
CRFTagging tagging = CRFSuite.getInstance().tag(featureLists);
tagging.logProb(tokens);
// create the AnswerBioMention annotations in the CAS
this.chunking.createChunks(passagesView, tokens, tagging);
}
}
protected List<Feature> expandFeaturesByLats(List<Feature> features, Collection<Long> lats) {
List<Feature> xFeatures = new ArrayList<>();
for (Feature f : features) {
for (Long l : lats) {
xFeatures.add(new Feature(Long.toString(l) + "|" + f.getName(), f.getValue()));
}
}
return xFeatures;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import static java.lang.String.format;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
/**
* Tool to import data from a TSV file.
*
* This tool is rather simplistic - it doesn't do any quoting or
* escaping, but is useful for many data loads.
*
* @see ImportTsv#usage(String)
*/
@InterfaceAudience.Public
public class ImportTsv extends Configured implements Tool {
protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
final static String NAME = "importtsv";
public final static String MAPPER_CONF_KEY = "importtsv.mapper.class";
public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp";
public final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
// TODO: the rest of these configs are used exclusively by TsvImporterMapper.
// Move them out of the tool and let the mapper handle its own validation.
public final static String DRY_RUN_CONF_KEY = "importtsv.dry.run";
// If true, bad lines are logged to stderr. Default: false.
public final static String LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines";
public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
public final static String SKIP_EMPTY_COLUMNS = "importtsv.skip.empty.columns";
public final static String COLUMNS_CONF_KEY = "importtsv.columns";
public final static String SEPARATOR_CONF_KEY = "importtsv.separator";
public final static String ATTRIBUTE_SEPERATOR_CONF_KEY = "attributes.seperator";
//This config is used to propagate credentials from parent MR jobs which launch
//ImportTSV jobs. SEE IntegrationTestImportTsv.
public final static String CREDENTIALS_LOCATION = "credentials_location";
final static String DEFAULT_SEPARATOR = "\t";
final static String DEFAULT_ATTRIBUTES_SEPERATOR = "=>";
final static String DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ",";
final static Class DEFAULT_MAPPER = TsvImporterMapper.class;
public final static String CREATE_TABLE_CONF_KEY = "create.table";
public final static String NO_STRICT_COL_FAMILY = "no.strict";
/**
* If table didn't exist and was created in dry-run mode, this flag is
* flipped to delete it when MR ends.
*/
private static boolean DRY_RUN_TABLE_CREATED;
public static class TsvParser {
/**
* Column families and qualifiers mapped to the TSV columns
*/
private final byte[][] families;
private final byte[][] qualifiers;
private final byte separatorByte;
private int rowKeyColumnIndex;
private int maxColumnCount;
// Default value must be negative
public static final int DEFAULT_TIMESTAMP_COLUMN_INDEX = -1;
private int timestampKeyColumnIndex = DEFAULT_TIMESTAMP_COLUMN_INDEX;
public static final String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
public static final String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY";
public static final String ATTRIBUTES_COLUMN_SPEC = "HBASE_ATTRIBUTES_KEY";
public static final String CELL_VISIBILITY_COLUMN_SPEC = "HBASE_CELL_VISIBILITY";
public static final String CELL_TTL_COLUMN_SPEC = "HBASE_CELL_TTL";
private int attrKeyColumnIndex = DEFAULT_ATTRIBUTES_COLUMN_INDEX;
public static final int DEFAULT_ATTRIBUTES_COLUMN_INDEX = -1;
public static final int DEFAULT_CELL_VISIBILITY_COLUMN_INDEX = -1;
public static final int DEFAULT_CELL_TTL_COLUMN_INDEX = -1;
private int cellVisibilityColumnIndex = DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
private int cellTTLColumnIndex = DEFAULT_CELL_TTL_COLUMN_INDEX;
/**
* @param columnsSpecification the list of columns to parser out, comma separated.
* The row key should be the special token TsvParser.ROWKEY_COLUMN_SPEC
* @param separatorStr
*/
public TsvParser(String columnsSpecification, String separatorStr) {
// Configure separator
byte[] separator = Bytes.toBytes(separatorStr);
Preconditions.checkArgument(separator.length == 1,
"TsvParser only supports single-byte separators");
separatorByte = separator[0];
// Configure columns
ArrayList<String> columnStrings = Lists.newArrayList(
Splitter.on(',').trimResults().split(columnsSpecification));
maxColumnCount = columnStrings.size();
families = new byte[maxColumnCount][];
qualifiers = new byte[maxColumnCount][];
for (int i = 0; i < columnStrings.size(); i++) {
String str = columnStrings.get(i);
if (ROWKEY_COLUMN_SPEC.equals(str)) {
rowKeyColumnIndex = i;
continue;
}
if (TIMESTAMPKEY_COLUMN_SPEC.equals(str)) {
timestampKeyColumnIndex = i;
continue;
}
if (ATTRIBUTES_COLUMN_SPEC.equals(str)) {
attrKeyColumnIndex = i;
continue;
}
if (CELL_VISIBILITY_COLUMN_SPEC.equals(str)) {
cellVisibilityColumnIndex = i;
continue;
}
if (CELL_TTL_COLUMN_SPEC.equals(str)) {
cellTTLColumnIndex = i;
continue;
}
String[] parts = str.split(":", 2);
if (parts.length == 1) {
families[i] = str.getBytes();
qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY;
} else {
families[i] = parts[0].getBytes();
qualifiers[i] = parts[1].getBytes();
}
}
}
public boolean hasTimestamp() {
return timestampKeyColumnIndex != DEFAULT_TIMESTAMP_COLUMN_INDEX;
}
public int getTimestampKeyColumnIndex() {
return timestampKeyColumnIndex;
}
public boolean hasAttributes() {
return attrKeyColumnIndex != DEFAULT_ATTRIBUTES_COLUMN_INDEX;
}
public boolean hasCellVisibility() {
return cellVisibilityColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
}
public boolean hasCellTTL() {
return cellTTLColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
}
public int getAttributesKeyColumnIndex() {
return attrKeyColumnIndex;
}
public int getCellVisibilityColumnIndex() {
return cellVisibilityColumnIndex;
}
public int getCellTTLColumnIndex() {
return cellTTLColumnIndex;
}
public int getRowKeyColumnIndex() {
return rowKeyColumnIndex;
}
public byte[] getFamily(int idx) {
return families[idx];
}
public byte[] getQualifier(int idx) {
return qualifiers[idx];
}
public ParsedLine parse(byte[] lineBytes, int length)
throws BadTsvLineException {
// Enumerate separator offsets
ArrayList<Integer> tabOffsets = new ArrayList<>(maxColumnCount);
for (int i = 0; i < length; i++) {
if (lineBytes[i] == separatorByte) {
tabOffsets.add(i);
}
}
if (tabOffsets.isEmpty()) {
throw new BadTsvLineException("No delimiter");
}
tabOffsets.add(length);
if (tabOffsets.size() > maxColumnCount) {
throw new BadTsvLineException("Excessive columns");
} else if (tabOffsets.size() <= getRowKeyColumnIndex()) {
throw new BadTsvLineException("No row key");
} else if (hasTimestamp()
&& tabOffsets.size() <= getTimestampKeyColumnIndex()) {
throw new BadTsvLineException("No timestamp");
} else if (hasAttributes() && tabOffsets.size() <= getAttributesKeyColumnIndex()) {
throw new BadTsvLineException("No attributes specified");
} else if (hasCellVisibility() && tabOffsets.size() <= getCellVisibilityColumnIndex()) {
throw new BadTsvLineException("No cell visibility specified");
} else if (hasCellTTL() && tabOffsets.size() <= getCellTTLColumnIndex()) {
throw new BadTsvLineException("No cell TTL specified");
}
return new ParsedLine(tabOffsets, lineBytes);
}
class ParsedLine {
private final ArrayList<Integer> tabOffsets;
private byte[] lineBytes;
ParsedLine(ArrayList<Integer> tabOffsets, byte[] lineBytes) {
this.tabOffsets = tabOffsets;
this.lineBytes = lineBytes;
}
public int getRowKeyOffset() {
return getColumnOffset(rowKeyColumnIndex);
}
public int getRowKeyLength() {
return getColumnLength(rowKeyColumnIndex);
}
public long getTimestamp(long ts) throws BadTsvLineException {
// Return ts if HBASE_TS_KEY is not configured in column spec
if (!hasTimestamp()) {
return ts;
}
String timeStampStr = Bytes.toString(lineBytes,
getColumnOffset(timestampKeyColumnIndex),
getColumnLength(timestampKeyColumnIndex));
try {
return Long.parseLong(timeStampStr);
} catch (NumberFormatException nfe) {
// treat this record as bad record
throw new BadTsvLineException("Invalid timestamp " + timeStampStr);
}
}
private String getAttributes() {
if (!hasAttributes()) {
return null;
} else {
return Bytes.toString(lineBytes, getColumnOffset(attrKeyColumnIndex),
getColumnLength(attrKeyColumnIndex));
}
}
public String[] getIndividualAttributes() {
String attributes = getAttributes();
if (attributes != null) {
return attributes.split(DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR);
} else {
return null;
}
}
public int getAttributeKeyOffset() {
if (hasAttributes()) {
return getColumnOffset(attrKeyColumnIndex);
} else {
return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
}
}
public int getAttributeKeyLength() {
if (hasAttributes()) {
return getColumnLength(attrKeyColumnIndex);
} else {
return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
}
}
public int getCellVisibilityColumnOffset() {
if (hasCellVisibility()) {
return getColumnOffset(cellVisibilityColumnIndex);
} else {
return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
}
}
public int getCellVisibilityColumnLength() {
if (hasCellVisibility()) {
return getColumnLength(cellVisibilityColumnIndex);
} else {
return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
}
}
public String getCellVisibility() {
if (!hasCellVisibility()) {
return null;
} else {
return Bytes.toString(lineBytes, getColumnOffset(cellVisibilityColumnIndex),
getColumnLength(cellVisibilityColumnIndex));
}
}
public int getCellTTLColumnOffset() {
if (hasCellTTL()) {
return getColumnOffset(cellTTLColumnIndex);
} else {
return DEFAULT_CELL_TTL_COLUMN_INDEX;
}
}
public int getCellTTLColumnLength() {
if (hasCellTTL()) {
return getColumnLength(cellTTLColumnIndex);
} else {
return DEFAULT_CELL_TTL_COLUMN_INDEX;
}
}
public long getCellTTL() {
if (!hasCellTTL()) {
return 0;
} else {
return Bytes.toLong(lineBytes, getColumnOffset(cellTTLColumnIndex),
getColumnLength(cellTTLColumnIndex));
}
}
public int getColumnOffset(int idx) {
if (idx > 0)
return tabOffsets.get(idx - 1) + 1;
else
return 0;
}
public int getColumnLength(int idx) {
return tabOffsets.get(idx) - getColumnOffset(idx);
}
public int getColumnCount() {
return tabOffsets.size();
}
public byte[] getLineBytes() {
return lineBytes;
}
}
public static class BadTsvLineException extends Exception {
public BadTsvLineException(String err) {
super(err);
}
private static final long serialVersionUID = 1L;
}
/**
* Return starting position and length of row key from the specified line bytes.
* @param lineBytes
* @param length
* @return Pair of row key offset and length.
* @throws BadTsvLineException
*/
public Pair<Integer, Integer> parseRowKey(byte[] lineBytes, int length)
throws BadTsvLineException {
int rkColumnIndex = 0;
int startPos = 0, endPos = 0;
for (int i = 0; i <= length; i++) {
if (i == length || lineBytes[i] == separatorByte) {
endPos = i - 1;
if (rkColumnIndex++ == getRowKeyColumnIndex()) {
if ((endPos + 1) == startPos) {
throw new BadTsvLineException("Empty value for ROW KEY.");
}
break;
} else {
startPos = endPos + 2;
}
}
if (i == length) {
throw new BadTsvLineException(
"Row key does not exist as number of columns in the line"
+ " are less than row key position.");
}
}
return new Pair<>(startPos, endPos - startPos + 1);
}
}
/**
* Sets up the actual job.
*
* @param conf The current configuration.
* @param args The command line parameters.
* @return The newly created job.
* @throws IOException When setting up the job fails.
*/
protected static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException, ClassNotFoundException {
Job job = null;
boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Admin admin = connection.getAdmin()) {
// Support non-XML supported characters
// by re-encoding the passed separator as a Base64 string.
String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
if (actualSeparator != null) {
conf.set(SEPARATOR_CONF_KEY,
Bytes.toString(Base64.getEncoder().encode(actualSeparator.getBytes())));
}
// See if a non-default Mapper was set
String mapperClassName = conf.get(MAPPER_CONF_KEY);
Class mapperClass = mapperClassName != null? Class.forName(mapperClassName): DEFAULT_MAPPER;
TableName tableName = TableName.valueOf(args[0]);
Path inputDir = new Path(args[1]);
String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName.getNameAsString());
job = Job.getInstance(conf, jobName);
job.setJarByClass(mapperClass);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(TextInputFormat.class);
job.setMapperClass(mapperClass);
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
String[] columns = conf.getStrings(COLUMNS_CONF_KEY);
if(StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
String fileLoc = conf.get(CREDENTIALS_LOCATION);
Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
job.getCredentials().addAll(cred);
}
if (hfileOutPath != null) {
if (!admin.tableExists(tableName)) {
LOG.warn(format("Table '%s' does not exist.", tableName));
if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
// TODO: this is backwards. Instead of depending on the existence of a table,
// create a sane splits file for HFileOutputFormat based on data sampling.
createTable(admin, tableName, columns);
if (isDryRun) {
LOG.warn("Dry run: Table will be deleted at end of dry run.");
synchronized (ImportTsv.class) {
DRY_RUN_TABLE_CREATED = true;
}
}
} else {
String errorMsg =
format("Table '%s' does not exist and '%s' is set to no.", tableName,
CREATE_TABLE_CONF_KEY);
LOG.error(errorMsg);
throw new TableNotFoundException(errorMsg);
}
}
try (Table table = connection.getTable(tableName);
RegionLocator regionLocator = connection.getRegionLocator(tableName)) {
boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false);
// if no.strict is false then check column family
if(!noStrict) {
ArrayList<String> unmatchedFamilies = new ArrayList<>();
Set<String> cfSet = getColumnFamilies(columns);
TableDescriptor tDesc = table.getDescriptor();
for (String cf : cfSet) {
if(!tDesc.hasColumnFamily(Bytes.toBytes(cf))) {
unmatchedFamilies.add(cf);
}
}
if(unmatchedFamilies.size() > 0) {
ArrayList<String> familyNames = new ArrayList<>();
for (ColumnFamilyDescriptor family : table.getDescriptor().getColumnFamilies()) {
familyNames.add(family.getNameAsString());
}
String msg =
"Column Families " + unmatchedFamilies + " specified in " + COLUMNS_CONF_KEY
+ " does not match with any of the table " + tableName
+ " column families " + familyNames + ".\n"
+ "To disable column family check, use -D" + NO_STRICT_COL_FAMILY
+ "=true.\n";
usage(msg);
System.exit(-1);
}
}
if (mapperClass.equals(TsvImporterTextMapper.class)) {
job.setMapOutputValueClass(Text.class);
job.setReducerClass(TextSortReducer.class);
} else {
job.setMapOutputValueClass(Put.class);
job.setCombinerClass(PutCombiner.class);
job.setReducerClass(PutSortReducer.class);
}
if (!isDryRun) {
Path outputDir = new Path(hfileOutPath);
FileOutputFormat.setOutputPath(job, outputDir);
HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(),
regionLocator);
}
}
} else {
if (!admin.tableExists(tableName)) {
String errorMsg = format("Table '%s' does not exist.", tableName);
LOG.error(errorMsg);
throw new TableNotFoundException(errorMsg);
}
if (mapperClass.equals(TsvImporterTextMapper.class)) {
usage(TsvImporterTextMapper.class.toString()
+ " should not be used for non bulkloading case. use "
+ TsvImporterMapper.class.toString()
+ " or custom mapper whose value type is Put.");
System.exit(-1);
}
if (!isDryRun) {
// No reducers. Just write straight to table. Call initTableReducerJob
// to set up the TableOutputFormat.
TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
}
job.setNumReduceTasks(0);
}
if (isDryRun) {
job.setOutputFormatClass(NullOutputFormat.class);
job.getConfiguration().setStrings("io.serializations",
job.getConfiguration().get("io.serializations"),
MutationSerialization.class.getName(), ResultSerialization.class.getName(),
CellSerialization.class.getName());
}
TableMapReduceUtil.addDependencyJars(job);
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
org.apache.hbase.thirdparty.com.google.common.base.Function.class /* Guava used by TsvParser */);
}
}
return job;
}
private static void createTable(Admin admin, TableName tableName, String[] columns)
throws IOException {
HTableDescriptor htd = new HTableDescriptor(tableName);
Set<String> cfSet = getColumnFamilies(columns);
for (String cf : cfSet) {
HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(cf));
htd.addFamily(hcd);
}
LOG.warn(format("Creating table '%s' with '%s' columns and default descriptors.",
tableName, cfSet));
admin.createTable(htd);
}
private static void deleteTable(Configuration conf, String[] args) {
TableName tableName = TableName.valueOf(args[0]);
try (Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin()) {
try {
admin.disableTable(tableName);
} catch (TableNotEnabledException e) {
LOG.debug("Dry mode: Table: " + tableName + " already disabled, so just deleting it.");
}
admin.deleteTable(tableName);
} catch (IOException e) {
LOG.error(format("***Dry run: Failed to delete table '%s'.***%n%s", tableName,
e.toString()));
return;
}
LOG.info(format("Dry run: Deleted table '%s'.", tableName));
}
private static Set<String> getColumnFamilies(String[] columns) {
Set<String> cfSet = new HashSet<>();
for (String aColumn : columns) {
if (TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn)
|| TsvParser.TIMESTAMPKEY_COLUMN_SPEC.equals(aColumn)
|| TsvParser.CELL_VISIBILITY_COLUMN_SPEC.equals(aColumn)
|| TsvParser.CELL_TTL_COLUMN_SPEC.equals(aColumn)
|| TsvParser.ATTRIBUTES_COLUMN_SPEC.equals(aColumn))
continue;
// we are only concerned with the first one (in case this is a cf:cq)
cfSet.add(aColumn.split(":", 2)[0]);
}
return cfSet;
}
/*
* @param errorMsg Error message. Can be null.
*/
private static void usage(final String errorMsg) {
if (errorMsg != null && errorMsg.length() > 0) {
System.err.println("ERROR: " + errorMsg);
}
String usage =
"Usage: " + NAME + " -D"+ COLUMNS_CONF_KEY + "=a,b,c <tablename> <inputdir>\n" +
"\n" +
"Imports the given input directory of TSV data into the specified table.\n" +
"\n" +
"The column names of the TSV data must be specified using the -D" + COLUMNS_CONF_KEY + "\n" +
"option. This option takes the form of comma-separated column names, where each\n" +
"column name is either a simple column family, or a columnfamily:qualifier. The special\n" +
"column name " + TsvParser.ROWKEY_COLUMN_SPEC + " is used to designate that this column should be used\n" +
"as the row key for each imported record. You must specify exactly one column\n" +
"to be the row key, and you must specify a column name for every column that exists in the\n" +
"input data. Another special column" + TsvParser.TIMESTAMPKEY_COLUMN_SPEC +
" designates that this column should be\n" +
"used as timestamp for each record. Unlike " + TsvParser.ROWKEY_COLUMN_SPEC + ", " +
TsvParser.TIMESTAMPKEY_COLUMN_SPEC + " is optional." + "\n" +
"You must specify at most one column as timestamp key for each imported record.\n" +
"Record with invalid timestamps (blank, non-numeric) will be treated as bad record.\n" +
"Note: if you use this option, then '" + TIMESTAMP_CONF_KEY + "' option will be ignored.\n" +
"\n" +
"Other special columns that can be specified are " + TsvParser.CELL_TTL_COLUMN_SPEC +
" and " + TsvParser.CELL_VISIBILITY_COLUMN_SPEC + ".\n" +
TsvParser.CELL_TTL_COLUMN_SPEC + " designates that this column will be used " +
"as a Cell's Time To Live (TTL) attribute.\n" +
TsvParser.CELL_VISIBILITY_COLUMN_SPEC + " designates that this column contains the " +
"visibility label expression.\n" +
"\n" +
TsvParser.ATTRIBUTES_COLUMN_SPEC+" can be used to specify Operation Attributes per record.\n"+
" Should be specified as key=>value where "+TsvParser.DEFAULT_ATTRIBUTES_COLUMN_INDEX+ " is used \n"+
" as the seperator. Note that more than one OperationAttributes can be specified.\n"+
"By default importtsv will load data directly into HBase. To instead generate\n" +
"HFiles of data to prepare for a bulk data load, pass the option:\n" +
" -D" + BULK_OUTPUT_CONF_KEY + "=/path/for/output\n" +
" Note: if you do not use this option, then the target table must already exist in HBase\n" +
"\n" +
"Other options that may be specified with -D include:\n" +
" -D" + DRY_RUN_CONF_KEY + "=true - Dry run mode. Data is not actually populated into" +
" table. If table does not exist, it is created but deleted in the end.\n" +
" -D" + SKIP_LINES_CONF_KEY + "=false - fail if encountering an invalid line\n" +
" -D" + LOG_BAD_LINES_CONF_KEY + "=true - logs invalid lines to stderr\n" +
" -D" + SKIP_EMPTY_COLUMNS + "=false - If true then skip empty columns in bulk import\n" +
" '-D" + SEPARATOR_CONF_KEY + "=|' - eg separate on pipes instead of tabs\n" +
" -D" + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n" +
" -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of " +
DEFAULT_MAPPER.getName() + "\n" +
" -D" + JOB_NAME_CONF_KEY + "=jobName - use the specified mapreduce job name for the import\n" +
" -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this tool\n" +
" Note: if you set this to 'no', then the target table must already exist in HBase\n" +
" -D" + NO_STRICT_COL_FAMILY + "=true - ignore column family check in hbase table. " +
"Default is false\n\n" +
"For performance consider the following options:\n" +
" -Dmapreduce.map.speculative=false\n" +
" -Dmapreduce.reduce.speculative=false";
System.err.println(usage);
}
@Override
public int run(String[] args) throws Exception {
if (args.length < 2) {
usage("Wrong number of arguments: " + args.length);
return -1;
}
// When MAPPER_CONF_KEY is null, the user wants to use the provided TsvImporterMapper, so
// perform validation on these additional args. When it's not null, user has provided their
// own mapper, thus these validation are not relevant.
// TODO: validation for TsvImporterMapper, not this tool. Move elsewhere.
if (null == getConf().get(MAPPER_CONF_KEY)) {
// Make sure columns are specified
String[] columns = getConf().getStrings(COLUMNS_CONF_KEY);
if (columns == null) {
usage("No columns specified. Please specify with -D" +
COLUMNS_CONF_KEY+"=...");
return -1;
}
// Make sure they specify exactly one column as the row key
int rowkeysFound = 0;
for (String col : columns) {
if (col.equals(TsvParser.ROWKEY_COLUMN_SPEC)) rowkeysFound++;
}
if (rowkeysFound != 1) {
usage("Must specify exactly one column as " + TsvParser.ROWKEY_COLUMN_SPEC);
return -1;
}
// Make sure we have at most one column as the timestamp key
int tskeysFound = 0;
for (String col : columns) {
if (col.equals(TsvParser.TIMESTAMPKEY_COLUMN_SPEC))
tskeysFound++;
}
if (tskeysFound > 1) {
usage("Must specify at most one column as "
+ TsvParser.TIMESTAMPKEY_COLUMN_SPEC);
return -1;
}
int attrKeysFound = 0;
for (String col : columns) {
if (col.equals(TsvParser.ATTRIBUTES_COLUMN_SPEC))
attrKeysFound++;
}
if (attrKeysFound > 1) {
usage("Must specify at most one column as "
+ TsvParser.ATTRIBUTES_COLUMN_SPEC);
return -1;
}
// Make sure one or more columns are specified excluding rowkey and
// timestamp key
if (columns.length - (rowkeysFound + tskeysFound + attrKeysFound) < 1) {
usage("One or more columns in addition to the row key and timestamp(optional) are required");
return -1;
}
}
// If timestamp option is not specified, use current system time.
long timstamp = getConf().getLong(TIMESTAMP_CONF_KEY, System.currentTimeMillis());
// Set it back to replace invalid timestamp (non-numeric) with current
// system time
getConf().setLong(TIMESTAMP_CONF_KEY, timstamp);
synchronized (ImportTsv.class) {
DRY_RUN_TABLE_CREATED = false;
}
Job job = createSubmittableJob(getConf(), args);
boolean success = job.waitForCompletion(true);
boolean delete = false;
synchronized (ImportTsv.class) {
delete = DRY_RUN_TABLE_CREATED;
}
if (delete) {
deleteTable(getConf(), args);
}
return success ? 0 : 1;
}
public static void main(String[] args) throws Exception {
int status = ToolRunner.run(HBaseConfiguration.create(), new ImportTsv(), args);
System.exit(status);
}
}
| |
package com.instructure.canvasapi.api;
import android.content.Context;
import com.instructure.canvasapi.model.Attachment;
import com.instructure.canvasapi.model.Course;
import com.instructure.canvasapi.model.Enrollment;
import com.instructure.canvasapi.model.Favorite;
import com.instructure.canvasapi.model.FileUploadParams;
import com.instructure.canvasapi.model.GradingPeriodResponse;
import com.instructure.canvasapi.model.kaltura.FileUploadParamsWrapper;
import com.instructure.canvasapi.utilities.APIHelpers;
import com.instructure.canvasapi.utilities.CanvasCallback;
import com.instructure.canvasapi.utilities.CanvasRestAdapter;
import com.instructure.canvasapi.utilities.ExhaustiveBridgeCallback;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import retrofit.RestAdapter;
import retrofit.http.Body;
import retrofit.http.DELETE;
import retrofit.http.GET;
import retrofit.http.Multipart;
import retrofit.http.POST;
import retrofit.http.PUT;
import retrofit.http.Part;
import retrofit.http.PartMap;
import retrofit.http.Path;
import retrofit.http.Query;
import retrofit.mime.TypedFile;
/**
* Copyright (c) 2015 Instructure. All rights reserved.
*/
public class CourseAPI extends BuildInterfaceAPI {
interface CoursesInterface {
@PUT("/courses/{courseid}")
void updateCourse(@Path("courseid") long courseID,
@Query("course[name]") String name, @Query("course[course_code]") String courseCode,
@Query("course[start_at]") String startAt, @Query("course[end_at]") String endAt,
@Query("course[license]") String license, @Query("course[is_public]") Integer isPublic,
@Body String body,
CanvasCallback<Course> callback);
@GET("/courses/{courseid}?include[]=term&include[]=permissions&include[]=license&include[]=is_public&include[]=needs_grading_count")
void getCourse(@Path("courseid") long courseId, CanvasCallback<Course> callback);
@GET("/courses/{courseid}?include[]=term&include[]=permissions&include[]=license&include[]=is_public&include[]=needs_grading_count&include[]=total_scores&include[]=current_grading_period_scores")
void getCourseWithGrade(@Path("courseid") long courseId, CanvasCallback<Course> callback);
@GET("/courses/{courseid}/enrollments")
void getEnrollmentsForGradingPeriod(@Path("courseid") long courseId, @Query("grading_period_id") long gradingPeriodId, CanvasCallback<Enrollment[]> callback);
@GET("/courses/{courseid}?include[]=syllabus_body&include[]=term&include[]=license&include[]=is_public&include[]=permissions")
void getCourseWithSyllabus(@Path("courseid") long courseId, CanvasCallback<Course> callback);
// I don't see why we wouldn't want to always get the grades
@GET("/courses?include[]=term&include[]=total_scores&include[]=license&include[]=is_public&include[]=needs_grading_count&include[]=permissions&include[]=favorites&include[]=current_grading_period_scores")
void getFirstPageCourses(CanvasCallback<Course[]> callback);
@GET("/{next}?&include[]=needs_grading_count&include[]=permissions&include[]=favorites")
void getNextPageCourses(@Path(value = "next", encode = false) String nextURL, CanvasCallback<Course[]> callback);
@GET("/users/self/favorites/courses?include[]=term&include[]=total_scores&include[]=license&include[]=is_public&include[]=needs_grading_count&include[]=permissions&include[]=current_grading_period_scores")
void getFavoriteCourses(CanvasCallback<Course[]> callback);
@GET("/courses/{courseId}/grading_periods")
void getGradingPeriodsForCourse(@Path("courseId") long courseId, CanvasCallback<GradingPeriodResponse> callback);
@POST("/users/self/favorites/courses/{courseId}")
void addCourseToFavorites(@Path("courseId") long courseId, @Body String body, CanvasCallback<Favorite> callback);
@DELETE("/users/self/favorites/courses/{courseId}")
void removeCourseFromFavorites(@Path("courseId") long courseId, CanvasCallback<Favorite> callback);
@GET("/users/{user_id}/courses?include[]=total_scores&include[]=syllabus_body")
void getCoursesForUser(@Path("user_id") long userId, CanvasCallback<Course[]> callback);
@GET("/{next}")
void getNextPageCoursesForUser(@Path(value = "next", encode = false) String nextURL, CanvasCallback<Course[]> callback);
@GET("/canvas/{parentId}/{studentId}/courses?include[]=total_scores&include[]=syllabus_body&include[]=current_grading_period_scores")
void getCoursesForUserAirwolf(@Path("parentId") String parentId, @Path("studentId") String studentId, CanvasCallback<Course[]> callback);
@GET("/canvas/{parentId}/{studentId}/courses/{courseId}?include[]=syllabus_body&include[]=term&include[]=license&include[]=is_public&include[]=permissions")
void getCourseWithSyllabusAirwolf(@Path("parentId") String parentId, @Path("studentId") String studentId, @Path("courseId") long courseId, CanvasCallback<Course> callback);
@GET("/canvas/{parentId}/{studentId}/courses/{courseId}?include[]=term&include[]=permissions&include[]=license&include[]=is_public&include[]=needs_grading_count&include[]=total_scores&include[]=current_grading_period_scores")
void getCourseWithGradeAirwolf(@Path("parentId") String parentId, @Path("studentId") String studentId, @Path("courseId") long courseId, CanvasCallback<Course> callback);
/////////////////////////////////////////////////////////////////////////////
// Synchronous
/////////////////////////////////////////////////////////////////////////////
@GET("/courses?include[]=term&include[]=total_scores&include[]=license&include[]=is_public&include[]=permissions")
Course[] getAllCoursesSynchronous(@Query("page") int page);
@GET("/users/self/favorites/courses?include[]=term&include[]=total_scores&include[]=license&include[]=is_public&include[]=permissions")
Course[] getFavCoursesSynchronous(@Query("page") int page);
@POST("/courses/{courseId}/files")
FileUploadParams getFileUploadParams(@Path("courseId") long courseId, @Query("parent_folder_id") Long parentFolderId, @Query("size") long size, @Query("name") String fileName, @Query("content_type") String content_type, @Body String body);
@POST("/courses/{courseId}/quizzes/{quizId}/submissions/self/files")
FileUploadParamsWrapper getQuizFileUploadParams(@Query("name") String name, @Query("duplicate_name") String duplicateName, @Path("courseId") long courseId, @Path("quizId") long quizId, @Body String body);
@Multipart
@POST("/")
Attachment uploadCourseFile(@PartMap LinkedHashMap<String, String> params, @Part("file") TypedFile file);
@Multipart
@POST("/")
Attachment uploadQuizFile(@PartMap LinkedHashMap<String, String> params, @Part("file") TypedFile file);
}
/////////////////////////////////////////////////////////////////////////
// API Calls
/////////////////////////////////////////////////////////////////////////
public static void getCourse(long courseId, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback, false).getCourse(courseId, callback);
buildInterface(CoursesInterface.class, callback, false).getCourse(courseId, callback);
}
public static void getCourseWithGrade(long courseId, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback, false).getCourseWithGrade(courseId, callback);
buildInterface(CoursesInterface.class, callback, false).getCourseWithGrade(courseId, callback);
}
public static void getCourseWithSyllabus(long courseId, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback, false).getCourseWithSyllabus(courseId, callback);
buildInterface(CoursesInterface.class, callback, false).getCourseWithSyllabus(courseId, callback);
}
public static void getFirstPageCourses(CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback).getFirstPageCourses(callback);
buildInterface(CoursesInterface.class, callback).getFirstPageCourses(callback);
}
public static void getFirstPageFavoriteCourses(CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback).getFavoriteCourses(callback);
buildInterface(CoursesInterface.class, callback).getFavoriteCourses(callback);
}
public static void getGradingPeriodsForCourse(long courseId, CanvasCallback<GradingPeriodResponse> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback).getGradingPeriodsForCourse(courseId, callback);
buildInterface(CoursesInterface.class, callback).getGradingPeriodsForCourse(courseId, callback);
}
public static void getEnrollmentsForGradingPeriod(long courseId, long gradingPeriodId, CanvasCallback<Enrollment[]> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildCacheInterface(CoursesInterface.class, callback).getEnrollmentsForGradingPeriod(courseId, gradingPeriodId, callback);
buildInterface(CoursesInterface.class, callback).getEnrollmentsForGradingPeriod(courseId, gradingPeriodId, callback);
}
public static void getNextPageCourses(CanvasCallback<Course[]> callback, String nextURL) {
if (APIHelpers.paramIsNull(callback, nextURL)) return;
callback.setIsNextPage(true);
buildCacheInterface(CoursesInterface.class, callback).getNextPageCourses(nextURL, callback);
buildInterface(CoursesInterface.class, callback).getNextPageCourses(nextURL, callback);
}
public static void getNextPageCoursesChained(CanvasCallback<Course[]> callback, String nextURL, boolean isCached) {
if (APIHelpers.paramIsNull(callback, nextURL)) return;
callback.setIsNextPage(true);
if (isCached) {
buildCacheInterface(CoursesInterface.class, callback).getNextPageCourses(nextURL, callback);
} else {
buildInterface(CoursesInterface.class, callback).getNextPageCourses(nextURL, callback);
}
}
public static void addCourseToFavorites(final long courseId, final CanvasCallback<Favorite> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildInterface(CoursesInterface.class, callback).addCourseToFavorites(courseId, "", callback);
}
public static void removeCourseFromFavorites(final long courseId, final CanvasCallback<Favorite> callback) {
if (APIHelpers.paramIsNull(callback)) return;
buildInterface(CoursesInterface.class, callback).removeCourseFromFavorites(courseId, callback);
}
public static void getAllFavoriteCoursesChained(final CanvasCallback<Course[]> callback, boolean isCached) {
if (APIHelpers.paramIsNull(callback)) return;
CanvasCallback<Course[]> bridge = new ExhaustiveBridgeCallback<>(Course.class, callback, new ExhaustiveBridgeCallback.ExhaustiveBridgeEvents() {
@Override
public void performApiCallWithExhaustiveCallback(CanvasCallback bridgeCallback, String nextURL, boolean isCached) {
if(callback.isCancelled()) { return; }
CourseAPI.getNextPageCoursesChained(bridgeCallback, nextURL, isCached);
}
});
if (isCached) {
buildCacheInterface(CoursesInterface.class, callback).getFavoriteCourses(bridge);
} else {
buildInterface(CoursesInterface.class, callback).getFavoriteCourses(bridge);
}
}
public static void getAllFavoriteCourses(final CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(callback)) return;
CanvasCallback<Course[]> bridge = new ExhaustiveBridgeCallback<>(Course.class, callback, new ExhaustiveBridgeCallback.ExhaustiveBridgeEvents() {
@Override
public void performApiCallWithExhaustiveCallback(CanvasCallback bridgeCallback, String nextURL, boolean isCached) {
if(callback.isCancelled()) { return; }
CourseAPI.getNextPageCoursesChained(bridgeCallback, nextURL, isCached);
}
});
buildCacheInterface(CoursesInterface.class, callback).getFavoriteCourses(bridge);
buildInterface(CoursesInterface.class, callback).getFavoriteCourses(bridge);
}
public static void getAllCourses(final CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(callback)) return;
CanvasCallback<Course[]> bridge = new ExhaustiveBridgeCallback<>(Course.class, callback, new ExhaustiveBridgeCallback.ExhaustiveBridgeEvents() {
@Override
public void performApiCallWithExhaustiveCallback(CanvasCallback bridgeCallback, String nextURL, boolean isCached) {
if(callback.isCancelled()) { return; }
CourseAPI.getNextPageCoursesChained(bridgeCallback, nextURL, isCached);
}
});
buildCacheInterface(CoursesInterface.class, callback).getFirstPageCourses(bridge);
buildInterface(CoursesInterface.class, callback).getFirstPageCourses(bridge);
}
/**
* @param newCourseName (Optional)
* @param newCourseCode (Optional)
* @param newStartAt (Optional)
* @param newEndAt (Optional)
* @param license (Optional)
* @param newIsPublic (Optional)
* @param course (Required)
* @param callback (Required)
*/
public static void updateCourse(String newCourseName, String newCourseCode, Date newStartAt, Date newEndAt, Course.LICENSE license, Boolean newIsPublic, Course course, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(callback, course)) return;
String newStartAtString = APIHelpers.dateToString(newStartAt);
String newEndAtString = APIHelpers.dateToString(newEndAt);
Integer newIsPublicInteger = (newIsPublic == null) ? null : APIHelpers.booleanToInt(newIsPublic);
buildInterface(CoursesInterface.class, callback).updateCourse(course.getId(), newCourseName, newCourseCode, newStartAtString, newEndAtString, Course.licenseToAPIString(license), newIsPublicInteger, "", callback);
}
public static void getCoursesForUser(long userId, CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(callback)) { return; }
buildCacheInterface(CoursesInterface.class, callback).getCoursesForUser(userId, callback);
buildInterface(CoursesInterface.class, callback).getCoursesForUser(userId, callback);
}
public static void getNextPageCoursesForUser(String nextURL, CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(nextURL, callback)) { return; }
callback.setIsNextPage(true);
buildCacheInterface(CoursesInterface.class, callback).getNextPageCoursesForUser(nextURL, callback);
buildInterface(CoursesInterface.class, callback).getNextPageCoursesForUser(nextURL, callback);
}
public static void getCoursesForUserAirwolf(String parentId, String studentId, CanvasCallback<Course[]> callback) {
if(APIHelpers.paramIsNull(parentId, studentId, callback)) { return; }
buildCacheInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback).getCoursesForUserAirwolf(parentId, studentId, callback);
buildInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback).getCoursesForUserAirwolf(parentId, studentId, callback);
}
public static void getNextPageCoursesForUserAirwolf(String nextURL, CanvasCallback<Course[]> callback) {
if (APIHelpers.paramIsNull(nextURL, callback)) { return; }
callback.setIsNextPage(true);
buildCacheInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback).getNextPageCoursesForUser(nextURL, callback);
buildInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback).getNextPageCoursesForUser(nextURL, callback);
}
public static void getCourseWithSyllabusAirwolf(String parentId, String studentId, long courseId, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(parentId, studentId, callback)) return;
buildCacheInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback, false).getCourseWithSyllabusAirwolf(parentId, studentId, courseId, callback);
buildInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback, false).getCourseWithSyllabusAirwolf(parentId, studentId, courseId, callback);
}
public static void getCourseWithGradeAirwolf(String parentId, String studentId, long courseId, CanvasCallback<Course> callback) {
if (APIHelpers.paramIsNull(parentId, studentId, callback)) return;
buildCacheInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback, false).getCourseWithGradeAirwolf(parentId, studentId, courseId, callback);
buildInterface(CoursesInterface.class, APIHelpers.getAirwolfDomain(callback.getContext()), callback, false).getCourseWithGradeAirwolf(parentId, studentId, courseId, callback);
}
/////////////////////////////////////////////////////////////////////////////
// Helper Methods
////////////////////////////////////////////////////////////////////////////
public static Map<Long, Course> createCourseMap(Course[] courses) {
Map<Long, Course> courseMap = new HashMap<Long, Course>();
if(courses == null) {
return courseMap;
}
for (Course course : courses) {
courseMap.put(course.getId(), course);
}
return courseMap;
}
/////////////////////////////////////////////////////////////////////////////
// Synchronous
//
// If Retrofit is unable to parse (no network for example) Synchronous calls
// will throw a nullPointer exception. All synchronous calls need to be in a
// try catch block.
/////////////////////////////////////////////////////////////////////////////
public static Course[] getAllCoursesSynchronous(Context context) {
RestAdapter restAdapter = CanvasRestAdapter.buildAdapter(context);
//If not able to parse (no network for example), this will crash. Handle that case.
try {
ArrayList<Course> allCourses = new ArrayList<>();
int page = 1;
long firstItemId = -1;
//for(ever) loop. break once we've run outta stuff;
for (;;) {
Course[] courses = restAdapter.create(CoursesInterface.class).getAllCoursesSynchronous(page);
page++;
//This is all or nothing. We don't want partial data.
if(courses == null){
return null;
} else if (courses.length == 0) {
break;
} else if(courses[0].getId() == firstItemId){
break;
} else {
firstItemId = courses[0].getId();
Collections.addAll(allCourses, courses);
}
}
return allCourses.toArray(new Course[allCourses.size()]);
} catch (Exception E) {
return null;
}
}
public static Course[] getFavCoursesSynchronous(Context context) {
RestAdapter restAdapter = CanvasRestAdapter.buildAdapter(context);
//If not able to parse (no network for example), this will crash. Handle that case.
try {
ArrayList<Course> allCourses = new ArrayList<>();
int page = 1;
long firstItemId = -1;
//for(ever) loop. break once we've run outta stuff;
for (;;) {
Course[] courses = restAdapter.create(CoursesInterface.class).getFavCoursesSynchronous(page);
page++;
//This is all or nothing. We don't want partial data.
if(courses == null){
return null;
} else if (courses.length == 0) {
break;
} else if(courses[0].getId() == firstItemId){
break;
} else {
firstItemId = courses[0].getId();
Collections.addAll(allCourses, courses);
}
}
return allCourses.toArray(new Course[allCourses.size()]);
} catch (Exception E) {
return null;
}
}
public static FileUploadParams getFileUploadParams(Context context, long courseId, Long parentFolderId, String fileName, long size, String contentType){
return buildInterface(CoursesInterface.class, context).getFileUploadParams(courseId, parentFolderId, size, fileName, contentType, "");
}
public static FileUploadParams getQuizFileUploadParams(Context context, long courseId, long quizId, String name, String duplicateName){
return buildInterface(CoursesInterface.class, context, false).getQuizFileUploadParams(name, duplicateName, courseId, quizId, "").getUploadParams().get(0);
}
public static Attachment uploadCourseFile(Context context, String uploadUrl, LinkedHashMap<String,String> uploadParams, String mimeType, File file){
return buildUploadInterface(CoursesInterface.class, uploadUrl).uploadCourseFile(uploadParams, new TypedFile(mimeType, file));
}
public static Attachment uploadQuizFile(String uploadUrl, LinkedHashMap<String,String> uploadParams, String mimeType, File file){
return buildUploadInterface(CoursesInterface.class, uploadUrl).uploadQuizFile(uploadParams, new TypedFile(mimeType, file));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.notebook;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.ui.TextBox;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterFactory;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterSettingManager;
import org.apache.zeppelin.notebook.repo.NotebookRepo;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.search.SearchService;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.user.Credentials;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.Date;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class NoteTest {
@Mock
NotebookRepo repo;
@Mock
JobListenerFactory jobListenerFactory;
@Mock
SearchService index;
@Mock
Credentials credentials;
@Mock
Interpreter interpreter;
@Mock
Scheduler scheduler;
@Mock
NoteEventListener noteEventListener;
@Mock
InterpreterFactory interpreterFactory;
@Mock
InterpreterSettingManager interpreterSettingManager;
private AuthenticationInfo anonymous = new AuthenticationInfo("anonymous");
@Test
public void runNormalTest() {
when(interpreterFactory.getInterpreter(anyString(), anyString(), eq("spark"))).thenReturn(interpreter);
when(interpreter.getScheduler()).thenReturn(scheduler);
String pText = "%spark sc.version";
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p.setText(pText);
p.setAuthenticationInfo(anonymous);
note.run(p.getId());
ArgumentCaptor<Paragraph> pCaptor = ArgumentCaptor.forClass(Paragraph.class);
verify(scheduler, only()).submit(pCaptor.capture());
verify(interpreterFactory, times(1)).getInterpreter(anyString(), anyString(), eq("spark"));
assertEquals("Paragraph text", pText, pCaptor.getValue().getText());
}
@Test
public void addParagraphWithEmptyReplNameTest() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
assertNull(p.getText());
}
@Test
public void addParagraphWithLastReplNameTest() {
when(interpreterFactory.getInterpreter(anyString(), anyString(), eq("spark"))).thenReturn(interpreter);
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p1.setText("%spark ");
Paragraph p2 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
assertEquals("%spark\n", p2.getText());
}
@Test
public void insertParagraphWithLastReplNameTest() {
when(interpreterFactory.getInterpreter(anyString(), anyString(), eq("spark"))).thenReturn(interpreter);
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p1.setText("%spark ");
Paragraph p2 = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS);
assertEquals("%spark\n", p2.getText());
}
@Test
public void insertParagraphWithInvalidReplNameTest() {
when(interpreterFactory.getInterpreter(anyString(), anyString(), eq("invalid"))).thenReturn(null);
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p1.setText("%invalid ");
Paragraph p2 = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS);
assertNull(p2.getText());
}
@Test
public void insertParagraphwithUser() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS);
assertEquals("anonymous", p.getUser());
}
@Test
public void clearAllParagraphOutputTest() {
when(interpreterFactory.getInterpreter(anyString(), anyString(), eq("md"))).thenReturn(interpreter);
when(interpreter.getScheduler()).thenReturn(scheduler);
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
InterpreterResult result = new InterpreterResult(InterpreterResult.Code.SUCCESS, InterpreterResult.Type.TEXT, "result");
p1.setResult(result);
Paragraph p2 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p2.setReturn(result, new Throwable());
note.clearAllParagraphOutput();
assertNull(p1.getReturn());
assertNull(p2.getReturn());
}
@Test
public void getFolderIdTest() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
// Ordinary case test
note.setName("this/is/a/folder/noteName");
assertEquals("this/is/a/folder", note.getFolderId());
// Normalize test
note.setName("/this/is/a/folder/noteName");
assertEquals("this/is/a/folder", note.getFolderId());
// Root folder test
note.setName("noteOnRootFolder");
assertEquals(Folder.ROOT_FOLDER_ID, note.getFolderId());
note.setName("/noteOnRootFolderStartsWithSlash");
assertEquals(Folder.ROOT_FOLDER_ID, note.getFolderId());
}
@Test
public void getNameWithoutPathTest() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
// Notes in the root folder
note.setName("noteOnRootFolder");
assertEquals("noteOnRootFolder", note.getNameWithoutPath());
note.setName("/noteOnRootFolderStartsWithSlash");
assertEquals("noteOnRootFolderStartsWithSlash", note.getNameWithoutPath());
// Notes in subdirectories
note.setName("/a/b/note");
assertEquals("note", note.getNameWithoutPath());
note.setName("a/b/note");
assertEquals("note", note.getNameWithoutPath());
}
@Test
public void isTrashTest() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
// Notes in the root folder
note.setName("noteOnRootFolder");
assertFalse(note.isTrash());
note.setName("/noteOnRootFolderStartsWithSlash");
assertFalse(note.isTrash());
// Notes in subdirectories
note.setName("/a/b/note");
assertFalse(note.isTrash());
note.setName("a/b/note");
assertFalse(note.isTrash());
// Notes in trash
note.setName(Folder.TRASH_FOLDER_ID + "/a");
assertTrue(note.isTrash());
note.setName("/" + Folder.TRASH_FOLDER_ID + "/a");
assertTrue(note.isTrash());
note.setName(Folder.TRASH_FOLDER_ID + "/a/b/c");
assertTrue(note.isTrash());
}
@Test
public void getNameWithoutNameItself() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
assertEquals("getName should return same as getId when name is empty", note.getId(), note.getName());
}
@Test
public void personalizedModeReturnDifferentParagraphInstancePerUser() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
String user1 = "user1";
String user2 = "user2";
note.setPersonalizedMode(true);
note.addNewParagraph(new AuthenticationInfo(user1));
Paragraph baseParagraph = note.getParagraphs().get(0);
Paragraph user1Paragraph = baseParagraph.getUserParagraph(user1);
Paragraph user2Paragraph = baseParagraph.getUserParagraph(user2);
assertNotEquals(System.identityHashCode(baseParagraph), System.identityHashCode(user1Paragraph));
assertNotEquals(System.identityHashCode(baseParagraph), System.identityHashCode(user2Paragraph));
assertNotEquals(System.identityHashCode(user1Paragraph), System.identityHashCode(user2Paragraph));
}
public void testNoteJson() {
Note note = new Note(repo, interpreterFactory, interpreterSettingManager, jobListenerFactory, index, credentials, noteEventListener);
note.setName("/test_note");
note.getConfig().put("config_1", "value_1");
note.getInfo().put("info_1", "value_1");
String pText = "%spark sc.version";
Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
p.setText(pText);
p.setResult("1.6.2");
p.settings.getForms().put("textbox_1", new TextBox("name", "default_name"));
p.settings.getParams().put("textbox_1", "my_name");
note.getAngularObjects().put("ao_1", Lists.newArrayList(new AngularObject("name_1", "value_1", note.getId(), p.getId(), null)));
// test Paragraph Json
Paragraph p2 = Paragraph.fromJson(p.toJson());
assertEquals(p2.settings, p.settings);
assertEquals(p2, p);
// test Note Json
Note note2 = Note.fromJson(note.toJson());
assertEquals(note2, note);
}
}
| |
package org.broadinstitute.hellbender.tools.walkers.mutect;
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.param.ParamUtils;
import org.broadinstitute.hellbender.utils.tsv.DataLine;
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
import org.broadinstitute.hellbender.utils.tsv.TableWriter;
import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Stores the results of the first pass of {@link FilterMutectCalls}, a purely online step in which each variant is
* not "aware" of other variants, and learns various global properties necessary for a more refined second step.
*/
public class FilteringFirstPass {
final List<FilterResult> filterResults;
final Map<String, ImmutablePair<String, Integer>> filteredPhasedCalls;
final Map<String, FilterStats> filterStats;
final String tumorSample;
boolean readyForSecondPass;
public FilteringFirstPass(final String tumorSample) {
filterResults = new ArrayList<>();
filteredPhasedCalls = new HashMap<>();
filterStats = new HashMap<>();
readyForSecondPass = false;
this.tumorSample = tumorSample;
}
public boolean isReadyForSecondPass() { return readyForSecondPass; }
public FilterStats getFilterStats(final String filterName){
Utils.validateArg(filterStats.containsKey(filterName), "invalid filter name: " + filterName);
return filterStats.get(filterName);
}
public boolean isOnFilteredHaplotype(final VariantContext vc, final int maxDistance) {
final Genotype tumorGenotype = vc.getGenotype(tumorSample);
if (!hasPhaseInfo(tumorGenotype)) {
return false;
}
final String pgt = (String) tumorGenotype.getExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY, "");
final String pid = (String) tumorGenotype.getExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_ID_KEY, "");
final int position = vc.getStart();
final Pair<String, Integer> filteredCall = filteredPhasedCalls.get(pid);
if (filteredCall == null) {
return false;
}
// Check that vc occurs on the filtered haplotype
return filteredCall.getLeft().equals(pgt) && Math.abs(filteredCall.getRight() - position) <= maxDistance;
}
public void add(final FilterResult filterResult, final VariantContext vc) {
filterResults.add(filterResult);
final Genotype tumorGenotype = vc.getGenotype(tumorSample);
if (!filterResult.getFilters().isEmpty() && hasPhaseInfo(tumorGenotype)) {
final String pgt = (String) tumorGenotype.getExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY, "");
final String pid = (String) tumorGenotype.getExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_ID_KEY, "");
final int position = vc.getStart();
filteredPhasedCalls.put(pid, new ImmutablePair<>(pgt, position));
}
}
public void learnModelForSecondPass(final double requestedFPR){
final double[] readOrientationPosteriors = getFilterResults().stream()
.filter(r -> r.getFilters().isEmpty())
.mapToDouble(r -> r.getReadOrientationPosterior())
.toArray();
final FilterStats readOrientationFilterStats = calculateThresholdForReadOrientationFilter(readOrientationPosteriors, requestedFPR);
filterStats.put(GATKVCFConstants.READ_ORIENTATION_ARTIFACT_FILTER_NAME, readOrientationFilterStats);
readyForSecondPass = true;
}
/**
*
* Compute the filtering threshold that ensures that the false positive rate among the resulting pass variants
* will not exceed the requested false positive rate
*
* @param posteriors A list of posterior probabilities, which gets sorted
* @param requestedFPR We set the filtering threshold such that the FPR doesn't exceed this value
* @return
*/
public static FilterStats calculateThresholdForReadOrientationFilter(final double[] posteriors, final double requestedFPR){
ParamUtils.isPositiveOrZero(requestedFPR, "requested FPR must be non-negative");
final double thresholdForFilteringNone = 1.0;
final double thresholdForFilteringAll = 0.0;
Arrays.sort(posteriors);
final int numPassingVariants = posteriors.length;
double cumulativeExpectedFPs = 0.0;
for (int i = 0; i < numPassingVariants; i++){
final double posterior = posteriors[i];
// One can show that the cumulative error rate is monotonically increasing in i
final double expectedFPR = (cumulativeExpectedFPs + posterior) / (i + 1);
if (expectedFPR > requestedFPR){
return i > 0 ?
new FilterStats(GATKVCFConstants.READ_ORIENTATION_ARTIFACT_FILTER_NAME, posteriors[i-1],
cumulativeExpectedFPs, i-1, cumulativeExpectedFPs/i, requestedFPR) :
new FilterStats(GATKVCFConstants.READ_ORIENTATION_ARTIFACT_FILTER_NAME, thresholdForFilteringAll,
0.0, 0, 0.0, requestedFPR);
}
cumulativeExpectedFPs += posterior;
}
// If the expected FP rate never exceeded the max tolerable value, then we can let everything pass
return new FilterStats(GATKVCFConstants.READ_ORIENTATION_ARTIFACT_FILTER_NAME, thresholdForFilteringNone,
cumulativeExpectedFPs, numPassingVariants, cumulativeExpectedFPs/numPassingVariants, requestedFPR);
}
public static boolean hasPhaseInfo(final Genotype genotype) {
return genotype.hasExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY) && genotype.hasExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_ID_KEY);
}
public List<FilterResult> getFilterResults() {
return filterResults;
}
public static class FilterStats {
private final String filterName;
private final double threshold;
private final double expectedNumFPs;
private final int numPassingVariants;
private final double expectedFPR;
private final double requestedFPR;
public FilterStats(final String filterName, final double threshold, final double expectedNumFPs,
final int numPassingVariants, final double expectedFPR, final double requestedFPR){
this.filterName = filterName;
this.threshold = threshold;
this.expectedNumFPs = expectedNumFPs;
this.numPassingVariants = numPassingVariants;
this.expectedFPR = expectedFPR;
this.requestedFPR = requestedFPR;
}
public String getFilterName() { return filterName; }
public double getExpectedNumFPs() { return expectedNumFPs; }
public int getNumPassingVariants() { return numPassingVariants; }
public double getThreshold() { return threshold; }
public double getExpectedFPR() { return expectedFPR; }
public double getRequestedFPR() { return requestedFPR; }
}
private enum M2FilterStatsTableColumn {
FILTER_NAME("filter_name"),
THRESHOLD("threshold"),
EXPECTED_FALSE_POSITIVES("expected_fps"),
EXPECTED_FALSE_POSITIVE_RATE("expected_fpr"),
REQUESTED_FALSE_POSITIVE_RATE("requested_fpr"),
NUM_PASSING_VARIANTS("num_passing_variants");
private String columnName;
M2FilterStatsTableColumn(final String columnName) {
this.columnName = columnName;
}
@Override
public String toString() { return columnName; }
public static final TableColumnCollection COLUMNS = new TableColumnCollection((Object[]) values());
}
private static class Mutect2FilterStatsWriter extends TableWriter<FilterStats> {
private Mutect2FilterStatsWriter(final File output) throws IOException {
super(output, M2FilterStatsTableColumn.COLUMNS);
}
@Override
protected void composeLine(final FilterStats stats, final DataLine dataLine) {
dataLine.set(M2FilterStatsTableColumn.FILTER_NAME.toString(), stats.getFilterName())
.set(M2FilterStatsTableColumn.THRESHOLD.toString(), stats.getThreshold())
.set(M2FilterStatsTableColumn.EXPECTED_FALSE_POSITIVES.toString(), stats.getExpectedNumFPs())
.set(M2FilterStatsTableColumn.EXPECTED_FALSE_POSITIVE_RATE.toString(), stats.getExpectedFPR())
.set(M2FilterStatsTableColumn.REQUESTED_FALSE_POSITIVE_RATE.toString(), stats.getRequestedFPR())
.set(M2FilterStatsTableColumn.NUM_PASSING_VARIANTS.toString(), stats.getNumPassingVariants());
}
}
public void writeM2FilterSummary(final File outputTable) {
try (Mutect2FilterStatsWriter writer = new Mutect2FilterStatsWriter(outputTable)) {
writer.writeAllRecords(filterStats.values());
} catch (IOException e) {
throw new UserException(String.format("Encountered an IO exception while writing to %s.", outputTable), e);
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.leanback.widget;
import androidx.collection.CircularArray;
import androidx.collection.CircularIntArray;
import java.io.PrintWriter;
/**
* A dynamic data structure that caches staggered grid position information
* for each individual child. The algorithm ensures that each row will be kept
* as balanced as possible when prepending and appending a child.
*
* <p>
* You may keep view {@link StaggeredGrid.Location} inside StaggeredGrid as much
* as possible since prepending and appending views is not symmetric: layout
* going from 0 to N will likely produce a different result than layout going
* from N to 0 for the staggered cases. If a user scrolls from 0 to N then
* scrolls back to 0 and we don't keep history location information, edges of
* the very beginning of rows will not be aligned. It is recommended to keep a
* list of tens of thousands of {@link StaggeredGrid.Location}s which will be
* big enough to remember a typical user's scroll history.
*
* <p>
* This class is abstract and can be replaced with different implementations.
*/
abstract class StaggeredGrid extends Grid {
/**
* Cached representation of Staggered item.
*/
public static class Location extends Grid.Location {
/**
* Offset to previous item location.
* min_edge(index) - min_edge(index - 1) for non reversed case
* max_edge(index) - max_edge(index - 1) for reversed case
*/
public int offset;
/**
* size of the item.
*/
public int size;
public Location(int row, int offset, int size) {
super(row);
this.offset = offset;
this.size = size;
}
}
protected CircularArray<Location> mLocations = new CircularArray<Location>(64);
// mFirstIndex <= mFirstVisibleIndex <= mLastVisibleIndex
// <= mFirstIndex + mLocations.size() - 1
protected int mFirstIndex = -1;
protected Object mPendingItem;
protected int mPendingItemSize;
/**
* Returns index of first item (cached or visible) in the staggered grid.
* Returns negative value if no item.
*/
public final int getFirstIndex() {
return mFirstIndex;
}
/**
* Returns index of last item (cached or visible) in the staggered grid.
* Returns negative value if no item.
*/
public final int getLastIndex() {
return mFirstIndex + mLocations.size() - 1;
}
/**
* Returns the size of the saved {@link Location}s.
*/
public final int getSize() {
return mLocations.size();
}
@Override
public final Location getLocation(int index) {
final int indexInArray = index - mFirstIndex;
if (indexInArray < 0 || indexInArray >= mLocations.size()) {
return null;
}
return mLocations.get(indexInArray);
}
@Override
public final void debugPrint(PrintWriter pw) {
for (int i = 0, size = mLocations.size(); i < size; i++) {
Location loc = mLocations.get(i);
pw.print("<" + (mFirstIndex + i) + "," + loc.row + ">");
pw.print(" ");
pw.println();
}
}
@Override
protected final boolean prependVisibleItems(int toLimit, boolean oneColumnMode) {
if (mProvider.getCount() == 0) {
return false;
}
if (!oneColumnMode && checkPrependOverLimit(toLimit)) {
return false;
}
try {
if (prependVisbleItemsWithCache(toLimit, oneColumnMode)) {
return true;
}
return prependVisibleItemsWithoutCache(toLimit, oneColumnMode);
} finally {
mTmpItem[0] = null;
mPendingItem = null;
}
}
/**
* Prepends items using cached locations, returning true if toLimit is reached.
* This method should only be called by prependVisibleItems().
*/
protected final boolean prependVisbleItemsWithCache(int toLimit, boolean oneColumnMode) {
if (mLocations.size() == 0) {
return false;
}
int itemIndex;
int edge;
int offset;
if (mFirstVisibleIndex >= 0) {
// prepend visible items from first visible index
edge = mProvider.getEdge(mFirstVisibleIndex);
offset = getLocation(mFirstVisibleIndex).offset;
itemIndex = mFirstVisibleIndex - 1;
} else {
// prepend first visible item
edge = Integer.MAX_VALUE;
offset = 0;
itemIndex = mStartIndex != START_DEFAULT ? mStartIndex : 0;
if (itemIndex > getLastIndex() || itemIndex < getFirstIndex() - 1) {
// if the item is not within or adjacent to cached items, clear cache.
mLocations.clear();
return false;
} else if (itemIndex < getFirstIndex()) {
// if the item is adjacent to first index, should prepend without cache.
return false;
}
}
int firstIndex = Math.max(mProvider.getMinIndex(), mFirstIndex);
for (; itemIndex >= firstIndex; itemIndex--) {
Location loc = getLocation(itemIndex);
int rowIndex = loc.row;
int size = mProvider.createItem(itemIndex, false, mTmpItem, false);
if (size != loc.size) {
mLocations.removeFromStart(itemIndex + 1 - mFirstIndex);
mFirstIndex = mFirstVisibleIndex;
// pending item will be added in prependVisibleItemsWithoutCache
mPendingItem = mTmpItem[0];
mPendingItemSize = size;
return false;
}
mFirstVisibleIndex = itemIndex;
if (mLastVisibleIndex < 0) {
mLastVisibleIndex = itemIndex;
}
mProvider.addItem(mTmpItem[0], itemIndex, size, rowIndex, edge - offset);
if (!oneColumnMode && checkPrependOverLimit(toLimit)) {
return true;
}
edge = mProvider.getEdge(itemIndex);
offset = loc.offset;
// Check limit after filled a full column
if (rowIndex == 0) {
if (oneColumnMode) {
return true;
}
}
}
return false;
}
/**
* Calculate offset of item after last cached item.
*/
private int calculateOffsetAfterLastItem(int row) {
// Find a cached item in same row, if not found, just use last item.
int cachedIndex = getLastIndex();
boolean foundCachedItemInSameRow = false;
while (cachedIndex >= mFirstIndex) {
Location loc = getLocation(cachedIndex);
if (loc.row == row) {
foundCachedItemInSameRow = true;
break;
}
cachedIndex--;
}
if (!foundCachedItemInSameRow) {
cachedIndex = getLastIndex();
}
// Assuming the cachedIndex is next to item on the same row, so the
// sum of offset of [cachedIndex + 1, itemIndex] should be size of the
// cached item plus spacing.
int offset = isReversedFlow() ? -getLocation(cachedIndex).size - mSpacing:
getLocation(cachedIndex).size + mSpacing;
for (int i = cachedIndex + 1; i <= getLastIndex(); i++) {
offset -= getLocation(i).offset;
}
return offset;
}
/**
* This implements the algorithm of layout staggered grid, the method should only be called by
* prependVisibleItems().
*/
protected abstract boolean prependVisibleItemsWithoutCache(int toLimit, boolean oneColumnMode);
/**
* Prepends one visible item with new Location info. Only called from
* prependVisibleItemsWithoutCache().
*/
protected final int prependVisibleItemToRow(int itemIndex, int rowIndex, int edge) {
int offset;
if (mFirstVisibleIndex >= 0) {
if (mFirstVisibleIndex != getFirstIndex() || mFirstVisibleIndex != itemIndex + 1) {
// should never hit this when we prepend a new item with a new Location object.
throw new IllegalStateException();
}
}
Location oldFirstLoc = mFirstIndex >= 0 ? getLocation(mFirstIndex) : null;
int oldFirstEdge = mProvider.getEdge(mFirstIndex);
Location loc = new Location(rowIndex, 0, 0);
mLocations.addFirst(loc);
Object item;
if (mPendingItem != null) {
loc.size = mPendingItemSize;
item = mPendingItem;
mPendingItem = null;
} else {
loc.size = mProvider.createItem(itemIndex, false, mTmpItem, false);
item = mTmpItem[0];
}
mFirstIndex = mFirstVisibleIndex = itemIndex;
if (mLastVisibleIndex < 0) {
mLastVisibleIndex = itemIndex;
}
int thisEdge = !mReversedFlow ? edge - loc.size : edge + loc.size;
if (oldFirstLoc != null) {
oldFirstLoc.offset = oldFirstEdge - thisEdge;
}
mProvider.addItem(item, itemIndex, loc.size, rowIndex, thisEdge);
return loc.size;
}
@Override
protected final boolean appendVisibleItems(int toLimit, boolean oneColumnMode) {
if (mProvider.getCount() == 0) {
return false;
}
if (!oneColumnMode && checkAppendOverLimit(toLimit)) {
return false;
}
try {
if (appendVisbleItemsWithCache(toLimit, oneColumnMode)) {
return true;
}
return appendVisibleItemsWithoutCache(toLimit, oneColumnMode);
} finally {
mTmpItem[0] = null;
mPendingItem = null;
}
}
/**
* Appends items using cached locations, returning true if at least one item is appended
* and (oneColumnMode is true or reach limit and aboveIndex).
* This method should only be called by appendVisibleItems()
*/
protected final boolean appendVisbleItemsWithCache(int toLimit, boolean oneColumnMode) {
if (mLocations.size() == 0) {
return false;
}
final int count = mProvider.getCount();
int itemIndex;
int edge;
if (mLastVisibleIndex >= 0) {
// append visible items from last visible index
itemIndex = mLastVisibleIndex + 1;
edge = mProvider.getEdge(mLastVisibleIndex);
} else {
// append first visible item
edge = Integer.MAX_VALUE;
itemIndex = mStartIndex != START_DEFAULT ? mStartIndex : 0;
if (itemIndex > getLastIndex() + 1 || itemIndex < getFirstIndex()) {
// if the item is not within or adjacent to cached items, clear cache.
mLocations.clear();
return false;
} else if (itemIndex > getLastIndex()) {
// if the item is adjacent to first index, should prepend without cache.
return false;
}
}
int lastIndex = getLastIndex();
for (; itemIndex < count && itemIndex <= lastIndex; itemIndex++) {
Location loc = getLocation(itemIndex);
if (edge != Integer.MAX_VALUE) {
edge = edge + loc.offset;
}
int rowIndex = loc.row;
int size = mProvider.createItem(itemIndex, true, mTmpItem, false);
if (size != loc.size) {
loc.size = size;
mLocations.removeFromEnd(lastIndex - itemIndex);
lastIndex = itemIndex;
}
mLastVisibleIndex = itemIndex;
if (mFirstVisibleIndex < 0) {
mFirstVisibleIndex = itemIndex;
}
mProvider.addItem(mTmpItem[0], itemIndex, size, rowIndex, edge);
if (!oneColumnMode && checkAppendOverLimit(toLimit)) {
return true;
}
if (edge == Integer.MAX_VALUE) {
edge = mProvider.getEdge(itemIndex);
}
// Check limit after filled a full column
if (rowIndex == mNumRows - 1) {
if (oneColumnMode) {
return true;
}
}
}
return false;
}
/**
* algorithm of layout staggered grid, this method should only be called by
* appendVisibleItems().
*/
protected abstract boolean appendVisibleItemsWithoutCache(int toLimit, boolean oneColumnMode);
/**
* Appends one visible item with new Location info. Only called from
* appendVisibleItemsWithoutCache().
*/
protected final int appendVisibleItemToRow(int itemIndex, int rowIndex, int location) {
int offset;
if (mLastVisibleIndex >= 0) {
if (mLastVisibleIndex != getLastIndex() || mLastVisibleIndex != itemIndex - 1) {
// should never hit this when we append a new item with a new Location object.
throw new IllegalStateException();
}
}
if (mLastVisibleIndex < 0) {
// if we append first visible item after existing cached items, we need update
// the offset later when prependVisbleItemsWithCache()
if (mLocations.size() > 0 && itemIndex == getLastIndex() + 1) {
offset = calculateOffsetAfterLastItem(rowIndex);
} else {
offset = 0;
}
} else {
offset = location - mProvider.getEdge(mLastVisibleIndex);
}
Location loc = new Location(rowIndex, offset, 0);
mLocations.addLast(loc);
Object item;
if (mPendingItem != null) {
loc.size = mPendingItemSize;
item = mPendingItem;
mPendingItem = null;
} else {
loc.size = mProvider.createItem(itemIndex, true, mTmpItem, false);
item = mTmpItem[0];
}
if (mLocations.size() == 1) {
mFirstIndex = mFirstVisibleIndex = mLastVisibleIndex = itemIndex;
} else {
if (mLastVisibleIndex < 0) {
mFirstVisibleIndex = mLastVisibleIndex = itemIndex;
} else {
mLastVisibleIndex++;
}
}
mProvider.addItem(item, itemIndex, loc.size, rowIndex, location);
return loc.size;
}
@Override
public final CircularIntArray[] getItemPositionsInRows(int startPos, int endPos) {
for (int i = 0; i < mNumRows; i++) {
mTmpItemPositionsInRows[i].clear();
}
if (startPos >= 0) {
for (int i = startPos; i <= endPos; i++) {
CircularIntArray row = mTmpItemPositionsInRows[getLocation(i).row];
if (row.size() > 0 && row.getLast() == i - 1) {
// update continuous range
row.popLast();
row.addLast(i);
} else {
// add single position
row.addLast(i);
row.addLast(i);
}
}
}
return mTmpItemPositionsInRows;
}
@Override
public void invalidateItemsAfter(int index) {
super.invalidateItemsAfter(index);
mLocations.removeFromEnd(getLastIndex() - index + 1);
if (mLocations.size() == 0) {
mFirstIndex = -1;
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2015, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2007/03/25 Martin D. Flynn
// -Initial release
// 2007/06/13 Martin D. Flynn
// -Moved to package "org.opengts.war.report"
// -Renamed 'DeviceList' to 'ReportDeviceList'
// 2009/09/23 Martin D. Flynn
// -Fixed bug that could cause an "ConcurrentModificationException"
// ----------------------------------------------------------------------------
package org.opengts.war.report;
import java.lang.*;
import java.util.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.tables.*;
public class ReportDeviceList
extends DBRecord<ReportDeviceList> // not really a database table
{
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static class DeviceHolder
{
private Account account = null;
private String deviceID = null;
private String deviceDesc = null;
private Device device = null;
public DeviceHolder(Account acct, String devID) {
super();
this.account = acct;
this.deviceID = devID;
this.device = null;
}
public DeviceHolder(Device dev) {
this(dev.getAccount(), dev.getDeviceID());
this.device = dev;
}
public void setDevice(Device dev) {
if ((dev != null) &&
this.account.getAccountID().equals(dev.getAccountID()) &&
this.deviceID.equals(dev.getDeviceID())) {
this.device = dev;
}
}
public String getDeviceID() {
return this.deviceID;
}
public boolean hasDevice() {
return (this.device != null);
}
public String getDeviceDescription() {
if (this.deviceDesc == null) {
try {
Device device = this.getDevice();
if (device != null) {
this.deviceDesc = device.getDescription();
} else {
this.deviceDesc = "";
}
} catch (DBException dbe) {
this.deviceDesc = "";
}
}
return this.deviceDesc;
}
public Device getDevice() throws DBException {
if ((this.device == null) && (this.account != null) && (this.deviceID != null)) {
this.device = Device.getDevice(this.account, this.deviceID); // null if non-existent
if (this.device == null) {
// -- so we don't try again
this.account = null;
this.deviceID = null;
}
}
return this.device;
}
}
private static class DeviceHolderComparator
implements Comparator<DeviceHolder>
{
private boolean ascending = true;
public DeviceHolderComparator() {
this(true);
}
public DeviceHolderComparator(boolean ascending) {
this.ascending = ascending;
}
public int compare(DeviceHolder dh1, DeviceHolder dh2) {
// assume we are comparing DeviceHolder records
if (dh1 == dh2) {
return 0; // exact same object (or both null)
} else
if (dh1 == null) {
return this.ascending? -1 : 1; // null < non-null
} else
if (dh2 == null) {
return this.ascending? 1 : -1; // non-null > null
} else {
String D1 = dh1.getDeviceDescription().toLowerCase(); // dh1.getDeviceID();
String D2 = dh2.getDeviceDescription().toLowerCase(); // dh2.getDeviceID();
return this.ascending? D1.compareTo(D2) : D2.compareTo(D1);
}
}
public boolean equals(Object other) {
if (other instanceof DeviceHolderComparator) {
DeviceHolderComparator dhc = (DeviceHolderComparator)other;
return (this.ascending == dhc.ascending);
}
return false;
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private Account account = null;
private User user = null;
private DeviceGroup devGroup = null;
private boolean isGroup = false;
private Map<String,DeviceHolder> devMap = null;
// ------------------------------------------------------------------------
/* generic instance (devices will be added later) */
public ReportDeviceList(Account acct, User user)
{
super();
this.account = acct;
this.user = user;
this.devMap = null;
}
/* constuctor with specific device */
public ReportDeviceList(Account acct, User user, Device device)
{
this(acct, user);
this.add(device);
}
/* constuctor with a list of devices */
public ReportDeviceList(Account acct, User user, String devID[])
{
this(acct, user);
this.add(devID);
}
/* constuctor with a specific group */
public ReportDeviceList(Account acct, User user, DeviceGroup group)
{
this(acct, user);
this.devGroup = group;
this.isGroup = true;
this.add(group);
}
// ------------------------------------------------------------------------
/* return account id */
public String getAccountID()
{
return (this.account != null)? this.account.getAccountID() : "";
}
/* return account db instance */
public Account getAccount()
{
return this.account;
}
// ------------------------------------------------------------------------
/* return user id */
public String getUserID()
{
return (this.user != null)? this.user.getUserID() : "";
}
/* return user db instance */
public User getUser()
{
return this.user;
}
// ------------------------------------------------------------------------
/* return device group id */
public String getDeviceGroupID()
{
return (this.devGroup != null)? this.devGroup.getGroupID() : "";
}
/* return device group db instance */
public DeviceGroup getDeviceGroup()
{
return this.devGroup;
}
/* return ture if group */
public boolean isDeviceGroup()
{
return this.isGroup || (this.size() > 1);
}
// ------------------------------------------------------------------------
/**
*** Returns true if the specified DeviceID is contained within this map
**/
public boolean containsDevice(String devID)
{
return this.getDeviceMap().containsKey(devID);
}
/**
*** Gets the device for named id (must already exist in the device map)
**/
public Device getDevice(String devID)
throws DBException
{
/* invalid device-id specified? */
if (StringTools.isBlank(devID)) {
return null;
}
/* return device */
DeviceHolder dh = this.getDeviceMap().get(devID);
return (dh != null)? dh.getDevice() : null;
}
// ------------------------------------------------------------------------
/* clear the device map */
public void clear()
{
if (this.devMap != null) {
this.devMap.clear(); // set Map to empty
}
}
/* return the internal device map */
protected Map<String,DeviceHolder> getDeviceMap()
{
if (this.devMap == null) {
this.devMap = new HashMap<String,DeviceHolder>(10);
}
return this.devMap;
}
// ------------------------------------------------------------------------
/* set the single DeviceHolder object */
public boolean setDevice(User user, DeviceHolder dh)
{
this.clear();
try {
this._addDevice(user, dh);
return true;
} catch (DBException dbe) {
return false;
}
}
/* add DeviceHolder if absent from list */
protected void _addDevice(User user, DeviceHolder dh)
throws DBException
{
String devID = dh.getDeviceID();
if ((user == null) || user.isAuthorizedDevice(devID)) {
Map<String,DeviceHolder> dm = this.getDeviceMap();
if (dm.containsKey(devID)) {
// -- already present, try updating device
if (dh.hasDevice()) { // probably will be false
DeviceHolder dmdh = dm.get(devID);
dmdh.setDevice(dh.getDevice());
}
} else {
// -- new entry, add DeviceHolder
dm.put(devID, dh);
}
}
}
/* add DeviceHolder if absent from list */
protected void _addDevice(User user, Device device)
throws DBException
{
String devID = device.getDeviceID();
if ((user == null) || user.isAuthorizedDevice(devID)) {
Map<String,DeviceHolder> dm = this.getDeviceMap();
if (dm.containsKey(devID)) {
// already present, update device
DeviceHolder dmdh = dm.get(devID);
dmdh.setDevice(device);
} else {
// new entry, add device
DeviceHolder dh = new DeviceHolder(device);
dm.put(devID, dh);
}
}
}
// ------------------------------------------------------------------------
/* add device to map */
public boolean add(Device device)
{
/* invalid device */
if (device == null) {
return false;
}
/* add device */
//Print.logStackTrace("Adding device: " + device.getDeviceID());
User user = this.getUser();
try {
this._addDevice(user, device);
return true;
} catch (DBException dbe) {
return false;
}
}
/* add list of devices to map */
public boolean add(String devID[])
{
/* empty list */
if (ListTools.isEmpty(devID)) {
return false;
}
/* add devices from list */
//Print.logStackTrace("Adding devices ...");
Account acct = this.getAccount();
User user = this.getUser();
try {
for (int i = 0; i < devID.length; i++) {
this._addDevice(user, new DeviceHolder(acct, devID[i]));
}
return true;
} catch (DBException dbe) {
return false;
}
}
/* add list of devices to map */
public boolean add(java.util.List<String> devIDList)
{
/* empty list */
if (ListTools.isEmpty(devIDList)) {
return false;
}
/* add devices from list */
//Print.logStackTrace("Adding devices ...");
Account acct = this.getAccount();
User user = this.getUser();
try {
for (String devID : devIDList) {
this._addDevice(user, new DeviceHolder(acct, devID));
}
return true;
} catch (DBException dbe) {
return false;
}
}
/* add device to map */
public boolean add(String devID)
{
/* invalid Device id? */
if (StringTools.isBlank(devID)) {
return false;
}
/* add device id */
//Print.logStackTrace("Adding device: " + devID);
Account acct = this.getAccount();
User user = this.getUser();
try {
this._addDevice(user, new DeviceHolder(acct, devID));
return true;
} catch (DBException dbe) {
return false;
}
}
/* add device-group to map */
public boolean add(DeviceGroup group)
{
/* invalid group */
if (group == null) {
return false;
}
/* AccountID mismatch? */
String acctID = this.getAccountID();
if (!acctID.equals(group.getAccountID())) {
return false;
}
/* add devices from group */
Account acct = this.getAccount();
User user = this.getUser();
try {
OrderedSet<String> devIDSet = DeviceGroup.getDeviceIDsForGroup(acctID, group.getGroupID(), null/*User*/, false);
for (int i = 0; i < devIDSet.size(); i++) {
this._addDevice(user, new DeviceHolder(acct, devIDSet.get(i)));
}
this.isGroup = true;
return true;
} catch (DBException dbe) {
Print.logException("Unable to add DeviceGroup", dbe);
return false;
}
}
// ------------------------------------------------------------------------
/* add all user authorized devices to the internal device map */
public void addAllAuthorizedDevices()
{
try {
User usr = this.getUser();
Account acct = this.getAccount();
OrderedSet<String> list = User.getAuthorizedDeviceIDs(usr, acct, false);
//Print.logInfo("Authorized devices: " + list.size());
this.add(list);
} catch (DBException dbe) {
Print.logException("Unable to add all User devices", dbe);
}
}
// ------------------------------------------------------------------------
/* return number of devices currently in the map */
public int size()
{
return this.getDeviceMap().size();
}
// ------------------------------------------------------------------------
/* return a device map iterator */
public Iterator<String> iterator()
{
return this.getDeviceMap().keySet().iterator();
}
// ------------------------------------------------------------------------
/* return the device map values */
public java.util.List<DeviceHolder> getDeviceHolderList(boolean sort)
{
java.util.List<DeviceHolder> dhList = new Vector<DeviceHolder>(this.getDeviceMap().values());
if (sort) {
Collections.sort(dhList, new DeviceHolderComparator());
}
return dhList;
}
// ------------------------------------------------------------------------
/* return the first deviceID in the map */
public String getFirstDeviceID()
{
Iterator i = this.iterator();
if (i.hasNext()) {
return (String)i.next();
} else {
return "";
}
}
/* return the first device in the map */
public Device getFirstDevice()
{
String devID = this.getFirstDeviceID();
if (!devID.equals("")) {
try {
return this.getDevice(devID);
} catch (DBException dbe) {
return null;
}
}
return null;
}
// ------------------------------------------------------------------------
/* return a string representation of this ReportDeviceList */
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append("ReportDeviceList:");
sb.append(" Account=").append(this.getAccountID());
sb.append(" User=").append(this.getUserID());
sb.append(" Group=").append(this.getDeviceGroupID());
sb.append(" Size=").append(this.size());
return sb.toString();
}
// ------------------------------------------------------------------------
}
| |
package uk.ac.gcu.bluedroid;
import uk.ac.gcu.bluedroid.game.GameState;
import uk.ac.gcu.bluedroid.game.Player;
import uk.ac.gcu.bluedroid.game.TurnInfo;
import uk.ac.gcu.bluedroid.resources.Camp;
import uk.ac.gcu.bluedroid.resources.Crop;
import uk.ac.gcu.bluedroid.resources.Mine;
import uk.ac.gcu.bluedroid.resources.Resource;
import uk.ac.gcu.bluedroid.units.Archer;
import uk.ac.gcu.bluedroid.units.Paladin;
import uk.ac.gcu.bluedroid.units.Soldier;
import uk.ac.gcu.bluedroid.units.Unit;
import uk.ac.gcu.bluedroid.util.CustomImageVIew;
import uk.ac.gcu.bluedroid.util.Util;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.os.Bundle;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.LightingColorFilter;
import android.graphics.Point;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.support.v4.view.GestureDetectorCompat;
import android.text.Html;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.TypedValue;
import android.view.Display;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.HorizontalScrollView;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
/**
* This is the main Activity that displays the current chat session.
*/
public class MainActivity extends Activity implements OnClickListener {
// Debugging
private static final String TAG = "MainActivity";
private static final boolean D = true;
// Message types sent from the BluetoothChatService Handler
public static final int MESSAGE_STATE_CHANGE = 1;
public static final int MESSAGE_RECEIVED = 2;
public static final int MESSAGE_SENT = 3;
public static final int MESSAGE_DEVICE_NAME = 4;
public static final int MESSAGE_TOAST = 5;
// Key names received from the BluetoothChatService Handler
public static final String DEVICE_NAME = "device_name";
public static final String TOAST = "toast";
// Intent request codes
private static final int REQUEST_CONNECT_DEVICE_SECURE = 1;
private static final int REQUEST_CONNECT_DEVICE_INSECURE = 2;
private static final int REQUEST_ENABLE_BT = 3;
// Name of the connected device
private String mConnectedDeviceName = null;
// Array adapter for the conversation thread
private ArrayAdapter<String> mConversationArrayAdapter;
// String buffer for outgoing messages
private StringBuffer mOutStringBuffer;
// Local Bluetooth adapter
private BluetoothAdapter mBluetoothAdapter = null;
// Member object for the chat services
private BluetoothChatService mChatService = null;
// Game variables
private static final String START_GAME = "start";
private static final String END_GAME = "end";
private Button startButton, connectionButton, mapButton1, mapButton2,
mapButton3, exitButton; // buttons
private final Context context = this;
private boolean gameOn = false;
private boolean server = false;
private int player = 0;
private boolean isMyTurn = false;
private ScrollView scrollY;
private HorizontalScrollView scrollX;
private RelativeLayout mapContainer;
private GameState state;
private GestureDetectorCompat mDetector;
// Selected unit and resource
private Unit selectedUnit = null;
private Resource selectedResource = null;
// Constants
private static final int ACTION_NONE = 0;
private static final int ACTION_MOVE = 1;
private static final int ACTION_ATTACK = 2;
// Current action
private int action = ACTION_NONE;
private ProgressDialog progressDialog = null;
private View viewMap = null;
private TurnInfo myTurn = null, enemyTurn;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (D)
Log.e(TAG, "+++ ON CREATE +++");
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
// Set up the window layout
setContentView(R.layout.main);
startButton = (Button) findViewById(R.id.startButton);
startButton.setOnClickListener(this);
startButton.setVisibility(View.GONE);
connectionButton = (Button) findViewById(R.id.connectionButton);
connectionButton.setOnClickListener(this);
mapButton1 = (Button) findViewById(R.id.mapButton1);
mapButton1.setOnClickListener(this);
mapButton2 = (Button) findViewById(R.id.mapButton2);
mapButton2.setClickable(false);
mapButton3 = (Button) findViewById(R.id.mapButton3);
mapButton3.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Util.simpleAlertDialog("Oops!",
"I'm sorry.\nThis map is not available yet", context);
}
});
exitButton = (Button) findViewById(R.id.exit);
exitButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
View view = getLayoutInflater().inflate(
R.layout.exit_confirmation, null);
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setView(view);
final Dialog dialog = builder.show();
view.findViewById(R.id.yes).setOnClickListener(
new OnClickListener() {
@Override
public void onClick(View v) {
MainActivity.this.finish();
}
});
view.findViewById(R.id.no).setOnClickListener(
new OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
}
});
mapButton1.setVisibility(View.GONE);
mapButton2.setVisibility(View.GONE);
mapButton3.setVisibility(View.GONE);
// Get local Bluetooth adapter
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// If the adapter is null, then Bluetooth is not supported
if (mBluetoothAdapter == null) {
Toast.makeText(this, "Bluetooth is not available",
Toast.LENGTH_LONG).show();
finish();
return;
}
}
@Override
public void onClick(View v) {
if (v == startButton) {
mapButton1.setVisibility(View.VISIBLE);
mapButton2.setVisibility(View.VISIBLE);
mapButton3.setVisibility(View.VISIBLE);
connectionButton.setVisibility(View.GONE);
startButton.setVisibility(View.GONE);
}
if (v == connectionButton) {
Log.d(TAG, "----Connections Button----");
showCustomDialog();
}
if (v == mapButton1) {
setupMap(1);
}
if (v == mapButton2) {
setupMap(2);
}
if (v == mapButton3) {
setupMap(3);
}
}
/**
*
* @param mapId
*/
@SuppressLint("InflateParams")
private void setupMap(int mapId) {
Log.d(TAG, "----Start Button----");
if (server)
sendStart(mapId);
gameOn = true;
viewMap = getLayoutInflater().inflate(R.layout.map, null);
setContentView(viewMap);
state = new GameState(this);
myTurn = new TurnInfo();
enemyTurn = new TurnInfo();
scrollY = (ScrollView) findViewById(R.id.scrollY);
scrollX = (HorizontalScrollView) findViewById(R.id.scrollX);
mapContainer = (RelativeLayout) findViewById(R.id.map);
mDetector = new GestureDetectorCompat(this, new MyOnGestureListener());
findViewById(R.id.endturn).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
if (isMyTurn) {
endTurn();
}
}
});
if (!isMyTurn) {
progressDialog = new ProgressDialog(context);
progressDialog.setMessage("Waiting for other Player..");
progressDialog.setCanceledOnTouchOutside(false);
progressDialog.show();
}
drawEverything();
}
private void showCustomDialog() {
final Dialog dialog = new Dialog(this);
// Gets the dialogs XML file.
dialog.setContentView(R.layout.menu);
dialog.setTitle("Connection Menu");
dialog.setCancelable(true);
// Intent serverIntent = null;
Button dialogButton1 = (Button) dialog
.findViewById(R.id.connectDialogButton);
dialogButton1.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent serverIntent = new Intent(context,
DeviceListActivity.class);
startActivityForResult(serverIntent,
REQUEST_CONNECT_DEVICE_INSECURE);
dialog.dismiss();
}
});
Button dialogButton2 = (Button) dialog
.findViewById(R.id.discoverableDialogButton);
dialogButton2.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ensureDiscoverable();
dialog.dismiss();
}
});
dialog.show();
}
@Override
public void onStart() {
super.onStart();
if (D)
Log.e(TAG, "++ ON START ++");
// If BT is not on, request that it be enabled.
// setupChat() will then be called during onActivityResult
if (!mBluetoothAdapter.isEnabled()) {
Intent enableIntent = new Intent(
BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
// Otherwise, setup the chat session
} else {
if (mChatService == null)
setupChat();
}
}
@Override
public synchronized void onResume() {
super.onResume();
if (D)
Log.e(TAG, "+ ON RESUME +");
// Performing this check in onResume() covers the case in which BT was
// not enabled during onStart(), so we were paused to enable it...
// onResume() will be called when ACTION_REQUEST_ENABLE activity
// returns.
if (mChatService != null) {
// Only if the state is STATE_NONE, do we know that we haven't
// started already
if (mChatService.getState() == BluetoothChatService.STATE_NONE) {
// Start the Bluetooth chat services
mChatService.start();
}
}
}
private void setupChat() {
Log.d(TAG, "setupChat()");
// Initialize the BluetoothChatService to perform bluetooth connections
mChatService = new BluetoothChatService(this, mHandler);
// Initialize the buffer for outgoing messages
mOutStringBuffer = new StringBuffer("");
}
@Override
public synchronized void onPause() {
super.onPause();
if (D)
Log.e(TAG, "- ON PAUSE -");
}
@Override
public void onStop() {
super.onStop();
if (D)
Log.e(TAG, "-- ON STOP --");
}
@Override
public void onDestroy() {
super.onDestroy();
// Stop the Bluetooth chat services
if (mChatService != null)
mChatService.stop();
if (D)
Log.e(TAG, "--- ON DESTROY ---");
}
/**
*
*/
private void ensureDiscoverable() {
if (D)
Log.d(TAG, "ensure discoverable");
if (mBluetoothAdapter.getScanMode() != BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE) {
Intent discoverableIntent = new Intent(
BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE);
discoverableIntent.putExtra(
BluetoothAdapter.EXTRA_DISCOVERABLE_DURATION, 300);
startActivity(discoverableIntent);
}
}
/**
*
* @return
*/
private boolean checkConnected() {
if (mChatService.getState() != BluetoothChatService.STATE_CONNECTED) {
Toast.makeText(this, R.string.not_connected, Toast.LENGTH_SHORT)
.show();
return false;
} else
return true;
}
/**
*
* @param mapId
*/
private void sendStart(int mapId) {
if (checkConnected())
mChatService.send("string", START_GAME + "," + mapId);
}
private void sendTurn() {
if (checkConnected()) {
mChatService.send("turn_info", myTurn);
showProgressDialog();
}
}
private void endTurn() {
selectedUnit = null;
action = ACTION_NONE;
state.getMap().updateResources(state.getPlayers()[player - 1]);
state.updateTurn();
sendTurn();
}
/**
* The Handler that gets information back from the BluetoothChatService
*/
@SuppressLint("HandlerLeak")
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_STATE_CHANGE:
if (D)
Log.i(TAG, "MESSAGE_STATE_CHANGE: " + msg.arg1);
switch (msg.arg1) {
case BluetoothChatService.STATE_CONNECTED:
if (server) {
startButton.setVisibility(View.VISIBLE);
player = 1;
isMyTurn = true;
} else {
player = 2;
}
break;
case BluetoothChatService.STATE_CONNECTING:
break;
case BluetoothChatService.STATE_LISTEN:
case BluetoothChatService.STATE_NONE:
break;
}
break;
case MESSAGE_SENT:
if (msg.obj instanceof String) {
} else {
isMyTurn = false;
}
break;
case MESSAGE_RECEIVED:
if (msg.obj instanceof String) {
String message = (String) msg.obj;
String[] pieces = message.split(",");
if (pieces[0].equals(START_GAME)) {
setupMap(Integer.valueOf(pieces[1]));
}
} else {
enemyTurn = (TurnInfo) msg.obj;
if (enemyTurn.getRecruitedUnit() != TurnInfo.HAS_NOT_RECRUITED) {
if (enemyTurn.getHasMoved()
&& state.getMap().getUnit(
enemyTurn.getRecruitPos()) != null) {
if(enemyTurn.getHasAttacked()) {
attackAndMove(enemyTurn);
enemyTurn.setHasAttacked(false);
} else
move(enemyTurn);
enemyTurn.setHasMoved(false);
}
recruit(enemyTurn);
}
if (enemyTurn.getHasAttacked() && enemyTurn.getHasMoved())
attackAndMove(enemyTurn);
else if (enemyTurn.getHasMoved())
move(enemyTurn);
else if (enemyTurn.getHasAttacked())
attack(enemyTurn);
isMyTurn = true;
myTurn = new TurnInfo();
drawEverything();
progressDialog.dismiss();
}
break;
case MESSAGE_DEVICE_NAME:
// save the connected device's name
mConnectedDeviceName = msg.getData().getString(DEVICE_NAME);
Toast.makeText(context, "Connected to " + mConnectedDeviceName,
Toast.LENGTH_SHORT).show();
break;
case MESSAGE_TOAST:
Toast.makeText(context, msg.getData().getString(TOAST),
Toast.LENGTH_SHORT).show();
break;
}
}
};
private void recruit(TurnInfo enemyTurn) {
int p = player == 1 ? 2 : 1;
if (enemyTurn.getRecruitedUnit() == TurnInfo.ARCHER_RECRUITED)
state.getMap().addUnit(new Archer(p, enemyTurn.getRecruitPos()));
else if (enemyTurn.getRecruitedUnit() == TurnInfo.PALADIN_RECRUITED)
state.getMap().addUnit(new Paladin(p, enemyTurn.getRecruitPos()));
else if (enemyTurn.getRecruitedUnit() == TurnInfo.SOLDIER_RECRUITED)
state.getMap().addUnit(new Soldier(p, enemyTurn.getRecruitPos()));
((Camp) state.getMap().getResource(enemyTurn.getRecruitPos()))
.setWorking(false);
}
private void attackAndMove(TurnInfo enemyTurn) {
move(enemyTurn);
state.getMap()
.getUnit(enemyTurn.getUnitTargetPos())
.takeDemage(
state.getMap().getUnit(enemyTurn.getUnitEndPos())
.getPower());
}
private void move(TurnInfo enemyTurn) {
state.getMap().moveUnit(
state.getMap().getUnit(enemyTurn.getUnitStartPos()),
enemyTurn.getUnitEndPos());
}
private void attack(TurnInfo enemyTurn) {
state.getMap()
.getUnit(enemyTurn.getUnitTargetPos())
.takeDemage(
state.getMap().getUnit(enemyTurn.getUnitEndPos())
.getPower());
}
/**
*
*/
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (D)
Log.d(TAG, "onActivityResult " + resultCode);
switch (requestCode) {
case REQUEST_CONNECT_DEVICE_INSECURE:
// When DeviceListActivity returns with a device to connect
if (resultCode == Activity.RESULT_OK) {
connectDevice(data);
}
break;
case REQUEST_ENABLE_BT:
// When the request to enable Bluetooth returns
if (resultCode == Activity.RESULT_OK) {
// Bluetooth is now enabled, so set up a chat session
setupChat();
} else {
// User did not enable Bluetooth or an error occurred
Log.d(TAG, "BT not enabled");
Toast.makeText(this, R.string.bt_not_enabled_leaving,
Toast.LENGTH_SHORT).show();
finish();
}
}
}
/**
*
* @param data
*/
private void connectDevice(Intent data) {
// Get the device MAC address
String address = data.getExtras().getString(
DeviceListActivity.EXTRA_DEVICE_ADDRESS);
// Get the BluetoothDevice object
BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(address);
// Attempt to connect to the device
mChatService.connect(device, false);
server = true;
}
@Override
public boolean dispatchTouchEvent(MotionEvent event) {
super.dispatchTouchEvent(event);
if (gameOn) {
scrollX.dispatchTouchEvent(event);
scrollY.onTouchEvent(event);
mDetector.onTouchEvent(event);
}
return true;
}
/**
*
* @param context
* @param dp
* @return
*/
int DPtoPX(Context context, float dp) {
Resources r = context.getResources();
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp,
r.getDisplayMetrics());
}
/**
*
* @param context
* @param px
* @return
*/
float PXtoDP(Context context, float px) {
Resources resources = context.getResources();
DisplayMetrics metrics = resources.getDisplayMetrics();
return (px / (metrics.densityDpi / 158.4f));
}
/**
*
*/
void drawEverything() {
// Remove views
for (int i = mapContainer.getChildCount(); i > 0; i--) {
View child = mapContainer.getChildAt(i - 1);
if (child.getId() != R.id.mapimg)
mapContainer.removeViewAt(i - 1);
}
// Add views
addUnits();
// Organize header
refreshHeader(this.player);
}
/**
*
*/
private void addUnits() {
for (int i = 0; i < state.getMap().getX(); i++) {
for (int j = 0; j < state.getMap().getY(); j++) {
Unit u = state.getMap().getUnit(i, j);
if (u == null)
continue;
ImageView unit = new ImageView(this);
unit.setImageResource(getResources().getIdentifier(
(u.getPrefix() + u.getOwner()), "drawable",
getPackageName()));
ProgressBar lifeBar = new ProgressBar(this, null,
android.R.attr.progressBarStyleHorizontal);
if ((Math.round(100 * u.getLife() / (float) u.getMax_life())) < 50) {
Drawable drawable = lifeBar.getProgressDrawable();
drawable.setColorFilter(new LightingColorFilter(0x00000000,
Color.RED));
}
lifeBar.setProgress(Math.round(100 * u.getLife()
/ (float) u.getMax_life()));
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(
DPtoPX(this, 50), DPtoPX(this, 50));
lp.addRule(RelativeLayout.ALIGN_PARENT_START);
lp.leftMargin = DPtoPX(this, i * 50f);
lp.topMargin = DPtoPX(this, j * 50f);
RelativeLayout.LayoutParams lp2 = new RelativeLayout.LayoutParams(
DPtoPX(this, 50), DPtoPX(this, 50));
lp2.addRule(RelativeLayout.BELOW, unit.getId());
lp2.leftMargin = DPtoPX(this, i * 50f);
lp2.topMargin = DPtoPX(this, (j * 50f) - 30f);
mapContainer.addView(unit, lp);
mapContainer.addView(lifeBar, lp2);
}
}
}
/**
* Refreshes header information of the game
*
* @param player
* player who is on turn
*/
private void refreshHeader(final int player) {
Button resources = (Button) viewMap.findViewById(R.id.resourcesButton);
TextView food = (TextView) viewMap.findViewById(R.id.foodValue);
TextView gold = (TextView) viewMap.findViewById(R.id.goldValue);
TextView camps = (TextView) viewMap.findViewById(R.id.campValue);
TextView player_text = (TextView) viewMap
.findViewById(R.id.playerValue);
player_text.setText(String.valueOf(this.player));
food.setText(String.valueOf(state.getPlayers()[player - 1].getFood()));
gold.setText(String.valueOf(state.getPlayers()[player - 1].getGold()));
camps.setText(String.valueOf(state.getPlayers()[player - 1].getCamps()));
resources.setOnClickListener(new OnClickListener() {
@SuppressLint("InflateParams")
@Override
public void onClick(View v) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
View view = getLayoutInflater().inflate(R.layout.resources,
null);
builder.setView(view);
final Dialog dialog = builder.show();
TextView crops = (TextView) view.findViewById(R.id.cropsValue);
TextView mines = (TextView) view.findViewById(R.id.minesValue);
TextView camps = (TextView) view.findViewById(R.id.campsValue);
Button ok = (Button) view.findViewById(R.id.ok);
crops.setText(String.valueOf(state.getPlayers()[player - 1]
.getCrops()));
mines.setText(String.valueOf(state.getPlayers()[player - 1]
.getMines()));
camps.setText(String.valueOf(state.getPlayers()[player - 1]
.getCamps()));
ok.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
}
});
}
private void showProgressDialog() {
progressDialog = new ProgressDialog(context);
progressDialog.setMessage("Waiting for Other Player..");
progressDialog.show();
progressDialog.setCanceledOnTouchOutside(false);
}
@SuppressLint("InflateParams")
private void unitAction() {
// Unit dialog creation
View view = getLayoutInflater().inflate(R.layout.actions, null);
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setView(view);
final Dialog dialog = builder.show();
TextView name = (TextView) view.findViewById(R.id.name);
TextView life = (TextView) view.findViewById(R.id.life);
TextView id = (TextView) view.findViewById(R.id.id);
TextView player_text = (TextView) view.findViewById(R.id.player);
TextView power = (TextView) view.findViewById(R.id.power);
TextView attack_range = (TextView) view.findViewById(R.id.attack_range);
TextView move_range = (TextView) view.findViewById(R.id.move_range);
ProgressBar lifebar = (ProgressBar) view.findViewById(R.id.lifebar);
ImageView unit = (ImageView) view.findViewById(R.id.imageView1);
unit.setImageResource(getResources().getIdentifier(
(selectedUnit.getPrefix() + selectedUnit.getOwner()),
"drawable", getPackageName()));
if (selectedUnit.getPrefix().equals("a")) { // if it's an archer
name.setText(R.string.archer);
} else if (selectedUnit.getPrefix().equals("i")) { // if it's a soldier
name.setText(R.string.soldier);
} else if (selectedUnit.getPrefix().equals("c")) { // if it's a paladin
name.setText(R.string.paladin);
}
name.setTypeface(null, Typeface.BOLD);
player_text.setText(Html.fromHtml("<b>Player ID: </b>"
+ selectedUnit.getOwner()));
power.setText(Html.fromHtml("<b>Power: </b>" + selectedUnit.getPower()));
life.setText(Html.fromHtml("<b>Life: </b>" + selectedUnit.getLife()
+ "/" + selectedUnit.getMax_life()));
lifebar.setProgress(Math.round(100 * selectedUnit.getLife()
/ (float) selectedUnit.getMax_life()));
Button cancel = (Button) view.findViewById(R.id.cancel);
Button move = (Button) view.findViewById(R.id.move);
Button attack = (Button) view.findViewById(R.id.attack);
cancel.setOnClickListener(new OnClickListener() { // cancel button
// action
@Override
public void onClick(View arg0) {
dialog.dismiss();
}
});
if (selectedUnit.getOwner() != player) { // if the unity it's an enemy
id.setText("ENEMY");
id.setTypeface(null, Typeface.BOLD);
id.setTextColor(Color.RED);
move.setVisibility(View.GONE);
attack.setVisibility(View.GONE);
move_range.setVisibility(View.GONE);
attack_range.setVisibility(View.GONE);
} else { // if it's a current player unit
id.setText(Html.fromHtml("<b>Unit ID: </b>" + selectedUnit.getId()));
attack_range.setText(Html.fromHtml("<b>Attack range: </b>"
+ selectedUnit.getRange()));
move_range.setText(Html.fromHtml("<b>Move range: </b>"
+ selectedUnit.getMove()));
if (myTurn.getHasMoved())
move.setVisibility(View.GONE);
else
move.setOnClickListener(new OnClickListener() { // move action
@Override
public void onClick(View arg0) {
action = ACTION_MOVE;
dialog.dismiss();
}
});
if (myTurn.getHasMoved()
&& !selectedUnit.getPosition().equals(
myTurn.getUnitEndPos()))
attack.setVisibility(View.GONE);
else
attack.setOnClickListener(new OnClickListener() { // attack
// action
@Override
public void onClick(View arg0) {
action = ACTION_ATTACK;
dialog.dismiss();
}
});
}
}
@SuppressLint("InflateParams")
private void resourceAction() {
// Resource dialog creation
View view = null;
AlertDialog.Builder builder = new AlertDialog.Builder(context);
if (selectedResource instanceof Mine
|| selectedResource instanceof Crop) {
view = getLayoutInflater().inflate(R.layout.resource, null);
builder.setView(view);
final Dialog dialog = builder.show();
TextView type = (TextView) view.findViewById(R.id.type);
TextView owner = (TextView) view.findViewById(R.id.owner);
Button cancel = (Button) view.findViewById(R.id.cancel);
owner.setText(Html.fromHtml("<b>Owner: </b>"
+ (selectedResource.getOwner() == 0 ? "None" : "Player: "
+ selectedResource.getOwner())));
if (selectedResource instanceof Mine) {
type.setText(Html.fromHtml("<b>Type: </b>Mine"));
} else if (selectedResource instanceof Crop) {
type.setText(Html.fromHtml("<b>Type: </b>Crop"));
}
cancel.setOnClickListener(new OnClickListener() { // cancel action
@Override
public void onClick(View arg0) {
dialog.dismiss();
}
});
} else if (selectedResource instanceof Camp) { // if it's a camp
if (selectedResource.getOwner() == 0) { // if the camp has no owner
// setup dialog
view = getLayoutInflater().inflate(
R.layout.camp_alert_no_owner, null);
builder.setView(view);
final Dialog dialog2 = builder.show();
Button ok = (Button) view.findViewById(R.id.ok);
ok.setOnClickListener(new OnClickListener() { // ok button
// action
@Override
public void onClick(View arg0) {
dialog2.dismiss();
}
});
} else if (((Camp) selectedResource).isWorking() == false) { // if
// the
// camp
// has
// a
// owner
// but
// is
// not
// working
// setup dialog
view = getLayoutInflater().inflate(R.layout.camp_alert2, null);
builder.setView(view);
final Dialog dialog2 = builder.show();
Button ok = (Button) view.findViewById(R.id.ok);
ok.setOnClickListener(new OnClickListener() { // ok button
// action
@Override
public void onClick(View arg0) {
dialog2.dismiss();
}
});
} else { // if has a owner, can recrute some unit
view = getLayoutInflater().inflate(R.layout.camp, null);
builder.setView(view);
final Dialog dialog3 = builder.show();
Button cancel = (Button) view.findViewById(R.id.cancel);
Button paladin = (Button) view.findViewById(R.id.paladin);
Button archer = (Button) view.findViewById(R.id.archer);
Button soldier = (Button) view.findViewById(R.id.soldier);
paladin.setOnClickListener(new OnClickListener() { // paladin
// recruitment
// action
@Override
public void onClick(View arg0) {
if (((Camp) selectedResource).hasEnoughResources(
Camp.PALADIN, state.getPlayers()[player - 1])) {
state.getMap().addUnit(
new Paladin(selectedResource.getOwner(),
selectedResource.getPosition()));
state.getPlayers()[player - 1].updateResource(
Player.GOLD, -Camp.PALADIN_COST_GOLD);
state.getPlayers()[player - 1].updateResource(
Player.FOOD, -Camp.PALADIN_COST_FOOD);
state.getPlayers()[player - 1].updateResource(
Player.CAMPS, -1);
drawEverything();
((Camp) selectedResource).setWorking(false);
myTurn.setRecruitPos(selectedResource.getPosition());
myTurn.setRecruitedUnit(TurnInfo.PALADIN_RECRUITED);
} else {
Toast.makeText(context, R.string.enoughResources,
Toast.LENGTH_SHORT).show();
}
dialog3.dismiss();
}
});
archer.setOnClickListener(new OnClickListener() { // archer
// recruitment
// action
@Override
public void onClick(View arg0) {
if (((Camp) selectedResource).hasEnoughResources(
Camp.ARCHER, state.getPlayers()[player - 1])) {
state.getMap().addUnit(
new Archer(selectedResource.getOwner(),
selectedResource.getPosition()));
state.getPlayers()[player - 1].updateResource(
Player.GOLD, -Camp.ARCHER_COST_GOLD);
state.getPlayers()[player - 1].updateResource(
Player.FOOD, -Camp.ARCHER_COST_FOOD);
state.getPlayers()[player - 1].updateResource(
Player.CAMPS, -1);
drawEverything();
((Camp) selectedResource).setWorking(false);
myTurn.setRecruitPos(selectedResource.getPosition());
myTurn.setRecruitedUnit(TurnInfo.ARCHER_RECRUITED);
} else {
Toast.makeText(context, R.string.enoughResources,
Toast.LENGTH_SHORT).show();
}
dialog3.dismiss();
}
});
soldier.setOnClickListener(new OnClickListener() {// soldier
// recruitment
// action
@Override
public void onClick(View arg0) {
if (((Camp) selectedResource).hasEnoughResources(
Camp.SOLDIER, state.getPlayers()[player - 1])) {
state.getMap().addUnit(
new Soldier(selectedResource.getOwner(),
selectedResource.getPosition()));
state.getPlayers()[player - 1].updateResource(
Player.GOLD, -Camp.SOLDIER_COST_GOLD);
state.getPlayers()[player - 1].updateResource(
Player.FOOD, -Camp.SOLDIER_COST_FOOD);
state.getPlayers()[player - 1].updateResource(
Player.CAMPS, -1);
drawEverything();
((Camp) selectedResource).setWorking(false);
myTurn.setRecruitPos(selectedResource.getPosition());
myTurn.setRecruitedUnit(TurnInfo.SOLDIER_RECRUITED);
} else {
Toast.makeText(context, R.string.enoughResources,
Toast.LENGTH_SHORT).show();
}
dialog3.dismiss();
}
});
cancel.setOnClickListener(new OnClickListener() { // cancel
// action
@Override
public void onClick(View arg0) {
dialog3.dismiss();
}
});
refreshHeader(selectedResource.getOwner());
}
}
}
class MyOnGestureListener extends SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed(MotionEvent event) {
Point size = new Point();
Display display = getWindowManager().getDefaultDisplay();
display.getSize(size);
if (event.getY() < DPtoPX(context, 50)
|| event.getY() > size.y - DPtoPX(context, 50))
return true;
int tmpX = ((int) event.getX() + scrollX.getScrollX());
int tmpY = ((int) event.getY() + scrollY.getScrollY() - DPtoPX(
context, 50));
int x = (int) Math.floor((PXtoDP(context, tmpX) / 50));
int y = (int) Math.floor((PXtoDP(context, tmpY) / 50));
switch (action) {
case ACTION_NONE:
selectedUnit = state.getMap().getUnit(x, y);
selectedResource = state.getMap().getResource(x, y);
if (selectedUnit != null) {
unitAction();
} else if (selectedResource != null) {
resourceAction();
}
break;
case ACTION_MOVE:
if (selectedUnit != null) {
Log.e("pos", x + " " + y);
if (state.getMap().walkable(x, y) == 0)
Toast.makeText(context, R.string.cantMove,
Toast.LENGTH_SHORT).show();
else if (!state.getMap().canWalkTo(
selectedUnit.getPosition().getX(),
selectedUnit.getPosition().getY(), x, y,
selectedUnit.getMove()))
Toast.makeText(MainActivity.this, R.string.tooFar,
Toast.LENGTH_SHORT).show();
else { // can be a resource
if (state.getMap().walkable(x, y) == Player.CAMPS) { // if
// it's
// a
// camp
state.getPlayers()[player - 1].updateResource(
Player.CAMPS, 1);
} else if (state.getMap().walkable(x, y) == Player.MINES) { // if
// it's
// a
// mine
state.getPlayers()[player - 1].updateResource(
Player.MINES, 1);
} else if (state.getMap().walkable(x, y) == Player.CROPS) { // if
// it's
// a
// crop
state.getPlayers()[player - 1].updateResource(
Player.CROPS, 1);
}
myTurn.setUnitStartPos(selectedUnit.getPosition());
state.getMap().moveUnit(selectedUnit, x, y);
myTurn.setUnitEndPos(selectedUnit.getPosition());
myTurn.setHasMoved(true);
}
selectedUnit = null;
action = ACTION_NONE;
}
break;
case ACTION_ATTACK:
if (selectedUnit != null) {
Unit target = state.getMap().getUnit(x, y);
if (target != null) {
if (target.getOwner() == selectedUnit.getOwner()) {
Toast.makeText(context, R.string.ownUnit,
Toast.LENGTH_SHORT).show();
} else if (Math.abs(selectedUnit.getPosition().getX()
- target.getPosition().getX())
+ Math.abs(selectedUnit.getPosition().getY()
- target.getPosition().getY()) <= selectedUnit
.getRange()) {
myTurn.setUnitEndPos(selectedUnit.getPosition());
myTurn.setHasAttacked(true);
myTurn.setUnitTargetPos(target.getPosition());
target.takeDemage(selectedUnit.getPower());
Toast.makeText(
context,
"Unit " + target.getId() + " from Player "
+ target.getOwner() + " took "
+ selectedUnit.getPower()
+ " points of damage from Unit "
+ selectedUnit.getId()
+ " (Player: "
+ selectedUnit.getOwner() + ")",
Toast.LENGTH_LONG).show();
if (target.getLife() == 0) {
state.getMap().removeUnit(target);
}
endTurn();
} else
Toast.makeText(context, R.string.tooFar,
Toast.LENGTH_SHORT).show();
} else
Toast.makeText(context, R.string.noTarget,
Toast.LENGTH_SHORT).show();
selectedUnit = null;
action = ACTION_NONE;
}
break;
}
drawEverything();
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.index;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.cache.query.annotations.QuerySqlField;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.processors.query.QueryField;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.testframework.config.GridTestProperties;
import static org.apache.ignite.testframework.config.GridTestProperties.BINARY_MARSHALLER_USE_SIMPLE_NAME_MAPPER;
/**
* Test to check dynamic columns related features.
*/
public abstract class H2DynamicColumnsAbstractBasicSelfTest extends DynamicColumnsAbstractTest {
/**
* Index of coordinator node.
*/
final static int SRV_CRD_IDX = 0;
/**
* Index of non coordinator server node.
*/
final static int SRV_IDX = 1;
/**
* Index of client.
*/
final static int CLI_IDX = 2;
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
for (IgniteConfiguration cfg : configurations())
Ignition.start(cfg);
}
/**
* @return Grid configurations to start.
* @throws Exception if failed.
*/
private IgniteConfiguration[] configurations() throws Exception {
return new IgniteConfiguration[] {
commonConfiguration(0),
commonConfiguration(1),
clientConfiguration(2)
};
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
super.afterTestsStopped();
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
run(CREATE_SQL);
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
run(DROP_SQL);
super.afterTest();
}
/**
* Test column addition to the end of the columns list.
*/
public void testAddColumnSimple() throws SQLException {
run("ALTER TABLE Person ADD COLUMN age int");
doSleep(500);
QueryField c = c("AGE", Integer.class.getName());
checkTableState(QueryUtils.DFLT_SCHEMA, "PERSON", c);
}
/**
* Test column addition to the end of the columns list.
*/
public void testAddFewColumnsSimple() throws SQLException {
run("ALTER TABLE Person ADD COLUMN (age int, \"city\" varchar)");
doSleep(500);
checkTableState(QueryUtils.DFLT_SCHEMA, "PERSON", c("AGE", Integer.class.getName()), c("city", String.class.getName()));
}
/**
* Test {@code IF EXISTS} handling.
*/
public void testIfTableExists() {
run("ALTER TABLE if exists City ADD COLUMN population int");
}
/**
* Test {@code IF NOT EXISTS} handling.
*/
public void testIfColumnNotExists() {
run("ALTER TABLE Person ADD COLUMN if not exists name varchar");
}
/**
* Test {@code IF NOT EXISTS} handling.
*/
public void testDuplicateColumnName() {
assertThrows("ALTER TABLE Person ADD COLUMN name varchar", "Column already exists: NAME");
}
/**
* Test behavior in case of missing table.
*/
public void testMissingTable() {
assertThrows("ALTER TABLE City ADD COLUMN name varchar", "Table doesn't exist: CITY");
}
/** */
@SuppressWarnings("unchecked")
public void testComplexOperations() {
IgniteCache<BinaryObject, BinaryObject> cache = ignite(nodeIndex())
.cache(QueryUtils.createTableCacheName(QueryUtils.DFLT_SCHEMA, "PERSON"));
run(cache, "ALTER TABLE Person ADD COLUMN city varchar");
run(cache, "INSERT INTO Person (id, name, city) values (1, 'John Doe', 'New York')");
run(cache, "INSERT INTO Person (id, name, city) values (2, 'Mike Watts', 'Denver')");
run(cache, "INSERT INTO Person (id, name, city) values (3, 'Ann Pierce', 'New York')");
run(cache, "CREATE INDEX pidx1 ON Person(name, city desc)");
CacheConfiguration<Integer, City> ccfg = defaultCacheConfiguration().setName("City")
.setIndexedTypes(Integer.class, City.class).setSqlSchema(QueryUtils.DFLT_SCHEMA);
ccfg.getQueryEntities().iterator().next().setKeyFieldName("id");
ignite(nodeIndex()).getOrCreateCache(ccfg);
run(cache, "ALTER TABLE City ADD COLUMN population int");
run(cache, "CREATE INDEX cidx1 ON City(population)");
run(cache, "CREATE INDEX cidx2 ON City(name)");
run(cache, "INSERT INTO City(id, name, population, state) values (5, 'New York', 15000000, 'New York')," +
"(7, 'Denver', 3000000, 'Colorado')");
List<List<?>> res = run(cache, "SELECT p.name from Person p join City c on p.city = c.name where " +
"c.population > 5000000 order by p.name");
assertEquals(2, res.size());
assertEquals(Collections.singletonList("Ann Pierce"), res.get(0));
assertEquals(Collections.singletonList("John Doe"), res.get(1));
run(cache, "ALTER TABLE Person ADD COLUMN age int");
run(cache, "UPDATE Person SET age = (5 - id) * 10");
res = run(cache, "SELECT p.name from Person p join City c on p.city = c.name where " +
"c.population > 5000000 and age < 40");
assertEquals(1, res.size());
assertEquals(Collections.singletonList("Ann Pierce"), res.get(0));
run(cache, "CREATE INDEX pidx2 on Person(age desc)");
run(cache, "DROP INDEX pidx2");
run(cache, "DROP INDEX pidx1");
run(cache, "DROP INDEX cidx2");
run(cache, "DROP INDEX cidx1");
run(cache, "DELETE FROM Person where age > 10");
assertEquals(0, cache.size());
ignite(nodeIndex()).destroyCache("City");
}
/**
* Test that we can add columns dynamically to tables associated with non dynamic caches as well.
*/
public void testAddColumnToNonDynamicCache() throws SQLException {
run("ALTER TABLE \"idx\".PERSON ADD COLUMN CITY varchar");
doSleep(500);
QueryField c = c("CITY", String.class.getName());
checkTableState("idx", "PERSON", c);
}
/**
* Test that we can add columns dynamically to tables associated with non dynamic caches storing user types as well.
*/
@SuppressWarnings("unchecked")
public void testAddColumnToNonDynamicCacheWithRealValueType() throws SQLException {
CacheConfiguration<Integer, City> ccfg = defaultCacheConfiguration().setName("City")
.setIndexedTypes(Integer.class, City.class);
IgniteCache<Integer, ?> cache = ignite(nodeIndex()).getOrCreateCache(ccfg);
run(cache, "ALTER TABLE \"City\".City ADD COLUMN population int");
doSleep(500);
QueryField c = c("POPULATION", Integer.class.getName());
checkTableState("City", "CITY", c);
run(cache, "INSERT INTO \"City\".City (_key, id, name, state, population) values " +
"(1, 1, 'Washington', 'DC', 2500000)");
List<List<?>> res = run(cache, "select _key, id, name, state, population from \"City\".City");
assertEquals(Collections.singletonList(Arrays.asList(1, 1, "Washington", "DC", 2500000)), res);
if (!Boolean.valueOf(GridTestProperties.getProperty(BINARY_MARSHALLER_USE_SIMPLE_NAME_MAPPER))) {
City city = (City)cache.get(1);
assertEquals(1, city.id());
assertEquals("Washington", city.name());
assertEquals("DC", city.state());
}
else {
BinaryObject city = (BinaryObject)cache.withKeepBinary().get(1);
assertEquals(1, (int)city.field("id"));
assertEquals("Washington", (String)city.field("name"));
assertEquals("DC", (String)city.field("state"));
assertEquals(2500000, (int)city.field("population"));
}
cache.destroy();
}
/**
* Test addition of column with not null constraint.
*/
public void testAddNotNullColumn() throws SQLException {
run("ALTER TABLE Person ADD COLUMN age int NOT NULL");
doSleep(500);
QueryField c = new QueryField("AGE", Integer.class.getName(), false);
checkTableState(QueryUtils.DFLT_SCHEMA, "PERSON", c);
}
/**
* Test addition of column explicitly defined as nullable.
*/
public void testAddNullColumn() throws SQLException {
run("ALTER TABLE Person ADD COLUMN age int NULL");
doSleep(500);
QueryField c = new QueryField("AGE", Integer.class.getName(), true);
checkTableState(QueryUtils.DFLT_SCHEMA, "PERSON", c);
}
/**
* Test that {@code ADD COLUMN} fails for non dynamic table that has flat value.
*/
@SuppressWarnings({"unchecked", "ThrowFromFinallyBlock"})
public void testTestAlterTableOnFlatValueNonDynamicTable() {
CacheConfiguration c =
new CacheConfiguration("ints").setIndexedTypes(Integer.class, Integer.class)
.setSqlSchema(QueryUtils.DFLT_SCHEMA);
try {
grid(nodeIndex()).getOrCreateCache(c);
doTestAlterTableOnFlatValue("INTEGER");
}
finally {
grid(nodeIndex()).destroyCache("ints");
}
}
/**
* Test that {@code ADD COLUMN} fails for dynamic table that has flat value.
*/
@SuppressWarnings({"unchecked", "ThrowFromFinallyBlock"})
public void testTestAlterTableOnFlatValueDynamicTable() {
try {
run("CREATE TABLE TEST (id int primary key, x varchar) with \"wrap_value=false\"");
doTestAlterTableOnFlatValue("TEST");
}
finally {
run("DROP TABLE TEST");
}
}
/**
* Test that {@code ADD COLUMN} fails for tables that have flat value.
* @param tblName table name.
*/
private void doTestAlterTableOnFlatValue(String tblName) {
assertThrows("ALTER TABLE " + tblName + " ADD COLUMN y varchar",
"Cannot add column(s) because table was created with WRAP_VALUE=false option.");
}
/**
* @return Node index to run queries on.
*/
protected abstract int nodeIndex();
/**
* Run specified statement expected to throw {@code IgniteSqlException} with expected specified message.
* @param sql Statement.
* @param msg Expected message.
*/
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
protected void assertThrows(final String sql, String msg) {
assertThrows(grid(nodeIndex()), sql, msg);
}
/**
* Execute SQL command and return resulting dataset.
* @param sql Statement.
* @return result.
*/
protected List<List<?>> run(String sql) {
return run(grid(nodeIndex()), sql);
}
/** City class. */
private final static class City {
/** City id. */
@QuerySqlField
private int id;
/** City name. */
@QuerySqlField
private String name;
/** City state. */
@QuerySqlField
private String state;
/**
* @return City id.
*/
public int id() {
return id;
}
/**
* @param id City id.
*/
public void id(int id) {
this.id = id;
}
/**
* @return City name.
*/
public String name() {
return name;
}
/**
* @param name City name.
*/
public void name(String name) {
this.name = name;
}
/**
* @return City state.
*/
public String state() {
return state;
}
/**
* @param state City state.
*/
public void state(String state) {
this.state = state;
}
}
}
| |
package edu.mit.collab.designer;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.swing.JOptionPane;
import javax.swing.event.EventListenerList;
import org.apache.commons.math3.linear.RealVector;
import com.google.gson.Gson;
import edu.mit.collab.util.HLAfloatVector;
import edu.mit.collab.util.HLAintegerMatrix;
import edu.mit.collab.util.HLAstringVector;
import edu.mit.collab.util.Utilities;
import hla.rti1516e.AttributeHandleSet;
import hla.rti1516e.AttributeHandleValueMap;
import hla.rti1516e.CallbackModel;
import hla.rti1516e.FederateHandle;
import hla.rti1516e.LogicalTime;
import hla.rti1516e.MessageRetractionHandle;
import hla.rti1516e.NullFederateAmbassador;
import hla.rti1516e.ObjectClassHandle;
import hla.rti1516e.ObjectInstanceHandle;
import hla.rti1516e.OrderType;
import hla.rti1516e.RTIambassador;
import hla.rti1516e.ResignAction;
import hla.rti1516e.RtiFactory;
import hla.rti1516e.RtiFactoryFactory;
import hla.rti1516e.TransportationTypeHandle;
import hla.rti1516e.encoding.ByteWrapper;
import hla.rti1516e.encoding.EncoderException;
import hla.rti1516e.encoding.EncoderFactory;
import hla.rti1516e.encoding.HLAboolean;
import hla.rti1516e.encoding.HLAinteger32BE;
import hla.rti1516e.encoding.HLAunicodeString;
import hla.rti1516e.exceptions.AlreadyConnected;
import hla.rti1516e.exceptions.AttributeNotDefined;
import hla.rti1516e.exceptions.AttributeNotOwned;
import hla.rti1516e.exceptions.CallNotAllowedFromWithinCallback;
import hla.rti1516e.exceptions.ConnectionFailed;
import hla.rti1516e.exceptions.CouldNotCreateLogicalTimeFactory;
import hla.rti1516e.exceptions.CouldNotOpenFDD;
import hla.rti1516e.exceptions.ErrorReadingFDD;
import hla.rti1516e.exceptions.FederateAlreadyExecutionMember;
import hla.rti1516e.exceptions.FederateIsExecutionMember;
import hla.rti1516e.exceptions.FederateNameAlreadyInUse;
import hla.rti1516e.exceptions.FederateNotExecutionMember;
import hla.rti1516e.exceptions.FederateOwnsAttributes;
import hla.rti1516e.exceptions.FederatesCurrentlyJoined;
import hla.rti1516e.exceptions.FederationExecutionAlreadyExists;
import hla.rti1516e.exceptions.FederationExecutionDoesNotExist;
import hla.rti1516e.exceptions.InconsistentFDD;
import hla.rti1516e.exceptions.InvalidLocalSettingsDesignator;
import hla.rti1516e.exceptions.InvalidObjectClassHandle;
import hla.rti1516e.exceptions.InvalidResignAction;
import hla.rti1516e.exceptions.NameNotFound;
import hla.rti1516e.exceptions.NotConnected;
import hla.rti1516e.exceptions.ObjectClassNotDefined;
import hla.rti1516e.exceptions.ObjectClassNotPublished;
import hla.rti1516e.exceptions.ObjectInstanceNotKnown;
import hla.rti1516e.exceptions.OwnershipAcquisitionPending;
import hla.rti1516e.exceptions.RTIinternalError;
import hla.rti1516e.exceptions.RestoreInProgress;
import hla.rti1516e.exceptions.SaveInProgress;
import hla.rti1516e.exceptions.UnsupportedCallbackModel;
/**
* The federate ambassador interface to the RTI for the designer application.
* This class handles all of the interactions with the RTI including setting up
* the connection to a federation and receiving all messages from other
* federates.
*
* @author Paul T. Grogan, ptgrogan@mit.edu
*/
public class DesignerAmbassador extends NullFederateAmbassador {
private static enum ManagerAction {ADD, MODEL_UPDATE,
OUTPUT_UPDATE, REMOVE};
// the variables below define configuration strings for various
// commands issued to the RTI ambassador
private static final String federateType = "designer";
private static final String managerClassName = "HLAobjectRoot.Manager";
private static final String outputAttributeName = "Output";
private static final String initialInputAttributeName = "InitialInput";
private static final String targetOutputAttributeName = "TargetOutput";
private static final String activeModelAttributeName = "ActiveModel";
private static final String inputIndicesAttributeName = "InputIndices";
private static final String outputIndicesAttributeName = "OutputIndices";
private static final String inputLabelsAttributeName = "InputLabels";
private static final String outputLabelsAttributeName = "OutputLabels";
private static final String designerClassName = "HLAobjectRoot.Designer";
private static final String inputAttributeName = "Input";
private static final String indexAttributeName = "Index";
private static final String readyAttributeName = "Ready";
private transient String objectInstanceName; // set upon connection to RTI
private final RTIambassador rtiAmbassador; // immutable
private final EncoderFactory encoderFactory; // immutable
private final Properties properties; // mutable
private final HLAfloatVector input; // mutable
private final HLAinteger32BE index; // mutable
private final HLAboolean ready; // mutable
private final EventListenerList listenerList = new EventListenerList(); // mutable
// synchronized mutable map to support multi-threaded application
private final Map<ObjectInstanceHandle, Manager> managers =
Collections.synchronizedMap(
new HashMap<ObjectInstanceHandle, Manager>());
/**
* Instantiates a new designer ambassador.
*
* @param designerIndex the designer index
* @throws RTIinternalError the RTI internal error
*/
public DesignerAmbassador(int designerIndex) throws RTIinternalError {
properties = new Properties();
try {
InputStream in = getClass().getClassLoader().getResourceAsStream(
Utilities.PROPERTIES_PATH);
properties.load(in);
in.close();
} catch (IOException e) {
e.printStackTrace();
}
String rtiName = properties.getProperty("rtiName", null);
// create the RTI factory and store ambassador and encoder objects
RtiFactory rtiFactory;
if(rtiName == null) {
rtiFactory = RtiFactoryFactory.getRtiFactory();
} else {
rtiFactory = RtiFactoryFactory.getRtiFactory(rtiName);
}
rtiAmbassador = rtiFactory.getRtiAmbassador();
encoderFactory = rtiFactory.getEncoderFactory();
// create hla-compatible data elements for encoding/decoding values
input = new HLAfloatVector(encoderFactory);
index = encoderFactory.createHLAinteger32BE(designerIndex);
ready = encoderFactory.createHLAboolean();
}
/**
* Adds the manager listener.
*
* @param listener the listener
*/
public void addManagerListener(ManagerListener listener) {
// add listener to list
listenerList.add(ManagerListener.class, listener);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#discoverObjectInstance(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.ObjectClassHandle, java.lang.String)
*/
@Override
public void discoverObjectInstance(ObjectInstanceHandle theObject,
ObjectClassHandle theObjectClass,
String objectName) {
// re-direct method to single method signature
discoverObjectInstance(theObject, theObjectClass, objectName, null);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#discoverObjectInstance(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.ObjectClassHandle, java.lang.String)
*/
@Override
public void discoverObjectInstance(ObjectInstanceHandle theObject,
ObjectClassHandle theObjectClass,
String objectName,
FederateHandle producingFederate) {
// this method is called by the RTI when a new object is "discovered"
// in this case, we are only expecting managers which should be added
// to the list of discovered managers and request attribute updates
try {
// check if object is a designer (shouldn't be anything else!)
if(theObjectClass.equals(
rtiAmbassador.getObjectClassHandle(managerClassName))) {
// create new object model
Manager manager = new Manager(objectName);
// add object class handle and manager object to
// thread-safe map using a synchronized block
synchronized(managers) {
managers.put(theObject, manager);
}
// create a new attribute handle set to request updates of the
// manager's attributes
AttributeHandleSet attributes = rtiAmbassador
.getAttributeHandleSetFactory().create();
// add initial input attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
initialInputAttributeName));
// add target output attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
targetOutputAttributeName));
// add output attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputAttributeName));
// add active model attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
activeModelAttributeName));
// add input indices attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
inputIndicesAttributeName));
// add output indices attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputIndicesAttributeName));
// add input labels attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
inputLabelsAttributeName));
// add output labels attribute
attributes.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputLabelsAttributeName));
// issue request attribute value update service call
rtiAmbassador.requestAttributeValueUpdate(theObject,
attributes, new byte[0]);
// notify listeners that a manager has been discovered
fireManagerEvent(ManagerAction.ADD,
new ManagerEvent(this, manager));
}
} catch (Exception ex) {
// in the case of an exception (from the request attribute value
// update call), print stack trace and show error message
ex.printStackTrace();
JOptionPane.showMessageDialog(null, "An exception of type " +
ex.getMessage() + " occurred while discovering an object. " +
"See stack trace for more information.",
"Error", JOptionPane.ERROR_MESSAGE);
}
}
/**
* Fires a manager event corresponding to an observed action.
*
* @param action the action
* @param event the event
*/
private void fireManagerEvent(ManagerAction action, ManagerEvent event) {
// get the list of manager listeners
ManagerListener[] listeners = listenerList.getListeners(
ManagerListener.class);
// for each listener, notify using the appropriate method
for(int i = 0; i < listeners.length; i++) {
switch(action) {
case ADD:
listeners[i].managerAdded(event);
break;
case MODEL_UPDATE:
listeners[i].managerModelModified(event);
break;
case OUTPUT_UPDATE:
listeners[i].managerOutputModified(event);
break;
case REMOVE:
listeners[i].managerRemoved(event);
}
}
}
/**
* Gets the instance name issued by the RTI.
*
* @return the instance name
*/
public String getInstanceName() {
return objectInstanceName;
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#provideAttributeValueUpdate(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.AttributeHandleSet, byte[])
*/
public void provideAttributeValueUpdate(ObjectInstanceHandle theObject,
AttributeHandleSet theAttributes,
byte[] userSuppliedTag) {
// this method is called by the RTI when this object attribute values
// owned by this federate are requested by another federate. in this
// case, this corresponds to a designer requesting attribute values
// of the manager
try {
// check to make sure that the object requested is this manager
if(theObject.equals(rtiAmbassador.getObjectInstanceHandle(
objectInstanceName))) {
// create an attribute handle value map to store data
AttributeHandleValueMap attributes = rtiAmbassador.
getAttributeHandleValueMapFactory().create(3);
// if the input is requested, add it to the map
if(theAttributes.contains(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
inputAttributeName))) {
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
inputAttributeName), input.toByteArray());
}
// if the index is requested, add it to the map
if(theAttributes.contains(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
indexAttributeName))) {
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
indexAttributeName), index.toByteArray());
}
// if the ready state is requested, add it to the map
if(theAttributes.contains(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
readyAttributeName))) {
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
readyAttributeName), ready.toByteArray());
}
// use the rti's update attribute value service to issue updates
rtiAmbassador.updateAttributeValues(
rtiAmbassador.getObjectInstanceHandle(objectInstanceName),
attributes, new byte[0]);
}
} catch (Exception ex) {
// in the case of an exception (from the update attribute value
// call), print stack trace and show error message
ex.printStackTrace();
JOptionPane.showMessageDialog(null, "An exception of type " +
ex.getMessage() + " occurred while providing attribute " +
"updates. See stack trace for more information.",
"Error", JOptionPane.ERROR_MESSAGE);
}
}
/**
* Configures the published object class attributes.
*
* @throws FederateNotExecutionMember the federate not execution member
* @throws NotConnected the not connected
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
* @throws RTIinternalError the RTI internal error
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectClassNotDefined the object class not defined
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
*/
private void publish()
throws FederateNotExecutionMember, NotConnected, NameNotFound,
InvalidObjectClassHandle, RTIinternalError, AttributeNotDefined,
ObjectClassNotDefined, SaveInProgress, RestoreInProgress {
// create a new attribute handle set to store attributes
AttributeHandleSet attributeHandleSet = rtiAmbassador.
getAttributeHandleSetFactory().create();
// add the input to the set
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
inputAttributeName));
// add the index to the set
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
indexAttributeName));
// add the ready state to the set
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
readyAttributeName));
// use the RTI service to publish object class attributes
rtiAmbassador.publishObjectClassAttributes(
rtiAmbassador.getObjectClassHandle(designerClassName),
attributeHandleSet);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#reflectAttributeValues(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.AttributeHandleValueMap, byte[], hla.rti1516e.OrderType, hla.rti1516e.TransportationTypeHandle, hla.rti1516e.LogicalTime, hla.rti1516e.OrderType, hla.rti1516e.MessageRetractionHandle, hla.rti1516e.FederateAmbassador.SupplementalReflectInfo)
*/
@Override
public void reflectAttributeValues(ObjectInstanceHandle theObject,
AttributeHandleValueMap theAttributes,
byte[] userSuppliedTag,
OrderType sentOrdering,
TransportationTypeHandle theTransport,
LogicalTime theTime,
OrderType receivedOrdering,
MessageRetractionHandle retractionHandle,
SupplementalReflectInfo reflectInfo) {
// this method is called by the RTI when remote objects update their
// values. this method must update any local representations of the
// remote objects to reflect the processed updates
// create a gson object to help with log message formatting
Gson gson = new Gson();
try {
// check whether the object has been previously discovered
Manager manager = null;
synchronized(managers) {
manager = managers.get(theObject);
}
// if manager has not been discovered, simply return
if(manager == null) {
return;
}
// get the data corresponding to the active model attribute
ByteWrapper wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
activeModelAttributeName));
if(wrapper != null) {
// active model has changed -- start a complete model update
// decode into an HLA data element
HLAunicodeString string =
encoderFactory.createHLAunicodeString();
string.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager active model to " +
string.getValue());
// update manager object
manager.setActiveModel(string.getValue());
// get the data corresponding to the initial input attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
initialInputAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAfloatVector vector = new HLAfloatVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager initial input to " +
vector.getValue());
// update manager object
manager.setInitialInput(vector.getValue());
}
// get the data corresponding to the target output attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
targetOutputAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAfloatVector vector = new HLAfloatVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager target output to " +
vector.getValue());
// update manager object
manager.setTargetOutput(vector.getValue());
}
// get the data corresponding to the output attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
outputAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAfloatVector vector =
new HLAfloatVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager output to " +
vector.getValue());
// update manager object
manager.setOutput(vector.getValue());
}
// get the data corresponding to the input indices attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
inputIndicesAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAintegerMatrix matrix =
new HLAintegerMatrix(encoderFactory);
matrix.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager input indices to " +
gson.toJson(matrix.getValue()));
// update manager object
manager.setInputIndices(matrix.getValue());
}
// get the data corresponding to the output indices attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
outputIndicesAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAintegerMatrix matrix =
new HLAintegerMatrix(encoderFactory);
matrix.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager output indices to " +
gson.toJson(matrix.getValue()));
// update manager object
manager.setOutputIndices(matrix.getValue());
}
// get the data corresponding to the input labels attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
inputLabelsAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAstringVector vector =
new HLAstringVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager input labels to " +
gson.toJson(vector.getValue()));
// update manager object
manager.setInputLabels(vector.getValue());
}
// get the data corresponding to the output labels attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
outputLabelsAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAstringVector vector =
new HLAstringVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager output labels to " +
gson.toJson(vector.getValue()));
// update manager object
manager.setOutputLabels(vector.getValue());
}
// update manager model
fireManagerEvent(ManagerAction.MODEL_UPDATE,
new ManagerEvent(this, manager));
} else {
// get the data corresponding to the output attribute
wrapper = theAttributes.getValueReference(
rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(
managerClassName),
outputAttributeName));
if(wrapper != null) {
// wrapper has data; decode into an HLA data element
HLAfloatVector vector =
new HLAfloatVector(encoderFactory);
vector.decode(wrapper);
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting manager output to " +
vector.getValue());
// update manager object and set flag to update output
manager.setOutput(vector.getValue());
// fire update event
fireManagerEvent(ManagerAction.OUTPUT_UPDATE,
new ManagerEvent(this, manager));
}
}
} catch (Exception ex) {
// in the case of an exception (from the various RTI calls),
// print stack trace and show error message
ex.printStackTrace();
JOptionPane.showMessageDialog(null, "An exception of type " +
ex.getMessage() + " occurred while decoding an " +
"attribute update. See stack trace for more information.",
"Error", JOptionPane.ERROR_MESSAGE);
}
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#reflectAttributeValues(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.AttributeHandleValueMap, byte[], hla.rti1516e.OrderType, hla.rti1516e.TransportationTypeHandle, hla.rti1516e.LogicalTime, hla.rti1516e.OrderType, hla.rti1516e.FederateAmbassador.SupplementalReflectInfo)
*/
@Override
public void reflectAttributeValues(ObjectInstanceHandle theObject,
AttributeHandleValueMap theAttributes,
byte[] userSuppliedTag,
OrderType sentOrdering,
TransportationTypeHandle theTransport,
LogicalTime theTime,
OrderType receivedOrdering,
SupplementalReflectInfo reflectInfo) {
// re-direct method to single method signature
reflectAttributeValues(theObject, theAttributes, userSuppliedTag,
sentOrdering, theTransport, theTime, receivedOrdering,
null, reflectInfo);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#reflectAttributeValues(hla.rti1516e.ObjectInstanceHandle, hla.rti1516e.AttributeHandleValueMap, byte[], hla.rti1516e.OrderType, hla.rti1516e.TransportationTypeHandle, hla.rti1516e.FederateAmbassador.SupplementalReflectInfo)
*/
@Override
public void reflectAttributeValues(ObjectInstanceHandle theObject,
AttributeHandleValueMap theAttributes,
byte[] userSuppliedTag,
OrderType sentOrdering,
TransportationTypeHandle theTransport,
SupplementalReflectInfo reflectInfo) {
// re-direct method to single method signature
reflectAttributeValues(theObject, theAttributes, userSuppliedTag,
sentOrdering, theTransport, null, null, reflectInfo);
}
/**
* Removes the designer listener.
*
* @param listener the listener
*/
public void removeManagerListener(ManagerListener listener) {
// remove the lister from the list
listenerList.remove(ManagerListener.class, listener);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#removeObjectInstance(hla.rti1516e.ObjectInstanceHandle, byte[], hla.rti1516e.OrderType, hla.rti1516e.LogicalTime, hla.rti1516e.OrderType, hla.rti1516e.MessageRetractionHandle, hla.rti1516e.FederateAmbassador.SupplementalRemoveInfo)
*/
@Override
public void removeObjectInstance(ObjectInstanceHandle theObject,
byte[] userSuppliedTag,
OrderType sentOrdering,
LogicalTime theTime,
OrderType receivedOrdering,
MessageRetractionHandle retractionHandle,
SupplementalRemoveInfo removeInfo) {
// try to remove manager from the manager map
Manager manager = null;
synchronized(managers) {
manager = managers.remove(theObject);
}
if(manager != null) {
// notify listeners that manager has been removed
fireManagerEvent(ManagerAction.REMOVE,
new ManagerEvent(this, manager));
}
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#removeObjectInstance(hla.rti1516e.ObjectInstanceHandle, byte[], hla.rti1516e.OrderType, hla.rti1516e.LogicalTime, hla.rti1516e.OrderType, hla.rti1516e.FederateAmbassador.SupplementalRemoveInfo)
*/
@Override
public void removeObjectInstance(ObjectInstanceHandle theObject,
byte[] userSuppliedTag,
OrderType sentOrdering,
LogicalTime theTime,
OrderType receivedOrdering,
SupplementalRemoveInfo removeInfo) {
// re-direct method to single method signature
removeObjectInstance(theObject, userSuppliedTag, sentOrdering,
theTime, receivedOrdering, null, removeInfo);
}
/* (non-Javadoc)
* @see hla.rti1516e.NullFederateAmbassador#removeObjectInstance(hla.rti1516e.ObjectInstanceHandle, byte[], hla.rti1516e.OrderType, hla.rti1516e.FederateAmbassador.SupplementalRemoveInfo)
*/
@Override
public void removeObjectInstance(ObjectInstanceHandle theObject,
byte[] userSuppliedTag,
OrderType sentOrdering,
SupplementalRemoveInfo removeInfo) {
// re-direct method to single method signature
removeObjectInstance(theObject, userSuppliedTag, sentOrdering,
null, null, null, removeInfo);
}
/**
* Shut down.
*
* @throws InvalidResignAction the invalid resign action
* @throws OwnershipAcquisitionPending the ownership acquisition pending
* @throws FederateOwnsAttributes the federate owns attributes
* @throws CallNotAllowedFromWithinCallback the call not allowed from within callback
* @throws RTIinternalError the RTI internal error
* @throws FederateIsExecutionMember the federate is execution member
*/
public void shutDown()
throws InvalidResignAction, OwnershipAcquisitionPending,
FederateOwnsAttributes, CallNotAllowedFromWithinCallback,
RTIinternalError, FederateIsExecutionMember {
// try to resign from the federation execution; ignore
// exceptions if already resigned or not connected
try {
rtiAmbassador.resignFederationExecution(
ResignAction.DELETE_OBJECTS_THEN_DIVEST);
} catch (FederateNotExecutionMember ignored) {
} catch (NotConnected ignored) { }
// once resigned, try to destroy federation; ignore
// exceptions if other federates still joined, federation
// already destroyed, or not connected
try {
rtiAmbassador.destroyFederationExecution(properties.getProperty("federationName", "collab"));
} catch (FederatesCurrentlyJoined ignored) {
} catch (FederationExecutionDoesNotExist ignored) {
} catch (NotConnected ignored) {
}
// disconnect from the rti
rtiAmbassador.disconnect();
}
/**
* Start up.
*
* @throws ConnectionFailed the connection failed
* @throws InvalidLocalSettingsDesignator the invalid local settings designator
* @throws UnsupportedCallbackModel the unsupported callback model
* @throws CallNotAllowedFromWithinCallback the call not allowed from within callback
* @throws RTIinternalError the RTI internal error
* @throws InconsistentFDD the inconsistent FDD
* @throws ErrorReadingFDD the error reading FDD
* @throws CouldNotOpenFDD the could not open FDD
* @throws NotConnected the not connected
* @throws MalformedURLException the malformed URL exception
* @throws CouldNotCreateLogicalTimeFactory the could not create logical time factory
* @throws FederateNameAlreadyInUse the federate name already in use
* @throws FederationExecutionDoesNotExist the federation execution does not exist
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
* @throws FederateNotExecutionMember the federate not execution member
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectClassNotDefined the object class not defined
* @throws ObjectInstanceNotKnown the object instance not known
* @throws ObjectClassNotPublished the object class not published
*/
public void startUp()
throws ConnectionFailed, InvalidLocalSettingsDesignator,
UnsupportedCallbackModel, CallNotAllowedFromWithinCallback,
RTIinternalError, InconsistentFDD, ErrorReadingFDD,
CouldNotOpenFDD, NotConnected, MalformedURLException,
CouldNotCreateLogicalTimeFactory, FederateNameAlreadyInUse,
FederationExecutionDoesNotExist, SaveInProgress,
RestoreInProgress, FederateNotExecutionMember, NameNotFound,
InvalidObjectClassHandle, AttributeNotDefined,
ObjectClassNotDefined, ObjectInstanceNotKnown,
ObjectClassNotPublished {
// try to connect to the RTI; ignore if already connected
try {
// use the HLA_Evoked model to require explicit callbacks
rtiAmbassador.connect(this, CallbackModel.HLA_IMMEDIATE);
} catch(AlreadyConnected ignored) { }
// try to create the federation execution using the FOM file;
// ignore if already exists
try {
rtiAmbassador.createFederationExecution(
properties.getProperty("federationName", "collab"),
getClass().getClassLoader().getResource(
properties.getProperty("fomPath", "resources/collab.xml")));
} catch(FederationExecutionAlreadyExists ignored) { }
// try to join the federation execution; ignore if already joined
try {
rtiAmbassador.joinFederationExecution("Designer " + index.getValue(),
federateType, properties.getProperty("federationName", "collab"));
} catch(FederateAlreadyExecutionMember ignored) { }
// publish and subscribe to object class attributes
publish();
subscribe();
// register the object instance name
objectInstanceName = rtiAmbassador.getObjectInstanceName(
rtiAmbassador.registerObjectInstance(
rtiAmbassador.getObjectClassHandle(
designerClassName)));
}
/**
* Subscribe.
*
* @throws FederateNotExecutionMember the federate not execution member
* @throws NotConnected the not connected
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectClassNotDefined the object class not defined
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
* @throws RTIinternalError the RTI internal error
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
*/
private void subscribe()
throws FederateNotExecutionMember, NotConnected,
AttributeNotDefined, ObjectClassNotDefined, SaveInProgress,
RestoreInProgress, RTIinternalError, NameNotFound,
InvalidObjectClassHandle {
// create an attribute handle set
AttributeHandleSet attributeHandleSet = rtiAmbassador
.getAttributeHandleSetFactory().create();
// add the initial input attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
initialInputAttributeName));
// add the target output attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
targetOutputAttributeName));
// add the output attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputAttributeName));
// add the active model attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
activeModelAttributeName));
// add the input indices attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
inputIndicesAttributeName));
// add the output indices attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputIndicesAttributeName));
// add the input labels indices attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
inputLabelsAttributeName));
// add the output labels attribute
attributeHandleSet.add(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(managerClassName),
outputLabelsAttributeName));
// use the RTI service to subscribe to the defined attributes
rtiAmbassador.subscribeObjectClassAttributes(
rtiAmbassador.getObjectClassHandle(managerClassName),
attributeHandleSet);
}
/**
* Update index attribute.
*
* @param indexValue the index value
* @throws FederateNotExecutionMember the federate not execution member
* @throws NotConnected the not connected
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
* @throws RTIinternalError the RTI internal error
* @throws EncoderException the encoder exception
* @throws AttributeNotOwned the attribute not owned
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectInstanceNotKnown the object instance not known
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
*/
public void updateIndexAttribute(int indexValue)
throws FederateNotExecutionMember, NotConnected, NameNotFound,
InvalidObjectClassHandle, RTIinternalError, EncoderException,
AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown,
SaveInProgress, RestoreInProgress {
// create an attribute handle value map to store data
AttributeHandleValueMap attributes = rtiAmbassador
.getAttributeHandleValueMapFactory().create(1);
// set HLA data element value and add to map
index.setValue(indexValue);
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
indexAttributeName), index.toByteArray());
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting index value to " + index.getValue());
// use RTI service to update attribute values using map
rtiAmbassador.updateAttributeValues(
rtiAmbassador.getObjectInstanceHandle(objectInstanceName),
attributes, new byte[0]);
}
/**
* Update input attribute.
*
* @param outputVector the output vector
* @throws FederateNotExecutionMember the federate not execution member
* @throws NotConnected the not connected
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
* @throws RTIinternalError the RTI internal error
* @throws EncoderException the encoder exception
* @throws AttributeNotOwned the attribute not owned
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectInstanceNotKnown the object instance not known
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
*/
public void updateInputAttribute(RealVector inputValue)
throws FederateNotExecutionMember, NotConnected, NameNotFound,
InvalidObjectClassHandle, RTIinternalError, EncoderException,
AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown,
SaveInProgress, RestoreInProgress {
// create an attribute handle value map to store data
AttributeHandleValueMap attributes = rtiAmbassador
.getAttributeHandleValueMapFactory().create(1);
// set HLA data element value and add to map
input.setValue(inputValue);
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
inputAttributeName), input.toByteArray());
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting input value to " + input.getValue());
// use RTI service to update attribute values using map
rtiAmbassador.updateAttributeValues(
rtiAmbassador.getObjectInstanceHandle(objectInstanceName),
attributes, new byte[0]);
}
/**
* Update ready attribute.
*
* @param readyValue the ready value
* @throws FederateNotExecutionMember the federate not execution member
* @throws NotConnected the not connected
* @throws NameNotFound the name not found
* @throws InvalidObjectClassHandle the invalid object class handle
* @throws RTIinternalError the RTI internal error
* @throws EncoderException the encoder exception
* @throws AttributeNotOwned the attribute not owned
* @throws AttributeNotDefined the attribute not defined
* @throws ObjectInstanceNotKnown the object instance not known
* @throws SaveInProgress the save in progress
* @throws RestoreInProgress the restore in progress
*/
public void updateStateAttribute(boolean readyValue)
throws FederateNotExecutionMember, NotConnected, NameNotFound,
InvalidObjectClassHandle, RTIinternalError, EncoderException,
AttributeNotOwned, AttributeNotDefined, ObjectInstanceNotKnown,
SaveInProgress, RestoreInProgress {
// create an attribute handle value map to store data
AttributeHandleValueMap attributes = rtiAmbassador
.getAttributeHandleValueMapFactory().create(1);
// set HLA data element value and add to map
ready.setValue(readyValue);
attributes.put(rtiAmbassador.getAttributeHandle(
rtiAmbassador.getObjectClassHandle(designerClassName),
readyAttributeName), ready.toByteArray());
System.out.println("Designer " + (index.getValue()+1) +
" Log: setting ready value to " + ready.getValue());
// use RTI service to update attribute values using map
rtiAmbassador.updateAttributeValues(
rtiAmbassador.getObjectInstanceHandle(objectInstanceName),
attributes, new byte[0]);
}
}
| |
package ch.codebulb.lambdaomega.abstractions;
import static ch.codebulb.lambdaomega.F.compare;
import static ch.codebulb.lambdaomega.F.compareAsc;
import ch.codebulb.lambdaomega.M;
import ch.codebulb.lambdaomega.M.E;
import ch.codebulb.lambdaomega.V2;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiPredicate;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.ToDoubleFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Adds a contract to work with functional interfaces to a {@link SequentialI}. None of these functions alter the original wrapped collection.
*/
public interface SequentialIFunctions<T> extends SequentialI<T>, FunctionsI {
/**
* Joins all elements into a String with the <code>delimiter</code> provided.
*/
public default String join(CharSequence delimiter) {
return stream().map(it -> it.toString()).collect(Collectors.joining(delimiter));
}
/**
* @see Stream#forEach(Consumer)
*/
public default void forEach(Consumer<? super T> function) {
stream().forEach(function);
}
/**
* Collects the result of a map with a function <i>f: (T) → R</i> as a {@link Collection}.
*/
public default <R> Collection<R> map(Function<T, R> function) {
return stream().map(function).collect(createCollector());
}
/**
* Performs a "reduce" function with a single result <i>f: (T, A) → R</i> on the elements using an accumulator,
* injecting an <code>identity</code> element as the first accumulator value.
* This is the second step of the "map-reduce" algorithm. Other functions such as {@link #min(Function...)} and
* {@link #sum(ToDoubleFunction)} could actually be re-implemented using this function.
*/
public default T reduce(T identity, BinaryOperator<T> accumulator) {
return stream().reduce(identity, accumulator);
}
/**
* Collects the result of a map with a function <i>f: (T) → {@link E}<K, V></i> as a {@link Map}, built from the returned {@link E}s.
*/
public default <RK, RV> Map<RK, RV> mapEntries(Function<T, M.E<RK, RV>> function) {
return stream().map(function).collect(Collectors.toMap(it -> it.k, it -> it.v));
}
/**
* Returns a transformation of a collection of one depth level into a flat collection.
*
* @see #flattenDeep()
*/
public default <N> Collection<N> flatten() {
return (Collection<N>)(flatten(stream()).collect(createCollector()));
}
static <N, T> Stream<N> flatten(Stream<T> stream) {
return stream.flatMap(it -> {
if (it instanceof SequentialI) {
return ((SequentialI<N>)it).stream();
}
else if (it instanceof Collection) {
return ((Collection<N>)it).stream();
}
else {
return Stream.of((N)(it));
}
});
}
/**
* Returns a transformation of a collection of an arbitrary depth level into a flat collection.
*
* @see #flatten()
*/
public default <N> Collection<N> flattenDeep() {
// TODO This seems a bit shaky. Rewrite.
Collection<?> ret = toCollection();
while (ret.stream().anyMatch(it -> it instanceof SequentialI || it instanceof Collection)) {
ret = ((Stream<?>)ret.stream()).flatMap(it -> {
if (it instanceof SequentialI) {
return ((SequentialI<N>)it).stream();
}
else if (it instanceof Collection) {
return ((Collection<N>)it).stream();
}
else {
return Stream.of((N)(it));
}
}).collect(createCollector());
}
return ((Stream<N>)(ret.stream())).collect(createCollector());
}
/**
* Returns the first element for with the <code>predicate</code> provided returns <code>true</code>,
* or <code>null</code> if no such element is found.
*/
public default T find(Predicate<T> predicate) {
Optional<T> found = stream().filter(predicate).findFirst();
if (found.isPresent()) {
return found.get();
}
else {
return null;
}
}
/**
* Returns a {@link Collection} with every element for which the <code>predicate</code> provided returns <code>true</code>.
* This is the opposite of {@link #reject(Predicate)}.
*/
public default Collection<T> findAll(Predicate<T> predicate) {
return stream().filter(predicate).collect(createCollector());
}
/**
* @see #findAll(Predicate)
*/
public default Collection<T> filter(Predicate<T> predicate) {
return findAll(predicate);
}
/**
* Returns a {@link Collection} with every element for with the <code>predicate</code> provided returns <code>false</code>.
* This is the opposite of {@link #findAll(Predicate)} / {@link #filter(Predicate)}.
*/
public default Collection<T> reject(Predicate<T> predicate) {
return findAll(predicate.negate());
}
/**
* Returns a {@link List} where the elements are sorted <i>in ascending order</i> using the <code>keyExtractors</code> provided in order
* as a comparator on every element.
*
* @see {@link #sortDescBy(Function)}, {@link #sortBy(Function...)}
*/
public default List<T> sortAscBy(Function<T, Comparable>... keyExtractors) {
return stream().sorted(compareAsc(keyExtractors)).collect(Collectors.toList());
}
/**
* Returns a {@link List} where the elements are sorted <i>in descending order</i> using the <code>keyExtractors</code> provided in order
* as a comparator on every element.
*
* @see {@link #sortAscBy(Function...)}, {@link #sortBy(Function...)}
*/
public default List<T> sortDescBy(Function<T, Comparable>... keyExtractors) {
return stream().sorted(compareAsc(keyExtractors).reversed()).collect(Collectors.toList());
}
/**
* Returns a {@link List} where the elements are sorted using the <code>keyExtractors</code> provided in order
* as a comparator on every element. Every keyExtractor is a function <i>f: (T) → {@link V2}<f1, Boolean></i> where the return type is
* a {@link V2}; its 1st element is the actual keyExtractor function <i>f1</i>, its 2nd element is a Boolean specifying the sort order for that specific
* keyExtractor: <code>true</code> for <i>ascending</i>.
*
* @see {@link #sortAscBy(Function...)}, {@link #sortBy(Function...)}
*/
public default List<T> sortBy(Function<T, V2<Function<? super T, Comparable>, Boolean>>... keyExtractors) {
return stream().sorted(compare(keyExtractors)).collect(Collectors.toList());
}
/**
* Returns a randomly shuffled {@link List} of all elements.
*
* @see Collections#shuffle(List)
*/
public default List<T> shuffle() {
List<T> ret = new ArrayList(toCollection());
Collections.shuffle(ret);
return ret;
}
/**
* Returns the element with the minimum value after invoking the <code>keyExtractors</code> provided in order as a comparator
* on every element.
*/
public default T min(Function<T, Comparable>... keyExtractors) {
return Collections.min(toCollection(), compareAsc(keyExtractors));
}
/**
* Returns the element with the maximum value after invoking the <code>keyExtractors</code> provided in order as a comparator
* on every element.
*/
public default T max(Function<T, Comparable>... keyExtractors) {
return Collections.max(toCollection(), compareAsc(keyExtractors));
}
/**
* Returns the number of elements for which the <code>predicate</code> provided returns <code>true</code>.
*/
public default int count(Predicate<T> predicate) {
return findAll(predicate).size();
}
/**
* Returns the sum of elements if the int-<code>mapper</code> provided is applied on every element.
*/
public default double sum(ToDoubleFunction<? super T> mapper) {
return stream().collect(Collectors.summingDouble(mapper));
}
/**
* Returns a {@link Map} which groups the elements using the <code>classifier</code> provided.
*/
public default <K> Map<K, ? extends Collection<T>> groupBy(Function<? super T, ? extends K> classifier) {
return stream().collect(Collectors.groupingBy(classifier, createCollector()));
}
/**
* Like {@link #groupBy(Function)}, but there are exactly two groups: one for elements for which
* the <code>predicate</code> returns <code>true</code>, and one for which it returns <code>false</code>
*/
public default Map<Boolean, ? extends Collection<T>> partition(Predicate<? super T> predicate) {
return stream().collect(Collectors.groupingBy(it -> predicate.test(it), createCollector()));
}
/**
* Returns <code>true</code>, if the <code>predicate</code> provided returns <code>true</code> for every element.
*/
public default boolean every(Predicate<? super T> predicate) {
return stream().allMatch(predicate);
}
/**
* Returns <code>true</code>, if the <code>predicate</code> provided returns <code>true</code> for at least one element.
*/
public default boolean some(Predicate<? super T> predicate) {
return stream().anyMatch(predicate);
}
/**
* Returns <code>true</code>, if the <code>predicate</code> provided returns <code>true</code> for none of the elements.
*/
public default boolean none(Predicate<? super T> predicate) {
return stream().noneMatch(predicate);
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeBuildableContext;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.keys.DefaultRuleKeyBuilderFactory;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.SymlinkTreeStep;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class HeaderSymlinkTreeWithHeaderMapTest {
@Rule
public final TemporaryPaths tmpDir = new TemporaryPaths();
private ProjectFilesystem projectFilesystem;
private BuildTarget buildTarget;
private HeaderSymlinkTreeWithHeaderMap symlinkTreeBuildRule;
private ImmutableMap<Path, SourcePath> links;
private Path symlinkTreeRoot;
private Path headerMapPath;
@Before
public void setUp() throws Exception {
projectFilesystem = new FakeProjectFilesystem(tmpDir.getRoot());
// Create a build target to use when building the symlink tree.
buildTarget = BuildTargetFactory.newInstance("//test:test");
// Get the first file we're symlinking
Path link1 = Paths.get("file");
Path file1 = tmpDir.newFile();
Files.write(file1, "hello world".getBytes(Charsets.UTF_8));
// Get the second file we're symlinking
Path link2 = Paths.get("directory", "then", "file");
Path file2 = tmpDir.newFile();
Files.write(file2, "hello world".getBytes(Charsets.UTF_8));
// Setup the map representing the link tree.
links = ImmutableMap.<Path, SourcePath>of(
link1,
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), file1)),
link2,
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), file2)));
// The output path used by the buildable for the link tree.
symlinkTreeRoot = projectFilesystem.resolve(
BuildTargets.getGenPath(projectFilesystem, buildTarget, "%s/symlink-tree-root"));
// Setup the symlink tree buildable.
symlinkTreeBuildRule = new HeaderSymlinkTreeWithHeaderMap(
new FakeBuildRuleParamsBuilder(buildTarget).build(),
new SourcePathResolver(
new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer())
),
symlinkTreeRoot,
links);
headerMapPath = symlinkTreeBuildRule.getPathToOutput();
}
@Test
public void testSymlinkTreeBuildSteps() throws IOException {
BuildContext buildContext = FakeBuildContext.NOOP_CONTEXT;
ProjectFilesystem filesystem = new FakeProjectFilesystem();
FakeBuildableContext buildableContext = new FakeBuildableContext();
SourcePathResolver resolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
ImmutableList<Step> expectedBuildSteps =
ImmutableList.of(
new MakeCleanDirectoryStep(filesystem, symlinkTreeRoot),
new SymlinkTreeStep(
filesystem,
symlinkTreeRoot,
resolver.getMappedPaths(links)),
new HeaderMapStep(
filesystem,
headerMapPath,
ImmutableMap.of(
Paths.get("file"),
filesystem.resolve(filesystem.getBuckPaths().getBuckOut())
.relativize(symlinkTreeRoot)
.resolve("file"),
Paths.get("directory/then/file"),
filesystem.resolve(filesystem.getBuckPaths().getBuckOut())
.relativize(symlinkTreeRoot)
.resolve("directory/then/file"))));
ImmutableList<Step> actualBuildSteps =
symlinkTreeBuildRule.getBuildSteps(
buildContext,
buildableContext);
assertEquals(expectedBuildSteps, actualBuildSteps.subList(1, actualBuildSteps.size()));
}
@Test
public void testSymlinkTreeRuleKeyChangesIfLinkMapChanges() throws Exception {
Path aFile = tmpDir.newFile();
Files.write(aFile, "hello world".getBytes(Charsets.UTF_8));
AbstractBuildRule modifiedSymlinkTreeBuildRule = new HeaderSymlinkTreeWithHeaderMap(
new FakeBuildRuleParamsBuilder(buildTarget).build(),
new SourcePathResolver(
new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer())
),
symlinkTreeRoot,
ImmutableMap.<Path, SourcePath>of(
Paths.get("different/link"),
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), aFile))));
SourcePathResolver resolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
// Calculate their rule keys and verify they're different.
FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>of());
RuleKey key1 = new DefaultRuleKeyBuilderFactory(0, hashCache, resolver).build(
symlinkTreeBuildRule);
RuleKey key2 = new DefaultRuleKeyBuilderFactory(0, hashCache, resolver).build(
modifiedSymlinkTreeBuildRule);
assertNotEquals(key1, key2);
}
@Test
public void testSymlinkTreeRuleKeyDoesNotChangeIfLinkTargetsChange() throws IOException {
BuildRuleResolver ruleResolver = new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer());
ruleResolver.addToIndex(symlinkTreeBuildRule);
SourcePathResolver resolver = new SourcePathResolver(ruleResolver);
DefaultRuleKeyBuilderFactory ruleKeyBuilderFactory = new DefaultRuleKeyBuilderFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>of()),
resolver);
// Calculate the rule key
RuleKey key1 = ruleKeyBuilderFactory.build(symlinkTreeBuildRule);
// Change the contents of the target of the link.
Path existingFile =
projectFilesystem.resolve(resolver.deprecatedGetPath(links.values().asList().get(0)));
Files.write(existingFile, "something new".getBytes(Charsets.UTF_8));
// Re-calculate the rule key
RuleKey key2 = ruleKeyBuilderFactory.build(symlinkTreeBuildRule);
// Verify that the rules keys are the same.
assertEquals(key1, key2);
}
}
| |
/*
* Copyright 2004-2006 Stefan Reuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.asteriskjava.live.internal;
import java.util.*;
import org.asteriskjava.live.AsteriskChannel;
import org.asteriskjava.live.AsteriskQueueEntry;
import org.asteriskjava.live.CallDetailRecord;
import org.asteriskjava.live.CallerId;
import org.asteriskjava.live.ChannelState;
import org.asteriskjava.live.ChannelStateHistoryEntry;
import org.asteriskjava.live.DialedChannelHistoryEntry;
import org.asteriskjava.live.Extension;
import org.asteriskjava.live.ExtensionHistoryEntry;
import org.asteriskjava.live.HangupCause;
import org.asteriskjava.live.LinkedChannelHistoryEntry;
import org.asteriskjava.live.ManagerCommunicationException;
import org.asteriskjava.live.NoSuchChannelException;
import org.asteriskjava.manager.action.AbsoluteTimeoutAction;
import org.asteriskjava.manager.action.ChangeMonitorAction;
import org.asteriskjava.manager.action.GetVarAction;
import org.asteriskjava.manager.action.HangupAction;
import org.asteriskjava.manager.action.MonitorAction;
import org.asteriskjava.manager.action.PauseMixMonitorAction;
import org.asteriskjava.manager.action.PauseMonitorAction;
import org.asteriskjava.manager.action.PlayDtmfAction;
import org.asteriskjava.manager.action.RedirectAction;
import org.asteriskjava.manager.action.SetVarAction;
import org.asteriskjava.manager.action.StopMonitorAction;
import org.asteriskjava.manager.action.UnpauseMonitorAction;
import org.asteriskjava.manager.response.ManagerError;
import org.asteriskjava.manager.response.ManagerResponse;
import org.asteriskjava.util.MixMonitorDirection;
/**
* Default implementation of the AsteriskChannel interface.
*
* @author srt
* @version $Id$
*/
class AsteriskChannelImpl extends AbstractLiveObject implements AsteriskChannel
{
private static final String CAUSE_VARIABLE_NAME = "PRI_CAUSE";
/**
* Date this channel has been created.
*/
private final Date dateOfCreation;
private final List<ExtensionHistoryEntry> extensionHistory;
private final List<ChannelStateHistoryEntry> stateHistory;
private final List<LinkedChannelHistoryEntry> linkedChannelHistory;
private final List<DialedChannelHistoryEntry> dialedChannelHistory;
private final List<AsteriskChannel> dialedChannels;
private final List<AsteriskChannel> dialingChannels;
/**
* If this channel is bridged to another channel, the linkedChannels contains
* the channel this channel is bridged with.
*/
private final List<AsteriskChannel> linkedChannels;
private final Map<String, String> variables;
/**
* Unique id of this channel.
*/
private String id;
/**
* The traceId is used to trace originated channels.
*/
private String traceId;
/**
* Date this channel has left the Asterisk server.
*/
private Date dateOfRemoval;
/**
* Name of this channel.
*/
private String name;
/**
* Caller*ID of this channel.
*/
private CallerId callerId;
/**
* State of this channel.
*/
private ChannelState state;
/**
* Account code used to bill this channel.
*/
private String account;
/**
* Indicates if this channel was linked to another channel at least once.
*/
private boolean wasLinked;
private HangupCause hangupCause;
private String hangupCauseText;
private CallDetailRecordImpl callDetailRecord;
/**
* MeetMe room user associated with this channel if any, <code>null</code>
* otherwise.
*/
private MeetMeUserImpl meetMeUserImpl;
/**
* Queue entry associated with this channel if any, <code>null</code>
* otherwise.
*/
private AsteriskQueueEntryImpl queueEntryImpl;
/**
* Extension where the call is parked if it is parked, <code>null</code>
* otherwise.
*/
private Extension parkedAt;
/**
* Last dtmf digit recieved on this channel if any, <code>null</code> otherwise.
*/
private Character dtmfReceived;
/**
* Last dtmf digit sent on this channel if any, <code>null</code> otherwise.
*/
private Character dtmfSent;
/**
* Actual monitor state
*/
private boolean isMonitored;
/**
* Creates a new Channel.
*
* @param server server this channel belongs to.
* @param name name of this channel, for example "SIP/1310-20da".
* @param id unique id of this channel, for example "1099015093.165".
* @param dateOfCreation date this channel has been created.
* @throws IllegalArgumentException if any of the parameters are null.
*/
AsteriskChannelImpl(final AsteriskServerImpl server, final String name, final String id, final Date dateOfCreation)
throws IllegalArgumentException
{
super(server);
if (server == null)
{
throw new IllegalArgumentException("Parameter 'server' passed to AsteriskChannelImpl() must not be null.");
}
if (name == null)
{
throw new IllegalArgumentException("Parameter 'name' passed to AsteriskChannelImpl() must not be null.");
}
if (id == null)
{
throw new IllegalArgumentException("Parameter 'id' passed to AsteriskChannelImpl() must not be null.");
}
if (dateOfCreation == null)
{
throw new IllegalArgumentException(
"Parameter 'dateOfCreation' passed to AsteriskChannelImpl() must not be null.");
}
this.name = name;
this.id = id;
this.dateOfCreation = dateOfCreation;
this.extensionHistory = new ArrayList<ExtensionHistoryEntry>();
this.stateHistory = new ArrayList<ChannelStateHistoryEntry>();
this.linkedChannelHistory = new ArrayList<LinkedChannelHistoryEntry>();
this.dialedChannelHistory = new ArrayList<DialedChannelHistoryEntry>();
this.variables = new HashMap<String, String>();
this.dialedChannels = new ArrayList<AsteriskChannel>();
this.dialingChannels = new ArrayList<AsteriskChannel>();
this.linkedChannels = new ArrayList<AsteriskChannel>();
}
public String getId()
{
return id;
}
/**
* Changes the id of this channel.
*
* @param date date of the name change.
* @param id the new unique id of this channel.
*/
void idChanged(Date date, String id)
{
final String oldId = this.id;
if (oldId != null && oldId.equals(id))
{
return;
}
this.id = id;
firePropertyChange(PROPERTY_ID, oldId, id);
}
String getTraceId()
{
return traceId;
}
void setTraceId(String traceId)
{
this.traceId = traceId;
}
public String getName()
{
return name;
}
/**
* Changes the name of this channel.
*
* @param date date of the name change.
* @param name the new name of this channel.
*/
void nameChanged(Date date, String name)
{
final String oldName = this.name;
if (oldName != null && oldName.equals(name))
{
return;
}
this.name = name;
firePropertyChange(PROPERTY_NAME, oldName, name);
}
public CallerId getCallerId()
{
return callerId;
}
/**
* Sets the caller id of this channel.
*
* @param callerId the caller id of this channel.
*/
void setCallerId(final CallerId callerId)
{
final CallerId oldCallerId = this.callerId;
this.callerId = callerId;
firePropertyChange(PROPERTY_CALLER_ID, oldCallerId, callerId);
}
public ChannelState getState()
{
return state;
}
public boolean wasInState(ChannelState state)
{
synchronized (stateHistory)
{
for (ChannelStateHistoryEntry historyEntry : stateHistory)
{
if (historyEntry.getState() == state)
{
return true;
}
}
}
return false;
}
public boolean wasBusy()
{
return wasInState(ChannelState.BUSY)
|| hangupCause == HangupCause.AST_CAUSE_BUSY
|| hangupCause == HangupCause.AST_CAUSE_USER_BUSY;
}
/**
* Changes the state of this channel.
*
* @param date when the state change occurred.
* @param state the new state of this channel.
*/
synchronized void stateChanged(Date date, ChannelState state)
{
final ChannelStateHistoryEntry historyEntry;
final ChannelState oldState = this.state;
if (oldState == state)
{
return;
}
// System.err.println(id + " state change: " + oldState + " => " + state
// + " (" + name + ")");
historyEntry = new ChannelStateHistoryEntry(date, state);
synchronized (stateHistory)
{
stateHistory.add(historyEntry);
}
this.state = state;
firePropertyChange(PROPERTY_STATE, oldState, state);
}
public String getAccount()
{
return account;
}
/**
* Sets the account code used to bill this channel.
*
* @param account the account code used to bill this channel.
*/
void setAccount(String account)
{
final String oldAccount = this.account;
this.account = account;
firePropertyChange(PROPERTY_ACCOUNT, oldAccount, account);
}
public Extension getCurrentExtension()
{
final Extension extension;
synchronized (extensionHistory)
{
if (extensionHistory.isEmpty())
{
extension = null;
}
else
{
extension = extensionHistory.get(extensionHistory.size() - 1).getExtension();
}
}
return extension;
}
public Extension getFirstExtension()
{
final Extension extension;
synchronized (extensionHistory)
{
if (extensionHistory.isEmpty())
{
extension = null;
}
else
{
extension = extensionHistory.get(0).getExtension();
}
}
return extension;
}
public List<ExtensionHistoryEntry> getExtensionHistory()
{
final List<ExtensionHistoryEntry> copy;
synchronized (extensionHistory)
{
copy = new ArrayList<ExtensionHistoryEntry>(extensionHistory);
}
return copy;
}
/**
* Adds a visted dialplan entry to the history.
*
* @param date the date the extension has been visited.
* @param extension the visted dialplan entry to add.
*/
void extensionVisited(Date date, Extension extension)
{
final Extension oldCurrentExtension = getCurrentExtension();
final ExtensionHistoryEntry historyEntry;
historyEntry = new ExtensionHistoryEntry(date, extension);
synchronized (extensionHistory)
{
extensionHistory.add(historyEntry);
}
firePropertyChange(PROPERTY_CURRENT_EXTENSION, oldCurrentExtension, extension);
}
public Date getDateOfCreation()
{
return dateOfCreation;
}
public Date getDateOfRemoval()
{
return dateOfRemoval;
}
public HangupCause getHangupCause()
{
return hangupCause;
}
public String getHangupCauseText()
{
return hangupCauseText;
}
public CallDetailRecord getCallDetailRecord()
{
return callDetailRecord;
}
void callDetailRecordReceived(Date date, CallDetailRecordImpl callDetailRecord)
{
final CallDetailRecordImpl oldCallDetailRecord = this.callDetailRecord;
this.callDetailRecord = callDetailRecord;
firePropertyChange(PROPERTY_CALL_DETAIL_RECORD, oldCallDetailRecord, callDetailRecord);
}
/**
* Sets dateOfRemoval, hangupCause and hangupCauseText and changes state to
* {@link ChannelState#HUNGUP}. Fires a PropertyChangeEvent for state.
*
* @param dateOfRemoval date the channel was hung up
* @param hangupCause cause for hangup
* @param hangupCauseText textual representation of hangup cause
*/
synchronized void hungup(Date dateOfRemoval, HangupCause hangupCause, String hangupCauseText)
{
this.dateOfRemoval = dateOfRemoval;
this.hangupCause = hangupCause;
this.hangupCauseText = hangupCauseText;
// update state and fire PropertyChangeEvent
stateChanged(dateOfRemoval, ChannelState.HUNGUP);
}
/**
* Retrives the conplete List of all dialed channels associated to ths calls
*
* @return List of all dialed channels
*/
public List<AsteriskChannel> getDialedChannels()
{
final List<AsteriskChannel> copy;
synchronized (dialedChannels)
{
copy = new ArrayList<AsteriskChannel>(dialedChannels);
}
return copy;
}
/* dialed channels */
public AsteriskChannel getDialedChannel()
{
synchronized (dialedChannels)
{
for (AsteriskChannel channel : dialedChannels)
{
if (channel != null)
return channel;
}
}
return null;
}
public List<DialedChannelHistoryEntry> getDialedChannelHistory()
{
final List<DialedChannelHistoryEntry> copy;
synchronized (linkedChannelHistory)
{
copy = new ArrayList<DialedChannelHistoryEntry>(dialedChannelHistory);
}
return copy;
}
synchronized void channelDialed(Date date, AsteriskChannel dialedChannel)
{
final AsteriskChannel oldDialedChannel;
synchronized (dialedChannels)
{
if (dialedChannels.isEmpty())
oldDialedChannel = null;
else
oldDialedChannel = dialedChannels.get(dialedChannels.size() - 1);
dialedChannels.add(dialedChannel);
}
final DialedChannelHistoryEntry historyEntry;
historyEntry = new DialedChannelHistoryEntry(date, dialedChannel);
synchronized (dialedChannelHistory)
{
dialedChannelHistory.add(historyEntry);
}
firePropertyChange(PROPERTY_DIALED_CHANNEL, oldDialedChannel, dialedChannel);
}
/* dialed channels */
public AsteriskChannel getDialingChannel()
{
synchronized (dialingChannels)
{
if (dialingChannels.isEmpty())
return null;
return dialingChannels.get(0);
}
}
synchronized void channelDialing(Date date, AsteriskChannel dialingChannel)
{
final AsteriskChannel oldDialingChannel;
synchronized (this.dialingChannels)
{
if (this.dialingChannels.isEmpty())
{
oldDialingChannel = null;
this.dialingChannels.add(dialingChannel);
}
else
{
oldDialingChannel = this.dialingChannels.get(0);
this.dialingChannels.set(0, dialingChannel);
}
}
firePropertyChange(PROPERTY_DIALING_CHANNEL, oldDialingChannel, dialingChannel);
}
/* linked channels */
public AsteriskChannel getLinkedChannel()
{
synchronized (linkedChannels)
{
if (linkedChannels.isEmpty())
return null;
return linkedChannels.get(0);
}
}
public List<LinkedChannelHistoryEntry> getLinkedChannelHistory()
{
final List<LinkedChannelHistoryEntry> copy;
synchronized (linkedChannelHistory)
{
copy = new ArrayList<LinkedChannelHistoryEntry>(linkedChannelHistory);
}
return copy;
}
public boolean wasLinked()
{
return wasLinked;
}
/**
* Sets the channel this channel is bridged with.
*
* @param date the date this channel was linked.
* @param linkedChannel the channel this channel is bridged with.
*/
synchronized void channelLinked(Date date, AsteriskChannel linkedChannel)
{
final AsteriskChannel oldLinkedChannel;
synchronized (this.linkedChannels)
{
if (this.linkedChannels.isEmpty())
{
oldLinkedChannel = null;
this.linkedChannels.add(linkedChannel);
}
else
{
oldLinkedChannel = this.linkedChannels.get(0);
this.linkedChannels.set(0, linkedChannel);
}
}
final LinkedChannelHistoryEntry historyEntry;
historyEntry = new LinkedChannelHistoryEntry(date, linkedChannel);
synchronized (linkedChannelHistory)
{
linkedChannelHistory.add(historyEntry);
}
this.wasLinked = true;
firePropertyChange(PROPERTY_LINKED_CHANNEL, oldLinkedChannel, linkedChannel);
}
synchronized void channelUnlinked(Date date)
{
final AsteriskChannel oldLinkedChannel;
synchronized (this.linkedChannels)
{
if (this.linkedChannels.isEmpty())
{
oldLinkedChannel = null;
}
else
{
oldLinkedChannel = this.linkedChannels.get(0);
}
linkedChannels.clear();
}
final LinkedChannelHistoryEntry historyEntry;
synchronized (linkedChannelHistory)
{
if (linkedChannelHistory.isEmpty())
{
historyEntry = null;
}
else
{
historyEntry = linkedChannelHistory.get(linkedChannelHistory.size() - 1);
}
}
if (historyEntry != null)
{
historyEntry.setDateUnlinked(date);
}
firePropertyChange(PROPERTY_LINKED_CHANNEL, oldLinkedChannel, null);
}
/* MeetMe user */
public MeetMeUserImpl getMeetMeUser()
{
return meetMeUserImpl;
}
void setMeetMeUserImpl(MeetMeUserImpl meetMeUserImpl)
{
final MeetMeUserImpl oldMeetMeUserImpl = this.meetMeUserImpl;
this.meetMeUserImpl = meetMeUserImpl;
firePropertyChange(PROPERTY_MEET_ME_USER, oldMeetMeUserImpl, meetMeUserImpl);
}
// action methods
public void hangup() throws ManagerCommunicationException, NoSuchChannelException
{
hangup(null);
}
public void hangup(HangupCause cause) throws ManagerCommunicationException, NoSuchChannelException
{
final HangupAction action;
final ManagerResponse response;
if (cause != null)
{
setVariable(CAUSE_VARIABLE_NAME, Integer.toString(cause.getCode()));
action = new HangupAction(name, cause.getCode());
}
else
{
action = new HangupAction(name);
}
response = server.sendAction(action);
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void setAbsoluteTimeout(int seconds) throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new AbsoluteTimeoutAction(name, seconds));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void redirect(String context, String exten, int priority) throws ManagerCommunicationException,
NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new RedirectAction(name, context, exten, priority));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void redirectBothLegs(String context, String exten, int priority) throws ManagerCommunicationException,
NoSuchChannelException
{
ManagerResponse response;
synchronized (linkedChannels)
{
if (linkedChannels.isEmpty())
{
response = server.sendAction(new RedirectAction(name, context, exten, priority));
}
else
{
response = server
.sendAction(new RedirectAction(name, linkedChannels.get(0).getName(), context, exten, priority,
context, exten, priority));
}
}
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public String getVariable(String variable) throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
String value;
synchronized (variables)
{
value = variables.get(variable);
if (value != null)
{
return value;
}
response = server.sendAction(new GetVarAction(name, variable));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
value = response.getAttribute("Value");
if (value == null)
{
value = response.getAttribute(variable); // for Asterisk 1.0.x
}
variables.put(variable, value);
}
return value;
}
public void setVariable(String variable, String value) throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new SetVarAction(name, variable, value));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
synchronized (variables)
{
variables.put(variable, value);
}
}
public void playDtmf(String digit) throws ManagerCommunicationException, NoSuchChannelException, IllegalArgumentException
{
ManagerResponse response;
if (digit == null)
{
throw new IllegalArgumentException("DTMF digit to send must not be null");
}
response = server.sendAction(new PlayDtmfAction(name, digit));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void startMonitoring(String filename) throws ManagerCommunicationException, NoSuchChannelException
{
startMonitoring(filename, null, false);
}
public void startMonitoring(String filename, String format) throws ManagerCommunicationException, NoSuchChannelException
{
startMonitoring(filename, format, false);
}
public void startMonitoring(String filename, String format, boolean mix) throws ManagerCommunicationException,
NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new MonitorAction(name, filename, format, mix));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void changeMonitoring(String filename)
throws ManagerCommunicationException, NoSuchChannelException, IllegalArgumentException
{
ManagerResponse response;
if (filename == null)
{
throw new IllegalArgumentException("New filename must not be null");
}
response = server.sendAction(new ChangeMonitorAction(name, filename));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void stopMonitoring() throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new StopMonitorAction(name));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void pauseMonitoring() throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new PauseMonitorAction(name));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void unpauseMonitoring() throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new UnpauseMonitorAction(name));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void pauseMixMonitor(MixMonitorDirection direction) throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new PauseMixMonitorAction(this.name, 1, direction.getStateName()));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public void unPauseMixMonitor(MixMonitorDirection direction) throws ManagerCommunicationException, NoSuchChannelException
{
ManagerResponse response;
response = server.sendAction(new PauseMixMonitorAction(this.name, 0, direction.getStateName()));
if (response instanceof ManagerError)
{
throw new NoSuchChannelException("Channel '" + name + "' is not available: " + response.getMessage());
}
}
public Extension getParkedAt()
{
// warning: the context of this extension will be null until we get the context property from
// the parked call event!
return parkedAt;
}
void setParkedAt(Extension parkedAt)
{
final Extension oldParkedAt = this.parkedAt;
this.parkedAt = parkedAt;
firePropertyChange(PROPERTY_PARKED_AT, oldParkedAt, parkedAt);
}
void updateVariable(String name, String value)
{
synchronized (variables)
{
// final String oldValue = variables.get(name);
variables.put(name, value);
// TODO add notification for updated channel variables
}
}
public Map<String, String> getVariables()
{
synchronized (variables)
{
return new HashMap<String, String>(variables);
}
}
public Character getDtmfReceived()
{
return this.dtmfReceived;
}
public Character getDtmfSent()
{
return this.dtmfSent;
}
void dtmfReceived(Character digit)
{
final Character oldDtmfReceived = this.dtmfReceived;
this.dtmfReceived = digit;
firePropertyChange(PROPERTY_DTMF_RECEIVED, oldDtmfReceived, digit);
}
void dtmfSent(Character digit)
{
final Character oldDtmfSent = this.dtmfSent;
this.dtmfSent = digit;
firePropertyChange(PROPERTY_DTMF_SENT, oldDtmfSent, digit);
}
public AsteriskQueueEntryImpl getQueueEntry()
{
return queueEntryImpl;
}
void setQueueEntry(AsteriskQueueEntryImpl queueEntry)
{
final AsteriskQueueEntry oldQueueEntry = this.queueEntryImpl;
this.queueEntryImpl = queueEntry;
firePropertyChange(PROPERTY_QUEUE_ENTRY, oldQueueEntry, queueEntry);
}
public boolean isMonitored()
{
return this.isMonitored;
}
void setMonitored(boolean monitored)
{
final boolean oldMonitored = this.isMonitored;
this.isMonitored = monitored;
firePropertyChange(PROPERTY_MONITORED, oldMonitored, monitored);
}
@Override
public String toString()
{
final StringBuffer sb;
final List<AsteriskChannel> dialedChannels;
final List<AsteriskChannel> dialingChannel;
final List<AsteriskChannel> linkedChannel;
sb = new StringBuffer("AsteriskChannel[");
synchronized (this)
{
sb.append("id='").append(getId()).append("',");
sb.append("name='").append(getName()).append("',");
sb.append("callerId='").append(getCallerId()).append("',");
sb.append("state='").append(getState()).append("',");
sb.append("account='").append(getAccount()).append("',");
sb.append("dateOfCreation=").append(getDateOfCreation()).append(",");
dialedChannels = getDialedChannels();
dialingChannel = this.dialingChannels;
linkedChannel = this.linkedChannels;
}
if (dialedChannels.isEmpty())
{
sb.append("dialedChannel=null,");
}
else
{
sb.append("dialedChannel=AsteriskChannel[");
synchronized (dialedChannels)
{
for (AsteriskChannel dialedChannel : dialedChannels)
{
sb.append("[id='").append(dialedChannel.getId()).append("',");
sb.append("name='").append(dialedChannel.getName()).append("'],");
}
sb.append("],");
}
}
if (dialingChannel.isEmpty())
{
sb.append("dialingChannel=null,");
}
else
{
sb.append("dialingChannel=AsteriskChannel[");
sb.append("id='").append(dialingChannel.get(0).getId()).append("',");
sb.append("name='").append(dialingChannel.get(0).getName()).append("'],");
}
synchronized (linkedChannel)
{
if (linkedChannel.isEmpty())
{
sb.append("linkedChannel=null");
}
else
{
sb.append("linkedChannel=AsteriskChannel[");
{
sb.append("id='").append(linkedChannel.get(0).getId()).append("',");
sb.append("name='").append(linkedChannel.get(0).getName()).append("']");
}
}
}
sb.append("]");
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.jdbc;
import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.tajo.TajoConstants;
import org.apache.tajo.client.TajoClient;
import org.apache.tajo.conf.TajoConf;
import org.jboss.netty.handler.codec.http.QueryStringDecoder;
import java.io.IOException;
import java.net.URI;
import java.sql.*;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
public class TajoConnection implements Connection {
private static Log LOG = LogFactory.getLog(TajoConnection.class);
private final TajoClient tajoClient;
private final AtomicBoolean closed = new AtomicBoolean(true);
private final String rawURI;
private final Properties properties;
private final URI uri;
private final String hostName;
private final int port;
private final String databaseName;
@SuppressWarnings("unused")
/** it will be used soon. */
private final Map<String, List<String>> params;
public TajoConnection(String rawURI, Properties properties) throws SQLException {
this.rawURI = rawURI;
this.properties = properties;
try {
if (!rawURI.startsWith(TajoDriver.TAJO_JDBC_URL_PREFIX)) {
throw new SQLException("Invalid URL: " + rawURI, "TAJO-001");
}
// URI form: jdbc:tajo://hostname:port/databasename
int startIdx = rawURI.indexOf(":");
if (startIdx < 0) {
throw new SQLException("Invalid URL: " + rawURI, "TAJO-001");
}
String uri = rawURI.substring(startIdx+1, rawURI.length());
try {
this.uri = URI.create(uri);
} catch (IllegalArgumentException iae) {
throw new SQLException("Invalid URL: " + rawURI, "TAJO-001");
}
hostName = this.uri.getHost();
if(hostName == null) {
throw new SQLException("Invalid JDBC URI: " + rawURI, "TAJO-001");
}
if (this.uri.getPort() < 1) {
port = 26002;
} else {
port = this.uri.getPort();
}
if (this.uri.getPath() == null || this.uri.getPath().equalsIgnoreCase("")) { // if no database is given, set default.
databaseName = TajoConstants.DEFAULT_DATABASE_NAME;
} else {
// getPath() will return '/database'.
databaseName = this.uri.getPath().split("/")[1];
}
params = new QueryStringDecoder(rawURI).getParameters();
} catch (SQLException se) {
throw se;
} catch (Throwable t) { // for unexpected exceptions like ArrayIndexOutOfBoundsException.
throw new SQLException("Invalid JDBC URI: " + rawURI, "TAJO-001");
}
TajoConf tajoConf = new TajoConf();
if(properties != null) {
for(Map.Entry<Object, Object> entry: properties.entrySet()) {
tajoConf.set(entry.getKey().toString(), entry.getValue().toString());
}
}
try {
tajoClient = new TajoClient(hostName, port, databaseName);
} catch (Exception e) {
throw new SQLException("Cannot create TajoClient instance:" + e.getMessage(), "TAJO-002");
}
closed.set(false);
}
public String getUri() {
return this.rawURI;
}
public TajoClient getTajoClient() {
return tajoClient;
}
@Override
public void clearWarnings() throws SQLException {
}
@Override
public void close() throws SQLException {
if(!closed.get()) {
if(tajoClient != null) {
tajoClient.close();
}
closed.set(true);
}
}
@Override
public void commit() throws SQLException {
throw new SQLFeatureNotSupportedException("commit");
}
@Override
public Array createArrayOf(String arg0, Object[] arg1) throws SQLException {
throw new SQLFeatureNotSupportedException("createArrayOf");
}
@Override
public Blob createBlob() throws SQLException {
throw new SQLFeatureNotSupportedException("createBlob");
}
@Override
public Clob createClob() throws SQLException {
throw new SQLFeatureNotSupportedException("createClob");
}
@Override
public NClob createNClob() throws SQLException {
throw new SQLFeatureNotSupportedException("createNClob");
}
@Override
public SQLXML createSQLXML() throws SQLException {
throw new SQLFeatureNotSupportedException("createSQLXML");
}
@Override
public Statement createStatement() throws SQLException {
if (isClosed()) {
throw new SQLException("Can't create Statement, connection is closed");
}
return new TajoStatement(tajoClient);
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
throw new SQLFeatureNotSupportedException("createStatement");
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
throw new SQLFeatureNotSupportedException("createStatement");
}
@Override
public Struct createStruct(String typeName, Object[] attributes)
throws SQLException {
throw new SQLFeatureNotSupportedException("createStruct");
}
@Override
public boolean getAutoCommit() throws SQLException {
return true;
}
@Override
public String getCatalog() throws SQLException {
try {
return tajoClient.getCurrentDatabase();
} catch (ServiceException e) {
throw new SQLException(e);
}
}
@Override
public Properties getClientInfo() throws SQLException {
throw new SQLFeatureNotSupportedException("getClientInfo");
}
@Override
public String getClientInfo(String name) throws SQLException {
throw new SQLFeatureNotSupportedException("getClientInfo");
}
@Override
public int getHoldability() throws SQLException {
throw new SQLFeatureNotSupportedException("getHoldability");
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return new TajoDatabaseMetaData(this);
}
@Override
public int getTransactionIsolation() throws SQLException {
return Connection.TRANSACTION_NONE;
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
throw new SQLFeatureNotSupportedException("getTypeMap");
}
@Override
public SQLWarning getWarnings() throws SQLException {
throw new SQLFeatureNotSupportedException("getWarnings");
}
@Override
public boolean isClosed() throws SQLException {
return closed.get();
}
@Override
public boolean isReadOnly() throws SQLException {
return false;
}
@Override
public boolean isValid(int timeout) throws SQLException {
try {
if (tajoClient.isConnected()) {
ResultSet resultSet = tajoClient.executeQueryAndGetResult("SELECT 1;");
boolean next = resultSet.next();
boolean valid = next && resultSet.getLong(1) == 1;
resultSet.close();
return valid;
} else {
return false;
}
} catch (ServiceException e) {
LOG.error("TajoMaster is not available.");
return false;
} catch (IOException e) {
LOG.error("JDBC connection is not valid.");
return false;
}
}
@Override
public String nativeSQL(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("nativeSQL");
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("prepareCall");
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
throw new SQLFeatureNotSupportedException("prepareCall");
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new SQLFeatureNotSupportedException("prepareCall");
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return new TajoPreparedStatement(tajoClient, sql);
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
throws SQLException {
return new TajoPreparedStatement(tajoClient, sql);
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
throws SQLException {
throw new SQLFeatureNotSupportedException("prepareStatement");
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames)
throws SQLException {
throw new SQLFeatureNotSupportedException("prepareStatement");
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency) throws SQLException {
return new TajoPreparedStatement(tajoClient, sql);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
int resultSetConcurrency, int resultSetHoldability) throws SQLException {
throw new SQLFeatureNotSupportedException("prepareStatement");
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
throw new SQLFeatureNotSupportedException("releaseSavepoint");
}
@Override
public void rollback() throws SQLException {
throw new SQLFeatureNotSupportedException("rollback");
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
throw new SQLFeatureNotSupportedException("rollback");
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
throw new SQLFeatureNotSupportedException("setAutoCommit");
}
@Override
public void setCatalog(String catalog) throws SQLException {
try {
tajoClient.selectDatabase(catalog);
} catch (ServiceException e) {
throw new SQLException(e);
}
}
@Override
public void setClientInfo(Properties properties)
throws SQLClientInfoException {
throw new UnsupportedOperationException("setClientInfo");
}
@Override
public void setClientInfo(String name, String value)
throws SQLClientInfoException {
throw new UnsupportedOperationException("setClientInfo");
}
@Override
public void setHoldability(int holdability) throws SQLException {
throw new SQLFeatureNotSupportedException("setHoldability");
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
throw new SQLFeatureNotSupportedException("setReadOnly");
}
@Override
public Savepoint setSavepoint() throws SQLException {
throw new SQLFeatureNotSupportedException("setSavepoint");
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
throw new SQLFeatureNotSupportedException("setSavepoint");
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
throw new SQLFeatureNotSupportedException("setTransactionIsolation");
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
throw new SQLFeatureNotSupportedException("setTypeMap");
}
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
if (isWrapperFor(tClass)) {
return (T) this;
}
throw new SQLException("No wrapper for " + tClass);
}
@Override
public boolean isWrapperFor(Class<?> tClass) throws SQLException {
return tClass.isInstance(this);
}
public void abort(Executor executor) throws SQLException {
// JDK 1.7
throw new SQLFeatureNotSupportedException("abort is not supported");
}
public int getNetworkTimeout() throws SQLException {
// JDK 1.7
throw new SQLFeatureNotSupportedException("getNetworkTimeout is not supported");
}
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
// JDK 1.7
throw new SQLFeatureNotSupportedException("setNetworkTimeout not supported");
}
public String getSchema() throws SQLException {
return TajoConstants.DEFAULT_SCHEMA_NAME;
}
public void setSchema(String schema) throws SQLException {
throw new SQLFeatureNotSupportedException("setSchema() is not supported yet");
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered.org <http://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.service;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.MapMaker;
import org.spongepowered.api.plugin.PluginContainer;
import org.spongepowered.api.plugin.PluginManager;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.annotation.Nullable;
import javax.inject.Inject;
/**
* The default implementation of {@link ServiceManager}.
*/
public class SimpleServiceManager implements ServiceManager {
private final ConcurrentMap<Class<?>, Provider> providers =
new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Class<?>, SimpleServiceReference<?>> potentials =
new MapMaker().concurrencyLevel(3).weakKeys().makeMap();
private final PluginManager pluginManager;
/**
* Construct a simple {@link ServiceManager}.
*
* @param pluginManager The plugin manager to get the
* {@link PluginContainer} for a given plugin
*/
@Inject
public SimpleServiceManager(PluginManager pluginManager) {
checkNotNull(pluginManager, "pluginManager");
this.pluginManager = pluginManager;
}
@Override
public <T> void setProvider(Object plugin, Class<T> service, T provider) throws ProviderExistsException {
checkNotNull(plugin, "plugin");
checkNotNull(service, "service");
checkNotNull(provider, "provider");
Optional<PluginContainer> containerOptional = this.pluginManager.fromInstance(plugin);
if (!containerOptional.isPresent()) {
throw new IllegalArgumentException(
"The provided plugin object does not have an associated plugin container "
+ "(in other words, is 'plugin' actually your plugin object?)");
}
PluginContainer container = containerOptional.get();
Provider existing = this.providers.putIfAbsent(service, new Provider(container, provider));
if (existing != null) {
throw new ProviderExistsException("Provider for service " + service.getCanonicalName() + " has already been registered!");
}
@SuppressWarnings("unchecked")
SimpleServiceReference<T> ref = (SimpleServiceReference<T>) this.potentials.remove(service);
if (ref != null) {
ref.registered(provider);
}
}
@SuppressWarnings("unchecked")
@Override
public <T> ServiceReference<T> potentiallyProvide(Class<T> service) {
SimpleServiceReference<T> ref = new SimpleServiceReference<T>(provide(service));
@SuppressWarnings("rawtypes")
SimpleServiceReference newRef = this.potentials.putIfAbsent(service, ref);
if (newRef != null) {
ref = newRef;
}
if (ref.ref().isPresent()) {
this.potentials.remove(service, ref);
}
return ref;
}
@SuppressWarnings("unchecked")
@Override
public <T> Optional<T> provide(Class<T> service) {
checkNotNull(service, "service");
@Nullable Provider provider = this.providers.get(service);
return provider != null ? (Optional<T>) Optional.of(provider.provider) : Optional.<T>absent();
}
@SuppressWarnings("unchecked")
@Override
public <T> T provideUnchecked(Class<T> service) throws ProvisioningException {
checkNotNull(service, "service");
@Nullable Provider provider = this.providers.get(service);
if (provider != null) {
return (T) provider.provider;
} else {
throw new ProvisioningException("No provider is registered for the service '" + service.getName() + "'", service);
}
}
private static class Provider {
@SuppressWarnings("unused")
private final PluginContainer container;
private final Object provider;
private Provider(PluginContainer container, Object provider) {
this.container = container;
this.provider = provider;
}
}
private static class SimpleServiceReference<T> implements ServiceReference<T> {
private final List<Predicate<T>> actionsOnPresent = new CopyOnWriteArrayList<Predicate<T>>();
private final Lock waitLock = new ReentrantLock();
private final Condition waitCondition = this.waitLock.newCondition();
private volatile Optional<T> service;
public SimpleServiceReference(Optional<T> service) {
this.service = service;
}
@Override
public Optional<T> ref() {
return this.service;
}
@Override
public T await() throws InterruptedException {
while (true) {
this.waitLock.lock();
try {
Optional<T> service = this.service;
if (service.isPresent()) {
return service.get();
}
this.waitCondition.await();
} finally {
this.waitLock.unlock();
}
}
}
@Override
public void executeWhenPresent(Predicate<T> run) {
if (!this.service.isPresent()) {
this.actionsOnPresent.add(run);
} else {
run.apply(this.service.get());
}
}
public void registered(T service) {
this.service = Optional.of(service);
this.waitLock.lock();
try {
this.waitCondition.signalAll();
} finally {
this.waitLock.unlock();
}
for (Predicate<T> func : this.actionsOnPresent) {
func.apply(service);
}
this.actionsOnPresent.clear();
}
/**
* Checks if this is a flowerpot.
*
* @return Whether this is a flowerpot
*/
public boolean isFlowerPot() {
return false;
}
}
/**
* Checks if this is a flowerpot.
*
* @return Whether this is a flowerpot
*/
public boolean isFlowerPot() {
return true;
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.client.cache;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.isA;
import static org.easymock.EasyMock.isNull;
import static org.easymock.classextension.EasyMock.createNiceMock;
import static org.easymock.classextension.EasyMock.replay;
import static org.easymock.classextension.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import junit.framework.AssertionFailedError;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.cache.CacheResponseStatus;
import org.apache.http.client.cache.HttpCacheContext;
import org.apache.http.client.cache.HttpCacheEntry;
import org.apache.http.client.cache.HttpCacheStorage;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpExecutionAware;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpOptions;
import org.apache.http.client.methods.HttpRequestWrapper;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.DateUtils;
import org.apache.http.conn.routing.HttpRoute;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.execchain.ClientExecChain;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicHttpRequest;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.util.EntityUtils;
import org.easymock.Capture;
import org.easymock.IExpectationSetters;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@SuppressWarnings("boxing") // test code
public abstract class TestCachingExecChain {
private ClientExecChain impl;
protected CacheValidityPolicy mockValidityPolicy;
protected CacheableRequestPolicy mockRequestPolicy;
protected ClientExecChain mockBackend;
protected HttpCache mockCache;
private HttpCacheStorage mockStorage;
protected CachedResponseSuitabilityChecker mockSuitabilityChecker;
protected ResponseCachingPolicy mockResponsePolicy;
protected HttpCacheEntry mockCacheEntry;
protected CachedHttpResponseGenerator mockResponseGenerator;
private ResponseHandler<Object> mockHandler;
private HttpUriRequest mockUriRequest;
private CloseableHttpResponse mockCachedResponse;
protected ConditionalRequestBuilder mockConditionalRequestBuilder;
private HttpRequest mockConditionalRequest;
private StatusLine mockStatusLine;
protected ResponseProtocolCompliance mockResponseProtocolCompliance;
protected RequestProtocolCompliance mockRequestProtocolCompliance;
protected CacheConfig config;
protected AsynchronousValidator asyncValidator;
protected HttpRoute route;
protected HttpHost host;
protected HttpRequestWrapper request;
protected HttpCacheContext context;
protected HttpCacheEntry entry;
@SuppressWarnings("unchecked")
@Before
public void setUp() {
mockRequestPolicy = createNiceMock(CacheableRequestPolicy.class);
mockValidityPolicy = createNiceMock(CacheValidityPolicy.class);
mockBackend = createNiceMock(ClientExecChain.class);
mockCache = createNiceMock(HttpCache.class);
mockSuitabilityChecker = createNiceMock(CachedResponseSuitabilityChecker.class);
mockResponsePolicy = createNiceMock(ResponseCachingPolicy.class);
mockHandler = createNiceMock(ResponseHandler.class);
mockUriRequest = createNiceMock(HttpUriRequest.class);
mockCacheEntry = createNiceMock(HttpCacheEntry.class);
mockResponseGenerator = createNiceMock(CachedHttpResponseGenerator.class);
mockCachedResponse = createNiceMock(CloseableHttpResponse.class);
mockConditionalRequestBuilder = createNiceMock(ConditionalRequestBuilder.class);
mockConditionalRequest = createNiceMock(HttpRequest.class);
mockStatusLine = createNiceMock(StatusLine.class);
mockResponseProtocolCompliance = createNiceMock(ResponseProtocolCompliance.class);
mockRequestProtocolCompliance = createNiceMock(RequestProtocolCompliance.class);
mockStorage = createNiceMock(HttpCacheStorage.class);
config = CacheConfig.DEFAULT;
asyncValidator = new AsynchronousValidator(config);
host = new HttpHost("foo.example.com", 80);
route = new HttpRoute(host);
request = HttpRequestWrapper.wrap(new BasicHttpRequest("GET", "/stuff",
HttpVersion.HTTP_1_1));
context = HttpCacheContext.create();
context.setTargetHost(host);
entry = HttpTestUtils.makeCacheEntry();
impl = createCachingExecChain(mockBackend, mockCache, mockValidityPolicy,
mockResponsePolicy, mockResponseGenerator, mockRequestPolicy, mockSuitabilityChecker,
mockConditionalRequestBuilder, mockResponseProtocolCompliance,
mockRequestProtocolCompliance, config, asyncValidator);
}
public abstract ClientExecChain createCachingExecChain(ClientExecChain backend,
HttpCache responseCache, CacheValidityPolicy validityPolicy,
ResponseCachingPolicy responseCachingPolicy, CachedHttpResponseGenerator responseGenerator,
CacheableRequestPolicy cacheableRequestPolicy,
CachedResponseSuitabilityChecker suitabilityChecker,
ConditionalRequestBuilder conditionalRequestBuilder,
ResponseProtocolCompliance responseCompliance, RequestProtocolCompliance requestCompliance,
CacheConfig config, AsynchronousValidator asynchRevalidator);
public abstract ClientExecChain createCachingExecChain(ClientExecChain backend,
HttpCache cache, CacheConfig config);
public static HttpRequestWrapper eqRequest(final HttpRequestWrapper in) {
EasyMock.reportMatcher(new RequestEquivalent(in));
return null;
}
public static <R extends HttpResponse> R eqResponse(final R in) {
EasyMock.reportMatcher(new ResponseEquivalent(in));
return null;
}
protected void replayMocks() {
replay(mockRequestPolicy);
replay(mockValidityPolicy);
replay(mockSuitabilityChecker);
replay(mockResponsePolicy);
replay(mockCacheEntry);
replay(mockResponseGenerator);
replay(mockBackend);
replay(mockCache);
replay(mockHandler);
replay(mockUriRequest);
replay(mockCachedResponse);
replay(mockConditionalRequestBuilder);
replay(mockConditionalRequest);
replay(mockStatusLine);
replay(mockResponseProtocolCompliance);
replay(mockRequestProtocolCompliance);
replay(mockStorage);
}
protected void verifyMocks() {
verify(mockRequestPolicy);
verify(mockValidityPolicy);
verify(mockSuitabilityChecker);
verify(mockResponsePolicy);
verify(mockCacheEntry);
verify(mockResponseGenerator);
verify(mockBackend);
verify(mockCache);
verify(mockHandler);
verify(mockUriRequest);
verify(mockCachedResponse);
verify(mockConditionalRequestBuilder);
verify(mockConditionalRequest);
verify(mockStatusLine);
verify(mockResponseProtocolCompliance);
verify(mockRequestProtocolCompliance);
verify(mockStorage);
}
@Test
public void testCacheableResponsesGoIntoCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Cache-Control", "max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
verifyMocks();
}
@Test
public void testOlderCacheableResponsesDoNotGoIntoCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date fiveSecondsAgo = new Date(now.getTime() - 5 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "max-age=3600");
resp1.setHeader("Etag", "\"new-etag\"");
backendExpectsAnyRequestAndReturn(resp1);
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
req2.setHeader("Cache-Control", "no-cache");
final HttpResponse resp2 = HttpTestUtils.make200Response();
resp2.setHeader("ETag", "\"old-etag\"");
resp2.setHeader("Date", DateUtils.formatDate(fiveSecondsAgo));
resp2.setHeader("Cache-Control", "max-age=3600");
backendExpectsAnyRequestAndReturn(resp2);
final HttpRequestWrapper req3 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
final HttpResponse result = impl.execute(route, req3, context, null);
verifyMocks();
assertEquals("\"new-etag\"", result.getFirstHeader("ETag").getValue());
}
@Test
public void testNewerCacheableResponsesReplaceExistingCacheEntry() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date fiveSecondsAgo = new Date(now.getTime() - 5 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Date", DateUtils.formatDate(fiveSecondsAgo));
resp1.setHeader("Cache-Control", "max-age=3600");
resp1.setHeader("Etag", "\"old-etag\"");
backendExpectsAnyRequestAndReturn(resp1);
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
req2.setHeader("Cache-Control", "max-age=0");
final HttpResponse resp2 = HttpTestUtils.make200Response();
resp2.setHeader("ETag", "\"new-etag\"");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "max-age=3600");
backendExpectsAnyRequestAndReturn(resp2);
final HttpRequestWrapper req3 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
final HttpResponse result = impl.execute(route, req3, context, null);
verifyMocks();
assertEquals("\"new-etag\"", result.getFirstHeader("ETag").getValue());
}
protected void requestIsFatallyNonCompliant(final RequestProtocolError error) {
final List<RequestProtocolError> errors = new ArrayList<RequestProtocolError>();
if (error != null) {
errors.add(error);
}
expect(mockRequestProtocolCompliance.requestIsFatallyNonCompliant(eqRequest(request)))
.andReturn(errors);
}
@Test
public void testSuitableCacheEntryDoesNotCauseBackendRequest() throws Exception {
cacheInvalidatorWasCalled();
requestPolicyAllowsCaching(true);
getCacheEntryReturns(mockCacheEntry);
cacheEntrySuitable(true);
responseIsGeneratedFromCache();
requestIsFatallyNonCompliant(null);
entryHasStaleness(0L);
replayMocks();
final HttpResponse result = impl.execute(route, request, context, null);
verifyMocks();
Assert.assertSame(mockCachedResponse, result);
}
@Test
public void testNonCacheableResponseIsNotCachedAndIsReturnedAsIs() throws Exception {
final CacheConfig configDefault = CacheConfig.DEFAULT;
impl = createCachingExecChain(mockBackend, new BasicHttpCache(new HeapResourceFactory(),
mockStorage, configDefault), configDefault);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(HttpTestUtils.makeDefaultRequest());
final HttpResponse resp1 = HttpTestUtils.make200Response();
resp1.setHeader("Cache-Control", "no-cache");
expect(mockStorage.getEntry(isA(String.class))).andReturn(null).anyTimes();
mockStorage.removeEntry(isA(String.class));
expectLastCall().anyTimes();
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
final HttpResponse result = impl.execute(route, req1, context, null);
verifyMocks();
assertTrue(HttpTestUtils.semanticallyTransparent(resp1, result));
}
@Test
public void testResponseIsGeneratedWhenCacheEntryIsUsable() throws Exception {
requestIsFatallyNonCompliant(null);
cacheInvalidatorWasCalled();
requestPolicyAllowsCaching(true);
cacheEntrySuitable(true);
getCacheEntryReturns(mockCacheEntry);
responseIsGeneratedFromCache();
entryHasStaleness(0L);
replayMocks();
impl.execute(route, request, context, null);
verifyMocks();
}
@Test
public void testNonCompliantRequestWrapsAndReThrowsProtocolException() throws Exception {
final ClientProtocolException expected = new ClientProtocolException("ouch");
requestIsFatallyNonCompliant(null);
mockRequestProtocolCompliance.makeRequestCompliant((HttpRequestWrapper) anyObject());
expectLastCall().andThrow(expected);
boolean gotException = false;
replayMocks();
try {
impl.execute(route, request, context, null);
} catch (final ClientProtocolException ex) {
Assert.assertSame(expected, ex);
gotException = true;
}
verifyMocks();
Assert.assertTrue(gotException);
}
@Test
public void testSetsModuleGeneratedResponseContextForCacheOptionsResponse() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req = HttpRequestWrapper.wrap(new BasicHttpRequest("OPTIONS", "*",
HttpVersion.HTTP_1_1));
req.setHeader("Max-Forwards", "0");
impl.execute(route, req, context, null);
Assert.assertEquals(CacheResponseStatus.CACHE_MODULE_RESPONSE,
context.getCacheResponseStatus());
}
@Test
public void testSetsModuleGeneratedResponseContextForFatallyNoncompliantRequest()
throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req.setHeader("Range", "bytes=0-50");
req.setHeader("If-Range", "W/\"weak-etag\"");
impl.execute(route, req, context, null);
Assert.assertEquals(CacheResponseStatus.CACHE_MODULE_RESPONSE,
context.getCacheResponseStatus());
}
@Test
public void testRecordsClientProtocolInViaHeaderIfRequestNotServableFromCache()
throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req = HttpRequestWrapper.wrap(new BasicHttpRequest("GET", "/",
HttpVersion.HTTP_1_0));
req.setHeader("Cache-Control", "no-cache");
final HttpResponse resp = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NO_CONTENT, "No Content");
final Capture<HttpRequestWrapper> cap = new Capture<HttpRequestWrapper>();
backendCaptureRequestAndReturn(cap, resp);
replayMocks();
impl.execute(route, req, context, null);
verifyMocks();
final HttpRequest captured = cap.getValue();
final String via = captured.getFirstHeader("Via").getValue();
final String proto = via.split("\\s+")[0];
Assert.assertTrue("http/1.0".equalsIgnoreCase(proto) || "1.0".equalsIgnoreCase(proto));
}
@Test
public void testSetsCacheMissContextIfRequestNotServableFromCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req.setHeader("Cache-Control", "no-cache");
final HttpResponse resp = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NO_CONTENT, "No Content");
backendExpectsAnyRequestAndReturn(resp);
replayMocks();
impl.execute(route, req, context, null);
verifyMocks();
Assert.assertEquals(CacheResponseStatus.CACHE_MISS, context.getCacheResponseStatus());
}
@Test
public void testSetsViaHeaderOnResponseIfRequestNotServableFromCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req.setHeader("Cache-Control", "no-cache");
final HttpResponse resp = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NO_CONTENT, "No Content");
backendExpectsAnyRequestAndReturn(resp);
replayMocks();
final HttpResponse result = impl.execute(route, req, context, null);
verifyMocks();
Assert.assertNotNull(result.getFirstHeader("Via"));
}
@Test
public void testSetsViaHeaderOnResponseForCacheMiss() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
final HttpResponse result = impl.execute(route, req1, context, null);
verifyMocks();
Assert.assertNotNull(result.getFirstHeader("Via"));
}
@Test
public void testSetsCacheHitContextIfRequestServedFromCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(CacheResponseStatus.CACHE_HIT, context.getCacheResponseStatus());
}
@Test
public void testSetsViaHeaderOnResponseIfRequestServedFromCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertNotNull(result.getFirstHeader("Via"));
}
@Test
public void testReturns304ForIfModifiedSinceHeaderIfRequestServedFromCache() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-Modified-Since", DateUtils.formatDate(now));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(tenSecondsAgo));
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns304ForIfModifiedSinceHeaderIf304ResponseInCache() throws Exception {
final Date now = new Date();
final Date oneHourAgo = new Date(now.getTime() - 3600 * 1000L);
final Date inTenMinutes = new Date(now.getTime() + 600 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req1.addHeader("If-Modified-Since", DateUtils.formatDate(oneHourAgo));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-Modified-Since", DateUtils.formatDate(oneHourAgo));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-control", "max-age=600");
resp1.setHeader("Expires", DateUtils.formatDate(inTenMinutes));
expect(
mockBackend.execute(eq(route), isA(HttpRequestWrapper.class),
isA(HttpClientContext.class), (HttpExecutionAware) isNull())).andReturn(
Proxies.enhanceResponse(resp1)).once();
expect(
mockBackend.execute(eq(route), isA(HttpRequestWrapper.class),
isA(HttpClientContext.class), (HttpExecutionAware) isNull())).andThrow(
new AssertionFailedError("Should have reused cached 304 response")).anyTimes();
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
Assert.assertFalse(result.containsHeader("Last-Modified"));
}
@Test
public void testReturns200ForIfModifiedSinceDateIsLess() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(new Date()));
// The variant has been modified since this date
req2.addHeader("If-Modified-Since", DateUtils.formatDate(tenSecondsAgo));
final HttpResponse resp2 = HttpTestUtils.make200Response();
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForIfModifiedSinceDateIsInvalid() throws Exception {
final Date now = new Date();
final Date tenSecondsAfter = new Date(now.getTime() + 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(new Date()));
// invalid date (date in the future)
req2.addHeader("If-Modified-Since", DateUtils.formatDate(tenSecondsAfter));
backendExpectsAnyRequestAndReturn(resp1).times(2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns304ForIfNoneMatchHeaderIfRequestServedFromCache() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-None-Match", "*");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForIfNoneMatchHeaderFails() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(new Date()));
resp1.setHeader("Cache-Control", "public, max-age=3600");
req2.addHeader("If-None-Match", "\"abc\"");
final HttpResponse resp2 = HttpTestUtils.make200Response();
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(200, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns304ForIfNoneMatchHeaderAndIfModifiedSinceIfRequestServedFromCache()
throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(new Date()));
req2.addHeader("If-None-Match", "*");
req2.addHeader("If-Modified-Since", DateUtils.formatDate(now));
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForIfNoneMatchHeaderFailsIfModifiedSinceIgnored() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-None-Match", "\"abc\"");
req2.addHeader("If-Modified-Since", DateUtils.formatDate(now));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(tenSecondsAgo));
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(200, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForOptionsFollowedByGetIfAuthorizationHeaderAndSharedCache()
throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.custom()
.setSharedCache(true).build());
final Date now = new Date();
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpOptions(
"http://foo.example.com/"));
req1.setHeader("Authorization", "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.setHeader("Authorization", "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NO_CONTENT, "No Content");
resp1.setHeader("Content-Length", "0");
resp1.setHeader("ETag", "\"options-etag\"");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(now));
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"get-etag\"");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "public, max-age=3600");
resp1.setHeader("Last-Modified", DateUtils.formatDate(now));
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(200, result.getStatusLine().getStatusCode());
}
@Test
public void testSetsValidatedContextIfRequestWasSuccessfullyValidated() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("ETag", "\"etag\"");
resp2.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(CacheResponseStatus.VALIDATED, context.getCacheResponseStatus());
}
@Test
public void testSetsViaHeaderIfRequestWasSuccessfullyValidated() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("ETag", "\"etag\"");
resp2.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertNotNull(result.getFirstHeader("Via"));
}
@Test
public void testSetsModuleResponseContextIfValidationRequiredButFailed() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5, must-revalidate");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndThrows(new IOException());
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(CacheResponseStatus.CACHE_MODULE_RESPONSE,
context.getCacheResponseStatus());
}
@Test
public void testSetsModuleResponseContextIfValidationFailsButNotRequired() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndThrows(new IOException());
replayMocks();
impl.execute(route, req1, context, null);
impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(CacheResponseStatus.CACHE_HIT, context.getCacheResponseStatus());
}
@Test
public void testSetViaHeaderIfValidationFailsButNotRequired() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndThrows(new IOException());
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertNotNull(result.getFirstHeader("Via"));
}
@Test
public void testReturns304ForIfNoneMatchPassesIfRequestServedFromOrigin() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
req2.addHeader("If-None-Match", "\"etag\"");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not Modified");
resp2.setHeader("ETag", "\"etag\"");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForIfNoneMatchFailsIfRequestServedFromOrigin() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
req2.addHeader("If-None-Match", "\"etag\"");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("ETag", "\"newetag\"");
resp2.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns304ForIfModifiedSincePassesIfRequestServedFromOrigin() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Last-Modified", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
req2.addHeader("If-Modified-Since", DateUtils.formatDate(tenSecondsAgo));
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not Modified");
resp2.setHeader("ETag", "\"etag\"");
resp2.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Last-Modified", DateUtils.formatDate(tenSecondsAgo));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
}
@Test
public void testReturns200ForIfModifiedSinceFailsIfRequestServedFromOrigin() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Last-Modified", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
req2.addHeader("If-Modified-Since", DateUtils.formatDate(tenSecondsAgo));
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("ETag", "\"newetag\"");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Last-Modified", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
impl.execute(route, req1, context, null);
final HttpResponse result = impl.execute(route, req2, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result.getStatusLine().getStatusCode());
}
@Test
public void testVariantMissServerIfReturns304CacheReturns200() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req1.addHeader("Accept-Encoding", "gzip");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("Etag", "\"gzip_etag\"");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Vary", "Accept-Encoding");
resp1.setHeader("Cache-Control", "public, max-age=3600");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req2.addHeader("Accept-Encoding", "deflate");
final HttpRequestWrapper req2Server = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req2Server.addHeader("Accept-Encoding", "deflate");
req2Server.addHeader("If-None-Match", "\"gzip_etag\"");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("Etag", "\"deflate_etag\"");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Vary", "Accept-Encoding");
resp2.setHeader("Cache-Control", "public, max-age=3600");
final HttpRequestWrapper req3 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req3.addHeader("Accept-Encoding", "gzip,deflate");
final HttpRequestWrapper req3Server = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req3Server.addHeader("Accept-Encoding", "gzip,deflate");
req3Server.addHeader("If-None-Match", "\"gzip_etag\",\"deflate_etag\"");
final HttpResponse resp3 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp3.setEntity(HttpTestUtils.makeBody(128));
resp3.setHeader("Content-Length", "128");
resp3.setHeader("Etag", "\"gzip_etag\"");
resp3.setHeader("Date", DateUtils.formatDate(now));
resp3.setHeader("Vary", "Accept-Encoding");
resp3.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
backendExpectsAnyRequestAndReturn(resp3);
replayMocks();
final HttpResponse result1 = impl.execute(route, req1, context, null);
final HttpResponse result2 = impl.execute(route, req2, context, null);
final HttpResponse result3 = impl.execute(route, req3, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result1.getStatusLine().getStatusCode());
Assert.assertEquals(HttpStatus.SC_OK, result2.getStatusLine().getStatusCode());
Assert.assertEquals(HttpStatus.SC_OK, result3.getStatusLine().getStatusCode());
}
@Test
public void testVariantsMissServerReturns304CacheReturns304() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req1.addHeader("Accept-Encoding", "gzip");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(HttpTestUtils.makeBody(128));
resp1.setHeader("Content-Length", "128");
resp1.setHeader("Etag", "\"gzip_etag\"");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Vary", "Accept-Encoding");
resp1.setHeader("Cache-Control", "public, max-age=3600");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req2.addHeader("Accept-Encoding", "deflate");
final HttpRequestWrapper req2Server = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req2Server.addHeader("Accept-Encoding", "deflate");
req2Server.addHeader("If-None-Match", "\"gzip_etag\"");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp2.setEntity(HttpTestUtils.makeBody(128));
resp2.setHeader("Content-Length", "128");
resp2.setHeader("Etag", "\"deflate_etag\"");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Vary", "Accept-Encoding");
resp2.setHeader("Cache-Control", "public, max-age=3600");
final HttpRequestWrapper req4 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req4.addHeader("Accept-Encoding", "gzip,identity");
req4.addHeader("If-None-Match", "\"gzip_etag\"");
final HttpRequestWrapper req4Server = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
req4Server.addHeader("Accept-Encoding", "gzip,identity");
req4Server.addHeader("If-None-Match", "\"gzip_etag\"");
final HttpResponse resp4 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not Modified");
resp4.setHeader("Etag", "\"gzip_etag\"");
resp4.setHeader("Date", DateUtils.formatDate(now));
resp4.setHeader("Vary", "Accept-Encoding");
resp4.setHeader("Cache-Control", "public, max-age=3600");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
backendExpectsAnyRequestAndReturn(resp4);
replayMocks();
final HttpResponse result1 = impl.execute(route, req1, context, null);
final HttpResponse result2 = impl.execute(route, req2, context, null);
final HttpResponse result4 = impl.execute(route, req4, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_OK, result1.getStatusLine().getStatusCode());
Assert.assertEquals(HttpStatus.SC_OK, result2.getStatusLine().getStatusCode());
Assert.assertEquals(HttpStatus.SC_NOT_MODIFIED, result4.getStatusLine().getStatusCode());
}
@Test
public void testSocketTimeoutExceptionIsNotSilentlyCatched() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final Date now = new Date();
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"OK");
resp1.setEntity(new InputStreamEntity(new InputStream() {
private boolean closed = false;
@Override
public void close() throws IOException {
closed = true;
}
@Override
public int read() throws IOException {
if (closed) {
throw new SocketException("Socket closed");
}
throw new SocketTimeoutException("Read timed out");
}
}, 128));
resp1.setHeader("Date", DateUtils.formatDate(now));
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
try {
final HttpResponse result1 = impl.execute(route, req1, context, null);
EntityUtils.toString(result1.getEntity());
Assert.fail("We should have had a SocketTimeoutException");
} catch (final SocketTimeoutException e) {
}
verifyMocks();
}
@Test
public void testIsSharedCache() {
Assert.assertTrue(config.isSharedCache());
}
@Test
public void testTreatsCacheIOExceptionsAsCacheMiss() throws Exception {
impl = createCachingExecChain(mockBackend, mockCache, CacheConfig.DEFAULT);
final CloseableHttpResponse resp = Proxies.enhanceResponse(HttpTestUtils.make200Response());
mockCache.flushInvalidatedCacheEntriesFor(host, request);
expectLastCall().andThrow(new IOException()).anyTimes();
mockCache.flushInvalidatedCacheEntriesFor(isA(HttpHost.class), isA(HttpRequest.class),
isA(HttpResponse.class));
expectLastCall().anyTimes();
expect(mockCache.getCacheEntry(eq(host), isA(HttpRequest.class))).andThrow(
new IOException()).anyTimes();
expect(mockCache.getVariantCacheEntriesWithEtags(eq(host), isA(HttpRequest.class)))
.andThrow(new IOException()).anyTimes();
expect(
mockCache.cacheAndReturnResponse(eq(host), isA(HttpRequest.class),
isA(CloseableHttpResponse.class), isA(Date.class), isA(Date.class)))
.andReturn(resp).anyTimes();
expect(
mockBackend.execute(eq(route), isA(HttpRequestWrapper.class),
isA(HttpClientContext.class), (HttpExecutionAware) isNull())).andReturn(resp);
replayMocks();
final HttpResponse result = impl.execute(route, request, context, null);
verifyMocks();
Assert.assertSame(resp, result);
}
@Test
public void testIfOnlyIfCachedAndNoCacheEntryBackendNotCalled() throws Exception {
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
request.addHeader("Cache-Control", "only-if-cached");
final HttpResponse resp = impl.execute(route, request, context, null);
Assert.assertEquals(HttpStatus.SC_GATEWAY_TIMEOUT, resp.getStatusLine().getStatusCode());
}
@Test
public void testIfOnlyIfCachedAndEntryNotSuitableBackendNotCalled() throws Exception {
request.setHeader("Cache-Control", "only-if-cached");
entry = HttpTestUtils.makeCacheEntry(new Header[] { new BasicHeader("Cache-Control",
"must-revalidate") });
requestIsFatallyNonCompliant(null);
cacheInvalidatorWasCalled();
requestPolicyAllowsCaching(true);
getCacheEntryReturns(entry);
cacheEntrySuitable(false);
replayMocks();
final HttpResponse resp = impl.execute(route, request, context, null);
verifyMocks();
Assert.assertEquals(HttpStatus.SC_GATEWAY_TIMEOUT, resp.getStatusLine().getStatusCode());
}
@Test
public void testIfOnlyIfCachedAndEntryExistsAndIsSuitableReturnsEntry() throws Exception {
request.setHeader("Cache-Control", "only-if-cached");
requestIsFatallyNonCompliant(null);
cacheInvalidatorWasCalled();
requestPolicyAllowsCaching(true);
getCacheEntryReturns(entry);
cacheEntrySuitable(true);
responseIsGeneratedFromCache();
entryHasStaleness(0);
replayMocks();
final HttpResponse resp = impl.execute(route, request, context, null);
verifyMocks();
Assert.assertSame(mockCachedResponse, resp);
}
@Test
public void testDoesNotSetConnectionInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
impl.execute(route, request, ctx, null);
assertNull(ctx.getConnection());
}
@Test
public void testSetsTargetHostInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
impl.execute(route, request, ctx, null);
assertSame(host, ctx.getTargetHost());
}
@Test
public void testSetsRouteInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
impl.execute(route, request, ctx, null);
assertEquals(route, ctx.getHttpRoute());
}
@Test
public void testSetsRequestInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
impl.execute(route, request, ctx, null);
if (!HttpTestUtils.equivalent(request, ctx.getRequest())) {
assertSame(request, ctx.getRequest());
}
}
@Test
public void testSetsResponseInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
final HttpResponse result = impl.execute(route, request, ctx, null);
if (!HttpTestUtils.equivalent(result, ctx.getResponse())) {
assertSame(result, ctx.getResponse());
}
}
@Test
public void testSetsRequestSentInContextOnCacheHit() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpClientContext ctx = HttpClientContext.create();
ctx.setTargetHost(host);
impl.execute(route, request, context, null);
impl.execute(route, request, ctx, null);
assertTrue(ctx.isRequestSent());
}
@Test
public void testCanCacheAResponseWithoutABody() throws Exception {
final HttpResponse response = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NO_CONTENT, "No Content");
response.setHeader("Date", DateUtils.formatDate(new Date()));
response.setHeader("Cache-Control", "max-age=300");
final DummyBackend backend = new DummyBackend();
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
impl.execute(route, request, context, null);
impl.execute(route, request, context, null);
assertEquals(1, backend.getExecutions());
}
@Test
public void testNoEntityForIfNoneMatchRequestNotYetInCache() throws Exception {
final Date now = new Date();
final Date tenSecondsAgo = new Date(now.getTime() - 10 * 1000L);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req1.addHeader("If-None-Match", "\"etag\"");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp1.setHeader("Content-Length", "128");
resp1.setHeader("ETag", "\"etag\"");
resp1.setHeader("Date", DateUtils.formatDate(tenSecondsAgo));
resp1.setHeader("Cache-Control", "public, max-age=5");
backendExpectsAnyRequestAndReturn(resp1);
replayMocks();
final HttpResponse result = impl.execute(route, req1, context, null);
verifyMocks();
assertEquals(HttpStatus.SC_NOT_MODIFIED, result.getStatusLine().getStatusCode());
assertNull("The 304 response messages MUST NOT contain a message-body", result.getEntity());
}
@Test
public void testNotModifiedResponseUpdatesCacheEntryWhenNoEntity() throws Exception {
final Date now = new Date();
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req1.addHeader("If-None-Match", "etag");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-None-Match", "etag");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "max-age=0");
resp1.setHeader("Etag", "etag");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "max-age=0");
resp1.setHeader("Etag", "etag");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
final HttpResponse result1 = impl.execute(route, req1, context, null);
final HttpResponse result2 = impl.execute(route, req2, context, null);
verifyMocks();
assertEquals(HttpStatus.SC_NOT_MODIFIED, result1.getStatusLine().getStatusCode());
assertEquals("etag", result1.getFirstHeader("Etag").getValue());
assertEquals(HttpStatus.SC_NOT_MODIFIED, result2.getStatusLine().getStatusCode());
assertEquals("etag", result2.getFirstHeader("Etag").getValue());
}
@Test
public void testNotModifiedResponseWithVaryUpdatesCacheEntryWhenNoEntity() throws Exception {
final Date now = new Date();
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req1.addHeader("If-None-Match", "etag");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req2.addHeader("If-None-Match", "etag");
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "max-age=0");
resp1.setHeader("Etag", "etag");
resp1.setHeader("Vary", "Accept-Encoding");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "max-age=0");
resp1.setHeader("Etag", "etag");
resp1.setHeader("Vary", "Accept-Encoding");
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2);
replayMocks();
final HttpResponse result1 = impl.execute(route, req1, context, null);
final HttpResponse result2 = impl.execute(route, req2, context, null);
verifyMocks();
assertEquals(HttpStatus.SC_NOT_MODIFIED, result1.getStatusLine().getStatusCode());
assertEquals("etag", result1.getFirstHeader("Etag").getValue());
assertEquals(HttpStatus.SC_NOT_MODIFIED, result2.getStatusLine().getStatusCode());
assertEquals("etag", result2.getFirstHeader("Etag").getValue());
}
@Test
public void testDoesNotSend304ForNonConditionalRequest() throws Exception {
final Date now = new Date();
final Date inOneMinute = new Date(System.currentTimeMillis() + 60000);
impl = createCachingExecChain(mockBackend, new BasicHttpCache(), CacheConfig.DEFAULT);
final HttpRequestWrapper req1 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
req1.addHeader("If-None-Match", "etag");
final HttpRequestWrapper req2 = HttpRequestWrapper.wrap(new HttpGet(
"http://foo.example.com/"));
final HttpResponse resp1 = new BasicHttpResponse(HttpVersion.HTTP_1_1,
HttpStatus.SC_NOT_MODIFIED, "Not modified");
resp1.setHeader("Date", DateUtils.formatDate(now));
resp1.setHeader("Cache-Control", "public, max-age=60");
resp1.setHeader("Expires", DateUtils.formatDate(inOneMinute));
resp1.setHeader("Etag", "etag");
resp1.setHeader("Vary", "Accept-Encoding");
final HttpResponse resp2 = new BasicHttpResponse(HttpVersion.HTTP_1_1, HttpStatus.SC_OK,
"Ok");
resp2.setHeader("Date", DateUtils.formatDate(now));
resp2.setHeader("Cache-Control", "public, max-age=60");
resp2.setHeader("Expires", DateUtils.formatDate(inOneMinute));
resp2.setHeader("Etag", "etag");
resp2.setHeader("Vary", "Accept-Encoding");
resp2.setEntity(HttpTestUtils.makeBody(128));
backendExpectsAnyRequestAndReturn(resp1);
backendExpectsAnyRequestAndReturn(resp2).anyTimes();
replayMocks();
final HttpResponse result1 = impl.execute(route, req1, context, null);
final HttpResponse result2 = impl.execute(route, req2, context, null);
verifyMocks();
assertEquals(HttpStatus.SC_NOT_MODIFIED, result1.getStatusLine().getStatusCode());
assertNull(result1.getEntity());
assertEquals(HttpStatus.SC_OK, result2.getStatusLine().getStatusCode());
Assert.assertNotNull(result2.getEntity());
}
@Test
public void testUsesVirtualHostForCacheKey() throws Exception {
final DummyBackend backend = new DummyBackend();
final HttpResponse response = HttpTestUtils.make200Response();
response.setHeader("Cache-Control", "max-age=3600");
backend.setResponse(response);
impl = createCachingExecChain(backend, new BasicHttpCache(), CacheConfig.DEFAULT);
impl.execute(route, request, context, null);
assertEquals(1, backend.getExecutions());
context.setTargetHost(new HttpHost("bar.example.com"));
impl.execute(route, request, context, null);
assertEquals(2, backend.getExecutions());
impl.execute(route, request, context, null);
assertEquals(2, backend.getExecutions());
}
private IExpectationSetters<CloseableHttpResponse> backendExpectsAnyRequestAndReturn(
final HttpResponse response) throws Exception {
final CloseableHttpResponse resp = mockBackend.execute(EasyMock.isA(HttpRoute.class),
EasyMock.isA(HttpRequestWrapper.class), EasyMock.isA(HttpClientContext.class),
EasyMock.<HttpExecutionAware> isNull());
return EasyMock.expect(resp).andReturn(Proxies.enhanceResponse(response));
}
protected IExpectationSetters<CloseableHttpResponse> backendExpectsRequestAndReturn(
final HttpRequestWrapper request, final HttpResponse response) throws Exception {
final CloseableHttpResponse resp = mockBackend.execute(EasyMock.isA(HttpRoute.class),
EasyMock.eq(request), EasyMock.isA(HttpClientContext.class),
EasyMock.<HttpExecutionAware> isNull());
return EasyMock.expect(resp).andReturn(Proxies.enhanceResponse(response));
}
protected IExpectationSetters<CloseableHttpResponse> backendExpectsRequestAndReturn(
final HttpRequestWrapper request, final CloseableHttpResponse response) throws Exception {
final CloseableHttpResponse resp = mockBackend.execute(EasyMock.isA(HttpRoute.class),
EasyMock.eq(request), EasyMock.isA(HttpClientContext.class),
EasyMock.<HttpExecutionAware> isNull());
return EasyMock.expect(resp).andReturn(response);
}
protected IExpectationSetters<CloseableHttpResponse> backendExpectsAnyRequestAndThrows(
final Throwable throwable) throws Exception {
final CloseableHttpResponse resp = mockBackend.execute(EasyMock.isA(HttpRoute.class),
EasyMock.isA(HttpRequestWrapper.class), EasyMock.isA(HttpClientContext.class),
EasyMock.<HttpExecutionAware> isNull());
return EasyMock.expect(resp).andThrow(throwable);
}
protected IExpectationSetters<CloseableHttpResponse> backendCaptureRequestAndReturn(
final Capture<HttpRequestWrapper> cap, final HttpResponse response) throws Exception {
final CloseableHttpResponse resp = mockBackend.execute(EasyMock.isA(HttpRoute.class),
EasyMock.capture(cap), EasyMock.isA(HttpClientContext.class),
EasyMock.<HttpExecutionAware> isNull());
return EasyMock.expect(resp).andReturn(Proxies.enhanceResponse(response));
}
protected void getCacheEntryReturns(final HttpCacheEntry result) throws IOException {
expect(mockCache.getCacheEntry(eq(host), eqRequest(request))).andReturn(result);
}
private void cacheInvalidatorWasCalled() throws IOException {
mockCache
.flushInvalidatedCacheEntriesFor((HttpHost) anyObject(), (HttpRequest) anyObject());
}
protected void cacheEntryValidatable(final boolean b) {
expect(mockValidityPolicy.isRevalidatable((HttpCacheEntry) anyObject())).andReturn(b)
.anyTimes();
}
protected void cacheEntryMustRevalidate(final boolean b) {
expect(mockValidityPolicy.mustRevalidate(mockCacheEntry)).andReturn(b);
}
protected void cacheEntryProxyRevalidate(final boolean b) {
expect(mockValidityPolicy.proxyRevalidate(mockCacheEntry)).andReturn(b);
}
protected void mayReturnStaleWhileRevalidating(final boolean b) {
expect(
mockValidityPolicy.mayReturnStaleWhileRevalidating((HttpCacheEntry) anyObject(),
(Date) anyObject())).andReturn(b);
}
protected void conditionalRequestBuilderReturns(final HttpRequestWrapper validate)
throws Exception {
expect(mockConditionalRequestBuilder.buildConditionalRequest(request, entry)).andReturn(
validate);
}
protected void requestPolicyAllowsCaching(final boolean allow) {
expect(mockRequestPolicy.isServableFromCache((HttpRequest) anyObject())).andReturn(allow);
}
protected void cacheEntrySuitable(final boolean suitable) {
expect(
mockSuitabilityChecker.canCachedResponseBeUsed((HttpHost) anyObject(),
(HttpRequest) anyObject(), (HttpCacheEntry) anyObject(), (Date) anyObject()))
.andReturn(suitable);
}
private void entryHasStaleness(final long staleness) {
expect(
mockValidityPolicy.getStalenessSecs((HttpCacheEntry) anyObject(), (Date) anyObject()))
.andReturn(staleness);
}
protected void responseIsGeneratedFromCache() {
expect(
mockResponseGenerator.generateResponse((HttpRequestWrapper) anyObject(), (HttpCacheEntry) anyObject()))
.andReturn(mockCachedResponse);
}
}
| |
package eecs2030.lab7;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import org.junit.FixMethodOrder;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.junit.runners.MethodSorters;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class TreeTraversalTest {
@Rule
public Timeout globalTimeout = Timeout.seconds(1);
@Test
public void test01a_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
List<String> exp = new ArrayList<>();
assertEquals("failed for an empty tree",
exp, TreeTraversal.inorder(t));
}
@Test
public void test01b_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
List<String> exp = new ArrayList<>();
exp.add("hello");
assertEquals("failed for tree with one element",
exp, TreeTraversal.inorder(t));
}
@Test
public void test01c_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
List<String> exp = new ArrayList<>();
exp.add("goodbye");
exp.add("hello");
assertEquals("failed for tree with two elements",
exp, TreeTraversal.inorder(t));
}
@Test
public void test01d_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
t.add("salut");
List<String> exp = new ArrayList<>();
exp.add("goodbye");
exp.add("hello");
exp.add("salut");
assertEquals("failed for tree with three elements",
exp, TreeTraversal.inorder(t));
}
@Test
public void test01e_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("50");
t.add("27");
t.add("73");
t.add("08");
t.add("44");
t.add("83");
t.add("73");
t.add("93");
List<String> exp = new ArrayList<>();
exp.add("08");
exp.add("27");
exp.add("44");
exp.add("50");
exp.add("73");
exp.add("73");
exp.add("83");
exp.add("93");
assertEquals("failed for tree in lab document",
exp, TreeTraversal.inorder(t));
}
@Test
public void test02a_preorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
List<String> exp = new ArrayList<>();
assertEquals("failed for an empty tree",
exp, TreeTraversal.preorder(t));
}
@Test
public void test02b_preorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
List<String> exp = new ArrayList<>();
exp.add("hello");
assertEquals("failed for tree with one element",
exp, TreeTraversal.preorder(t));
}
@Test
public void test02c_preorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
List<String> exp = new ArrayList<>();
exp.add("hello");
exp.add("goodbye");
assertEquals("failed for tree with two elements",
exp, TreeTraversal.preorder(t));
}
@Test
public void test02d_preorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
t.add("salut");
List<String> exp = new ArrayList<>();
exp.add("hello");
exp.add("goodbye");
exp.add("salut");
assertEquals("failed for tree with three elements",
exp, TreeTraversal.preorder(t));
}
@Test
public void test02e_preorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("50");
t.add("27");
t.add("73");
t.add("08");
t.add("44");
t.add("83");
t.add("73");
t.add("93");
List<String> exp = new ArrayList<>();
exp.add("50");
exp.add("27");
exp.add("08");
exp.add("44");
exp.add("73");
exp.add("83");
exp.add("73");
exp.add("93");
assertEquals("failed for tree in lab document",
exp, TreeTraversal.preorder(t));
}
@Test
public void test03a_postorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
List<String> exp = new ArrayList<>();
assertEquals("failed for an empty tree",
exp, TreeTraversal.postorder(t));
}
@Test
public void test03b_inorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
List<String> exp = new ArrayList<>();
exp.add("hello");
assertEquals("failed for tree with one element",
exp, TreeTraversal.postorder(t));
}
@Test
public void test03d_postorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
t.add("salut");
List<String> exp = new ArrayList<>();
exp.add("goodbye");
exp.add("salut");
exp.add("hello");
assertEquals("failed for tree with three elements",
exp, TreeTraversal.postorder(t));
}
@Test
public void test03e_postorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("50");
t.add("27");
t.add("73");
t.add("08");
t.add("44");
t.add("83");
t.add("73");
t.add("93");
List<String> exp = new ArrayList<>();
exp.add("08");
exp.add("44");
exp.add("27");
exp.add("73");
exp.add("93");
exp.add("83");
exp.add("73");
exp.add("50");
assertEquals("failed for tree in lab document",
exp, TreeTraversal.postorder(t));
}
@Test
public void test03c_postorder() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
List<String> exp = new ArrayList<>();
exp.add("goodbye");
exp.add("hello");
assertEquals("failed for tree with two elements",
exp, TreeTraversal.postorder(t));
}
@Test
public void test04a_breadthFirst() {
BinarySearchTree<String> t = new BinarySearchTree<>();
List<String> exp = new ArrayList<>();
assertEquals("failed for an empty tree",
exp, TreeTraversal.breadthFirst(t));
}
@Test
public void test04b_breadthFirst() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
List<String> exp = new ArrayList<>();
exp.add("hello");
assertEquals("failed for tree with one element",
exp, TreeTraversal.breadthFirst(t));
}
@Test
public void test04c_breadthFirst() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
List<String> exp = new ArrayList<>();
exp.add("hello");
exp.add("goodbye");
assertEquals("failed for tree with two elements",
exp, TreeTraversal.breadthFirst(t));
}
@Test
public void test04d_breadthFirst() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("hello");
t.add("goodbye");
t.add("salut");
List<String> exp = new ArrayList<>();
exp.add("hello");
exp.add("goodbye");
exp.add("salut");
assertEquals("failed for tree with three elements",
exp, TreeTraversal.breadthFirst(t));
}
@Test
public void test04e_breadthFirst() {
BinarySearchTree<String> t = new BinarySearchTree<>();
t.add("50");
t.add("27");
t.add("73");
t.add("08");
t.add("44");
t.add("83");
t.add("73");
t.add("93");
List<String> exp = new ArrayList<>();
exp.add("50");
exp.add("27");
exp.add("73");
exp.add("08");
exp.add("44");
exp.add("83");
exp.add("73");
exp.add("93");
assertEquals("failed for tree in lab document",
exp, TreeTraversal.breadthFirst(t));
}
}
| |
/*
* =============================================================================
*
* Copyright (c) 2014-2017, The UNBESCAPE team (http://www.unbescape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.unbescape.html;
import org.junit.Test;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeDecimal0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeDecimal1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeDecimal2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeDecimal3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeDecimal4;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHexa0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHexa1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHexa2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHexa3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHexa4;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Decimal0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Decimal1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Decimal2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Decimal3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Decimal4;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Hexa0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Hexa1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Hexa2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Hexa3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml4Hexa4;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Decimal0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Decimal1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Decimal2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Decimal3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Decimal4;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Hexa0;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Hexa1;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Hexa2;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Hexa3;
import static org.unbescape.html.HtmlEscapeTestUtil.testEscapeHtml5Hexa4;
import static org.unbescape.html.HtmlEscapeTestUtil.testUnescape;
/**
*
* @author Daniel Fernández
*
* @since 1.0
*
*/
public class HtmlEscape05Test {
private static final String TEXT =
"<>&'\"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'\" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'\"> 0123456789&<'\">";
@Test
public void testHtml5() throws Exception {
final String textHtml5DecLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5DecLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5DecLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5DecLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5DecLevel4 =
"<>&'"ABCDEFGHIJKLMN" +
"OPQRSTUVWXYZ " +
"<>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijkl" +
"mnopqrstuvwxyz " +
"<&'"> 0123456789&<'">";
testEscapeHtml5Decimal0(TEXT, textHtml5DecLevel0);
testEscapeHtml5Decimal1(TEXT, textHtml5DecLevel1);
testEscapeHtml5Decimal2(TEXT, textHtml5DecLevel2);
testEscapeHtml5Decimal3(TEXT, textHtml5DecLevel3);
testEscapeHtml5Decimal4(TEXT, textHtml5DecLevel4);
testUnescape(textHtml5DecLevel0, TEXT);
testUnescape(textHtml5DecLevel1, TEXT);
testUnescape(textHtml5DecLevel2, TEXT);
testUnescape(textHtml5DecLevel3, TEXT);
testUnescape(textHtml5DecLevel4, TEXT);
final String textHtml5HexaLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5HexaLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5HexaLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5HexaLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml5HexaLevel4 =
"<>&'"ABCDEFGHIJKL" +
"MNOPQRSTUVWXYZ " +
"<>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijk" +
"lmnopqrstuvwxyz " +
"<&'"> 0123456789" +
"&<'">";
testEscapeHtml5Hexa0(TEXT, textHtml5HexaLevel0);
testEscapeHtml5Hexa1(TEXT, textHtml5HexaLevel1);
testEscapeHtml5Hexa2(TEXT, textHtml5HexaLevel2);
testEscapeHtml5Hexa3(TEXT, textHtml5HexaLevel3);
testEscapeHtml5Hexa4(TEXT, textHtml5HexaLevel4);
testUnescape(textHtml5HexaLevel0, TEXT);
testUnescape(textHtml5HexaLevel1, TEXT);
testUnescape(textHtml5HexaLevel2, TEXT);
testUnescape(textHtml5HexaLevel3, TEXT);
testUnescape(textHtml5HexaLevel4, TEXT);
}
@Test
public void testHtml4() throws Exception {
final String textHtml4DecLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4DecLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4DecLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4DecLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4DecLevel4 =
"<>&'"ABCDEFGHIJKLMN" +
"OPQRSTUVWXYZ " +
"<>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijkl" +
"mnopqrstuvwxyz " +
"<&'"> 0123456789&<'">";
testEscapeHtml4Decimal0(TEXT, textHtml4DecLevel0);
testEscapeHtml4Decimal1(TEXT, textHtml4DecLevel1);
testEscapeHtml4Decimal2(TEXT, textHtml4DecLevel2);
testEscapeHtml4Decimal3(TEXT, textHtml4DecLevel3);
testEscapeHtml4Decimal4(TEXT, textHtml4DecLevel4);
testUnescape(textHtml4DecLevel0, TEXT);
testUnescape(textHtml4DecLevel1, TEXT);
testUnescape(textHtml4DecLevel2, TEXT);
testUnescape(textHtml4DecLevel3, TEXT);
testUnescape(textHtml4DecLevel4, TEXT);
final String textHtml4HexaLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4HexaLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4HexaLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4HexaLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHtml4HexaLevel4 =
"<>&'"ABCDEFGHIJKL" +
"MNOPQRSTUVWXYZ " +
"<>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghij" +
"klmnopqrstuvwxyz " +
"<&'"> 0123456789" +
"&<'">";
testEscapeHtml4Hexa0(TEXT, textHtml4HexaLevel0);
testEscapeHtml4Hexa1(TEXT, textHtml4HexaLevel1);
testEscapeHtml4Hexa2(TEXT, textHtml4HexaLevel2);
testEscapeHtml4Hexa3(TEXT, textHtml4HexaLevel3);
testEscapeHtml4Hexa4(TEXT, textHtml4HexaLevel4);
testUnescape(textHtml4HexaLevel0, TEXT);
testUnescape(textHtml4HexaLevel1, TEXT);
testUnescape(textHtml4HexaLevel2, TEXT);
testUnescape(textHtml4HexaLevel3, TEXT);
testUnescape(textHtml4HexaLevel4, TEXT);
}
@Test
public void testDecimal() throws Exception {
final String textDecLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textDecLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textDecLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textDecLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textDecLevel4 =
"<>&'"ABCDEFGHIJKLMNO" +
"PQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæab" +
"cdefghijklmnopqrs" +
"tuvwxyz <&'"> 01234" +
"56789&<'">";
testEscapeDecimal0(TEXT, textDecLevel0);
testEscapeDecimal1(TEXT, textDecLevel1);
testEscapeDecimal2(TEXT, textDecLevel2);
testEscapeDecimal3(TEXT, textDecLevel3);
testEscapeDecimal4(TEXT, textDecLevel4);
testUnescape(textDecLevel0, TEXT);
testUnescape(textDecLevel1, TEXT);
testUnescape(textDecLevel2, TEXT);
testUnescape(textDecLevel3, TEXT);
testUnescape(textDecLevel4, TEXT);
}
@Test
public void testHexa() throws Exception {
final String textHexaLevel0 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHexaLevel1 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" \u0163\uD840\uDC00\ud835\udccd-\u0163-\uD840\uDC00-\ud835\udccd\u00E0\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6abcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHexaLevel2 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHexaLevel3 =
"<>&'"ABCDEFGHIJKLMNOPQRSTUVWXYZ <>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghijklmnopqrstuvwxyz <&'"> 0123456789&<'">";
final String textHexaLevel4 =
"<>&'"ABCDEFGHIJK" +
"LMNOPQRSTUVWXYZ " +
"<>&'" ţ𠀀𝓍-ţ-𠀀-𝓍àáâãäåæabcdefghij" +
"klmnopqrstuvwxyz" +
" <&'"> 012345678" +
"9&<'">";
testEscapeHexa0(TEXT, textHexaLevel0);
testEscapeHexa1(TEXT, textHexaLevel1);
testEscapeHexa2(TEXT, textHexaLevel2);
testEscapeHexa3(TEXT, textHexaLevel3);
testEscapeHexa4(TEXT, textHexaLevel4);
testUnescape(textHexaLevel0, TEXT);
testUnescape(textHexaLevel1, TEXT);
testUnescape(textHexaLevel2, TEXT);
testUnescape(textHexaLevel3, TEXT);
testUnescape(textHexaLevel4, TEXT);
}
public HtmlEscape05Test() {
super();
}
}
| |
package picard.illumina.parser;
import htsjdk.samtools.util.IOUtil;
import picard.PicardException;
import picard.illumina.parser.fakers.FileFaker;
import picard.illumina.parser.readers.BclReader;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
public class PerTilePerCycleFileUtil extends ParameterizedFileUtil {
private final CycleIlluminaFileMap cycleFileMap;
private final Set<Integer> detectedCycles = new TreeSet<Integer>();
public PerTilePerCycleFileUtil(final String extension,
final File base, final FileFaker faker, final int lane) {
super(true, extension, base, faker, lane);
//sideEffect, assigned to numCycles
this.cycleFileMap = getPerTilePerCycleFiles();
}
/**
* For the given tiles, populate a CycleIlluminaFileMap that contains all these tiles and will iterate through
* all the files for these tiles in expectedBase
* Side Effect: Assigns numCycles
*
* @return A CycleIlluminaFileMap with the listed (or all) tiles for at least expectedCycles number of cycles(or total available
* cycles if expectedCycles is null)
*/
protected CycleIlluminaFileMap getPerTilePerCycleFiles() {
final CycleIlluminaFileMap cycledMap = new CycleIlluminaFileMap();
final File laneDir = base;
final File[] tempCycleDirs;
tempCycleDirs = IOUtil.getFilesMatchingRegexp(laneDir, IlluminaFileUtil.CYCLE_SUBDIRECTORY_PATTERN);
if (tempCycleDirs == null || tempCycleDirs.length == 0) {
return cycledMap;
}
for (final File tempCycleDir : tempCycleDirs) {
detectedCycles.add(getCycleFromDir(tempCycleDir));
}
final Set<Integer> uniqueTiles = new HashSet<Integer>();
for (final File cycleDir : tempCycleDirs) {
final IlluminaFileMap fileMap = getTiledFiles(cycleDir, matchPattern);
uniqueTiles.addAll(fileMap.keySet());
cycledMap.put(getCycleFromDir(cycleDir), fileMap);
}
this.tiles = Collections.unmodifiableList(new ArrayList<Integer>(uniqueTiles));
return cycledMap;
}
public CycleIlluminaFileMap getFiles() {
return cycleFileMap;
}
public CycleIlluminaFileMap getFiles(final List<Integer> tiles) {
return cycleFileMap.keep(tiles, detectedCycles);
}
/**
* Returns a cycleIlluminaFileMap with all available tiles but limited to the cycles passed in. Any cycles that are missing
* cycle files or directories will be removed from the cycle list that is kept.
*
* @param cycles Cycles that should be present in the output CycleIlluminaFileMap
* @return A CycleIlluminaFileMap with all available tiles but at most the cycles passed in by the cycles parameter
*/
public CycleIlluminaFileMap getFiles(final int[] cycles) {
//Remove any cycles that were discovered to be NON-EXISTENT when this util was instantiated
final Set<Integer> filteredCycles = removeNonExistentCycles(cycles);
return cycleFileMap.keep(tiles, filteredCycles);
}
/**
* Returns a cycleIlluminaFileMap that contains only the tiles and cycles specified (and fewer if the original CycleIlluminaFileMap, created
* on util instantiation, doesn't contain any of these tiles/cycles).
*
* @param cycles Cycles that should be present in the output CycleIlluminaFileMap
* @return A CycleIlluminaFileMap with at most the tiles/cycles listed in the parameters
*/
public CycleIlluminaFileMap getFiles(final List<Integer> tiles, final int[] cycles) {
//Remove any cycles that were discovered to be NON-EXISTENT when this util was instantiated
final Set<Integer> filteredCycles = removeNonExistentCycles(cycles);
return cycleFileMap.keep(tiles, filteredCycles);
}
private Set<Integer> removeNonExistentCycles(final int[] cycles) {
final TreeSet<Integer> inputCyclesSet = new TreeSet<Integer>();
for (final Integer inputCycle : cycles) {
inputCyclesSet.add(inputCycle);
}
inputCyclesSet.retainAll(detectedCycles);
return inputCyclesSet;
}
public Set<Integer> getDetectedCycles() {
return detectedCycles;
}
/**
* Discover all files of this type in expectedBase that match pattern and construct a list of tiles
* available based on these files. The same number of tiles is expected in each cycle dir.
*
* @return A list of tile integers for all tiles available
*/
public List<Integer> getTiles() {
return tiles;
}
public boolean filesAvailable() {
boolean filesAvailable = false;
for (final IlluminaFileMap fileMap : cycleFileMap.values()) {
if (!fileMap.isEmpty()) {
filesAvailable = true;
break;
}
}
return filesAvailable;
}
@Override
public List<String> verify(final List<Integer> expectedTiles, final int[] expectedCycles) {
final List<String> failures = new LinkedList<String>();
final Map<Integer, Long> tileToFileLengthMap = new HashMap<Integer, Long>();
if (!base.exists()) {
failures.add("Base directory(" + base.getAbsolutePath() + ") does not exist!");
} else {
final CycleIlluminaFileMap cfm = getFiles(expectedTiles, expectedCycles);
for (final int currentCycle : expectedCycles) {
final IlluminaFileMap fileMap = cfm.get(currentCycle);
if (fileMap != null) {
for (final int tile : expectedTiles) {
final File cycleFile = fileMap.get(tile);
if (cycleFile != null) {
if (tileToFileLengthMap.get(tile) == null) {
tileToFileLengthMap.put(tile, cycleFile.length());
} else if (!extension.equals(".bcl.gz") && tileToFileLengthMap.get(tile) != cycleFile.length()) {
// TODO: The gzip bcl files might not be the same length despite having the same content,
// for now we're punting on this but this should be looked into at some point
failures.add("File type " + extension
+ " has cycles files of different length. Current cycle ("
+ currentCycle + ") " +
"Length of first non-empty file (" + tileToFileLengthMap.get(tile)
+ ") length of current cycle (" + cycleFile.length() + ")"
+ " File(" + cycleFile.getAbsolutePath() + ")");
}
} else {
failures.add("File type " + extension + " is missing a file for cycle " + currentCycle + " and tile " + tile);
}
}
} else {
failures.add("Missing file for cycle " + currentCycle + " in directory " + base.getAbsolutePath()
+ " for file type " + extension);
}
}
}
return failures;
}
@Override
public List<String> fakeFiles(final List<Integer> expectedTiles, final int[] expectedCycles,
final IlluminaFileUtil.SupportedIlluminaFormat format) {
final List<String> failures = new LinkedList<String>();
if (!base.exists()) {
base.mkdirs();
}
final Set<Integer> missingCycleSet = new TreeSet<Integer>();
for (final Integer cycle : expectedCycles) {
missingCycleSet.add(cycle);
}
missingCycleSet.removeAll(detectedCycles);
for (final Integer cycle : missingCycleSet) {
final File cycleDirectory = new File(base, "C" + cycle + ".1");
if (cycleDirectory.mkdirs()) {
detectedCycles.add(cycle);
}
}
final CycleIlluminaFileMap cfm = getPerTilePerCycleFiles();
final Map<Integer, Integer> tileToSizeMap = new HashMap<Integer, Integer>();
for (final int currentCycle : expectedCycles) {
final IlluminaFileMap fileMap = cfm.get(currentCycle);
if (fileMap == null) {
for (final Integer tile : expectedTiles) {
final File fileToFake = new File(base + File.separator + getFileForCycle(currentCycle, tile));
try {
if (tileToSizeMap.containsKey(tile)) {
faker.fakeFile(fileToFake, tileToSizeMap.get(tile));
}
else{
faker.fakeFile(fileToFake, 1);
}
} catch (final IOException e) {
failures.add("Could not create fake file: " + e.getMessage());
}
}
} else {
for (final int tile : expectedTiles) {
final File cycleFile = fileMap.get(tile);
if (cycleFile != null && !tileToSizeMap.containsKey(tile)) {
tileToSizeMap.put(tile, (int) BclReader.getNumberOfClusters(cycleFile));
}
try {
if (cycleFile == null) {
final File fileToFake = new File(base + File.separator + getFileForCycle(currentCycle, tile));
if (tileToSizeMap.containsKey(tile)) {
faker.fakeFile(fileToFake, tileToSizeMap.get(tile));
} else {
faker.fakeFile(fileToFake, 1);
}
}
} catch (final IOException e) {
failures.add("Could not create fake file: " + e.getMessage());
}
}
}
}
for (final Integer cycle : missingCycleSet) {
failures.add("Missing cycle directory " + cycle + " in directory " + base.getAbsolutePath()
+ " for file type " + extension);
}
return failures;
}
private String getFileForCycle(final int currentCycle, final int tile) {
return "C" + currentCycle + ".1" + File.separator + "s_" + lane + "_" + tile + extension;
}
private static int getCycleFromDir(final File tempCycleDir) {
final String fileName = tempCycleDir.getName();
final Matcher matcher = IlluminaFileUtil.CYCLE_SUBDIRECTORY_PATTERN.matcher(fileName);
if (!matcher.matches()) {
throw new PicardException("Invalid cycle directory name " + tempCycleDir.getName());
}
return Integer.parseInt(matcher.group(1));
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.PrioritizedInternalDocumentListener;
import com.intellij.openapi.util.Getter;
import com.intellij.util.DocumentUtil;
import com.intellij.util.EventDispatcher;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.Predicate;
public class InlayModelImpl implements InlayModel, Disposable {
private static final Logger LOG = Logger.getInstance(InlayModelImpl.class);
private static final Comparator<Inlay> INLINE_ELEMENTS_COMPARATOR = Comparator.comparingInt((Inlay inlay) -> inlay.getOffset())
.thenComparing(i -> i.isRelatedToPrecedingText());
private static final Comparator<BlockInlayImpl> BLOCK_ELEMENTS_PRIORITY_COMPARATOR =
Comparator.comparingInt((BlockInlayImpl i) -> -i.myPriority);
private static final Comparator<BlockInlayImpl> BLOCK_ELEMENTS_COMPARATOR =
Comparator.comparing((BlockInlayImpl inlay) -> inlay.getVerticalAlignment())
.thenComparing(i -> i.getVerticalAlignment() == Inlay.VerticalAlignment.ABOVE_LINE ? i.myPriority : -i.myPriority);
private static final Processor<InlayImpl> UPDATE_SIZE_PROCESSOR = inlay -> {
inlay.updateSize();
return true;
};
private final EditorImpl myEditor;
private final EventDispatcher<Listener> myDispatcher = EventDispatcher.create(Listener.class);
final List<InlayImpl> myInlaysInvalidatedOnMove = new ArrayList<>();
final RangeMarkerTree<InlineInlayImpl> myInlineElementsTree;
final MarkerTreeWithPartialSums<BlockInlayImpl> myBlockElementsTree;
boolean myMoveInProgress;
boolean myPutMergedIntervalsAtBeginning;
private boolean myConsiderCaretPositionOnDocumentUpdates = true;
private List<Inlay> myInlaysAtCaret;
InlayModelImpl(@NotNull EditorImpl editor) {
myEditor = editor;
myInlineElementsTree = new InlineElementsTree(editor.getDocument());
myBlockElementsTree = new BlockElementsTree(editor.getDocument());
myEditor.getDocument().addDocumentListener(new PrioritizedInternalDocumentListener() {
@Override
public int getPriority() {
return EditorDocumentPriorities.INLAY_MODEL;
}
@Override
public void beforeDocumentChange(@NotNull DocumentEvent event) {
if (myEditor.getDocument().isInBulkUpdate()) return;
int offset = event.getOffset();
if (myConsiderCaretPositionOnDocumentUpdates && event.getOldLength() == 0 && offset == myEditor.getCaretModel().getOffset()) {
List<Inlay> inlays = getInlineElementsInRange(offset, offset);
int inlayCount = inlays.size();
if (inlayCount > 0) {
VisualPosition inlaysStartPosition = myEditor.offsetToVisualPosition(offset, false, false);
VisualPosition caretPosition = myEditor.getCaretModel().getVisualPosition();
if (inlaysStartPosition.line == caretPosition.line &&
caretPosition.column >= inlaysStartPosition.column && caretPosition.column <= inlaysStartPosition.column + inlayCount) {
myInlaysAtCaret = inlays;
for (int i = 0; i < inlayCount; i++) {
((InlayImpl)inlays.get(i)).setStickingToRight(i >= (caretPosition.column - inlaysStartPosition.column));
}
}
}
}
}
@Override
public void documentChanged(@NotNull DocumentEvent event) {
if (myInlaysAtCaret != null) {
for (Inlay inlay : myInlaysAtCaret) {
((InlayImpl)inlay).setStickingToRight(inlay.isRelatedToPrecedingText());
}
myInlaysAtCaret = null;
}
}
@Override
public void moveTextHappened(@NotNull Document document, int start, int end, int base) {
for (InlayImpl inlay : myInlaysInvalidatedOnMove) {
notifyRemoved(inlay);
}
myInlaysInvalidatedOnMove.clear();
}
}, this);
}
void reinitSettings() {
myInlineElementsTree.processAll(UPDATE_SIZE_PROCESSOR);
myBlockElementsTree.processAll(UPDATE_SIZE_PROCESSOR);
}
@Override
public void dispose() {
myInlineElementsTree.dispose(myEditor.getDocument());
myBlockElementsTree.dispose(myEditor.getDocument());
}
@Nullable
@Override
public <T extends EditorCustomElementRenderer> Inlay<T> addInlineElement(int offset,
boolean relatesToPrecedingText,
@NotNull T renderer) {
ApplicationManager.getApplication().assertIsDispatchThread();
Document document = myEditor.getDocument();
if (DocumentUtil.isInsideSurrogatePair(document, offset)) return null;
offset = Math.max(0, Math.min(document.getTextLength(), offset));
InlineInlayImpl<T> inlay = new InlineInlayImpl<>(myEditor, offset, relatesToPrecedingText, renderer);
notifyAdded(inlay);
return inlay;
}
@Nullable
@Override
public <T extends EditorCustomElementRenderer> Inlay<T> addBlockElement(int offset,
boolean relatesToPrecedingText,
boolean showAbove,
int priority,
@NotNull T renderer) {
ApplicationManager.getApplication().assertIsDispatchThread();
offset = Math.max(0, Math.min(myEditor.getDocument().getTextLength(), offset));
BlockInlayImpl<T> inlay = new BlockInlayImpl<>(myEditor, offset, relatesToPrecedingText, showAbove, priority, renderer);
notifyAdded(inlay);
return inlay;
}
@NotNull
@Override
public List<Inlay> getInlineElementsInRange(int startOffset, int endOffset) {
List<InlineInlayImpl> range =
getElementsInRange(myInlineElementsTree, startOffset, endOffset, inlay -> true, INLINE_ELEMENTS_COMPARATOR);
//noinspection unchecked
return (List)range;
}
@NotNull
@Override
public <T> List<Inlay<? extends T>> getInlineElementsInRange(int startOffset, int endOffset, Class<T> type) {
List<InlineInlayImpl> range =
getElementsInRange(myInlineElementsTree, startOffset, endOffset, inlay -> type.isInstance(inlay.myRenderer),
INLINE_ELEMENTS_COMPARATOR);
//noinspection unchecked
return (List)range;
}
@NotNull
@Override
public List<Inlay> getBlockElementsInRange(int startOffset, int endOffset) {
List<BlockInlayImpl> range =
getElementsInRange(myBlockElementsTree, startOffset, endOffset, inlay -> true, BLOCK_ELEMENTS_PRIORITY_COMPARATOR);
//noinspection unchecked
return (List)range;
}
@NotNull
@Override
public <T> List<Inlay<? extends T>> getBlockElementsInRange(int startOffset, int endOffset, Class<T> type) {
List<BlockInlayImpl> range = getElementsInRange(myBlockElementsTree, startOffset, endOffset, inlay -> type.isInstance(inlay.myRenderer),
BLOCK_ELEMENTS_PRIORITY_COMPARATOR);
//noinspection unchecked
return (List)range;
}
private static <T extends Inlay> List<T> getElementsInRange(@NotNull IntervalTreeImpl<? extends T> tree,
int startOffset,
int endOffset,
Predicate<? super T> predicate,
Comparator<? super T> comparator) {
List<T> result = new ArrayList<>();
tree.processOverlappingWith(startOffset, endOffset, inlay -> {
if (predicate.test(inlay)) result.add(inlay);
return true;
});
Collections.sort(result, comparator);
return result;
}
@NotNull
@Override
public List<Inlay> getBlockElementsForVisualLine(int visualLine, boolean above) {
int visibleLineCount = myEditor.getVisibleLineCount();
if (visualLine < 0 || visualLine >= visibleLineCount) return Collections.emptyList();
List<BlockInlayImpl> result = new ArrayList<>();
int startOffset = myEditor.visualLineStartOffset(visualLine);
int endOffset = visualLine == visibleLineCount - 1 ? myEditor.getDocument().getTextLength()
: myEditor.visualLineStartOffset(visualLine + 1) - 1;
myBlockElementsTree.processOverlappingWith(startOffset, endOffset, inlay -> {
if (inlay.myShowAbove == above && !myEditor.getFoldingModel().isOffsetCollapsed(inlay.getOffset())) {
result.add(inlay);
}
return true;
});
Collections.sort(result, BLOCK_ELEMENTS_COMPARATOR);
//noinspection unchecked
return (List)result;
}
public int getHeightOfBlockElementsBeforeVisualLine(int visualLine) {
if (visualLine < 0 || !hasBlockElements()) return 0;
int visibleLineCount = myEditor.getVisibleLineCount();
if (visualLine >= visibleLineCount) {
return myBlockElementsTree.getSumOfValuesUpToOffset(Integer.MAX_VALUE) -
myEditor.getFoldingModel().getTotalHeightOfFoldedBlockInlays();
}
int[] result = {0};
int startOffset = myEditor.visualLineStartOffset(visualLine);
int endOffset = visualLine >= visibleLineCount - 1 ? myEditor.getDocument().getTextLength()
: myEditor.visualLineStartOffset(visualLine + 1) - 1;
if (visualLine > 0) {
result[0] += myBlockElementsTree.getSumOfValuesUpToOffset(startOffset - 1) -
myEditor.getFoldingModel().getHeightOfFoldedBlockInlaysBefore(startOffset);
}
myBlockElementsTree.processOverlappingWith(startOffset, endOffset, inlay -> {
if (inlay.myShowAbove && !myEditor.getFoldingModel().isOffsetCollapsed(inlay.getOffset())) {
result[0] += inlay.getHeightInPixels();
}
return true;
});
return result[0];
}
@Override
public boolean hasBlockElements() {
return myBlockElementsTree.size() > 0;
}
@Override
public boolean hasInlineElementsInRange(int startOffset, int endOffset) {
return !myInlineElementsTree.processOverlappingWith(startOffset, endOffset, inlay -> false);
}
@Override
public boolean hasInlineElements() {
return myInlineElementsTree.size() > 0;
}
@Override
public boolean hasInlineElementAt(int offset) {
return !myInlineElementsTree.processOverlappingWith(offset, offset, inlay -> false);
}
@Override
public boolean hasInlineElementAt(@NotNull VisualPosition visualPosition) {
int offset = myEditor.logicalPositionToOffset(myEditor.visualToLogicalPosition(visualPosition));
int inlayCount = getInlineElementsInRange(offset, offset).size();
if (inlayCount == 0) return false;
VisualPosition inlayStartPosition = myEditor.offsetToVisualPosition(offset, false, false);
return visualPosition.line == inlayStartPosition.line &&
visualPosition.column >= inlayStartPosition.column && visualPosition.column < inlayStartPosition.column + inlayCount;
}
@Nullable
@Override
public Inlay getInlineElementAt(@NotNull VisualPosition visualPosition) {
int offset = myEditor.logicalPositionToOffset(myEditor.visualToLogicalPosition(visualPosition));
List<Inlay> inlays = getInlineElementsInRange(offset, offset);
if (inlays.isEmpty()) return null;
VisualPosition inlayStartPosition = myEditor.offsetToVisualPosition(offset, false, false);
if (visualPosition.line != inlayStartPosition.line) return null;
int inlayIndex = visualPosition.column - inlayStartPosition.column;
return inlayIndex >= 0 && inlayIndex < inlays.size() ? inlays.get(inlayIndex) : null;
}
@Nullable
@Override
public Inlay getElementAt(@NotNull Point point) {
boolean hasInlineElements = hasInlineElements();
boolean hasBlockElements = hasBlockElements();
if (!hasInlineElements && !hasBlockElements) return null;
VisualPosition visualPosition = myEditor.xyToVisualPosition(point);
if (hasBlockElements) {
int visualLine = visualPosition.line;
int baseY = myEditor.visualLineToY(visualLine);
if (point.y < baseY) {
List<Inlay> inlays = getBlockElementsForVisualLine(visualLine, true);
int yDiff = baseY - point.y;
for (int i = inlays.size() - 1; i >= 0; i--) {
Inlay inlay = inlays.get(i);
int height = inlay.getHeightInPixels();
if (yDiff <= height) return inlay;
yDiff -= height;
}
throw new IllegalStateException();
}
else {
int lineBottom = baseY + myEditor.getLineHeight();
if (point.y >= lineBottom) {
List<Inlay> inlays = getBlockElementsForVisualLine(visualLine, false);
int yDiff = point.y - lineBottom;
for (Inlay inlay : inlays) {
int height = inlay.getHeightInPixels();
if (yDiff < height) return inlay;
yDiff -= height;
}
throw new IllegalStateException();
}
}
}
if (hasInlineElements) {
int offset = myEditor.logicalPositionToOffset(myEditor.visualToLogicalPosition(visualPosition));
List<Inlay> inlays = getInlineElementsInRange(offset, offset);
if (inlays.isEmpty()) return null;
VisualPosition startVisualPosition = myEditor.offsetToVisualPosition(offset);
int x = myEditor.visualPositionToXY(startVisualPosition).x;
for (Inlay inlay : inlays) {
int endX = x + inlay.getWidthInPixels();
if (point.x >= x && point.x < endX) return inlay;
x = endX;
}
}
return null;
}
@Override
public void setConsiderCaretPositionOnDocumentUpdates(boolean enabled) {
myConsiderCaretPositionOnDocumentUpdates = enabled;
}
@Override
public void addListener(@NotNull Listener listener, @NotNull Disposable disposable) {
myDispatcher.addListener(listener, disposable);
}
private void notifyAdded(InlayImpl inlay) {
myDispatcher.getMulticaster().onAdded(inlay);
}
void notifyChanged(InlayImpl inlay) {
myDispatcher.getMulticaster().onUpdated(inlay);
}
void notifyRemoved(InlayImpl inlay) {
myDispatcher.getMulticaster().onRemoved(inlay);
}
@TestOnly
public void validateState() {
for (Inlay inlay : getInlineElementsInRange(0, myEditor.getDocument().getTextLength())) {
LOG.assertTrue(!DocumentUtil.isInsideSurrogatePair(myEditor.getDocument(), inlay.getOffset()));
}
}
private class InlineElementsTree extends HardReferencingRangeMarkerTree<InlineInlayImpl> {
InlineElementsTree(@NotNull Document document) {
super(document);
}
@NotNull
@Override
protected Node<InlineInlayImpl> createNewNode(@NotNull InlineInlayImpl key, int start, int end,
boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer) {
return new Node<InlineInlayImpl>(this, key, start, end, greedyToLeft, greedyToRight, stickingToRight) {
@Override
void addIntervalsFrom(@NotNull IntervalNode<InlineInlayImpl> otherNode) {
super.addIntervalsFrom(otherNode);
if (myPutMergedIntervalsAtBeginning) {
List<Getter<InlineInlayImpl>> added = ContainerUtil.subList(intervals, intervals.size() - otherNode.intervals.size());
List<Getter<InlineInlayImpl>> addedCopy = new ArrayList<>(added);
added.clear();
intervals.addAll(0, addedCopy);
}
}
};
}
@Override
void fireBeforeRemoved(@NotNull InlineInlayImpl inlay, @NotNull @NonNls Object reason) {
if (inlay.getUserData(InlayImpl.OFFSET_BEFORE_DISPOSAL) == null) {
if (myMoveInProgress) {
// delay notification about invalidated inlay - folding model is not consistent at this point
// (FoldingModelImpl.moveTextHappened hasn't been called yet at this point)
myInlaysInvalidatedOnMove.add(inlay);
}
else {
notifyRemoved(inlay);
}
}
}
}
private class BlockElementsTree extends MarkerTreeWithPartialSums<BlockInlayImpl> {
BlockElementsTree(@NotNull Document document) {
super(document);
}
@Override
void fireBeforeRemoved(@NotNull BlockInlayImpl inlay, @NotNull @NonNls Object reason) {
if (inlay.getUserData(InlayImpl.OFFSET_BEFORE_DISPOSAL) == null) {
notifyRemoved(inlay);
}
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.support.v4.app.Fragment;
import android.support.v4.content.CursorLoader;
import android.text.format.Time;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import android.database.Cursor;
import android.net.Uri;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v4.content.CursorLoader;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.sync.SunshineSyncAdapter;
import java.io.IOException;
import java.util.ArrayList;
/**
* Encapsulates fetching the forecast and displaying it as a {@link ListView} layout.
*/
public class ForecastFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>{
private ForecastAdapter mForecastAdapter;
private final String LOG_TAG = MainActivity.class.getSimpleName();
static final String[] FORECAST_COLUMNS = {
WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry._ID,
WeatherContract.WeatherEntry.COLUMN_DATE,
WeatherContract.WeatherEntry.COLUMN_SHORT_DESC,
WeatherContract.WeatherEntry.COLUMN_MAX_TEMP,
WeatherContract.WeatherEntry.COLUMN_MIN_TEMP,
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING,
WeatherContract.WeatherEntry.COLUMN_WEATHER_ID,
WeatherContract.LocationEntry.COLUMN_COORD_LAT,
WeatherContract.LocationEntry.COLUMN_COORD_LONG
};
// These indices are tied to FORECAST_COLUMNS. If FORECAST_COLUMNS changes, these
// must change.
static final int COL_WEATHER_ID = 0;
static final int COL_WEATHER_DATE = 1;
static final int COL_WEATHER_DESC = 2;
static final int COL_WEATHER_MAX_TEMP = 3;
static final int COL_WEATHER_MIN_TEMP = 4;
static final int COL_LOCATION_SETTING = 5;
static final int COL_WEATHER_CONDITION_ID = 6;
static final int COL_COORD_LAT = 7;
static final int COL_COORD_LONG = 8;
private static final int LOADER_ID=0;
private static final String POS_TAG="position";
public int mPosition;
private boolean mUseTodayLayout;
static final String PANE_TAG="pane";
private ListView mListView;
public ForecastFragment() {
}
@Override
public void onSaveInstanceState(Bundle bundle) {
super.onSaveInstanceState(bundle);
if (mPosition!=-1) {
bundle.putInt(POS_TAG,mPosition);
}
}
@Override
public Loader<Cursor> onCreateLoader(int i, Bundle bundle) {
// The ArrayAdapter will take data from a source and
// use it to populate the ListView it's attached to.
String locationSetting = Utility.getPreferredLocation(getActivity());
// Sort order: Ascending, by date.
String sortOrder = WeatherContract.WeatherEntry.COLUMN_DATE + " ASC";
Uri weatherForLocationUri = WeatherContract.WeatherEntry.buildWeatherLocationWithStartDate(
locationSetting, System.currentTimeMillis());
return new CursorLoader(getActivity(),weatherForLocationUri,FORECAST_COLUMNS,null,null,sortOrder);
}
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) {
mForecastAdapter.swapCursor(cursor);
mListView.smoothScrollToPosition(mPosition);
}
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader) {
mForecastAdapter.swapCursor(null);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Add this line in order for this fragment to handle menu events.
setHasOptionsMenu(true);
}
@Override
public void onActivityCreated(Bundle bundle) {
getLoaderManager().initLoader(LOADER_ID,null,this);
super.onActivityCreated(bundle);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.forecastfragment, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
// if (id == R.id.action_refresh) {
// updateWeather();
// return true;
// }else
if (id == R.id.action_map) {
openPreferredLocationInMap();
return true;
}
return super.onOptionsItemSelected(item);
}
private void openPreferredLocationInMap() {
// Using the URI scheme for showing a location found on a map. This super-handy
// intent can is detailed in the "Common Intents" page of Android's developer site:
// http://developer.android.com/guide/components/intents-common.html#Maps
if ( null != mForecastAdapter ) {
Cursor c = mForecastAdapter.getCursor();
if ( null != c ) {
c.moveToPosition(0);
String posLat = c.getString(COL_COORD_LAT);
String posLong = c.getString(COL_COORD_LONG);
Uri geoLocation = Uri.parse("geo:" + posLat + "," + posLong);
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setData(geoLocation);
if (intent.resolveActivity(getActivity().getPackageManager()) != null) {
startActivity(intent);
} else {
Log.d(LOG_TAG, "Couldn't call " + geoLocation.toString() + ", no receiving apps installed!");
}
}
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
final Bundle savedInstanceState) {
mForecastAdapter = new ForecastAdapter(getActivity(), null, 0);
mForecastAdapter.setUseTodayLayout(mUseTodayLayout);
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
// Get a reference to the ListView, and attach this adapter to it.
mListView = (ListView) rootView.findViewById(R.id.listview_forecast);
mPosition=-1;
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView adapterView, View view, int position, long l) {
// CursorAdapter returns a cursor at the correct position for getItem(), or null
// if it cannot seek to that position.
Cursor cursor = (Cursor) adapterView.getItemAtPosition(position);
if (cursor != null) {
String locationSetting = Utility.getPreferredLocation(getActivity());
Uri weatherForLocationUri = WeatherContract.WeatherEntry.buildWeatherLocationWithStartDate(
locationSetting, cursor.getLong(COL_WEATHER_DATE));
//Activity get is current MainActivity
Callback cb=(Callback)getActivity();
mPosition=position;
cb.onItemSelected(weatherForLocationUri);
}
}
});
mListView.setAdapter(mForecastAdapter);
if (savedInstanceState!=null && savedInstanceState.containsKey(POS_TAG)) {
mPosition=savedInstanceState.getInt(POS_TAG);
mListView.smoothScrollToPosition(mPosition);
}
return rootView;
}
private void updateWeather() {
SunshineSyncAdapter.syncImmediately(getActivity());
}
public void onLocationChanged() {
updateWeather();
getLoaderManager().restartLoader(LOADER_ID,null,this);
}
public void setPaneMode(boolean twoPane) {
mUseTodayLayout=!twoPane;
if (mForecastAdapter!=null) {
mForecastAdapter.setUseTodayLayout(mUseTodayLayout);
}
}
/**
* A callback interface that all activities containing this fragment must
* implement. This mechanism allows activities to be notified of item
* selections.
*/
public interface Callback {
/**
* DetailFragmentCallback for when an item has been selected.
*/
public void onItemSelected(Uri dateUri);
}
// @Override
// public void onItemSelected(Uri dateUri) {
//
// }
}
| |
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.mapreduce.heuristics;
import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData;
import com.linkedin.drelephant.util.Utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.linkedin.drelephant.analysis.Heuristic;
import com.linkedin.drelephant.analysis.HeuristicResult;
import com.linkedin.drelephant.analysis.Severity;
import com.linkedin.drelephant.mapreduce.data.MapReduceCounterData;
import com.linkedin.drelephant.mapreduce.data.MapReduceApplicationData;
import com.linkedin.drelephant.mapreduce.data.MapReduceTaskData;
import com.linkedin.drelephant.math.Statistics;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
/**
* This heuristic deals with the efficiency of container size
*/
public abstract class GenericMemoryHeuristic implements Heuristic<MapReduceApplicationData> {
private static final Logger logger = Logger.getLogger(GenericMemoryHeuristic.class);
private static final long CONTAINER_MEMORY_DEFAULT_MBYTES = 2048L;
// Severity Parameters
private static final String MEM_RATIO_SEVERITY = "memory_ratio_severity";
private static final String CONTAINER_MEM_SEVERITY = "container_memory_severity";
private static final String CONTAINER_MEM_DEFAULT_MB = "container_memory_default_mb";
// Default value of parameters
private double[] memRatioLimits = {0.6d, 0.5d, 0.4d, 0.3d}; // Avg Physical Mem of Tasks / Container Mem
private double[] memoryLimits = {1.1d, 1.5d, 2.0d, 2.5d}; // Container Memory Severity Limits
private String _containerMemConf;
private HeuristicConfigurationData _heuristicConfData;
private long getContainerMemDefaultMBytes() {
Map<String, String> paramMap = _heuristicConfData.getParamMap();
if (paramMap.containsKey(CONTAINER_MEM_DEFAULT_MB)) {
String strValue = paramMap.get(CONTAINER_MEM_DEFAULT_MB);
try {
return Long.parseLong(strValue);
} catch (NumberFormatException e) {
logger.warn(CONTAINER_MEM_DEFAULT_MB + ": expected number [" + strValue + "]");
}
}
return CONTAINER_MEMORY_DEFAULT_MBYTES;
}
private void loadParameters() {
Map<String, String> paramMap = _heuristicConfData.getParamMap();
String heuristicName = _heuristicConfData.getHeuristicName();
double[] confMemRatioLimits = Utils.getParam(paramMap.get(MEM_RATIO_SEVERITY), memRatioLimits.length);
if (confMemRatioLimits != null) {
memRatioLimits = confMemRatioLimits;
}
logger.info(heuristicName + " will use " + MEM_RATIO_SEVERITY + " with the following threshold settings: "
+ Arrays.toString(memRatioLimits));
long containerMemDefaultBytes = getContainerMemDefaultMBytes() * FileUtils.ONE_MB;
logger.info(heuristicName + " will use " + CONTAINER_MEM_DEFAULT_MB + " with the following threshold setting: "
+ containerMemDefaultBytes);
double[] confMemoryLimits = Utils.getParam(paramMap.get(CONTAINER_MEM_SEVERITY), memoryLimits.length);
if (confMemoryLimits != null) {
memoryLimits = confMemoryLimits;
}
logger.info(heuristicName + " will use " + CONTAINER_MEM_SEVERITY + " with the following threshold settings: "
+ Arrays.toString(memoryLimits));
for (int i = 0; i < memoryLimits.length; i++) {
memoryLimits[i] = memoryLimits[i] * containerMemDefaultBytes;
}
}
protected GenericMemoryHeuristic(String containerMemConf, HeuristicConfigurationData heuristicConfData) {
this._containerMemConf = containerMemConf;
this._heuristicConfData = heuristicConfData;
loadParameters();
}
protected abstract MapReduceTaskData[] getTasks(MapReduceApplicationData data);
@Override
public HeuristicConfigurationData getHeuristicConfData() {
return _heuristicConfData;
}
@Override
public HeuristicResult apply(MapReduceApplicationData data) {
if(!data.getSucceeded()) {
return null;
}
String containerSizeStr = data.getConf().getProperty(_containerMemConf);
long containerMem = -1L;
if (containerSizeStr != null) {
try {
containerMem = Long.parseLong(containerSizeStr);
} catch (NumberFormatException e0) {
// Some job has a string var like "${VAR}" for this config.
if(containerSizeStr.startsWith("$")) {
String realContainerConf = containerSizeStr.substring(containerSizeStr.indexOf("{")+1,
containerSizeStr.indexOf("}"));
String realContainerSizeStr = data.getConf().getProperty(realContainerConf);
try {
containerMem = Long.parseLong(realContainerSizeStr);
}
catch (NumberFormatException e1) {
logger.warn(realContainerConf + ": expected number [" + realContainerSizeStr + "]");
}
} else {
logger.warn(_containerMemConf + ": expected number [" + containerSizeStr + "]");
}
}
}
if (containerMem < 0) {
containerMem = getContainerMemDefaultMBytes();
}
containerMem *= FileUtils.ONE_MB;
MapReduceTaskData[] tasks = getTasks(data);
List<Long> taskPMems = new ArrayList<Long>();
List<Long> taskVMems = new ArrayList<Long>();
List<Long> runtimesMs = new ArrayList<Long>();
long taskPMin = Long.MAX_VALUE;
long taskPMax = 0;
for (MapReduceTaskData task : tasks) {
if (task.isTimeAndCounterDataPresent()) {
runtimesMs.add(task.getTotalRunTimeMs());
long taskPMem = task.getCounters().get(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES);
long taskVMem = task.getCounters().get(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES);
taskPMems.add(taskPMem);
taskPMin = Math.min(taskPMin, taskPMem);
taskPMax = Math.max(taskPMax, taskPMem);
taskVMems.add(taskVMem);
}
}
if(taskPMin == Long.MAX_VALUE) {
taskPMin = 0;
}
long taskPMemAvg = Statistics.average(taskPMems);
long taskVMemAvg = Statistics.average(taskVMems);
long averageTimeMs = Statistics.average(runtimesMs);
Severity severity;
if (tasks.length == 0) {
severity = Severity.NONE;
} else {
severity = getTaskMemoryUtilSeverity(taskPMemAvg, containerMem);
}
HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(),
_heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));
result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
result.addResultDetail("Avg task runtime", Statistics.readableTimespan(averageTimeMs));
result.addResultDetail("Avg Physical Memory (MB)", Long.toString(taskPMemAvg / FileUtils.ONE_MB));
result.addResultDetail("Max Physical Memory (MB)", Long.toString(taskPMax / FileUtils.ONE_MB));
result.addResultDetail("Min Physical Memory (MB)", Long.toString(taskPMin / FileUtils.ONE_MB));
result.addResultDetail("Avg Virtual Memory (MB)", Long.toString(taskVMemAvg / FileUtils.ONE_MB));
result.addResultDetail("Requested Container Memory", FileUtils.byteCountToDisplaySize(containerMem));
return result;
}
private Severity getTaskMemoryUtilSeverity(long taskMemAvg, long taskMemMax) {
double ratio = ((double)taskMemAvg) / taskMemMax;
Severity sevRatio = getMemoryRatioSeverity(ratio);
// Severity is reduced if the requested container memory is close to default
Severity sevMax = getContainerMemorySeverity(taskMemMax);
return Severity.min(sevRatio, sevMax);
}
private Severity getContainerMemorySeverity(long taskMemMax) {
return Severity.getSeverityAscending(
taskMemMax, memoryLimits[0], memoryLimits[1], memoryLimits[2], memoryLimits[3]);
}
private Severity getMemoryRatioSeverity(double ratio) {
return Severity.getSeverityDescending(
ratio, memRatioLimits[0], memRatioLimits[1], memRatioLimits[2], memRatioLimits[3]);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.actions;
import com.intellij.execution.ExecutionBundle;
import com.intellij.execution.ProgramRunnerUtil;
import com.intellij.execution.RunnerAndConfigurationSettings;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.configurations.LocatableConfiguration;
import com.intellij.execution.configurations.LocatableConfigurationBase;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.lineMarker.RunLineMarkerProvider;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.macro.MacroManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.ThreeState;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.InputEvent;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Supplier;
public abstract class BaseRunConfigurationAction extends ActionGroup {
protected static final Logger LOG = Logger.getInstance(BaseRunConfigurationAction.class);
protected BaseRunConfigurationAction(@NotNull Supplier<String> text, @NotNull Supplier<String> description, final Icon icon) {
super(text, description, icon);
setPopup(true);
setEnabledInModalContext(true);
}
@Override
public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) {
return e != null ? getChildren(e.getDataContext()) : EMPTY_ARRAY;
}
private AnAction[] getChildren(DataContext dataContext) {
final ConfigurationContext context = ConfigurationContext.getFromContext(dataContext);
if (!Registry.is("suggest.all.run.configurations.from.context") && context.findExisting() != null) {
return EMPTY_ARRAY;
}
return createChildActions(context, getConfigurationsFromContext(context)).toArray(EMPTY_ARRAY);
}
@NotNull
protected List<AnAction> createChildActions(@NotNull ConfigurationContext context,
@NotNull List<? extends ConfigurationFromContext> configurations) {
if (configurations.size() <= 1) {
return Collections.emptyList();
}
final List<AnAction> childActions = new ArrayList<>();
for (final ConfigurationFromContext fromContext : configurations) {
final ConfigurationType configurationType = fromContext.getConfigurationType();
final String actionName = childActionName(fromContext);
final AnAction anAction = new AnAction(actionName, configurationType.getDisplayName(), fromContext.getConfiguration().getIcon()) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
perform(fromContext, context);
}
};
anAction.getTemplatePresentation().setText(actionName, false);
childActions.add(anAction);
}
return childActions;
}
@NotNull
private List<ConfigurationFromContext> getConfigurationsFromContext(ConfigurationContext context) {
final List<ConfigurationFromContext> fromContext = context.getConfigurationsFromContext();
if (fromContext == null) {
return Collections.emptyList();
}
final List<ConfigurationFromContext> enabledConfigurations = new ArrayList<>();
for (ConfigurationFromContext configurationFromContext : fromContext) {
if (isEnabledFor(configurationFromContext.getConfiguration())) {
enabledConfigurations.add(configurationFromContext);
}
}
return enabledConfigurations;
}
protected boolean isEnabledFor(RunConfiguration configuration) {
return true;
}
@Override
public boolean canBePerformed(@NotNull DataContext dataContext) {
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project != null && DumbService.isDumb(project)) {
return false;
}
final ConfigurationContext context = ConfigurationContext.getFromContext(dataContext);
final RunnerAndConfigurationSettings existing = context.findExisting();
if (existing == null) {
final List<ConfigurationFromContext> fromContext = getConfigurationsFromContext(context);
return fromContext.size() <= 1;
}
return true;
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
final DataContext dataContext = e.getDataContext();
MacroManager.getInstance().cacheMacrosPreview(e.getDataContext());
final ConfigurationContext context = ConfigurationContext.getFromContext(dataContext);
final RunnerAndConfigurationSettings existing = context.findExisting();
if (existing == null) {
final List<ConfigurationFromContext> producers = getConfigurationsFromContext(context);
if (producers.isEmpty()) return;
if (producers.size() > 1) {
final Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
Collections.sort(producers, ConfigurationFromContext.NAME_COMPARATOR);
final ListPopup popup =
JBPopupFactory.getInstance().createListPopup(new BaseListPopupStep<ConfigurationFromContext>(ExecutionBundle.message("configuration.action.chooser.title"), producers) {
@Override
@NotNull
public String getTextFor(final ConfigurationFromContext producer) {
return childActionName(producer);
}
@Override
public Icon getIconFor(final ConfigurationFromContext producer) {
return producer.getConfigurationType().getIcon();
}
@Override
public PopupStep<?> onChosen(ConfigurationFromContext producer, boolean finalChoice) {
perform(producer, context);
return FINAL_CHOICE;
}
});
final InputEvent event = e.getInputEvent();
if (event instanceof MouseEvent) {
popup.show(new RelativePoint((MouseEvent)event));
} else if (editor != null) {
popup.showInBestPositionFor(editor);
} else {
popup.showInBestPositionFor(dataContext);
}
} else {
perform(producers.get(0), context);
}
return;
}
if (LOG.isDebugEnabled()) {
String configurationClass = existing.getConfiguration().getClass().getName();
LOG.debug(String.format("Use existing run configuration: %s", configurationClass));
}
perform(context);
}
private void perform(final ConfigurationFromContext configurationFromContext, final ConfigurationContext context) {
RunnerAndConfigurationSettings configurationSettings = configurationFromContext.getConfigurationSettings();
context.setConfiguration(configurationSettings);
configurationFromContext.onFirstRun(context, () -> {
if (LOG.isDebugEnabled()) {
RunnerAndConfigurationSettings settings = context.getConfiguration();
RunConfiguration configuration = settings == null ? null : settings.getConfiguration();
String configurationClass = configuration == null ? null : configuration.getClass().getName();
LOG.debug(String.format("Create run configuration: %s", configurationClass));
}
perform(context);
});
}
protected abstract void perform(ConfigurationContext context);
@Override
public void beforeActionPerformedUpdate(@NotNull AnActionEvent e) {
fullUpdate(e);
}
@Nullable private static Integer ourLastTimeoutStamp = null;
@Override
public void update(@NotNull final AnActionEvent event) {
VirtualFile vFile = event.getDataContext().getData(CommonDataKeys.VIRTUAL_FILE);
ThreeState hadAnythingRunnable = vFile == null ? ThreeState.UNSURE : RunLineMarkerProvider.hadAnythingRunnable(vFile);
if (hadAnythingRunnable == ThreeState.UNSURE) {
fullUpdate(event);
return;
}
boolean success =
!alreadyExceededTimeoutOnSimilarAction() &&
ProgressIndicatorUtils.withTimeout(Registry.intValue("run.configuration.update.timeout"), () -> {
fullUpdate(event);
return true;
}) != null;
if (!success) {
recordUpdateTimeout();
approximatePresentationByPreviousAvailability(event, hadAnythingRunnable);
}
}
private static boolean alreadyExceededTimeoutOnSimilarAction() {
return Objects.equals(IdeEventQueue.getInstance().getEventCount(), ourLastTimeoutStamp);
}
private static void recordUpdateTimeout() {
ourLastTimeoutStamp = IdeEventQueue.getInstance().getEventCount();
}
// we assume that presence of anything runnable in a file changes rarely, so using last recorded state is mostly OK
protected void approximatePresentationByPreviousAvailability(AnActionEvent event, ThreeState hadAnythingRunnable) {
event.getPresentation().copyFrom(getTemplatePresentation());
event.getPresentation().setEnabledAndVisible(hadAnythingRunnable == ThreeState.YES);
}
protected void fullUpdate(@NotNull AnActionEvent event) {
final ConfigurationContext context = ConfigurationContext.getFromContext(event.getDataContext());
final Presentation presentation = event.getPresentation();
final RunnerAndConfigurationSettings existing = context.findExisting();
RunnerAndConfigurationSettings configuration = existing;
if (configuration == null) {
configuration = context.getConfiguration();
}
if (configuration == null){
presentation.setEnabledAndVisible(false);
}
else{
presentation.setEnabledAndVisible(true);
VirtualFile vFile = event.getDataContext().getData(CommonDataKeys.VIRTUAL_FILE);
if (vFile != null) {
RunLineMarkerProvider.markRunnable(vFile, true);
}
final List<ConfigurationFromContext> fromContext = getConfigurationsFromContext(context);
if (existing == null && !fromContext.isEmpty()) {
//todo[nik,anna] it's dirty fix. Otherwise wrong configuration will be returned from context.getConfiguration()
context.setConfiguration(fromContext.get(0).getConfigurationSettings());
}
final String name = suggestRunActionName((LocatableConfiguration)configuration.getConfiguration());
updatePresentation(presentation, existing != null || fromContext.size() <= 1 ? name : "", context);
}
}
@Override
public boolean isDumbAware() {
return false;
}
@NotNull
public static String suggestRunActionName(@NotNull LocatableConfiguration configuration) {
if (configuration instanceof LocatableConfigurationBase && configuration.isGeneratedName()) {
String actionName = ((LocatableConfigurationBase<?>)configuration).getActionName();
if (actionName != null) {
return actionName;
}
}
return ProgramRunnerUtil.shortenName(configuration.getName(), 0);
}
@NotNull
private static String childActionName(ConfigurationFromContext configurationFromContext) {
RunConfiguration configuration = configurationFromContext.getConfiguration();
if (!(configuration instanceof LocatableConfiguration)) {
return configurationFromContext.getConfigurationType().getDisplayName();
}
if (configurationFromContext.isFromAlternativeLocation()) {
String locationDisplayName = configurationFromContext.getAlternativeLocationDisplayName();
if (locationDisplayName != null) {
return ((LocatableConfigurationBase<?>)configuration).getActionName() + " " + locationDisplayName;
}
}
return StringUtil.unquoteString(suggestRunActionName((LocatableConfiguration)configurationFromContext.getConfiguration()));
}
protected abstract void updatePresentation(Presentation presentation, @NotNull String actionText, ConfigurationContext context);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.random;
import java.util.Arrays;
import org.apache.commons.math3.TestUtils;
import org.apache.commons.math3.stat.Frequency;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.exception.MathIllegalArgumentException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Base class for RandomGenerator tests.
*
* Tests RandomGenerator methods directly and also executes RandomDataTest
* test cases against a RandomDataImpl created using the provided generator.
*
* RandomGenerator test classes should extend this class, implementing
* makeGenerator() to provide a concrete generator to test. The generator
* returned by makeGenerator should be seeded with a fixed seed.
*
*/
public abstract class RandomGeneratorAbstractTest extends RandomDataGeneratorTest {
/** RandomGenerator under test */
protected RandomGenerator generator;
/**
* Override this method in subclasses to provide a concrete generator to test.
* Return a generator seeded with a fixed seed.
*/
protected abstract RandomGenerator makeGenerator();
/**
* Initialize generator and randomData instance in superclass.
*/
public RandomGeneratorAbstractTest() {
generator = makeGenerator();
randomData = new RandomDataGenerator(generator);
}
/**
* Set a fixed seed for the tests
*/
@Before
public void setUp() {
generator = makeGenerator();
}
// Omit secureXxx tests, since they do not use the provided generator
@Override
public void testNextSecureLongIAE() {}
@Override
public void testNextSecureLongNegativeToPositiveRange() {}
@Override
public void testNextSecureLongNegativeRange() {}
@Override
public void testNextSecureLongPositiveRange() {}
@Override
public void testNextSecureIntIAE() {}
@Override
public void testNextSecureIntNegativeToPositiveRange() {}
@Override
public void testNextSecureIntNegativeRange() {}
@Override
public void testNextSecureIntPositiveRange() {}
@Override
public void testNextSecureHex() {}
@Test
/**
* Tests uniformity of nextInt(int) distribution by generating 1000
* samples for each of 10 test values and for each sample performing
* a chi-square test of homogeneity of the observed distribution with
* the expected uniform distribution. Tests are performed at the .01
* level and an average failure rate higher than 2% (i.e. more than 20
* null hypothesis rejections) causes the test case to fail.
*
* All random values are generated using the generator instance used by
* other tests and the generator is not reseeded, so this is a fixed seed
* test.
*/
public void testNextIntDirect() {
// Set up test values - end of the array filled randomly
int[] testValues = new int[] {4, 10, 12, 32, 100, 10000, 0, 0, 0, 0};
for (int i = 6; i < 10; i++) {
final int val = generator.nextInt();
testValues[i] = val < 0 ? -val : val + 1;
}
final int numTests = 1000;
for (int i = 0; i < testValues.length; i++) {
final int n = testValues[i];
// Set up bins
int[] binUpperBounds;
if (n < 32) {
binUpperBounds = new int[n];
for (int k = 0; k < n; k++) {
binUpperBounds[k] = k;
}
} else {
binUpperBounds = new int[10];
final int step = n / 10;
for (int k = 0; k < 9; k++) {
binUpperBounds[k] = (k + 1) * step;
}
binUpperBounds[9] = n - 1;
}
// Run the tests
int numFailures = 0;
final int binCount = binUpperBounds.length;
final long[] observed = new long[binCount];
final double[] expected = new double[binCount];
expected[0] = binUpperBounds[0] == 0 ? (double) smallSampleSize / (double) n :
(double) ((binUpperBounds[0] + 1) * smallSampleSize) / (double) n;
for (int k = 1; k < binCount; k++) {
expected[k] = (double) smallSampleSize *
(double) (binUpperBounds[k] - binUpperBounds[k - 1]) / n;
}
for (int j = 0; j < numTests; j++) {
Arrays.fill(observed, 0);
for (int k = 0; k < smallSampleSize; k++) {
final int value = generator.nextInt(n);
Assert.assertTrue("nextInt range",(value >= 0) && (value < n));
for (int l = 0; l < binCount; l++) {
if (binUpperBounds[l] >= value) {
observed[l]++;
break;
}
}
}
if (testStatistic.chiSquareTest(expected, observed) < 0.01) {
numFailures++;
}
}
if ((double) numFailures / (double) numTests > 0.02) {
Assert.fail("Too many failures for n = " + n +
" " + numFailures + " out of " + numTests + " tests failed.");
}
}
}
@Test
public void testNextIntIAE2() {
try {
generator.nextInt(-1);
Assert.fail("MathIllegalArgumentException expected");
} catch (MathIllegalArgumentException ex) {
// ignored
}
try {
generator.nextInt(0);
} catch (MathIllegalArgumentException ex) {
// ignored
}
}
@Test
public void testNextLongDirect() {
long q1 = Long.MAX_VALUE/4;
long q2 = 2 * q1;
long q3 = 3 * q1;
Frequency freq = new Frequency();
long val = 0;
int value = 0;
for (int i=0; i<smallSampleSize; i++) {
val = generator.nextLong();
val = val < 0 ? -val : val;
if (val < q1) {
value = 0;
} else if (val < q2) {
value = 1;
} else if (val < q3) {
value = 2;
} else {
value = 3;
}
freq.addValue(value);
}
long[] observed = new long[4];
for (int i=0; i<4; i++) {
observed[i] = freq.getCount(i);
}
/* Use ChiSquare dist with df = 4-1 = 3, alpha = .001
* Change to 11.34 for alpha = .01
*/
Assert.assertTrue("chi-square test -- will fail about 1 in 1000 times",
testStatistic.chiSquare(expected,observed) < 16.27);
}
@Test
public void testNextBooleanDirect() {
long halfSampleSize = smallSampleSize / 2;
double[] expected = {halfSampleSize, halfSampleSize};
long[] observed = new long[2];
for (int i=0; i<smallSampleSize; i++) {
if (generator.nextBoolean()) {
observed[0]++;
} else {
observed[1]++;
}
}
/* Use ChiSquare dist with df = 2-1 = 1, alpha = .001
* Change to 6.635 for alpha = .01
*/
Assert.assertTrue("chi-square test -- will fail about 1 in 1000 times",
testStatistic.chiSquare(expected,observed) < 10.828);
}
@Test
public void testNextFloatDirect() {
Frequency freq = new Frequency();
float val = 0;
int value = 0;
for (int i=0; i<smallSampleSize; i++) {
val = generator.nextFloat();
if (val < 0.25) {
value = 0;
} else if (val < 0.5) {
value = 1;
} else if (val < 0.75) {
value = 2;
} else {
value = 3;
}
freq.addValue(value);
}
long[] observed = new long[4];
for (int i=0; i<4; i++) {
observed[i] = freq.getCount(i);
}
/* Use ChiSquare dist with df = 4-1 = 3, alpha = .001
* Change to 11.34 for alpha = .01
*/
Assert.assertTrue("chi-square test -- will fail about 1 in 1000 times",
testStatistic.chiSquare(expected,observed) < 16.27);
}
@Test
public void testDoubleDirect() {
SummaryStatistics sample = new SummaryStatistics();
final int N = 10000;
for (int i = 0; i < N; ++i) {
sample.addValue(generator.nextDouble());
}
Assert.assertEquals("Note: This test will fail randomly about 1 in 100 times.",
0.5, sample.getMean(), FastMath.sqrt(N/12.0) * 2.576);
Assert.assertEquals(1.0 / (2.0 * FastMath.sqrt(3.0)),
sample.getStandardDeviation(), 0.01);
}
@Test
public void testFloatDirect() {
SummaryStatistics sample = new SummaryStatistics();
final int N = 1000;
for (int i = 0; i < N; ++i) {
sample.addValue(generator.nextFloat());
}
Assert.assertEquals("Note: This test will fail randomly about 1 in 100 times.",
0.5, sample.getMean(), FastMath.sqrt(N/12.0) * 2.576);
Assert.assertEquals(1.0 / (2.0 * FastMath.sqrt(3.0)),
sample.getStandardDeviation(), 0.01);
}
@Test(expected=MathIllegalArgumentException.class)
public void testNextIntNeg() {
generator.nextInt(-1);
}
@Test
public void testNextInt2() {
int walk = 0;
final int N = 10000;
for (int k = 0; k < N; ++k) {
if (generator.nextInt() >= 0) {
++walk;
} else {
--walk;
}
}
Assert.assertTrue("Walked too far astray: " + walk + "\nNote: This " +
"test will fail randomly about 1 in 100 times.",
FastMath.abs(walk) < FastMath.sqrt(N) * 2.576);
}
@Test
public void testNextLong2() {
int walk = 0;
final int N = 1000;
for (int k = 0; k < N; ++k) {
if (generator.nextLong() >= 0) {
++walk;
} else {
--walk;
}
}
Assert.assertTrue("Walked too far astray: " + walk + "\nNote: This " +
"test will fail randomly about 1 in 100 times.",
FastMath.abs(walk) < FastMath.sqrt(N) * 2.576);
}
@Test
public void testNexBoolean2() {
int walk = 0;
final int N = 10000;
for (int k = 0; k < N; ++k) {
if (generator.nextBoolean()) {
++walk;
} else {
--walk;
}
}
Assert.assertTrue("Walked too far astray: " + walk + "\nNote: This " +
"test will fail randomly about 1 in 100 times.",
FastMath.abs(walk) < FastMath.sqrt(N) * 2.576);
}
@Test
public void testNexBytes() {
long[] count = new long[256];
byte[] bytes = new byte[10];
double[] expected = new double[256];
final int sampleSize = 100000;
for (int i = 0; i < 256; i++) {
expected[i] = (double) sampleSize / 265f;
}
for (int k = 0; k < sampleSize; ++k) {
generator.nextBytes(bytes);
for (byte b : bytes) {
++count[b + 128];
}
}
TestUtils.assertChiSquareAccept(expected, count, 0.001);
}
@Test
public void testSeeding() {
// makeGenerator initializes with fixed seed
RandomGenerator gen = makeGenerator();
RandomGenerator gen1 = makeGenerator();
checkSameSequence(gen, gen1);
// reseed, but recreate the second one
// verifies MATH-723
gen.setSeed(100);
gen1 = makeGenerator();
gen1.setSeed(100);
checkSameSequence(gen, gen1);
}
private void checkSameSequence(RandomGenerator gen1, RandomGenerator gen2) {
final int len = 11; // Needs to be an odd number to check MATH-723
final double[][] values = new double[2][len];
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextDouble();
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextDouble();
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextFloat();
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextFloat();
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextInt();
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextInt();
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextLong();
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextLong();
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextInt(len);
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextInt(len);
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextBoolean() ? 1 : 0;
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextBoolean() ? 1 : 0;
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
for (int i = 0; i < len; i++) {
values[0][i] = gen1.nextGaussian();
}
for (int i = 0; i < len; i++) {
values[1][i] = gen2.nextGaussian();
}
Assert.assertTrue(Arrays.equals(values[0], values[1]));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.provider.db.generic.service.thrift;
import static org.apache.sentry.policy.common.PolicyConstants.AUTHORIZABLE_JOINER;
import static org.apache.sentry.policy.common.PolicyConstants.KV_JOINER;
import java.lang.reflect.Constructor;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.sentry.SentryUserException;
import org.apache.sentry.core.common.Authorizable;
import org.apache.sentry.core.model.db.AccessConstants;
import org.apache.sentry.policy.common.KeyValue;
import org.apache.sentry.policy.common.PolicyConstants;
import org.apache.sentry.provider.common.AuthorizationComponent;
import org.apache.sentry.provider.db.SentryAccessDeniedException;
import org.apache.sentry.provider.db.SentryAlreadyExistsException;
import org.apache.sentry.provider.db.SentryInvalidInputException;
import org.apache.sentry.provider.db.SentryNoSuchObjectException;
import org.apache.sentry.provider.db.SentryThriftAPIMismatchException;
import org.apache.sentry.provider.db.generic.service.persistent.PrivilegeObject;
import org.apache.sentry.provider.db.generic.service.persistent.PrivilegeObject.Builder;
import org.apache.sentry.provider.db.generic.service.persistent.SentryStoreLayer;
import org.apache.sentry.provider.db.log.entity.JsonLogEntityFactory;
import org.apache.sentry.provider.db.log.util.Constants;
import org.apache.sentry.provider.db.service.model.MSentryGMPrivilege;
import org.apache.sentry.provider.db.service.model.MSentryRole;
import org.apache.sentry.provider.db.service.persistent.CommitContext;
import org.apache.sentry.provider.db.service.thrift.PolicyStoreConstants;
import org.apache.sentry.provider.db.service.thrift.SentryConfigurationException;
import org.apache.sentry.provider.db.service.thrift.SentryPolicyStoreProcessor;
import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
import org.apache.sentry.service.thrift.ServiceConstants.ThriftConstants;
import org.apache.sentry.service.thrift.ServiceConstants;
import org.apache.sentry.service.thrift.Status;
import org.apache.sentry.service.thrift.TSentryResponseStatus;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class SentryGenericPolicyProcessor implements SentryGenericPolicyService.Iface {
private static final Logger LOGGER = LoggerFactory.getLogger(SentryGenericPolicyProcessor.class);
private static final Logger AUDIT_LOGGER = LoggerFactory
.getLogger(Constants.AUDIT_LOGGER_NAME_GENERIC);
private final Configuration conf;
private final ImmutableSet<String> adminGroups;
private final SentryStoreLayer store;
private final NotificationHandlerInvoker handerInvoker;
public static final String SENTRY_GENERIC_SERVICE_NAME = "SentryGenericPolicyService";
private static final String ACCESS_DENIAL_MESSAGE = "Access denied to ";
public SentryGenericPolicyProcessor(Configuration conf) throws Exception {
this.store = createStore(conf);
this.handerInvoker = new NotificationHandlerInvoker(createHandlers(conf));
this.conf = conf;
adminGroups = ImmutableSet.copyOf((Sets.newHashSet(conf.getStrings(
ServerConfig.ADMIN_GROUPS, new String[]{}))));
}
@VisibleForTesting
public SentryGenericPolicyProcessor(Configuration conf, SentryStoreLayer store) throws Exception {
this.store = store;
this.handerInvoker = new NotificationHandlerInvoker(createHandlers(conf));
this.conf = conf;
adminGroups = ImmutableSet.copyOf(toTrimmed(Sets.newHashSet(conf.getStrings(
ServerConfig.ADMIN_GROUPS, new String[]{}))));
}
private void authorize(String requestorUser, Set<String> requestorGroups)
throws SentryAccessDeniedException {
if (!inAdminGroups(requestorGroups)) {
String msg = "User: " + requestorUser + " is part of " + requestorGroups +
" which does not, intersect admin groups " + adminGroups;
LOGGER.warn(msg);
throw new SentryAccessDeniedException(ACCESS_DENIAL_MESSAGE + requestorUser);
}
}
private Set<String> toTrimmedLower(Set<String> s) {
if (null == s) {
return new HashSet<String>();
}
Set<String> result = Sets.newHashSet();
for (String v : s) {
result.add(v.trim().toLowerCase());
}
return result;
}
private Set<String> toTrimmed(Set<String> s) {
if (null == s) {
return new HashSet<String>();
}
Set<String> result = Sets.newHashSet();
for (String v : s) {
result.add(v.trim());
}
return result;
}
private String toTrimmedLower(String s) {
if (Strings.isNullOrEmpty(s)){
return "";
}
return s.trim().toLowerCase();
}
public static Set<String> getRequestorGroups(Configuration conf, String userName) throws SentryUserException {
return SentryPolicyStoreProcessor.getGroupsFromUserName(conf, userName);
}
private boolean inAdminGroups(Set<String> requestorGroups) {
if (Sets.intersection(adminGroups, requestorGroups).isEmpty()) {
return false;
}
return true;
}
public static SentryStoreLayer createStore(Configuration conf) throws SentryConfigurationException {
SentryStoreLayer storeLayer = null;
String Store = conf.get(PolicyStoreConstants.SENTRY_GENERIC_POLICY_STORE, PolicyStoreConstants.SENTRY_GENERIC_POLICY_STORE_DEFAULT);
if (Strings.isNullOrEmpty(Store)) {
throw new SentryConfigurationException("the parameter configuration for sentry.generic.policy.store can't be empty");
}
try {
storeLayer = createInstance(Store, conf, SentryStoreLayer.class);
} catch (Exception e) {
throw new SentryConfigurationException("Create sentryStore error: " + e.getMessage(), e);
}
return storeLayer;
}
public static List<NotificationHandler> createHandlers(Configuration conf) throws SentryConfigurationException {
List<NotificationHandler> handlers = Lists.newArrayList();
Iterable<String> notificationHandlers = Splitter.onPattern("[\\s,]").trimResults()
.omitEmptyStrings().split(conf.get(PolicyStoreConstants.SENTRY_GENERIC_POLICY_NOTIFICATION, ""));
try {
for (String notificationHandler : notificationHandlers) {
handlers.add(createInstance(notificationHandler, conf, NotificationHandler.class));
}
} catch (Exception e) {
throw new SentryConfigurationException("Create notificationHandlers error: " + e.getMessage(), e);
}
return handlers;
}
@SuppressWarnings("unchecked")
public static <T> T createInstance(String className, Configuration conf, Class<T> iface) throws Exception {
T result;
try {
Class clazz = Class.forName(className);
if (!iface.isAssignableFrom(clazz)) {
throw new IllegalArgumentException("Class " + clazz + " is not a " +
iface.getName());
}
Constructor<T> meth = (Constructor<T>)clazz.getDeclaredConstructor(Configuration.class);
meth.setAccessible(true);
result = meth.newInstance(new Object[]{conf});
} catch (Exception e) {
throw new RuntimeException(e);
}
return result;
}
private <T> Response<T> requestHandle(RequestHandler<T> handler) {
Response<T> response = new Response<T>();
try {
response = handler.handle();
} catch (SentryAccessDeniedException e) {
String msg = "Sentry access denied: " + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.AccessDenied(e.getMessage(), e);
} catch (SentryAlreadyExistsException e) {
String msg = "Sentry object already exists: " + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.AlreadyExists(e.getMessage(), e);
} catch (SentryNoSuchObjectException e) {
String msg = "Sentry object doesn't exist: " + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.NoSuchObject(e.getMessage(), e);
} catch (SentryInvalidInputException e) {
String msg = "Invalid input privilege object: " + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.InvalidInput(msg, e);
} catch (SentryThriftAPIMismatchException e) {
String msg = "Sentry thrift API mismatch error: " + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.THRIFT_VERSION_MISMATCH(e.getMessage(), e);
} catch (Exception e) {
String msg = "Unknown error:" + e.getMessage();
LOGGER.error(msg, e);
response.status = Status.RuntimeError(msg, e);
}
return response;
}
private PrivilegeObject toPrivilegeObject(TSentryPrivilege tSentryPrivilege) {
Boolean grantOption;
if (tSentryPrivilege.getGrantOption().equals(TSentryGrantOption.TRUE)) {
grantOption = true;
} else if (tSentryPrivilege.getGrantOption().equals(TSentryGrantOption.FALSE)) {
grantOption = false;
} else {
grantOption = null;
}
return new Builder().setComponent(tSentryPrivilege.getComponent())
.setService(tSentryPrivilege.getServiceName())
.setAuthorizables(toAuthorizables(tSentryPrivilege.getAuthorizables()))
.setAction(tSentryPrivilege.getAction())
.withGrantOption(grantOption)
.build();
}
private TSentryPrivilege fromPrivilegeObject(PrivilegeObject privilege) {
TSentryPrivilege tPrivilege = new TSentryPrivilege(privilege.getComponent(), privilege.getService(),
fromAuthorizable(privilege.getAuthorizables()),
privilege.getAction());
if (privilege.getGrantOption() == null) {
tPrivilege.setGrantOption(TSentryGrantOption.UNSET);
} else if (privilege.getGrantOption()) {
tPrivilege.setGrantOption(TSentryGrantOption.TRUE);
} else {
tPrivilege.setGrantOption(TSentryGrantOption.FALSE);
}
return tPrivilege;
}
private List<TAuthorizable> fromAuthorizable(List<? extends Authorizable> authorizables) {
List<TAuthorizable> tAuthorizables = Lists.newArrayList();
for (Authorizable authorizable : authorizables) {
tAuthorizables.add(new TAuthorizable(authorizable.getTypeName(), authorizable.getName()));
}
return tAuthorizables;
}
private String fromAuthorizableToStr(List<? extends Authorizable> authorizables) {
if (authorizables != null && !authorizables.isEmpty()) {
List<String> privileges = Lists.newArrayList();
for (Authorizable authorizable : authorizables) {
privileges.add(PolicyConstants.KV_JOINER.join(authorizable.getTypeName(),
authorizable.getName()));
}
return PolicyConstants.AUTHORIZABLE_JOINER.join(privileges);
} else {
return "";
}
}
private List<? extends Authorizable> toAuthorizables(List<TAuthorizable> tAuthorizables) {
List<Authorizable> authorizables = Lists.newArrayList();
if (tAuthorizables == null) {
return authorizables;
}
for (final TAuthorizable tAuthorizable : tAuthorizables) {
authorizables.add(new Authorizable() {
@Override
public String getTypeName() {
return tAuthorizable.getType();
}
@Override
public String getName() {
return tAuthorizable.getName();
}
});
}
return authorizables;
}
private List<? extends Authorizable> toAuthorizables(String privilegeStr) {
List<Authorizable> authorizables = Lists.newArrayList();
if (privilegeStr == null) {
return authorizables;
}
for (String authorizable : PolicyConstants.AUTHORIZABLE_SPLITTER.split(privilegeStr)) {
KeyValue tempKV = new KeyValue(authorizable);
final String key = tempKV.getKey();
final String value = tempKV.getValue();
authorizables.add(new Authorizable() {
@Override
public String getTypeName() {
return key;
}
@Override
public String getName() {
return value;
}
});
}
return authorizables;
}
// Construct the role to set of privileges mapping based on the
// MSentryGMPrivilege information.
private TSentryPrivilegeMap toTSentryPrivilegeMap(Set<MSentryGMPrivilege> mPrivileges) {
// Mapping of <Role, Set<Privilege>>.
Map<String, Set<TSentryPrivilege>> tPrivilegeMap = Maps.newTreeMap();
for (MSentryGMPrivilege mPrivilege : mPrivileges) {
for (MSentryRole role : mPrivilege.getRoles()) {
TSentryPrivilege tPrivilege = toTSentryPrivilege(mPrivilege);
if (tPrivilegeMap.containsKey(role.getRoleName())) {
tPrivilegeMap.get(role.getRoleName()).add(tPrivilege);
} else {
Set<TSentryPrivilege> tPrivilegeSet = Sets.newTreeSet();
tPrivilegeSet.add(tPrivilege);
tPrivilegeMap.put(role.getRoleName(), tPrivilegeSet);
}
}
}
return new TSentryPrivilegeMap(tPrivilegeMap);
}
// Construct TSentryPrivilege based on MSentryGMPrivilege information.
private TSentryPrivilege toTSentryPrivilege(MSentryGMPrivilege mPrivilege) {
TSentryPrivilege tPrivilege = new TSentryPrivilege(mPrivilege.getComponentName(),
mPrivilege.getServiceName(), fromAuthorizable(mPrivilege.getAuthorizables()), mPrivilege.getAction());
if (mPrivilege.getGrantOption() == null) {
tPrivilege.setGrantOption(TSentryGrantOption.UNSET);
} else if (mPrivilege.getGrantOption()) {
tPrivilege.setGrantOption(TSentryGrantOption.TRUE);
} else {
tPrivilege.setGrantOption(TSentryGrantOption.FALSE);
}
return tPrivilege;
}
private Set<String> buildPermissions(Set<PrivilegeObject> privileges) {
Set<String> permissions = Sets.newHashSet();
for (PrivilegeObject privilege : privileges) {
List<String> hierarchy = Lists.newArrayList();
if (hasComponentServerPrivilege(privilege.getComponent())) {
hierarchy.add(KV_JOINER.join("server", privilege.getService()));
}
for (Authorizable authorizable : privilege.getAuthorizables()) {
hierarchy.add(KV_JOINER.join(authorizable.getTypeName(),authorizable.getName()));
}
hierarchy.add(KV_JOINER.join("action", privilege.getAction()));
permissions.add(AUTHORIZABLE_JOINER.join(hierarchy));
}
return permissions;
}
private boolean hasComponentServerPrivilege(String component) {
//judge the component whether has the server privilege, for example: sqoop has the privilege on the server
return AuthorizationComponent.SQOOP.equalsIgnoreCase(component);
}
@Override
public TCreateSentryRoleResponse create_sentry_role(
final TCreateSentryRoleRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.createRole(request.getComponent(), request.getRoleName(), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TCreateSentryRoleResponse tResponse = new TCreateSentryRoleResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.create_sentry_role(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for create role: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TDropSentryRoleResponse drop_sentry_role(final TDropSentryRoleRequest request)
throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.dropRole(request.getComponent(), request.getRoleName(), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TDropSentryRoleResponse tResponse = new TDropSentryRoleResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.drop_sentry_role(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for drop role: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TAlterSentryRoleGrantPrivilegeResponse alter_sentry_role_grant_privilege(
final TAlterSentryRoleGrantPrivilegeRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
CommitContext context = store.alterRoleGrantPrivilege(request.getComponent(), request.getRoleName(), toPrivilegeObject(request.getPrivilege()), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TAlterSentryRoleGrantPrivilegeResponse tResponse = new TAlterSentryRoleGrantPrivilegeResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.alter_sentry_role_grant_privilege(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for grant privilege to role: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TAlterSentryRoleRevokePrivilegeResponse alter_sentry_role_revoke_privilege(
final TAlterSentryRoleRevokePrivilegeRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
CommitContext context = store.alterRoleRevokePrivilege(request.getComponent(), request.getRoleName(), toPrivilegeObject(request.getPrivilege()), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TAlterSentryRoleRevokePrivilegeResponse tResponse = new TAlterSentryRoleRevokePrivilegeResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.alter_sentry_role_revoke_privilege(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for revoke privilege from role: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TAlterSentryRoleAddGroupsResponse alter_sentry_role_add_groups(
final TAlterSentryRoleAddGroupsRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.alterRoleAddGroups(request.getComponent(), request.getRoleName(), request.getGroups(), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TAlterSentryRoleAddGroupsResponse tResponse = new TAlterSentryRoleAddGroupsResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.alter_sentry_role_add_groups(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for add role to group: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TAlterSentryRoleDeleteGroupsResponse alter_sentry_role_delete_groups(
final TAlterSentryRoleDeleteGroupsRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.alterRoleDeleteGroups(request.getComponent(), request.getRoleName(), request.getGroups(), request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TAlterSentryRoleDeleteGroupsResponse tResponse = new TAlterSentryRoleDeleteGroupsResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.alter_sentry_role_delete_groups(respose.context, request, tResponse);
}
try {
AUDIT_LOGGER.info(JsonLogEntityFactory.getInstance()
.createJsonLogEntity(request, tResponse, conf).toJsonFormatLog());
} catch (Exception e) {
// if any exception, log the exception.
String msg = "Error creating audit log for delete role from group: " + e.getMessage();
LOGGER.error(msg, e);
}
return tResponse;
}
@Override
public TListSentryRolesResponse list_sentry_roles_by_group(
final TListSentryRolesRequest request) throws TException {
Response<Set<TSentryRole>> respose = requestHandle(new RequestHandler<Set<TSentryRole>>() {
@Override
public Response<Set<TSentryRole>> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
Set<String> groups = getRequestorGroups(conf, request.getRequestorUserName());
if (!AccessConstants.ALL.equalsIgnoreCase(request.getGroupName())) {
boolean admin = inAdminGroups(groups);
//Only admin users can list all roles in the system ( groupname = null)
//Non admin users are only allowed to list only groups which they belong to
if(!admin && (request.getGroupName() == null || !groups.contains(request.getGroupName()))) {
throw new SentryAccessDeniedException(ACCESS_DENIAL_MESSAGE + request.getRequestorUserName());
}
groups.clear();
groups.add(request.getGroupName());
}
Set<String> roleNames = store.getRolesByGroups(request.getComponent(), groups);
Set<TSentryRole> tSentryRoles = Sets.newHashSet();
for (String roleName : roleNames) {
Set<String> groupsForRoleName = store.getGroupsByRoles(request.getComponent(), Sets.newHashSet(roleName));
tSentryRoles.add(new TSentryRole(roleName, groupsForRoleName));
}
return new Response<Set<TSentryRole>>(Status.OK(), tSentryRoles);
}
});
TListSentryRolesResponse tResponse = new TListSentryRolesResponse();
tResponse.setStatus(respose.status);
tResponse.setRoles(respose.content);
return tResponse;
}
@Override
public TListSentryPrivilegesResponse list_sentry_privileges_by_role(
final TListSentryPrivilegesRequest request) throws TException {
Response<Set<TSentryPrivilege>> respose = requestHandle(new RequestHandler<Set<TSentryPrivilege>>() {
@Override
public Response<Set<TSentryPrivilege>> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
Set<String> groups = getRequestorGroups(conf, request.getRequestorUserName());
if (!inAdminGroups(groups)) {
Set<String> roleNamesForGroups = toTrimmedLower(store.getRolesByGroups(request.getComponent(), groups));
if (!roleNamesForGroups.contains(toTrimmedLower(request.getRoleName()))) {
throw new SentryAccessDeniedException(ACCESS_DENIAL_MESSAGE + request.getRequestorUserName());
}
}
Set<PrivilegeObject> privileges = store.getPrivilegesByProvider(request.getComponent(),
request.getServiceName(),
Sets.newHashSet(request.getRoleName()),
null, toAuthorizables(request.getAuthorizables()));
Set<TSentryPrivilege> tSentryPrivileges = Sets.newHashSet();
for (PrivilegeObject privilege : privileges) {
tSentryPrivileges.add(fromPrivilegeObject(privilege));
}
return new Response<Set<TSentryPrivilege>>(Status.OK(), tSentryPrivileges);
}
});
TListSentryPrivilegesResponse tResponse = new TListSentryPrivilegesResponse();
tResponse.setStatus(respose.status);
tResponse.setPrivileges(respose.content);
return tResponse;
}
@Override
public TListSentryPrivilegesForProviderResponse list_sentry_privileges_for_provider(
final TListSentryPrivilegesForProviderRequest request) throws TException {
Response<Set<String>> respose = requestHandle(new RequestHandler<Set<String>>() {
@Override
public Response<Set<String>> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
Set<String> activeRoleNames = toTrimmedLower(request.getRoleSet().getRoles());
Set<String> roleNamesForGroups = store.getRolesByGroups(request.getComponent(), request.getGroups());
Set<String> rolesToQuery = request.getRoleSet().isAll() ? roleNamesForGroups : Sets.intersection(activeRoleNames, roleNamesForGroups);
Set<PrivilegeObject> privileges = store.getPrivilegesByProvider(request.getComponent(),
request.getServiceName(),
rolesToQuery, null,
toAuthorizables(request.getAuthorizables()));
return new Response<Set<String>>(Status.OK(), buildPermissions(privileges));
}
});
TListSentryPrivilegesForProviderResponse tResponse = new TListSentryPrivilegesForProviderResponse();
tResponse.setStatus(respose.status);
tResponse.setPrivileges(respose.content);
return tResponse;
}
@Override
public TListSentryPrivilegesByAuthResponse list_sentry_privileges_by_authorizable(TListSentryPrivilegesByAuthRequest request) throws TException {
TListSentryPrivilegesByAuthResponse response = new TListSentryPrivilegesByAuthResponse();
Map<String, TSentryPrivilegeMap> authRoleMap = Maps.newHashMap();
// Group names are case sensitive.
Set<String> requestedGroups = request.getGroups();
String subject = request.getRequestorUserName();
TSentryActiveRoleSet activeRoleSet = request.getRoleSet();
Set<String> validActiveRoles = Sets.newHashSet();
try {
validateClientVersion(request.getProtocol_version());
Set<String> memberGroups = getRequestorGroups(conf, subject);
// Disallow non-admin users to lookup groups that
// they are not part of.
if(!inAdminGroups(memberGroups)) {
if (requestedGroups != null && !requestedGroups.isEmpty()) {
for (String requestedGroup : requestedGroups) {
// If user doesn't belong to one of the requested groups,
// then raise security exception.
if (!memberGroups.contains(requestedGroup)) {
throw new SentryAccessDeniedException(ACCESS_DENIAL_MESSAGE + subject);
}
}
} else {
// Non-admin's search is limited to its own groups.
requestedGroups = memberGroups;
}
// Disallow non-admin to lookup roles that they are not part of
if (activeRoleSet != null && !activeRoleSet.isAll()) {
Set<String> grantedRoles = toTrimmedLower(store.getRolesByGroups(request.getComponent(), requestedGroups));
Set<String> activeRoleNames = toTrimmedLower(activeRoleSet.getRoles());
for (String activeRole : activeRoleNames) {
if (!grantedRoles.contains(activeRole)) {
throw new SentryAccessDeniedException(ACCESS_DENIAL_MESSAGE
+ subject);
}
}
// For non-admin, valid active roles are intersection of active roles and granted roles.
validActiveRoles.addAll(activeRoleSet.isAll() ? grantedRoles : Sets.intersection(activeRoleNames, grantedRoles));
}
} else {
Set<String> allRoles = toTrimmedLower(store.getAllRoleNames());
Set<String> activeRoleNames = toTrimmedLower(activeRoleSet.getRoles());
// For admin, if requestedGroups are empty, valid active roles are intersection of active roles and all roles.
// Otherwise, valid active roles are intersection of active roles and the roles of requestedGroups.
if (requestedGroups == null || requestedGroups.isEmpty()) {
validActiveRoles.addAll(activeRoleSet.isAll() ? allRoles : Sets.intersection(activeRoleNames, allRoles));
} else {
Set<String> requestedRoles = toTrimmedLower(store.getRolesByGroups(request.getComponent(), requestedGroups));
validActiveRoles.addAll(activeRoleSet.isAll() ? allRoles : Sets.intersection(activeRoleNames, requestedRoles));
}
}
// If user is not part of any group.. return empty response
if (request.getAuthorizablesSet() != null) {
for (String authorizablesStr : request.getAuthorizablesSet()) {
List<? extends Authorizable> authorizables = toAuthorizables(authorizablesStr);
Set<MSentryGMPrivilege> sentryPrivileges = store.getPrivilegesByAuthorizable(request.getComponent(), request.getServiceName(), validActiveRoles, authorizables);
authRoleMap.put(fromAuthorizableToStr(authorizables), toTSentryPrivilegeMap(sentryPrivileges));
}
}
response.setPrivilegesMapByAuth(authRoleMap);
response.setStatus(Status.OK());
} catch (SentryAccessDeniedException e) {
LOGGER.error(e.getMessage(), e);
response.setStatus(Status.AccessDenied(e.getMessage(), e));
} catch (SentryThriftAPIMismatchException e) {
LOGGER.error(e.getMessage(), e);
response.setStatus(Status.THRIFT_VERSION_MISMATCH(e.getMessage(), e));
} catch (Exception e) {
String msg = "Unknown error for request: " + request + ", message: "
+ e.getMessage();
LOGGER.error(msg, e);
response.setStatus(Status.RuntimeError(msg, e));
}
return response;
}
@Override
public TDropPrivilegesResponse drop_sentry_privilege(
final TDropPrivilegesRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.dropPrivilege(request.getComponent(),
toPrivilegeObject(request.getPrivilege()),
request.getRequestorUserName());
return new Response<Void>(Status.OK(), context);
}
});
TDropPrivilegesResponse tResponse = new TDropPrivilegesResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.drop_sentry_privilege(respose.context, request, tResponse);
}
return tResponse;
}
@Override
public TRenamePrivilegesResponse rename_sentry_privilege(
final TRenamePrivilegesRequest request) throws TException {
Response<Void> respose = requestHandle(new RequestHandler<Void>() {
@Override
public Response<Void> handle() throws Exception {
validateClientVersion(request.getProtocol_version());
authorize(request.getRequestorUserName(),
getRequestorGroups(conf, request.getRequestorUserName()));
CommitContext context = store.renamePrivilege(request.getComponent(), request.getServiceName(),
toAuthorizables(request.getOldAuthorizables()),
toAuthorizables(request.getNewAuthorizables()),
request.getRequestorUserName());
return new Response<Void>(Status.OK(),context);
}
});
TRenamePrivilegesResponse tResponse = new TRenamePrivilegesResponse(respose.status);
if (Status.OK.getCode() == respose.status.getValue()) {
handerInvoker.rename_sentry_privilege(respose.context, request, tResponse);
}
return tResponse;
}
private static class Response<T> {
TSentryResponseStatus status;
CommitContext context;
T content;
Response() {
}
Response(TSentryResponseStatus status, CommitContext context) {
this(status,context,null);
}
Response(TSentryResponseStatus status, T content) {
this(status,null,content);
}
Response(TSentryResponseStatus status, CommitContext context, T content) {
this.status = status;
this.context = context;
this.content = content;
}
}
private interface RequestHandler <T>{
Response<T> handle() throws Exception ;
}
private static void validateClientVersion(int protocol_version) throws SentryThriftAPIMismatchException {
if (ServiceConstants.ThriftConstants.TSENTRY_SERVICE_VERSION_CURRENT != protocol_version) {
String msg = "Sentry thrift API protocol version mismatch: Client thrift version " +
"is: " + protocol_version + " , server thrift verion " +
"is " + ThriftConstants.TSENTRY_SERVICE_VERSION_CURRENT;
throw new SentryThriftAPIMismatchException(msg);
}
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.primitives;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkElementIndex;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Converter;
import java.io.Serializable;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.RandomAccess;
import javax.annotation.CheckForNull;
/**
* Static utility methods pertaining to {@code int} primitives, that are not
* already found in either {@link Integer} or {@link Arrays}.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/PrimitivesExplained">
* primitive utilities</a>.
*
* @author Kevin Bourrillion
* @since 1.0
*/
@GwtCompatible(emulated = true)
public final class Ints {
private Ints() {}
/**
* The number of bytes required to represent a primitive {@code int}
* value.
*/
public static final int BYTES = Integer.SIZE / Byte.SIZE;
/**
* The largest power of two that can be represented as an {@code int}.
*
* @since 10.0
*/
public static final int MAX_POWER_OF_TWO = 1 << (Integer.SIZE - 2);
/**
* Returns a hash code for {@code value}; equal to the result of invoking
* {@code ((Integer) value).hashCode()}.
*
* @param value a primitive {@code int} value
* @return a hash code for the value
*/
public static int hashCode(int value) {
return value;
}
/**
* Returns the {@code int} value that is equal to {@code value}, if possible.
*
* @param value any value in the range of the {@code int} type
* @return the {@code int} value that equals {@code value}
* @throws IllegalArgumentException if {@code value} is greater than {@link
* Integer#MAX_VALUE} or less than {@link Integer#MIN_VALUE}
*/
public static int checkedCast(long value) {
int result = (int) value;
if (result != value) {
// don't use checkArgument here, to avoid boxing
throw new IllegalArgumentException("Out of range: " + value);
}
return result;
}
/**
* Returns the {@code int} nearest in value to {@code value}.
*
* @param value any {@code long} value
* @return the same value cast to {@code int} if it is in the range of the
* {@code int} type, {@link Integer#MAX_VALUE} if it is too large,
* or {@link Integer#MIN_VALUE} if it is too small
*/
public static int saturatedCast(long value) {
if (value > Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
}
if (value < Integer.MIN_VALUE) {
return Integer.MIN_VALUE;
}
return (int) value;
}
/**
* Compares the two specified {@code int} values. The sign of the value
* returned is the same as that of {@code ((Integer) a).compareTo(b)}.
*
* <p><b>Note:</b> projects using JDK 7 or later should use the equivalent
* {@link Integer#compare} method instead.
*
* @param a the first {@code int} to compare
* @param b the second {@code int} to compare
* @return a negative value if {@code a} is less than {@code b}; a positive
* value if {@code a} is greater than {@code b}; or zero if they are equal
*/
public static int compare(int a, int b) {
return (a < b) ? -1 : ((a > b) ? 1 : 0);
}
/**
* Returns {@code true} if {@code target} is present as an element anywhere in
* {@code array}.
*
* @param array an array of {@code int} values, possibly empty
* @param target a primitive {@code int} value
* @return {@code true} if {@code array[i] == target} for some value of {@code
* i}
*/
public static boolean contains(int[] array, int target) {
for (int value : array) {
if (value == target) {
return true;
}
}
return false;
}
/**
* Returns the index of the first appearance of the value {@code target} in
* {@code array}.
*
* @param array an array of {@code int} values, possibly empty
* @param target a primitive {@code int} value
* @return the least index {@code i} for which {@code array[i] == target}, or
* {@code -1} if no such index exists.
*/
public static int indexOf(int[] array, int target) {
return indexOf(array, target, 0, array.length);
}
// TODO(kevinb): consider making this public
private static int indexOf(
int[] array, int target, int start, int end) {
for (int i = start; i < end; i++) {
if (array[i] == target) {
return i;
}
}
return -1;
}
/**
* Returns the start position of the first occurrence of the specified {@code
* target} within {@code array}, or {@code -1} if there is no such occurrence.
*
* <p>More formally, returns the lowest index {@code i} such that {@code
* java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly
* the same elements as {@code target}.
*
* @param array the array to search for the sequence {@code target}
* @param target the array to search for as a sub-sequence of {@code array}
*/
public static int indexOf(int[] array, int[] target) {
checkNotNull(array, "array");
checkNotNull(target, "target");
if (target.length == 0) {
return 0;
}
outer:
for (int i = 0; i < array.length - target.length + 1; i++) {
for (int j = 0; j < target.length; j++) {
if (array[i + j] != target[j]) {
continue outer;
}
}
return i;
}
return -1;
}
/**
* Returns the index of the last appearance of the value {@code target} in
* {@code array}.
*
* @param array an array of {@code int} values, possibly empty
* @param target a primitive {@code int} value
* @return the greatest index {@code i} for which {@code array[i] == target},
* or {@code -1} if no such index exists.
*/
public static int lastIndexOf(int[] array, int target) {
return lastIndexOf(array, target, 0, array.length);
}
// TODO(kevinb): consider making this public
private static int lastIndexOf(
int[] array, int target, int start, int end) {
for (int i = end - 1; i >= start; i--) {
if (array[i] == target) {
return i;
}
}
return -1;
}
/**
* Returns the least value present in {@code array}.
*
* @param array a <i>nonempty</i> array of {@code int} values
* @return the value present in {@code array} that is less than or equal to
* every other value in the array
* @throws IllegalArgumentException if {@code array} is empty
*/
public static int min(int... array) {
checkArgument(array.length > 0);
int min = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i] < min) {
min = array[i];
}
}
return min;
}
/**
* Returns the greatest value present in {@code array}.
*
* @param array a <i>nonempty</i> array of {@code int} values
* @return the value present in {@code array} that is greater than or equal to
* every other value in the array
* @throws IllegalArgumentException if {@code array} is empty
*/
public static int max(int... array) {
checkArgument(array.length > 0);
int max = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i] > max) {
max = array[i];
}
}
return max;
}
/**
* Returns the values from each provided array combined into a single array.
* For example, {@code concat(new int[] {a, b}, new int[] {}, new
* int[] {c}} returns the array {@code {a, b, c}}.
*
* @param arrays zero or more {@code int} arrays
* @return a single array containing all the values from the source arrays, in
* order
*/
public static int[] concat(int[]... arrays) {
int length = 0;
for (int[] array : arrays) {
length += array.length;
}
int[] result = new int[length];
int pos = 0;
for (int[] array : arrays) {
System.arraycopy(array, 0, result, pos, array.length);
pos += array.length;
}
return result;
}
/**
* Returns a big-endian representation of {@code value} in a 4-element byte
* array; equivalent to {@code ByteBuffer.allocate(4).putInt(value).array()}.
* For example, the input value {@code 0x12131415} would yield the byte array
* {@code {0x12, 0x13, 0x14, 0x15}}.
*
* <p>If you need to convert and concatenate several values (possibly even of
* different types), use a shared {@link java.nio.ByteBuffer} instance, or use
* {@link com.google.common.io.ByteStreams#newDataOutput()} to get a growable
* buffer.
*/
@GwtIncompatible("doesn't work")
public static byte[] toByteArray(int value) {
return new byte[] {
(byte) (value >> 24),
(byte) (value >> 16),
(byte) (value >> 8),
(byte) value};
}
/**
* Returns the {@code int} value whose big-endian representation is stored in
* the first 4 bytes of {@code bytes}; equivalent to {@code
* ByteBuffer.wrap(bytes).getInt()}. For example, the input byte array {@code
* {0x12, 0x13, 0x14, 0x15, 0x33}} would yield the {@code int} value {@code
* 0x12131415}.
*
* <p>Arguably, it's preferable to use {@link java.nio.ByteBuffer}; that
* library exposes much more flexibility at little cost in readability.
*
* @throws IllegalArgumentException if {@code bytes} has fewer than 4 elements
*/
@GwtIncompatible("doesn't work")
public static int fromByteArray(byte[] bytes) {
checkArgument(bytes.length >= BYTES,
"array too small: %s < %s", bytes.length, BYTES);
return fromBytes(bytes[0], bytes[1], bytes[2], bytes[3]);
}
/**
* Returns the {@code int} value whose byte representation is the given 4
* bytes, in big-endian order; equivalent to {@code Ints.fromByteArray(new
* byte[] {b1, b2, b3, b4})}.
*
* @since 7.0
*/
@GwtIncompatible("doesn't work")
public static int fromBytes(byte b1, byte b2, byte b3, byte b4) {
return b1 << 24 | (b2 & 0xFF) << 16 | (b3 & 0xFF) << 8 | (b4 & 0xFF);
}
private static final class IntConverter
extends Converter<String, Integer> implements Serializable {
static final IntConverter INSTANCE = new IntConverter();
@Override
protected Integer doForward(String value) {
return Integer.decode(value);
}
@Override
protected String doBackward(Integer value) {
return value.toString();
}
@Override
public String toString() {
return "Ints.stringConverter()";
}
private Object readResolve() {
return INSTANCE;
}
private static final long serialVersionUID = 1;
}
/**
* Returns a serializable converter object that converts between strings and
* integers using {@link Integer#decode} and {@link Integer#toString()}.
*
* @since 16.0
*/
@Beta
public static Converter<String, Integer> stringConverter() {
return IntConverter.INSTANCE;
}
/**
* Returns an array containing the same values as {@code array}, but
* guaranteed to be of a specified minimum length. If {@code array} already
* has a length of at least {@code minLength}, it is returned directly.
* Otherwise, a new array of size {@code minLength + padding} is returned,
* containing the values of {@code array}, and zeroes in the remaining places.
*
* @param array the source array
* @param minLength the minimum length the returned array must guarantee
* @param padding an extra amount to "grow" the array by if growth is
* necessary
* @throws IllegalArgumentException if {@code minLength} or {@code padding} is
* negative
* @return an array containing the values of {@code array}, with guaranteed
* minimum length {@code minLength}
*/
public static int[] ensureCapacity(
int[] array, int minLength, int padding) {
checkArgument(minLength >= 0, "Invalid minLength: %s", minLength);
checkArgument(padding >= 0, "Invalid padding: %s", padding);
return (array.length < minLength)
? copyOf(array, minLength + padding)
: array;
}
// Arrays.copyOf() requires Java 6
private static int[] copyOf(int[] original, int length) {
int[] copy = new int[length];
System.arraycopy(original, 0, copy, 0, Math.min(original.length, length));
return copy;
}
/**
* Returns a string containing the supplied {@code int} values separated
* by {@code separator}. For example, {@code join("-", 1, 2, 3)} returns
* the string {@code "1-2-3"}.
*
* @param separator the text that should appear between consecutive values in
* the resulting string (but not at the start or end)
* @param array an array of {@code int} values, possibly empty
*/
public static String join(String separator, int... array) {
checkNotNull(separator);
if (array.length == 0) {
return "";
}
// For pre-sizing a builder, just get the right order of magnitude
StringBuilder builder = new StringBuilder(array.length * 5);
builder.append(array[0]);
for (int i = 1; i < array.length; i++) {
builder.append(separator).append(array[i]);
}
return builder.toString();
}
/**
* Returns a comparator that compares two {@code int} arrays
* lexicographically. That is, it compares, using {@link
* #compare(int, int)}), the first pair of values that follow any
* common prefix, or when one array is a prefix of the other, treats the
* shorter array as the lesser. For example, {@code [] < [1] < [1, 2] < [2]}.
*
* <p>The returned comparator is inconsistent with {@link
* Object#equals(Object)} (since arrays support only identity equality), but
* it is consistent with {@link Arrays#equals(int[], int[])}.
*
* @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order">
* Lexicographical order article at Wikipedia</a>
* @since 2.0
*/
public static Comparator<int[]> lexicographicalComparator() {
return LexicographicalComparator.INSTANCE;
}
private enum LexicographicalComparator implements Comparator<int[]> {
INSTANCE;
@Override
public int compare(int[] left, int[] right) {
int minLength = Math.min(left.length, right.length);
for (int i = 0; i < minLength; i++) {
int result = Ints.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
}
/**
* Returns an array containing each value of {@code collection}, converted to
* a {@code int} value in the manner of {@link Number#intValue}.
*
* <p>Elements are copied from the argument collection as if by {@code
* collection.toArray()}. Calling this method is as thread-safe as calling
* that method.
*
* @param collection a collection of {@code Number} instances
* @return an array containing the same values as {@code collection}, in the
* same order, converted to primitives
* @throws NullPointerException if {@code collection} or any of its elements
* is null
* @since 1.0 (parameter was {@code Collection<Integer>} before 12.0)
*/
public static int[] toArray(Collection<? extends Number> collection) {
if (collection instanceof IntArrayAsList) {
return ((IntArrayAsList) collection).toIntArray();
}
Object[] boxedArray = collection.toArray();
int len = boxedArray.length;
int[] array = new int[len];
for (int i = 0; i < len; i++) {
// checkNotNull for GWT (do not optimize)
array[i] = ((Number) checkNotNull(boxedArray[i])).intValue();
}
return array;
}
/**
* Returns a fixed-size list backed by the specified array, similar to {@link
* Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)},
* but any attempt to set a value to {@code null} will result in a {@link
* NullPointerException}.
*
* <p>The returned list maintains the values, but not the identities, of
* {@code Integer} objects written to or read from it. For example, whether
* {@code list.get(0) == list.get(0)} is true for the returned list is
* unspecified.
*
* @param backingArray the array to back the list
* @return a list view of the array
*/
public static List<Integer> asList(int... backingArray) {
if (backingArray.length == 0) {
return Collections.emptyList();
}
return new IntArrayAsList(backingArray);
}
@GwtCompatible
private static class IntArrayAsList extends AbstractList<Integer>
implements RandomAccess, Serializable {
final int[] array;
final int start;
final int end;
IntArrayAsList(int[] array) {
this(array, 0, array.length);
}
IntArrayAsList(int[] array, int start, int end) {
this.array = array;
this.start = start;
this.end = end;
}
@Override public int size() {
return end - start;
}
@Override public boolean isEmpty() {
return false;
}
@Override public Integer get(int index) {
checkElementIndex(index, size());
return array[start + index];
}
@Override public boolean contains(Object target) {
// Overridden to prevent a ton of boxing
return (target instanceof Integer)
&& Ints.indexOf(array, (Integer) target, start, end) != -1;
}
@Override public int indexOf(Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Integer) {
int i = Ints.indexOf(array, (Integer) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override public int lastIndexOf(Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Integer) {
int i = Ints.lastIndexOf(array, (Integer) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override public Integer set(int index, Integer element) {
checkElementIndex(index, size());
int oldValue = array[start + index];
// checkNotNull for GWT (do not optimize)
array[start + index] = checkNotNull(element);
return oldValue;
}
@Override public List<Integer> subList(int fromIndex, int toIndex) {
int size = size();
checkPositionIndexes(fromIndex, toIndex, size);
if (fromIndex == toIndex) {
return Collections.emptyList();
}
return new IntArrayAsList(array, start + fromIndex, start + toIndex);
}
@Override public boolean equals(Object object) {
if (object == this) {
return true;
}
if (object instanceof IntArrayAsList) {
IntArrayAsList that = (IntArrayAsList) object;
int size = size();
if (that.size() != size) {
return false;
}
for (int i = 0; i < size; i++) {
if (array[start + i] != that.array[that.start + i]) {
return false;
}
}
return true;
}
return super.equals(object);
}
@Override public int hashCode() {
int result = 1;
for (int i = start; i < end; i++) {
result = 31 * result + Ints.hashCode(array[i]);
}
return result;
}
@Override public String toString() {
StringBuilder builder = new StringBuilder(size() * 5);
builder.append('[').append(array[start]);
for (int i = start + 1; i < end; i++) {
builder.append(", ").append(array[i]);
}
return builder.append(']').toString();
}
int[] toIntArray() {
// Arrays.copyOfRange() is not available under GWT
int size = size();
int[] result = new int[size];
System.arraycopy(array, start, result, 0, size);
return result;
}
private static final long serialVersionUID = 0;
}
private static final byte[] asciiDigits = new byte[128];
static {
Arrays.fill(asciiDigits, (byte) -1);
for (int i = 0; i <= 9; i++) {
asciiDigits['0' + i] = (byte) i;
}
for (int i = 0; i <= 26; i++) {
asciiDigits['A' + i] = (byte) (10 + i);
asciiDigits['a' + i] = (byte) (10 + i);
}
}
private static int digit(char c) {
return (c < 128) ? asciiDigits[c] : -1;
}
/**
* Parses the specified string as a signed decimal integer value. The ASCII
* character {@code '-'} (<code>'\u002D'</code>) is recognized as the
* minus sign.
*
* <p>Unlike {@link Integer#parseInt(String)}, this method returns
* {@code null} instead of throwing an exception if parsing fails.
* Additionally, this method only accepts ASCII digits, and returns
* {@code null} if non-ASCII digits are present in the string.
*
* <p>Note that strings prefixed with ASCII {@code '+'} are rejected, even
* under JDK 7, despite the change to {@link Integer#parseInt(String)} for
* that version.
*
* @param string the string representation of an integer value
* @return the integer value represented by {@code string}, or {@code null} if
* {@code string} has a length of zero or cannot be parsed as an integer
* value
* @since 11.0
*/
@Beta
@CheckForNull
@GwtIncompatible("TODO")
public static Integer tryParse(String string) {
return tryParse(string, 10);
}
/**
* Parses the specified string as a signed integer value using the specified
* radix. The ASCII character {@code '-'} (<code>'\u002D'</code>) is
* recognized as the minus sign.
*
* <p>Unlike {@link Integer#parseInt(String, int)}, this method returns
* {@code null} instead of throwing an exception if parsing fails.
* Additionally, this method only accepts ASCII digits, and returns
* {@code null} if non-ASCII digits are present in the string.
*
* <p>Note that strings prefixed with ASCII {@code '+'} are rejected, even
* under JDK 7, despite the change to {@link Integer#parseInt(String, int)}
* for that version.
*
* @param string the string representation of an integer value
* @param radix the radix to use when parsing
* @return the integer value represented by {@code string} using
* {@code radix}, or {@code null} if {@code string} has a length of zero
* or cannot be parsed as an integer value
* @throws IllegalArgumentException if {@code radix < Character.MIN_RADIX} or
* {@code radix > Character.MAX_RADIX}
*/
@CheckForNull
@GwtIncompatible("TODO") static Integer tryParse(
String string, int radix) {
if (checkNotNull(string).isEmpty()) {
return null;
}
if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX) {
throw new IllegalArgumentException(
"radix must be between MIN_RADIX and MAX_RADIX but was " + radix);
}
boolean negative = string.charAt(0) == '-';
int index = negative ? 1 : 0;
if (index == string.length()) {
return null;
}
int digit = digit(string.charAt(index++));
if (digit < 0 || digit >= radix) {
return null;
}
int accum = -digit;
int cap = Integer.MIN_VALUE / radix;
while (index < string.length()) {
digit = digit(string.charAt(index++));
if (digit < 0 || digit >= radix || accum < cap) {
return null;
}
accum *= radix;
if (accum < Integer.MIN_VALUE + digit) {
return null;
}
accum -= digit;
}
if (negative) {
return accum;
} else if (accum == Integer.MIN_VALUE) {
return null;
} else {
return -accum;
}
}
}
| |
package de.danoeh.antennapod.activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.app.ListFragment;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.ContextMenu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnLongClickListener;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import org.apache.commons.lang3.StringUtils;
import de.danoeh.antennapod.R;
import de.danoeh.antennapod.adapter.ChapterListAdapter;
import de.danoeh.antennapod.adapter.NavListAdapter;
import de.danoeh.antennapod.core.asynctask.FeedRemover;
import de.danoeh.antennapod.core.dialog.ConfirmationDialog;
import de.danoeh.antennapod.core.feed.Chapter;
import de.danoeh.antennapod.core.feed.EventDistributor;
import de.danoeh.antennapod.core.feed.Feed;
import de.danoeh.antennapod.core.feed.MediaType;
import de.danoeh.antennapod.core.feed.SimpleChapter;
import de.danoeh.antennapod.core.glide.ApGlideSettings;
import de.danoeh.antennapod.core.preferences.UserPreferences;
import de.danoeh.antennapod.core.service.playback.PlaybackService;
import de.danoeh.antennapod.core.storage.DBReader;
import de.danoeh.antennapod.core.storage.DBWriter;
import de.danoeh.antennapod.core.util.playback.ExternalMedia;
import de.danoeh.antennapod.core.util.playback.Playable;
import de.danoeh.antennapod.core.util.playback.PlaybackController;
import de.danoeh.antennapod.dialog.VariableSpeedDialog;
import de.danoeh.antennapod.fragment.CoverFragment;
import de.danoeh.antennapod.fragment.ItemDescriptionFragment;
import de.danoeh.antennapod.menuhandler.NavDrawerActivity;
import de.danoeh.antennapod.preferences.PreferenceController;
/**
* Activity for playing audio files.
*/
public class AudioplayerActivity extends MediaplayerActivity implements ItemDescriptionFragment.ItemDescriptionFragmentCallback,
NavDrawerActivity {
private static final int POS_COVER = 0;
private static final int POS_DESCR = 1;
private static final int POS_CHAPTERS = 2;
private static final int NUM_CONTENT_FRAGMENTS = 3;
private static final int POS_NONE = -1;
final String TAG = "AudioplayerActivity";
private static final String PREFS = "AudioPlayerActivityPreferences";
private static final String PREF_KEY_SELECTED_FRAGMENT_POSITION = "selectedFragmentPosition";
private static final String PREF_PLAYABLE_ID = "playableId";
private DrawerLayout drawerLayout;
private NavListAdapter navAdapter;
private ListView navList;
private View navDrawer;
private ActionBarDrawerToggle drawerToggle;
private Fragment[] detachedFragments;
private CoverFragment coverFragment;
private ItemDescriptionFragment descriptionFragment;
private ListFragment chapterFragment;
private Fragment currentlyShownFragment;
private int currentlyShownPosition = -1;
private int lastShownPosition = POS_NONE;
/**
* Used if onResume was called without loadMediaInfo.
*/
private int savedPosition = -1;
private TextView txtvTitle;
private Button butPlaybackSpeed;
private ImageButton butNavChaptersShownotes;
private ImageButton butShowCover;
private void resetFragmentView() {
FragmentTransaction fT = getSupportFragmentManager().beginTransaction();
if (coverFragment != null) {
Log.d(TAG, "Removing cover fragment");
fT.remove(coverFragment);
}
if (descriptionFragment != null) {
Log.d(TAG, "Removing description fragment");
fT.remove(descriptionFragment);
}
if (chapterFragment != null) {
Log.d(TAG, "Removing chapter fragment");
fT.remove(chapterFragment);
}
if (currentlyShownFragment != null) {
Log.d(TAG, "Removing currently shown fragment");
fT.remove(currentlyShownFragment);
}
for (int i = 0; i < detachedFragments.length; i++) {
Fragment f = detachedFragments[i];
if (f != null) {
Log.d(TAG, "Removing detached fragment");
fT.remove(f);
}
}
fT.commit();
currentlyShownFragment = null;
coverFragment = null;
descriptionFragment = null;
chapterFragment = null;
currentlyShownPosition = -1;
detachedFragments = new Fragment[NUM_CONTENT_FRAGMENTS];
}
@Override
protected void onStop() {
super.onStop();
Log.d(TAG, "onStop()");
cancelLoadTask();
EventDistributor.getInstance().unregister(contentUpdate);
}
@Override
protected void chooseTheme() {
setTheme(UserPreferences.getNoTitleTheme());
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
detachedFragments = new Fragment[NUM_CONTENT_FRAGMENTS];
}
private void savePreferences() {
Log.d(TAG, "Saving preferences");
SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
if (currentlyShownPosition >= 0 && controller != null
&& controller.getMedia() != null) {
editor.putInt(PREF_KEY_SELECTED_FRAGMENT_POSITION,
currentlyShownPosition);
editor.putString(PREF_PLAYABLE_ID, controller.getMedia()
.getIdentifier().toString());
} else {
editor.putInt(PREF_KEY_SELECTED_FRAGMENT_POSITION, -1);
editor.putString(PREF_PLAYABLE_ID, "");
}
editor.commit();
savedPosition = currentlyShownPosition;
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
drawerToggle.onConfigurationChanged(newConfig);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
// super.onSaveInstanceState(outState); would cause crash
Log.d(TAG, "onSaveInstanceState");
}
@Override
protected void onPause() {
savePreferences();
resetFragmentView();
super.onPause();
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
restoreFromPreferences();
}
/**
* Tries to restore the selected fragment position from the Activity's
* preferences.
*
* @return true if restoreFromPrefernces changed the activity's state
*/
private boolean restoreFromPreferences() {
Log.d(TAG, "Restoring instance state");
SharedPreferences prefs = getSharedPreferences(PREFS, MODE_PRIVATE);
int savedPosition = prefs.getInt(PREF_KEY_SELECTED_FRAGMENT_POSITION,
-1);
String playableId = prefs.getString(PREF_PLAYABLE_ID, "");
if (savedPosition != -1
&& controller != null
&& controller.getMedia() != null
&& controller.getMedia().getIdentifier().toString()
.equals(playableId)) {
switchToFragment(savedPosition);
return true;
} else if (controller == null || controller.getMedia() == null) {
Log.d(TAG, "Couldn't restore from preferences: controller or media was null");
} else {
Log.d(TAG, "Couldn't restore from preferences: savedPosition was -1 or saved identifier and playable identifier didn't match.\nsavedPosition: "
+ savedPosition + ", id: " + playableId);
}
return false;
}
@Override
protected void onResume() {
super.onResume();
if (StringUtils.equals(getIntent().getAction(), Intent.ACTION_VIEW)) {
Intent intent = getIntent();
Log.d(TAG, "Received VIEW intent: " + intent.getData().getPath());
ExternalMedia media = new ExternalMedia(intent.getData().getPath(),
MediaType.AUDIO);
Intent launchIntent = new Intent(this, PlaybackService.class);
launchIntent.putExtra(PlaybackService.EXTRA_PLAYABLE, media);
launchIntent.putExtra(PlaybackService.EXTRA_START_WHEN_PREPARED,
true);
launchIntent.putExtra(PlaybackService.EXTRA_SHOULD_STREAM, false);
launchIntent.putExtra(PlaybackService.EXTRA_PREPARE_IMMEDIATELY,
true);
startService(launchIntent);
}
if (savedPosition != -1) {
switchToFragment(savedPosition);
}
EventDistributor.getInstance().register(contentUpdate);
loadData();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
}
@Override
protected void onAwaitingVideoSurface() {
Log.d(TAG, "onAwaitingVideoSurface was called in audio player -> switching to video player");
startActivity(new Intent(this, VideoplayerActivity.class));
}
@Override
protected void postStatusMsg(int resId) {
if (resId == R.string.player_preparing_msg
|| resId == R.string.player_seeking_msg
|| resId == R.string.player_buffering_msg) {
// TODO Show progress bar here
}
}
@Override
protected void clearStatusMsg() {
// TODO Hide progress bar here
}
/**
* Changes the currently displayed fragment.
*
* @param pos Must be POS_COVER, POS_DESCR, or POS_CHAPTERS
*/
private void switchToFragment(int pos) {
Log.d(TAG, "Switching contentView to position " + pos);
if (currentlyShownPosition != pos && controller != null) {
Playable media = controller.getMedia();
if (media != null) {
FragmentTransaction ft = getSupportFragmentManager()
.beginTransaction();
if (currentlyShownFragment != null) {
detachedFragments[currentlyShownPosition] = currentlyShownFragment;
ft.detach(currentlyShownFragment);
}
switch (pos) {
case POS_COVER:
if (coverFragment == null) {
Log.i(TAG, "Using new coverfragment");
coverFragment = CoverFragment.newInstance(media);
}
currentlyShownFragment = coverFragment;
break;
case POS_DESCR:
if (descriptionFragment == null) {
descriptionFragment = ItemDescriptionFragment
.newInstance(media, true, true);
}
currentlyShownFragment = descriptionFragment;
break;
case POS_CHAPTERS:
if (chapterFragment == null) {
chapterFragment = new ListFragment() {
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
// add padding
final ListView lv = getListView();
lv.setClipToPadding(false);
final int vertPadding = getResources().getDimensionPixelSize(R.dimen.list_vertical_padding);
lv.setPadding(0, vertPadding, 0, vertPadding);
}
};
chapterFragment.setListAdapter(new ChapterListAdapter(
AudioplayerActivity.this, 0, media
.getChapters(), media, new ChapterListAdapter.Callback() {
@Override
public void onPlayChapterButtonClicked(int position) {
Chapter chapter = (Chapter)
chapterFragment.getListAdapter().getItem(position);
controller.seekToChapter(chapter);
}
}
));
}
currentlyShownFragment = chapterFragment;
break;
}
if (currentlyShownFragment != null) {
lastShownPosition = currentlyShownPosition;
currentlyShownPosition = pos;
if (detachedFragments[pos] != null) {
Log.d(TAG, "Reattaching fragment at position " + pos);
ft.attach(detachedFragments[pos]);
} else {
ft.add(R.id.contentView, currentlyShownFragment);
}
ft.disallowAddToBackStack();
ft.commit();
updateNavButtonDrawable();
}
}
}
}
/**
* Switches to the fragment that was displayed before the current one or the description fragment
* if no fragment was previously displayed.
*/
public void switchToLastFragment() {
if (lastShownPosition != POS_NONE) {
switchToFragment(lastShownPosition);
} else {
switchToFragment(POS_DESCR);
}
}
private void updateNavButtonDrawable() {
final int[] buttonTexts = new int[]{R.string.show_shownotes_label,
R.string.show_chapters_label};
final TypedArray drawables = obtainStyledAttributes(new int[]{
R.attr.navigation_shownotes, R.attr.navigation_chapters});
final Playable media = controller.getMedia();
if (butNavChaptersShownotes != null && butShowCover != null && media != null) {
butNavChaptersShownotes.setTag(R.id.imageloader_key, null);
setNavButtonVisibility();
switch (currentlyShownPosition) {
case POS_COVER:
butShowCover.setVisibility(View.GONE);
if (lastShownPosition == POS_CHAPTERS) {
butNavChaptersShownotes.setImageDrawable(drawables.getDrawable(1));
butNavChaptersShownotes.setContentDescription(getString(buttonTexts[1]));
} else {
butNavChaptersShownotes.setImageDrawable(drawables.getDrawable(0));
butNavChaptersShownotes.setContentDescription(getString(buttonTexts[0]));
}
break;
case POS_DESCR:
butShowCover.setVisibility(View.VISIBLE);
butNavChaptersShownotes.setImageDrawable(drawables.getDrawable(1));
butNavChaptersShownotes.setContentDescription(getString(buttonTexts[1]));
break;
case POS_CHAPTERS:
butShowCover.setVisibility(View.VISIBLE);
butNavChaptersShownotes.setImageDrawable(drawables.getDrawable(0));
butNavChaptersShownotes.setContentDescription(getString(buttonTexts[0]));
break;
}
}
drawables.recycle();
}
@Override
protected void setupGUI() {
super.setupGUI();
resetFragmentView();
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
navList = (ListView) findViewById(R.id.nav_list);
navDrawer = findViewById(R.id.nav_layout);
butPlaybackSpeed = (Button) findViewById(R.id.butPlaybackSpeed);
butNavChaptersShownotes = (ImageButton) findViewById(R.id.butNavChaptersShownotes);
butShowCover = (ImageButton) findViewById(R.id.butCover);
txtvTitle = (TextView) findViewById(R.id.txtvTitle);
drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, R.string.drawer_open, R.string.drawer_close);
drawerToggle.setDrawerIndicatorEnabled(false);
drawerLayout.setDrawerListener(drawerToggle);
navAdapter = new NavListAdapter(itemAccess, this);
navList.setAdapter(navAdapter);
navList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
int viewType = parent.getAdapter().getItemViewType(position);
if (viewType != NavListAdapter.VIEW_TYPE_SECTION_DIVIDER) {
Intent intent = new Intent(AudioplayerActivity.this, MainActivity.class);
intent.putExtra(MainActivity.EXTRA_NAV_TYPE, viewType);
intent.putExtra(MainActivity.EXTRA_NAV_INDEX, position);
startActivity(intent);
}
drawerLayout.closeDrawer(navDrawer);
}
});
registerForContextMenu(navList);
drawerToggle.syncState();
findViewById(R.id.nav_settings).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
drawerLayout.closeDrawer(navDrawer);
startActivity(new Intent(AudioplayerActivity.this, PreferenceController.getPreferenceActivity()));
}
});
butNavChaptersShownotes.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (currentlyShownPosition == POS_CHAPTERS) {
switchToFragment(POS_DESCR);
} else if (currentlyShownPosition == POS_DESCR) {
switchToFragment(POS_CHAPTERS);
} else if (currentlyShownPosition == POS_COVER) {
switchToLastFragment();
}
}
});
butShowCover.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
switchToFragment(POS_COVER);
}
});
butPlaybackSpeed.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (controller != null && controller.canSetPlaybackSpeed()) {
String[] availableSpeeds = UserPreferences
.getPlaybackSpeedArray();
String currentSpeed = UserPreferences.getPlaybackSpeed();
// Provide initial value in case the speed list has changed
// out from under us
// and our current speed isn't in the new list
String newSpeed;
if (availableSpeeds.length > 0) {
newSpeed = availableSpeeds[0];
} else {
newSpeed = "1.0";
}
for (int i = 0; i < availableSpeeds.length; i++) {
if (availableSpeeds[i].equals(currentSpeed)) {
if (i == availableSpeeds.length - 1) {
newSpeed = availableSpeeds[0];
} else {
newSpeed = availableSpeeds[i + 1];
}
break;
}
}
UserPreferences.setPlaybackSpeed(newSpeed);
controller.setPlaybackSpeed(Float.parseFloat(newSpeed));
}
}
});
butPlaybackSpeed.setOnLongClickListener(new OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
VariableSpeedDialog.showDialog(AudioplayerActivity.this);
return true;
}
});
}
private void setNavButtonVisibility() {
if (butNavChaptersShownotes != null) {
if (controller != null) {
Playable media = controller.getMedia();
if (media != null) {
if (media.getChapters() != null || currentlyShownPosition == POS_COVER) {
butNavChaptersShownotes.setVisibility(View.VISIBLE);
return;
}
}
}
butNavChaptersShownotes.setVisibility(View.GONE);
}
}
@Override
protected void onPlaybackSpeedChange() {
super.onPlaybackSpeedChange();
updateButPlaybackSpeed();
}
private void updateButPlaybackSpeed() {
if (controller != null && controller.canSetPlaybackSpeed()) {
butPlaybackSpeed.setText(UserPreferences.getPlaybackSpeed());
}
}
@Override
protected void onPositionObserverUpdate() {
super.onPositionObserverUpdate();
notifyMediaPositionChanged();
}
@Override
protected boolean loadMediaInfo() {
if (!super.loadMediaInfo()) {
return false;
}
final Playable media = controller.getMedia();
if (media == null) {
return false;
}
txtvTitle.setText(media.getEpisodeTitle());
getSupportActionBar().setTitle("");
Glide.with(this)
.load(media.getImageUri())
.placeholder(R.color.light_gray)
.error(R.color.light_gray)
.diskCacheStrategy(ApGlideSettings.AP_DISK_CACHE_STRATEGY)
.fitCenter()
.dontAnimate()
.into(butShowCover);
setNavButtonVisibility();
if (currentlyShownPosition == -1) {
if (!restoreFromPreferences()) {
switchToFragment(POS_COVER);
}
}
if (currentlyShownFragment instanceof AudioplayerContentFragment) {
((AudioplayerContentFragment) currentlyShownFragment)
.onDataSetChanged(media);
}
if (controller == null
|| !controller.canSetPlaybackSpeed()) {
butPlaybackSpeed.setVisibility(View.GONE);
} else {
butPlaybackSpeed.setVisibility(View.VISIBLE);
}
updateButPlaybackSpeed();
return true;
}
public void notifyMediaPositionChanged() {
if (chapterFragment != null) {
ArrayAdapter<SimpleChapter> adapter = (ArrayAdapter<SimpleChapter>) chapterFragment
.getListAdapter();
adapter.notifyDataSetChanged();
}
}
@Override
protected void onReloadNotification(int notificationCode) {
if (notificationCode == PlaybackService.EXTRA_CODE_VIDEO) {
Log.d(TAG, "ReloadNotification received, switching to Videoplayer now");
finish();
startActivity(new Intent(this, VideoplayerActivity.class));
}
}
@Override
protected void onBufferStart() {
postStatusMsg(R.string.player_buffering_msg);
}
@Override
protected void onBufferEnd() {
clearStatusMsg();
}
@Override
public PlaybackController getPlaybackController() {
return controller;
}
@Override
public boolean isDrawerOpen() {
return drawerLayout != null && navDrawer != null && drawerLayout.isDrawerOpen(navDrawer);
}
public interface AudioplayerContentFragment {
public void onDataSetChanged(Playable media);
}
@Override
protected int getContentViewResourceId() {
return R.layout.audioplayer_activity;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (drawerToggle != null && drawerToggle.onOptionsItemSelected(item)) {
return true;
} else {
return super.onOptionsItemSelected(item);
}
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
super.onCreateContextMenu(menu, v, menuInfo);
if(v.getId() != R.id.nav_list) {
return;
}
AdapterView.AdapterContextMenuInfo adapterInfo = (AdapterView.AdapterContextMenuInfo) menuInfo;
int position = adapterInfo.position;
if(position < navAdapter.getSubscriptionOffset()) {
return;
}
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.nav_feed_context, menu);
Feed feed = navDrawerData.feeds.get(position - navAdapter.getSubscriptionOffset());
menu.setHeaderTitle(feed.getTitle());
// episodes are not loaded, so we cannot check if the podcast has new or unplayed ones!
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterView.AdapterContextMenuInfo menuInfo = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
if(menuInfo.targetView.getParent() instanceof ListView == false
|| ((ListView)menuInfo.targetView.getParent()).getId() != R.id.nav_list) {
return false;
}
int position = menuInfo.position;
Feed feed = navDrawerData.feeds.get(position - navAdapter.getSubscriptionOffset());
switch(item.getItemId()) {
case R.id.mark_all_seen_item:
DBWriter.markFeedSeen(this, feed.getId());
return true;
case R.id.mark_all_read_item:
DBWriter.markFeedRead(this, feed.getId());
return true;
case R.id.remove_item:
final FeedRemover remover = new FeedRemover(this, feed) {
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
}
};
ConfirmationDialog conDialog = new ConfirmationDialog(this,
R.string.remove_feed_label,
R.string.feed_delete_confirmation_msg) {
@Override
public void onConfirmButtonPressed(
DialogInterface dialog) {
dialog.dismiss();
remover.executeAsync();
}
};
conDialog.createNewDialog().show();
return true;
default:
return super.onContextItemSelected(item);
}
}
private DBReader.NavDrawerData navDrawerData;
private AsyncTask<Void, Void, DBReader.NavDrawerData> loadTask;
private void loadData() {
loadTask = new AsyncTask<Void, Void, DBReader.NavDrawerData>() {
@Override
protected DBReader.NavDrawerData doInBackground(Void... params) {
return DBReader.getNavDrawerData(AudioplayerActivity.this);
}
@Override
protected void onPostExecute(DBReader.NavDrawerData result) {
super.onPostExecute(result);
navDrawerData = result;
if (navAdapter != null) {
navAdapter.notifyDataSetChanged();
}
}
};
loadTask.execute();
}
private void cancelLoadTask() {
if (loadTask != null) {
loadTask.cancel(true);
}
}
private EventDistributor.EventListener contentUpdate = new EventDistributor.EventListener() {
@Override
public void update(EventDistributor eventDistributor, Integer arg) {
if ((EventDistributor.FEED_LIST_UPDATE & arg) != 0) {
Log.d(TAG, "Received contentUpdate Intent.");
loadData();
}
}
};
private final NavListAdapter.ItemAccess itemAccess = new NavListAdapter.ItemAccess() {
@Override
public int getCount() {
if (navDrawerData != null) {
return navDrawerData.feeds.size();
} else {
return 0;
}
}
@Override
public Feed getItem(int position) {
if (navDrawerData != null && position < navDrawerData.feeds.size()) {
return navDrawerData.feeds.get(position);
} else {
return null;
}
}
@Override
public int getSelectedItemIndex() {
return -1;
}
@Override
public int getQueueSize() {
return (navDrawerData != null) ? navDrawerData.queueSize : 0;
}
@Override
public int getNumberOfNewItems() {
return (navDrawerData != null) ? navDrawerData.numNewItems : 0;
}
@Override
public int getFeedCounter(long feedId) {
return navDrawerData != null ? navDrawerData.feedCounters.get(feedId) : 0;
}
};
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python;
import com.intellij.formatting.WrapType;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.formatter.PyCodeStyleSettings;
import com.jetbrains.python.psi.LanguageLevel;
import com.jetbrains.python.psi.PyElementGenerator;
import com.jetbrains.python.psi.PyStatement;
/**
* @author yole
*/
public class PyFormatterTest extends PyTestCase {
public void testBlankLineBetweenMethods() {
doTest();
}
public void testBlankLineAroundClasses() {
getCommonCodeStyleSettings().BLANK_LINES_AROUND_CLASS = 2;
doTest();
}
public void testSpaceAfterComma() {
doTest();
}
public void testPep8ExtraneousWhitespace() {
doTest();
}
public void testPep8Operators() {
doTest();
}
public void testPep8KeywordArguments() {
doTest();
}
public void testUnaryMinus() {
doTest();
}
public void testBlankLineAfterImports() {
doTest();
}
// PY-15701
public void testNoBlankLinesAfterLocalImports() {
doTest();
}
public void testBlankLineBeforeFunction() {
doTest();
}
public void testStarArgument() { // PY-1395
doTest();
}
public void testDictLiteral() { // PY-1461
doTest();
}
public void testListAssignment() { // PY-1522
doTest();
}
public void testStarExpression() { // PY-1523
doTestPy3();
}
private void doTestPy3() {
runWithLanguageLevel(LanguageLevel.PYTHON34, this::doTest);
}
public void testWrapTuple() { // PY-1792
doTest();
}
public void testSpaceAfterCommaWrappedLine() { // PY-1065
doTest();
}
public void testAlignInBinaryExpression() {
doTest();
}
public void testAlignInStringLiteral() {
doTest();
}
public void testComment() { // PY-2108
doTest();
}
public void testCommentBetweenClasses() { // PY-1598
doTest();
}
public void testCommentInEmptyTuple() { //PY-11904
doTest();
}
public void testTwoLinesBetweenTopLevelClasses() { // PY-2765
doTest();
}
public void testTwoLinesBetweenTopLevelFunctions() { // PY-2765
doTest();
}
// PY-9923
public void testTwoLinesBetweenTopLevelDeclarationsWithComment() { // PY-9923
doTest();
}
// PY-9923
public void testTwoLinesBetweenTopLevelStatementAndDeclarationsWithComment() {
doTest();
}
public void testSpecialSlice() { // PY-1928
doTest();
}
public void testNoWrapBeforeParen() { // PY-3172
doTest();
}
public void testTupleAssignment() { // PY-4034 comment
doTest();
}
public void testSpaceInMethodDeclaration() { // PY-4241
getCommonCodeStyleSettings().SPACE_BEFORE_METHOD_PARENTHESES = true;
doTest();
}
public void testOptionalAlignForMethodParameters() { // PY-3995
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS = false;
doTest();
}
public void testNoAlignForMethodArguments() { // PY-3995
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = false;
doTest();
}
public void testAlignForMethodArguments() { // PY-3995
doTest();
}
public void testLambdaColon() {
doTest();
}
public void testInGenerator() { // PY-5379
doTest();
}
public void testIndentInGenerator() { // PY-6219
doTest();
}
public void testSpaceAroundDot() { // PY-6908
doTest();
}
public void testSetLiteralInArgList() { // PY-6672
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
public void testLiterals() { // PY-6751
doTest();
}
public void testTupleInArgList() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
public void testAlignInBinaryExpressions() {
doTest();
}
public void testFromImportRelative() {
doTest();
}
public void testContinuationIndent() {
doTest();
}
public void testContinuationIndentInIndentingStatement() { // PY-9573
doTest();
}
public void testContinuationIndentInIndentingStatement2() { // PY-11868
doTest();
}
public void testBlankLineAfterDecorator() {
doTest();
}
public void testSpaceAroundKeywords() {
doTest();
}
public void testSpaceAfterReturn() {
doTest();
}
public void testSpaceAfterRelativeImport() { // PY-8112
doTest();
}
public void testSpaceWithinBraces() { // PY-8069
getPythonCodeStyleSettings().SPACE_WITHIN_BRACES = true;
doTest();
}
public void testTupleClosingParen() { // PY-7946
doTest();
}
public void testBeforeTopLevelClass() { // PY-7743
doTest();
}
public void testPsiFormatting() { // IDEA-69724
String initial =
"def method_name(\n" +
" desired_impulse_response,\n" +
" desired_response_parameters,\n" +
" inverse_filter_length, \n" +
" observed_impulse_response):\n" +
" # Extract from here to ...\n" +
" desired_impulse_response = {'dirac, 'gaussian', logistic_derivative'}\n" +
"return desired, o";
final PsiFile file = PyElementGenerator.getInstance(myFixture.getProject()).createDummyFile(LanguageLevel.PYTHON34, initial);
final PsiElement reformatted = CodeStyleManager.getInstance(myFixture.getProject()).reformat(file);
String expected =
"def method_name(\n" +
" desired_impulse_response,\n" +
" desired_response_parameters,\n" +
" inverse_filter_length,\n" +
" observed_impulse_response):\n" +
" # Extract from here to ...\n" +
" desired_impulse_response = {'dirac, '\n" +
" gaussian\n" +
" ', logistic_derivative'}\n" +
" return desired, o";
assertEquals(expected, reformatted.getText());
}
public void testWrapDefinitionWithLongLine() { // IDEA-92081
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 30);
getCommonCodeStyleSettings().WRAP_LONG_LINES = true;
doTest();
}
public void testWrapAssignment() { // PY-8572
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 120);
getCommonCodeStyleSettings().WRAP_LONG_LINES = false;
doTest();
}
public void testIndentInSlice() { // PY-8572
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 120);
getCommonCodeStyleSettings().WRAP_LONG_LINES = false;
doTest();
}
public void testIndentInComprehensions() { // PY-8516
getPythonCodeStyleSettings().ALIGN_COLLECTIONS_AND_COMPREHENSIONS = false;
doTest();
}
public void testAlignInGenerators() { // PY-8822
doTest();
}
public void testAlignInCallExpression() {
doTest();
}
public void _testAlignInNestedCallInWith() { //PY-11337 TODO:
doTest();
}
public void testContinuationIndentForCallInStatementPart() { // PY-8577
doTest();
}
public void testIfConditionContinuation() { // PY-8195
doTest();
}
public void _testIndentInNestedCall() { // PY-11919 TODO: required changes in formatter to be able to make indent relative to block or alignment
doTest();
}
public void testIndentAfterBackslash() {
doTest();
}
public void testSpaceBeforeBackslash() {
getPythonCodeStyleSettings().SPACE_BEFORE_BACKSLASH = false;
doTest();
}
public void testNewLineAfterColon() {
getPythonCodeStyleSettings().NEW_LINE_AFTER_COLON = true;
doTest();
}
public void testNewLineAfterColonMultiClause() {
doTest();
}
public void testLongWith() { // PY-8743
runWithLanguageLevel(LanguageLevel.PYTHON27, this::doTest);
}
// PY-8961, PY-16050
public void testSpaceInAnnotations() {
doTestPy3();
}
// PY-15791
public void testForceSpacesAroundEqualSignInAnnotatedParameter() {
doTestPy3();
}
public void testWrapInBinaryExpression() { // PY-9032
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
doTest(true);
}
public void testSpaceWithinDeclarationParentheses() { // PY-8818
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_PARENTHESES = true;
getCommonCodeStyleSettings().SPACE_WITHIN_EMPTY_METHOD_PARENTHESES = false;
doTest();
}
// PY-21598
public void testSpaceBetweenParenthesesInEmptyParameterList() {
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_PARENTHESES = false;
getCommonCodeStyleSettings().SPACE_WITHIN_EMPTY_METHOD_PARENTHESES = true;
doTest();
}
public void testSpaceWithingCallParentheses() {
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_CALL_PARENTHESES = true;
getCommonCodeStyleSettings().SPACE_WITHIN_EMPTY_METHOD_CALL_PARENTHESES = false;
doTest();
}
// PY-21598
public void testSpaceBetweenParenthesesInEmptyArgumentList() {
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_CALL_PARENTHESES = false;
getCommonCodeStyleSettings().SPACE_WITHIN_EMPTY_METHOD_CALL_PARENTHESES = true;
doTest();
}
public void testWrapBeforeElse() { // PY-10319
doTest(true);
}
public void testSpacesInImportParentheses() { // PY-11359
doTest();
}
public void testWrapImports() { // PY-9163
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
doTest();
}
public void testCommentAfterBlock() { // PY-9542
doTest();
}
public void testWrapOnDot() { // PY-6359
doTest();
}
public void testIndentParensInImport() { // PY-9075
doTest();
}
public void testAlignInParenthesizedExpression() {
doTest();
}
public void testAlignInParameterList() {
doTest();
}
public void testAlignListComprehensionInDict() { //PY-10076
doTest();
}
public void testParenthesisAroundGeneratorExpression() {
doTest();
}
private void doTest() {
doTest(false);
}
private void doTest(final boolean reformatText) {
myFixture.configureByFile("formatter/" + getTestName(true) + ".py");
WriteCommandAction.runWriteCommandAction(null, () -> {
CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myFixture.getProject());
PsiFile file = myFixture.getFile();
if (reformatText) {
codeStyleManager.reformatText(file, 0, file.getTextLength());
}
else {
codeStyleManager.reformat(file);
}
});
myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
}
// PY-12861
public void testSpacesInsideParenthesisAreStripped() {
doTest();
}
// PY-14838
public void testNoAlignmentAfterDictHangingIndentInFunctionCall() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
// PY-13955
public void testNoAlignmentAfterDictHangingIndentInFunctionCallOnTyping() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
final String testName = "formatter/" + getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.type("\n(");
myFixture.checkResultByFile(testName + "_after.py");
}
// PY-12145
public void testAlignmentOfClosingBraceInDictLiteralWhenNoHangingIndent() {
doTest();
}
public void testNoAlignmentClosingBraceInDictLiteralWhenOpeningBraceIsForcedOnNewLine() {
getPythonCodeStyleSettings().DICT_NEW_LINE_AFTER_LEFT_BRACE = true;
doTest();
}
// PY-13004
public void testAlignmentOfClosingParenthesisOfArgumentListWhenNoHangingIndent() {
doTest();
}
// PY-14408
public void testIndentsWithTabsInsideDictLiteral() {
getIndentOptions().USE_TAB_CHARACTER = true;
doTest();
}
// PY-12749
public void testContinuationIndentIsNotUsedForNestedFunctionCallsInWithStatement() {
doTest();
}
public void testAlignmentOfClosingParenthesisInNestedFunctionCallsWithSingleArgument() {
doTest();
}
// PY-12748
public void testIndentCommentariesInsideFromImportStatement() {
doTest();
}
public void testClosingParenthesisInFromImportStatementWithNoHangingIndent() {
doTest();
}
// PY-12932
public void testCommentedCodeFragmentIgnored() {
doTest();
}
// PY-12932
public void testTrailingComment() {
doTest();
}
// PY-12938
public void testDoubleHashCommentIgnored() {
doTest();
}
// PY-12938
public void testDocCommentIgnored() {
doTest();
}
// PY-12775
public void testShebangCommentIgnored() {
doTest();
}
// PY-13232
public void testWhitespaceInsertedAfterHashSignInMultilineComment() {
doTest();
}
/**
* This test merely checks that call to {@link com.intellij.psi.codeStyle.CodeStyleManager#reformat(com.intellij.psi.PsiElement)}
* is possible for Python sources.
*/
public void testReformatOfSingleElementPossible() {
myFixture.configureByFile("formatter/" + getTestName(true) + ".py");
WriteCommandAction.runWriteCommandAction(myFixture.getProject(), () -> {
final PsiElement elementAtCaret = myFixture.getFile().findElementAt(myFixture.getCaretOffset());
assertNotNull(elementAtCaret);
final PyStatement statement = PsiTreeUtil.getParentOfType(elementAtCaret, PyStatement.class, false);
assertNotNull(statement);
final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myFixture.getProject());
codeStyleManager.reformat(statement);
});
myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
}
// PY-11552
public void testExtraBlankLinesBetweenMethodsAndAtTheEnd() {
getCommonCodeStyleSettings().KEEP_BLANK_LINES_IN_DECLARATIONS = 1;
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFileEnd() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFunctionEnd() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFunctionEndNoNewLine() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesMixed() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesInEmptyFile() {
doTest();
}
// PY-14962
public void testAlignDictLiteralOnValue() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_VALUE;
doTest();
}
// PY-22272
public void testAlightDictLiteralOnValueSubscriptionsAndSlices() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_VALUE;
doTest();
}
// PY-14962
public void testAlignDictLiteralOnColon() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_COLON;
doTest();
}
// PY-14962
public void testDictWrappingChopDownIfLong() {
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
getPythonCodeStyleSettings().DICT_WRAPPING = WrapType.CHOP_DOWN_IF_LONG.getLegacyRepresentation();
doTest();
}
// PY-14962
public void testForceNewLineAfterLeftBraceInDict() {
getPythonCodeStyleSettings().DICT_NEW_LINE_AFTER_LEFT_BRACE = true;
doTest();
}
// PY-14962
public void testForceNewLineBeforeRightBraceInDict() {
getPythonCodeStyleSettings().DICT_NEW_LINE_BEFORE_RIGHT_BRACE = true;
doTest();
}
// PY-17674
public void testForceNewLineBeforeRightBraceInDictAfterColon() {
getPythonCodeStyleSettings().DICT_NEW_LINE_BEFORE_RIGHT_BRACE = true;
doTest();
}
// PY-16393
public void testHangingIndentDetectionIgnoresComments() {
doTest();
}
// PY-15530
public void testAlignmentInArgumentListWhereFirstArgumentIsEmptyCall() {
doTest();
}
public void testAlignmentInListLiteralWhereFirstItemIsEmptyTuple() {
doTest();
}
public void testHangingIndentInNamedArgumentValue() {
doTest();
}
public void testHangingIndentInParameterDefaultValue() {
doTest();
}
// PY-15171
public void testHangingIndentInKeyValuePair() {
doTest();
}
public void testDoNotDestroyAlignment_OnPostponedFormatting() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_COLON;
doTest();
}
public void testAlignmentOfEmptyCollectionLiterals() {
doTest();
}
// PY-17593
public void testBlanksBetweenImportsPreservedWithoutOptimizeImports() {
doTest();
}
// PY-17979, PY-13304
public void testContinuationIndentBeforeFunctionArguments() {
getPythonCodeStyleSettings().USE_CONTINUATION_INDENT_FOR_ARGUMENTS = true;
doTest();
}
// PY-20909
public void testContinuationIndentForCollectionsAndComprehensions() {
getPythonCodeStyleSettings().USE_CONTINUATION_INDENT_FOR_COLLECTION_AND_COMPREHENSIONS = true;
doTest();
}
// PY-20909
public void testContinuationIndentForCollectionsAndComprehensionsHangingIndentOfClosingBrace() {
getPythonCodeStyleSettings().USE_CONTINUATION_INDENT_FOR_COLLECTION_AND_COMPREHENSIONS = true;
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-18265
public void testNoSpaceAroundPowerOperator() {
getPythonCodeStyleSettings().SPACE_AROUND_POWER_OPERATOR = false;
doTest();
}
// PY-20392
public void testSpaceAfterTrailingCommaInDictLiterals() {
doTest();
}
// PY-20392
public void testSpaceAfterTrailingCommaIfNoSpaceAfterCommaButWithinBracesOrBrackets() {
getPythonCodeStyleSettings().SPACE_WITHIN_BRACES = true;
getCommonCodeStyleSettings().SPACE_WITHIN_BRACKETS = true;
getCommonCodeStyleSettings().SPACE_AFTER_COMMA = false;
doTest();
}
// PY-10182
public void testHangClosingParenthesisInFromImport() {
// Shouldn't affect the result
getPythonCodeStyleSettings().ALIGN_MULTILINE_IMPORTS = false;
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-10182
public void testHangClosingParenthesisInFunctionCall() {
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-10182
public void testHangClosingParenthesisInFunctionDefinition() {
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-10182
public void testHangClosingBracketsInCollectionLiterals() {
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-15874
public void testHangClosingOffComprehensionsAndGeneratorExpressions() {
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = false;
doTest();
}
// PY-15874
public void testHangClosingOnComprehensionsAndGeneratorExpressions() {
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-20633
public void testFromImportWrappingChopDownIfLong() {
getPythonCodeStyleSettings().FROM_IMPORT_WRAPPING = WrapType.CHOP_DOWN_IF_LONG.getLegacyRepresentation();
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 30);
doTest();
}
// PY-20633
public void testFromImportParenthesesPlacement() {
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_AFTER_LEFT_PARENTHESIS = true;
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_BEFORE_RIGHT_PARENTHESIS = true;
getCommonCodeStyleSettings().SPACE_AFTER_COLON = true;
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 35);
doTest();
}
// PY-20633
public void testFromImportParenthesesPlacementHangClosingParenthesis() {
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_AFTER_LEFT_PARENTHESIS = true;
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_BEFORE_RIGHT_PARENTHESIS = true;
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
getCommonCodeStyleSettings().SPACE_AFTER_COLON = true;
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 35);
doTest();
}
// PY-20633
public void testFromImportForceParenthesesIfMultiline() {
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 30);
getPythonCodeStyleSettings().FROM_IMPORT_PARENTHESES_FORCE_IF_MULTILINE = true;
doTest();
}
// PY-20633
// See http://docs.pylonsproject.org/en/latest/community/codestyle.html
public void testPyramidFromImportFormatting() {
getPythonCodeStyleSettings().FROM_IMPORT_PARENTHESES_FORCE_IF_MULTILINE = true;
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_AFTER_LEFT_PARENTHESIS = true;
getPythonCodeStyleSettings().FROM_IMPORT_NEW_LINE_BEFORE_RIGHT_PARENTHESIS = true;
getPythonCodeStyleSettings().FROM_IMPORT_WRAPPING = WrapType.ALWAYS.getLegacyRepresentation();
getPythonCodeStyleSettings().FROM_IMPORT_TRAILING_COMMA_IF_MULTILINE = true;
getPythonCodeStyleSettings().HANG_CLOSING_BRACKETS = true;
doTest();
}
// PY-9764
public void testFromImportTrailingCommaWithParentheses() {
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 30);
getPythonCodeStyleSettings().FROM_IMPORT_PARENTHESES_FORCE_IF_MULTILINE = true;
getPythonCodeStyleSettings().FROM_IMPORT_TRAILING_COMMA_IF_MULTILINE = true;
doTest();
}
// PY-9764
public void testFromImportTrailingCommaWithoutParentheses() {
getCodeStyleSettings().setRightMargin(PythonLanguage.INSTANCE, 30);
getPythonCodeStyleSettings().FROM_IMPORT_PARENTHESES_FORCE_IF_MULTILINE = false;
getPythonCodeStyleSettings().FROM_IMPORT_TRAILING_COMMA_IF_MULTILINE = true;
doTest();
}
// PY-21931
public void testSpacesAroundElseInConditionalExpression() {
doTest();
}
// PY-20970
public void testSpacesAfterNonlocal() {
runWithLanguageLevel(LanguageLevel.PYTHON34, this::doTest);
}
// PY-21515
public void testSpacesBeforeFromImportSource() {
doTest();
}
public void testSpacesAfterFromInYieldFrom() {
runWithLanguageLevel(LanguageLevel.PYTHON34, this::doTest);
}
// PY-24220
public void testBlankLinesAfterTopLevelImportsBeforeClass() {
getCommonCodeStyleSettings().BLANK_LINES_AFTER_IMPORTS = 5;
doTest();
}
// PY-24220
public void testBlankLinesAfterTopLevelImportsBeforeClassWithPrecedingComments() {
getCommonCodeStyleSettings().BLANK_LINES_AFTER_IMPORTS = 5;
doTest();
}
// PY-25356
public void testCommentsSpacing() {
doTest();
}
// PY-19705
public void testBlankLinesAroundFirstMethod() {
getPythonCodeStyleSettings().BLANK_LINES_BEFORE_FIRST_METHOD = 1;
doTest();
}
// PY-21823
public void testSliceAlignment() {
doTest();
}
// PY-15051
public void testTrailingBlockCommentsIndentationPreserved() {
doTest();
}
public void testMultilineIfConditionKeywordAtEnd() {
doTest();
}
// PY-21328
public void testMultilineIfConditionLessComparisonsKeywordAtEnd() {
doTest();
}
public void testMultilineIfConditionKeywordAtStart() {
doTest();
}
public void testMultilineIfConditionInParenthesesKeywordAtEnd() {
doTest();
}
public void testMultilineIfConditionInParenthesesNegatedKeywordAtEnd() {
doTest();
}
public void testMultilineIfConditionInParenthesesKeywordAtEndSecondOperandIsReference() {
doTest();
}
public void testMultilineIfConditionInParenthesesKeywordAtStart() {
doTest();
}
public void testMultilineIfConditionNestedExpressions() {
doTest();
}
public void testMultilineIfConditionInParenthesesNestedExpressions() {
doTest();
}
public void testMultilineElifCondition() {
doTest();
}
public void testMultilineElifConditionInParentheses() {
doTest();
}
// PY-22035
public void testMultilineIfConditionComplex() {
doTest();
}
// PY-24160
public void testMultilineIfConditionInParenthesesHangingIndent() {
doTest();
}
public void testMultilineBinaryExpressionInsideGenerator() {
doTest();
}
public void testNotParenthesisedBinaryExpressions() {
doTest();
}
public void testGluedStringLiteralInParentheses() {
getPythonCodeStyleSettings().ALIGN_COLLECTIONS_AND_COMPREHENSIONS = false;
doTest();
getPythonCodeStyleSettings().ALIGN_COLLECTIONS_AND_COMPREHENSIONS = true;
doTest();
}
public void testVariableAnnotations() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-27266
public void testChainedMethodCallsInParentheses() {
doTest();
}
// PY-27266
public void testChainedAttributeAccessInParentheses() {
doTest();
}
public void testMultilineFStringExpressions() {
doTest();
}
}
| |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.result;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.provider.Contacts;
import com.google.zxing.client.android.Intents;
import com.google.zxing.client.android.R;
import com.google.zxing.client.android.SearchBookContentsActivity;
import com.google.zxing.client.android.LocaleManager;
import com.google.zxing.client.android.Contents;
import com.google.zxing.client.result.ParsedResult;
import com.google.zxing.client.result.ParsedResultType;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.text.DateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
public abstract class ResultHandler {
public static final int MAX_BUTTON_COUNT = 4;
protected final ParsedResult mResult;
private final Activity mActivity;
public ResultHandler(Activity activity, ParsedResult result) {
mResult = result;
mActivity = activity;
}
/**
* Indicates how many buttons the derived class wants shown.
*
* @return The integer button count.
*/
public abstract int getButtonCount();
/**
* The text of the nth action button.
*
* @param index From 0 to getButtonCount() - 1
* @return The button text as a resource ID
*/
public abstract int getButtonText(int index);
/**
* Execute the action which corresponds to the nth button.
*
* @param index The button that was clicked.
*/
public abstract void handleButtonPress(int index);
/**
* Create a possibly styled string for the contents of the current barcode.
*
* @return The text to be displayed.
*/
public CharSequence getDisplayContents() {
String contents = mResult.getDisplayResult();
return contents.replace("\r", "");
}
/**
* A string describing the kind of barcode that was found, e.g. "Found contact info".
*
* @return The resource ID of the string.
*/
public abstract int getDisplayTitle();
/**
* A convenience method to get the parsed type. Should not be overridden.
*
* @return The parsed type, e.g. URI or ISBN
*/
public final ParsedResultType getType() {
return mResult.getType();
}
/**
* Sends an intent to create a new calendar event by prepopulating the Add Event UI. Older
* versions of the system have a bug where the event title will not be filled out.
*
* @param summary A description of the event
* @param start The start time as yyyyMMdd or yyyyMMdd'T'HHmmss or yyyyMMdd'T'HHmmss'Z'
* @param end The end time as yyyyMMdd or yyyyMMdd'T'HHmmss or yyyyMMdd'T'HHmmss'Z'
*/
public final void addCalendarEvent(String summary, String start, String end) {
Intent intent = new Intent(Intent.ACTION_EDIT);
intent.setType("vnd.android.cursor.item/event");
intent.putExtra("beginTime", calculateMilliseconds(start));
if (start.length() == 8) {
intent.putExtra("allDay", true);
}
intent.putExtra("endTime", calculateMilliseconds(end));
intent.putExtra("title", summary);
launchIntent(intent);
}
private long calculateMilliseconds(String when) {
if (when.length() == 8) {
// Only contains year/month/day
DateFormat format = new SimpleDateFormat("yyyyMMdd");
Date date = format.parse(when, new ParsePosition(0));
return date.getTime();
} else {
// The when string can be local time, or UTC if it ends with a Z
DateFormat format = new SimpleDateFormat("yyyyMMdd'T'HHmmss");
Date date = format.parse(when.substring(0, 15), new ParsePosition(0));
long milliseconds = date.getTime();
if (when.length() == 16 && when.charAt(15) == 'Z') {
Calendar calendar = new GregorianCalendar();
int offset = (calendar.get(java.util.Calendar.ZONE_OFFSET) +
calendar.get(java.util.Calendar.DST_OFFSET));
milliseconds += offset;
}
return milliseconds;
}
}
public final void addContact(String[] names, String[] phoneNumbers, String[] emails, String note,
String address, String org, String title) {
Intent intent = new Intent(Contacts.Intents.Insert.ACTION, Contacts.People.CONTENT_URI);
putExtra(intent, Contacts.Intents.Insert.NAME, names);
int phoneCount = Math.min((phoneNumbers != null) ? phoneNumbers.length : 0,
Contents.PHONE_KEYS.length);
for (int x = 0; x < phoneCount; x++) {
putExtra(intent, Contents.PHONE_KEYS[x], phoneNumbers[x]);
}
int emailCount = Math.min((emails != null) ? emails.length : 0, Contents.EMAIL_KEYS.length);
for (int x = 0; x < emailCount; x++) {
putExtra(intent, Contents.EMAIL_KEYS[x], emails[x]);
}
putExtra(intent, Contacts.Intents.Insert.NOTES, note);
putExtra(intent, Contacts.Intents.Insert.POSTAL, address);
putExtra(intent, Contacts.Intents.Insert.COMPANY, org);
putExtra(intent, Contacts.Intents.Insert.JOB_TITLE, title);
launchIntent(intent);
}
public final void shareByEmail(String contents) {
sendEmailFromUri("mailto:", mActivity.getString(R.string.msg_share_subject_line), contents);
}
public final void sendEmail(String address, String subject, String body) {
sendEmailFromUri("mailto:" + address, subject, body);
}
public final void sendEmailFromUri(String uri, String subject, String body) {
Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.parse(uri));
putExtra(intent, "subject", subject);
putExtra(intent, "body", body);
launchIntent(intent);
}
public final void shareBySMS(String contents) {
sendSMSFromUri("smsto:", mActivity.getString(R.string.msg_share_subject_line) + ":\n" + contents);
}
public final void sendSMS(String phoneNumber, String body) {
sendSMSFromUri("smsto:" + phoneNumber, body);
}
public final void sendSMSFromUri(String uri, String body) {
Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.parse(uri));
putExtra(intent, "sms_body", body);
// Exit the app once the SMS is sent
intent.putExtra("compose_mode", true);
launchIntent(intent);
}
public final void sendMMS(String phoneNumber, String subject, String body) {
sendMMSFromUri("mmsto:" + phoneNumber, subject, body);
}
public final void sendMMSFromUri(String uri, String subject, String body) {
Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.parse(uri));
// The Messaging app needs to see a valid subject or else it will treat this an an SMS.
if (subject == null || subject.length() == 0) {
putExtra(intent, "subject", mActivity.getString(R.string.msg_default_mms_subject));
} else {
putExtra(intent, "subject", subject);
}
putExtra(intent, "sms_body", body);
intent.putExtra("compose_mode", true);
launchIntent(intent);
}
public final void dialPhone(String phoneNumber) {
launchIntent(new Intent(Intent.ACTION_DIAL, Uri.parse("tel:" + phoneNumber)));
}
public final void dialPhoneFromUri(String uri) {
launchIntent(new Intent(Intent.ACTION_DIAL, Uri.parse(uri)));
}
public final void openMap(String geoURI) {
launchIntent(new Intent(Intent.ACTION_VIEW, Uri.parse(geoURI)));
}
/**
* Do a geo search using the address as the query.
*
* @param address The address to find
* @param title An optional title, e.g. the name of the business at this address
*/
public final void searchMap(String address, String title) {
String query = address;
if (title != null && title.length() > 0) {
query = query + " (" + title + ")";
}
launchIntent(new Intent(Intent.ACTION_VIEW, Uri.parse("geo:0,0?q=" + Uri.encode(query))));
}
public final void getDirections(float latitude, float longitude) {
launchIntent(new Intent(Intent.ACTION_VIEW, Uri.parse("http://maps.google." +
LocaleManager.getCountryTLD() + "/maps?f=d&daddr=" + latitude + "," + longitude)));
}
public final void openProductSearch(String upc) {
Uri uri = Uri.parse("http://www.google." + LocaleManager.getCountryTLD() + "/products?q=" + upc);
launchIntent(new Intent(Intent.ACTION_VIEW, uri));
}
public final void openBookSearch(String isbn) {
Uri uri = Uri.parse("http://books.google." + LocaleManager.getCountryTLD() + "/books?vid=isbn" +
isbn);
launchIntent(new Intent(Intent.ACTION_VIEW, uri));
}
public final void searchBookContents(String isbn) {
Intent intent = new Intent(Intents.SearchBookContents.ACTION);
intent.setClassName(mActivity, SearchBookContentsActivity.class.getName());
putExtra(intent, Intents.SearchBookContents.ISBN, isbn);
launchIntent(intent);
}
public final void openURL(String url) {
launchIntent(new Intent(Intent.ACTION_VIEW, Uri.parse(url)));
}
public final void webSearch(String query) {
Intent intent = new Intent(Intent.ACTION_WEB_SEARCH);
intent.putExtra("query", query);
launchIntent(intent);
}
private void launchIntent(Intent intent) {
if (intent != null) {
mActivity.startActivity(intent);
}
}
private static void putExtra(Intent intent, String key, String value) {
if (value != null && value.length() > 0) {
intent.putExtra(key, value);
}
}
// TODO: This is only used by the names field, and only the first name will be taken.
private static void putExtra(Intent intent, String key, String[] value) {
if (value != null && value.length > 0) {
putExtra(intent, key, value[0]);
}
}
}
| |
package nat.sqlite;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import nat.flashcardcompetitionModel.Card;
import nat.flashcardcompetitionModel.CardInfo;
import nat.flashcardcompetitionModel.CardMeta;
import nat.flashcardcompetitionModel.Studyset;
/**
* Created by Nat on 2/23/2017.
*/
public class DBManager {
private FlashcardCompetitionDbHelper dbHelper;
private Context context;
private static SQLiteDatabase database;
public DBManager(Context c){
context = c;
}
public DBManager open() throws SQLException {
dbHelper = new FlashcardCompetitionDbHelper(context);
database = dbHelper.getWritableDatabase();
// dbHelper.onUpgrade(database,1,2);
return this;
}
public void close(){
dbHelper.close();
}
/*** TESTING PURPOSE ***/
// TODO Delete this
public void deleteAllTables(){
Cursor c = database.rawQuery("SELECT name FROM sqlite_master WHERE type='table'", null);
List<String> tables = new ArrayList<>();
// iterate over the result set, adding every table name to a list
while (c.moveToNext()) {
tables.add(c.getString(0));
}
// call DROP TABLE on every table name
for (String table : tables) {
String dropQuery = "DROP TABLE IF EXISTS " + table;
database.execSQL(dropQuery);
}
}
public List<Studyset> getStudySet(){
Cursor cursor = database.query(FlashcardCompetitionContract.Studyset.TABLE_NAME, null, null, null, null, null, null);
if (cursor != null) {
} else {
return null;
}
List<Studyset> studySets = new ArrayList<>();
while(cursor.moveToNext()) {
int id = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_ID)
);
String name = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_NAME)
);
String description = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_DESCRIPTION)
);
String created = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_CREATED)
);
String updated = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_UPDATED)
);
String supported_languages = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.Studyset.COLUMN_NAME_SUPPORTED_LANGUAGE)
);
Studyset studySet = new Studyset();
studySet.id = id;
studySet.name = name;
studySet.description = description;
studySet.created = created;
studySet.updated = updated;
studySet.supported_language = new ArrayList<>(Arrays.asList(supported_languages.trim().split(",")));
studySet.highscore = 0;
studySets.add(studySet);
}
cursor.close();
return studySets;
}
public void insertStudySet(Studyset studyset){
ContentValues contentValues = new ContentValues();
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_ID, studyset.id);
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_NAME, studyset.name);
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_DESCRIPTION, studyset.description);
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_CREATED, studyset.created);
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_UPDATED, studyset.updated);
StringBuilder stringBuilder = new StringBuilder();
for (String s : studyset.supported_language) {
stringBuilder.append(s);
stringBuilder.append(",");
}
contentValues.put(FlashcardCompetitionContract.Studyset.COLUMN_NAME_SUPPORTED_LANGUAGE, stringBuilder.toString());
int id = (int) database.insertWithOnConflict(FlashcardCompetitionContract.Studyset.TABLE_NAME, null, contentValues, SQLiteDatabase.CONFLICT_IGNORE);
if (id == -1) {
database.update(FlashcardCompetitionContract.Studyset.TABLE_NAME,
contentValues,
FlashcardCompetitionContract.Studyset.COLUMN_NAME_ID+"=?",
new String[] {studyset.id+""});
}
}
public void insertCardInfo(CardInfo cardInfo){
ContentValues contentValues = new ContentValues();
contentValues.put(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_ID, cardInfo.id);
contentValues.put(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_CARD_ID, cardInfo.cardId);
contentValues.put(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_STUDYSET_ID, cardInfo.studySetId);
contentValues.put(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_LANGUAUGE, cardInfo.language);
contentValues.put(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_WORD, cardInfo.word);
int id = (int) database.insertWithOnConflict(FlashcardCompetitionContract.CardInfo.TABLE_NAME, null, contentValues, SQLiteDatabase.CONFLICT_IGNORE);
if (id == -1) {
database.update(FlashcardCompetitionContract.CardInfo.TABLE_NAME,
contentValues,
FlashcardCompetitionContract.CardMeta.COLUMN_NAME_ID+"=?",
new String[] {cardInfo.id+""});
}
}
public List<Card> getCardByStudySetId(int studySetId, String lang1, String lang2){
String selection = FlashcardCompetitionContract.CardInfo.COLUMN_NAME_STUDYSET_ID + "=?";
String[] selectionArgs = new String[] {studySetId+""};
// String selectionArgs = studySetId+"";
Cursor cursor = database.query(FlashcardCompetitionContract.CardInfo.TABLE_NAME, null, selection, selectionArgs, null, null, null);
if (cursor != null) {
} else {
return null;
}
List<CardInfo> cardInfos = new ArrayList<>();
while(cursor.moveToNext()) {
CardInfo cardInfo = new CardInfo();
cardInfo.id = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_ID)
);
cardInfo.studySetId = studySetId;
cardInfo.cardId = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_CARD_ID)
);
cardInfo.language = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_LANGUAUGE)
);
cardInfo.word = cursor.getString(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardInfo.COLUMN_NAME_WORD)
);
cardInfos.add(cardInfo);
}
cursor.close();
HashMap<Integer, List<CardInfo>> hashMap = new HashMap<>();
for (CardInfo info : cardInfos){
if(info.language.equals(lang1) || info.language.equals(lang2)){
if(hashMap.containsKey(info.cardId)){
hashMap.get(info.cardId).add(info);
} else {
List<CardInfo> temp = new ArrayList<>();
temp.add(info);
hashMap.put(info.cardId,temp);
}
}
}
List<Card> cards = new ArrayList<>();
for(Integer key : hashMap.keySet()){
List<CardInfo> temp = hashMap.get(key);
if(temp.size() != 2) continue;
Card card = new Card();
CardInfo card1 = temp.get(0);
CardInfo card2 = temp.get(1);
card.cardID = card1.cardId;
// Log.i("BEFORE CARD", "lang1: " + lang1 + ", lang2: " + lang2 + ", card1: " + card1.language + ", " + card1.word + " card2: " + card2.language + ", " + card2.word );
if(card1.language.equals(lang1)){
card.first = card1.word;
card.second = card2.word;
} else {
card.first = card2.word;
card.second = card1.word;
}
// Log.i("AFTER CARD", "lang1: " + lang1 + ", lang2: " + lang2 + ", card-front: " + card.first + " card-back: " + card.second);
// TODO check card meta for the active.
card.active = true;
cards.add(card);
}
return cards;
}
public void insertCardMeta(CardMeta cardMeta){
ContentValues contentValues = new ContentValues();
contentValues.put(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_CARD_ID, cardMeta.cardID);
contentValues.put(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_IS_ACTIVE, cardMeta.isActive);
int id = (int) database.insertWithOnConflict(FlashcardCompetitionContract.CardMeta.TABLE_NAME, null, contentValues, SQLiteDatabase.CONFLICT_IGNORE);
if (id == -1) {
database.update(FlashcardCompetitionContract.CardMeta.TABLE_NAME,
contentValues,
FlashcardCompetitionContract.CardMeta.COLUMN_NAME_CARD_ID+"=?",
new String[] {cardMeta.cardID+""}); // number 1 is the _id here, update to variable for your code
}
}
public ArrayList<CardMeta> getCardMeta(){
String[] columns = new String[] {FlashcardCompetitionContract.CardMeta.COLUMN_NAME_ID, FlashcardCompetitionContract.CardMeta.COLUMN_NAME_CARD_ID, FlashcardCompetitionContract.CardMeta.COLUMN_NAME_IS_ACTIVE};
Cursor cursor = database.query(FlashcardCompetitionContract.CardMeta.TABLE_NAME, columns, null, null, null, null, null);
if (cursor != null) {
} else {
return null;
}
ArrayList<CardMeta> cardMetas = new ArrayList<>();
while(cursor.moveToNext()) {
int id = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_ID)
);
int cardID = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_CARD_ID)
);
int isActive = cursor.getInt(
cursor.getColumnIndexOrThrow(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_IS_ACTIVE)
);
cardMetas.add(new CardMeta(id,cardID, isActive));
}
cursor.close();
return cardMetas;
}
public void updateCardMeta(CardMeta cardMeta){
ContentValues contentValues = new ContentValues();
contentValues.put(FlashcardCompetitionContract.CardMeta.COLUMN_NAME_IS_ACTIVE, cardMeta.isActive);
String selection = FlashcardCompetitionContract.CardMeta.COLUMN_NAME_CARD_ID + " LIKE ?";
String[] selectionArgs = { ""+cardMeta.cardID };
int count = database.update(FlashcardCompetitionContract.CardMeta.TABLE_NAME,contentValues,selection,selectionArgs);
// Log.i("UpdateCardMeta :", "Number of Update = " +count);
}
}
| |
/*
* Copyright 2015 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.inferred.freebuilder.processor.model;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.getOnlyElement;
import static org.inferred.freebuilder.processor.model.ClassTypeImpl.newTopLevelClass;
import static org.inferred.freebuilder.processor.model.NullTypeImpl.NULL;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.inferred.freebuilder.processor.source.Partial;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ElementVisitor;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.Name;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.type.TypeVariable;
import javax.lang.model.type.TypeVisitor;
/**
* Fake implementation of a formal type parameter of a {@link GenericElement}.
*/
public abstract class GenericElementParameter implements TypeParameterElement {
/**
* Builder of {@link GenericElementParameter} instances.
*/
public static class Builder {
private final String simpleName;
private final List<TypeMirror> bounds = new ArrayList<>();
private final AtomicReference<GenericElementParameter> element = new AtomicReference<>();
Builder(String simpleName) {
this.simpleName = simpleName;
}
public Builder addBound(TypeMirror bound) {
checkState(element.get() == null,
"Cannot modify a %s after calling build()", Builder.class.getName());
bounds.add(bound);
return this;
}
public TypeVariableImpl asType() {
return Partial.of(TypeVariableImpl.class, element);
}
GenericElementParameter build(GenericElement genericElement) {
GenericElementParameter impl =
Partial.of(GenericElementParameter.class, genericElement, simpleName, bounds);
boolean notYetSet = element.compareAndSet(null, impl);
checkState(notYetSet, "Cannot call build() twice on a %s", Builder.class.getName());
return impl;
}
}
private final GenericElement genericElement;
private final String simpleName;
private final ImmutableList<TypeMirror> bounds;
GenericElementParameter(
GenericElement genericElement, String simpleName, Iterable<? extends TypeMirror> bounds) {
this.genericElement = genericElement;
this.simpleName = simpleName;
this.bounds = ImmutableList.copyOf(bounds);
}
@Override
public TypeVariableImpl asType() {
return Partial.of(TypeVariableImpl.class, new AtomicReference<>(this));
}
@Override
public ElementKind getKind() {
return ElementKind.TYPE_PARAMETER;
}
@Override
public List<? extends AnnotationMirror> getAnnotationMirrors() {
return ImmutableList.of();
}
@Override
public <A extends Annotation> A getAnnotation(Class<A> annotationType) {
return null;
}
@Override
public Set<Modifier> getModifiers() {
return ImmutableSet.of();
}
@Override
public Name getSimpleName() {
return new NameImpl(simpleName);
}
@Override
public List<? extends Element> getEnclosedElements() {
return ImmutableList.of();
}
@Override
public <R, P> R accept(ElementVisitor<R, P> v, P p) {
return v.visitTypeParameter(this, p);
}
@Override
public GenericElement getGenericElement() {
return genericElement;
}
@Override
public List<? extends TypeMirror> getBounds() {
return bounds;
}
@Override
public GenericElement getEnclosingElement() {
return genericElement;
}
@Override
public String toString() {
return simpleName;
}
/**
* Fake implementation of a type variable declared by a {@link GenericElement}.
*/
public abstract static class TypeVariableImpl implements TypeVariable {
private final AtomicReference<GenericElementParameter> element;
TypeVariableImpl(AtomicReference<GenericElementParameter> element) {
this.element = element;
}
@Override
public TypeKind getKind() {
return TypeKind.TYPEVAR;
}
@Override
public <R, P> R accept(TypeVisitor<R, P> v, P p) {
return v.visitTypeVariable(this, p);
}
@Override
public GenericElementParameter asElement() {
GenericElementParameter impl = getImpl("asElement()");
return impl;
}
@Override
public TypeMirror getUpperBound() {
GenericElementParameter impl = getImpl("getUpperBound()");
switch (impl.bounds.size()) {
case 0:
return newTopLevelClass("java.lang.Object");
case 1:
return getOnlyElement(impl.bounds);
default:
throw new UnsupportedOperationException();
}
}
@Override
public TypeMirror getLowerBound() {
return NULL;
}
@Override
public String toString() {
return getImpl("toString()").simpleName;
}
private GenericElementParameter getImpl(String calledMethod) {
GenericElementParameter impl = element.get();
checkState(impl != null,
"Cannot call %s on a TypeVariable returned from a %s before it is built",
calledMethod,
GenericElementParameter.Builder.class.getName());
return impl;
}
}
}
| |
package models.restaurant;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.Lob;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Transient;
import models.lists.Hcategory;
import models.lists.Hcousine;
import models.lists.Hpricerange;
import models.lists.Hrestaurantlicence;
import models.relations.Joinrestaurantcousine;
import models.relations.Joinrestauranthrestaurantcategory;
import models.user.User;
import com.avaje.ebean.Page;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import com.mysql.jdbc.Blob;
import play.db.ebean.Model;
@SuppressWarnings("serial")
@Entity
public class Restaurant extends Model {
public static Finder<Long, Restaurant> find = new Finder<Long, Restaurant>(Long.class, Restaurant.class);
public static Page<Restaurant> find(int page) {
return
find.where()
.orderBy("id asc")
.findPagingList(10)
.setFetchAhead(false)
.getPage(page);
}
@Id
Long id;
String name;
String organisationname;
String city;
Integer postcode;
String street;
String streetnumber;
String personinchargesurname;
String personinchargename;
String phonenumber;
String mobilephonenumber;
String fax;
String email;
String webpage;
Double latitude;
Double longtitude;
String extrainfo;
String imagepath;
@Lob
public byte[] picture;
Long userid;
ArrayList<Integer> joinrestaurantcousine;
ArrayList<Integer> joinrestauranthrestaurantcategory;
@OneToMany(mappedBy = "restaurant")
@JsonManagedReference
private Set<Joinrestaurantcousine> restaurantcousines = new HashSet<Joinrestaurantcousine>();
@OneToMany(mappedBy = "restaurant")
@JsonManagedReference
private Set<Joinrestauranthrestaurantcategory> restaurantcategories = new HashSet<Joinrestauranthrestaurantcategory>();
@ManyToOne
@JoinColumn(name="hpricerangeid", referencedColumnName="id")
Hpricerange pricerange;
@ManyToOne
@JoinColumn(name="hrestaurantlicenceid", referencedColumnName="id")
Hrestaurantlicence restaurantlicence;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public Long getUserid() {
return userid;
}
public void setUserid(Long userid) {
this.userid = userid;
}
public Integer getPostcode() {
return postcode;
}
public void setPostcode(Integer postcode) {
this.postcode = postcode;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getStreetnumber() {
return streetnumber;
}
public void setStreetnumber(String streetnumber) {
this.streetnumber = streetnumber;
}
public String getPersoninchargesurname() {
return personinchargesurname;
}
public void setPersoninchargesurname(String personinchargesurname) {
this.personinchargesurname = personinchargesurname;
}
public String getPersoninchargename() {
return personinchargename;
}
public void setPersoninchargename(String personinchargename) {
this.personinchargename = personinchargename;
}
public String getPhonenumber() {
return phonenumber;
}
public String getImagepath() {
return imagepath;
}
public void setImagepath(String imagepath) {
this.imagepath = imagepath;
}
public void setPhonenumber(String phonenumber) {
this.phonenumber = phonenumber;
}
public String getMobilephonenumber() {
return mobilephonenumber;
}
public String getExtrainfo() {
return extrainfo;
}
public void setExtrainfo(String extrainfo) {
this.extrainfo = extrainfo;
}
public void setMobilephonenumber(String mobilephonenumber) {
this.mobilephonenumber = mobilephonenumber;
}
public byte[] getPicture() {
return picture;
}
public void setPicture(byte[] picture) {
this.picture = picture;
}
public String getFax() {
return fax;
}
public void setFax(String fax) {
this.fax = fax;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<Joinrestaurantcousine> getRestaurantcousines() {
return restaurantcousines;
}
public void setRestaurantcousines(Set<Joinrestaurantcousine> restaurantcousines) {
this.restaurantcousines = restaurantcousines;
}
public String getWebpage() {
return webpage;
}
public void setWebpage(String webpage) {
this.webpage = webpage;
}
public Double getLatitude() {
return latitude;
}
public void setLatitude(Double latitude) {
this.latitude = latitude;
}
public Double getLongtitude() {
return longtitude;
}
public void setLongtitude(Double longtitude) {
this.longtitude = longtitude;
}
public ArrayList<Integer> getJoinrestaurantcousie() {
return joinrestaurantcousine;
}
public void setJoinrestaurantcousie(ArrayList<Integer> joinrestaurantcousie) {
this.joinrestaurantcousine = joinrestaurantcousie;
}
public ArrayList<Integer> getJoinrestauranthrestaurantcategory() {
return joinrestauranthrestaurantcategory;
}
public void setJoinrestauranthrestaurantcategory(
ArrayList<Integer> joinrestauranthrestaurantcategory) {
this.joinrestauranthrestaurantcategory = joinrestauranthrestaurantcategory;
}
public ArrayList<Integer> getJoinrestaurantcousine() {
return joinrestaurantcousine;
}
public void setJoinrestaurantcousine(ArrayList<Integer> joinrestaurantcousine) {
this.joinrestaurantcousine = joinrestaurantcousine;
}
public Set<Joinrestauranthrestaurantcategory> getRestaurantcategories() {
return restaurantcategories;
}
public void setRestaurantcategories(
Set<Joinrestauranthrestaurantcategory> restaurantcategories) {
this.restaurantcategories = restaurantcategories;
}
public Hpricerange getPricerange() {
return pricerange;
}
public void setPricerange(Hpricerange pricerange) {
this.pricerange = pricerange;
}
public Hrestaurantlicence getRestaurantlicence() {
return restaurantlicence;
}
public void setRestaurantlicence(Hrestaurantlicence restaurantlicence) {
this.restaurantlicence = restaurantlicence;
}
public String getOrganisationname() {
return organisationname;
}
public void setOrganisationname(String organisationname) {
this.organisationname = organisationname;
}
public Restaurant () {
}
}
| |
package io.userapp.client.android;
import io.userapp.client.UserAppClient;
import io.userapp.client.UserAppClient.Result;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Set;
import java.util.UUID;
import com.google.gson.Gson;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.AsyncTask;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.app.FragmentActivity;
public class UserApp {
// Actions used to broadcast and receive changes in session state (login and logout)
public static final String ACTION_SESSION_LOGIN = "io.userapp.client.android.SESSION_LOGIN";
public static final String ACTION_SESSION_LOGOUT = "io.userapp.client.android.SESSION_LOGOUT";
public static final String ACTION_USER_UPDATE = "io.userapp.client.android.USER_UPDATE";
// Shared preference file key + keys
public static final String PREFERENCE_KEY = "io.userapp.client.android.PREFERENCE_FILE_KEY";
public static final String SESSION_TOKEN_KEY = "io.userapp.client.android.SESSION_TOKEN_KEY";
public static final String TOKEN_ID_KEY = "io.userapp.client.android.TOKEN_ID_KEY";
public static final String INSTALLATION_KEY = "io.userapp.client.android.INSTALLATION_KEY";
public static final String USER_KEY = "io.userapp.client.android.USER_KEY";
// URI to redirect to after an OAuth authorization
public static final String OAUTH_REDIRECT_URI = "userapp-oauth:///";
/** Print a message to the console if debugging is turned on */
private static void log(String message) {
//System.out.println(message);
}
/**
* Class that keeps track of the user session by
* holding the current user and its state
*/
public static class Session {
private FragmentActivity activity = null; // The activity the session lives in
private boolean _isResumed = false; // Is the activity resumed or paused?
private UserApp.UIHelper uiHelper = null; // The UIHelper that is attached to this session
ArrayList<UserApp.Session.StatusCallback> callbacks = new ArrayList<UserApp.Session.StatusCallback>(); // Callbacks when the state changes (login/logout)
ArrayList<UserApp.Session.UserCallback> userCallbacks = new ArrayList<UserApp.Session.UserCallback>(); // Callbacks when the user profile updates
private LoginTask loginTask = null; // Async task to login
private LogoutTask logoutTask = null; // Async task to end the session with UserApp
private LoadUserTask loadUserTask = null; // Async task to reload the user profile
private SaveUserTask saveUserTask = null; // Async task to save the user profile
private GetOAuthUrlTask getOAuthUrlTask = null; // Async task to get an OAuth authorization url
private SharedPreferences sharedPreferences = null; // Shared preferences to store state data (token, user object, etc.)
public UserAppClient.API api; // Client API for UserApp
private String installationKey = null; // Unique (UUID) key for this phone (used to name the token at UserApp)
private Boolean isBroadcaster = false; // If this instance is the one broadcasting an event
private Boolean isAuthenticated = false; // Is the session authenticated or not?
public User user; // The logged in user
public String token; // Session token
/** Receiver for login events */
private BroadcastReceiver loginReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String error = intent.getStringExtra("ERROR_MESSAGE");
if (!isBroadcaster) {
user = deserializeUser();
} else {
isBroadcaster = false;
}
// Sync the token
setToken(sharedPreferences.getString(SESSION_TOKEN_KEY, null));
isAuthenticated = true;
if (error != null) {
callCallbacks(true, new Exception(error));
} else {
callCallbacks(true, null);
}
}
};
/** Receiver for logout events */
private BroadcastReceiver logoutReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (!isBroadcaster) {
// Clear the API token
setToken(null);
// Clear the user
serializeUser(null);
} else {
isBroadcaster = false;
}
isAuthenticated = false;
callCallbacks(false, null);
}
};
/** Receiver for user update events */
private BroadcastReceiver userUpdateReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
UserApp.log("User has been updated.");
if (!isBroadcaster) {
// Reload the user
user = deserializeUser();
} else {
isBroadcaster = false;
}
callUserCallbacks(user, null);
}
};
/** Interface for state callbacks */
public static interface StatusCallback {
public void call(Boolean authenticated, Exception exception);
}
/** Interface for user update callbacks */
public static interface UserCallback {
public void call(User user, Exception exception);
}
/** Interface for OAuth URL callbacks */
public static interface OAuthUrlCallback {
public void call(String url, Exception exception);
}
/** Result class to hold async task results */
private static class AsyncResult {
public Object value = null;
public Exception exception = null;
public AsyncResult(Object value, Exception exception) {
this.value = value;
this.exception = exception;
}
}
/** Constructors for Session */
public Session(FragmentActivity activity) {
this(activity, null);
}
public Session(FragmentActivity activity, UserApp.Session.StatusCallback callback) {
this.activity = activity;
this.sharedPreferences = this.activity.getSharedPreferences(UserApp.PREFERENCE_KEY, Context.MODE_PRIVATE);
// Load App Id from AndroidManifest.xml
this.api = new UserAppClient.API(UserApp.getAppId(this.activity));
if (callback != null) {
this.addCallback(callback);
}
}
/** Get a unique id for this installation, used to track tokens */
private String getInstallationKey() {
if (this.installationKey == null) {
if (this.sharedPreferences.contains(UserApp.INSTALLATION_KEY)) {
this.installationKey = this.sharedPreferences.getString(UserApp.INSTALLATION_KEY, "");
} else {
// Generate a new UUID
this.installationKey = UUID.randomUUID().toString();
// Save it in shared preferences
SharedPreferences.Editor editor = this.sharedPreferences.edit();
editor.putString(UserApp.INSTALLATION_KEY, this.installationKey);
editor.commit();
}
}
return this.installationKey;
}
/**
* As this SDK uses fixed tokens to preserve a
* persistent session, we need to save the id of
* that token to later be able to delete it at logout
*/
private void setPersistentTokenId(String id) {
// Save it in shared preferences
SharedPreferences.Editor editor = this.sharedPreferences.edit();
editor.putString(UserApp.TOKEN_ID_KEY, id);
editor.commit();
}
private String getPersistentTokenId() {
// Get it from shared preferences
return this.sharedPreferences.getString(UserApp.TOKEN_ID_KEY, null);
}
/** Create a new UI helper and bind it to this session */
public UserApp.UIHelper createUIHelper() {
return this.createUIHelper(-1, -1);
}
public UserApp.UIHelper createUIHelper(int loginFragmentId, int mainFragmentId, int ... otherFragmentIds) {
this.uiHelper = new UserApp.UIHelper(this, loginFragmentId, mainFragmentId, otherFragmentIds);
return this.uiHelper;
}
/** Get the attached activity */
public FragmentActivity getActivity() {
return this.activity;
}
/** Invoke all callbacks */
private void callCallbacks(Boolean authenticated, Exception exception) {
synchronized (this.callbacks) {
for (final UserApp.Session.StatusCallback callback : callbacks) {
callback.call(authenticated, exception);
}
}
}
/** Add a new callback */
public void addCallback(UserApp.Session.StatusCallback callback) {
synchronized (this.callbacks) {
if (callback != null && !this.callbacks.contains(callback)) {
this.callbacks.add(callback);
}
}
}
/** Remove a callback */
public void removeCallback(UserApp.Session.StatusCallback callback) {
synchronized (this.callbacks) {
this.callbacks.remove(callback);
}
}
/** Invoke all user callbacks */
private void callUserCallbacks(User user, Exception exception) {
synchronized (this.userCallbacks) {
for (final UserApp.Session.UserCallback callback : userCallbacks) {
callback.call(user, exception);
}
}
}
/** Add a new user callback */
public void addUserCallback(UserApp.Session.UserCallback callback) {
synchronized (this.userCallbacks) {
if (callback != null && !this.userCallbacks.contains(callback)) {
this.userCallbacks.add(callback);
}
}
}
/** Remove a user callback */
public void removeUserCallback(UserApp.Session.UserCallback callback) {
synchronized (this.userCallbacks) {
this.userCallbacks.remove(callback);
}
}
/** Return true if the activity is resumed */
public Boolean isResumed() {
return this._isResumed;
}
/** onResume event, should be called from the main activity */
public void onResume() {
this._isResumed = true;
// Check in preferences if there is a token
String token = this.sharedPreferences.getString(UserApp.SESSION_TOKEN_KEY, null);
if (token != null) {
// Use the fixed token with the API
setToken(token);
user = deserializeUser();
this.callCallbacks(true, null);
} else {
this.callCallbacks(false, null);
}
LocalBroadcastManager.getInstance(this.activity).registerReceiver(this.loginReceiver, new IntentFilter(UserApp.ACTION_SESSION_LOGIN));
LocalBroadcastManager.getInstance(this.activity).registerReceiver(logoutReceiver, new IntentFilter(UserApp.ACTION_SESSION_LOGOUT));
LocalBroadcastManager.getInstance(this.activity).registerReceiver(userUpdateReceiver, new IntentFilter(UserApp.ACTION_USER_UPDATE));
}
/** onPause event, should be called from the main activity */
public void onPause() {
this._isResumed = false;
LocalBroadcastManager.getInstance(this.activity).unregisterReceiver(this.loginReceiver);
LocalBroadcastManager.getInstance(this.activity).unregisterReceiver(this.logoutReceiver);
LocalBroadcastManager.getInstance(this.activity).unregisterReceiver(this.userUpdateReceiver);
}
/** Broadcast an action */
private void postAction(String action, Exception exception) {
final Intent intent = new Intent(action);
if (exception != null) {
intent.putExtra("ERROR_MESSAGE", exception.getMessage());
}
this.isBroadcaster = true;
LocalBroadcastManager.getInstance(this.getActivity()).sendBroadcast(intent);
}
/** Method to login a user and start the session */
public void login(String login, String password) {
this.login(login, password, null);
}
public void login(String login, String password, UserApp.Session.StatusCallback callback) {
if (loginTask != null) {
return;
}
loginTask = new LoginTask();
loginTask.login = login;
loginTask.password = password;
loginTask.callback = callback;
loginTask.execute();
}
/** Method to login with a token */
public void loginWithToken(String token) {
this.loginWithToken(token, null);
}
public void loginWithToken(String token, UserApp.Session.StatusCallback callback) {
if (loginTask != null) {
return;
}
loginTask = new LoginTask();
loginTask.token = token;
loginTask.callback = callback;
loginTask.execute();
}
/** Login callback */
private void onLoginCompleted(UserApp.Session.StatusCallback callback, AsyncResult result) {
if (callback != null) {
callback.call(result.value != null, result.exception);
}
if (result.value != null) {
// Save token in preferences
SharedPreferences.Editor editor = this.sharedPreferences.edit();
editor.putString(UserApp.SESSION_TOKEN_KEY, (String) result.value);
editor.commit();
// Broadcast login success
this.postAction(UserApp.ACTION_SESSION_LOGIN, result.exception);
UserApp.log("Logged in.");
} else if (result.exception != null) {
UserApp.log("Login failed: " + result.exception.getMessage());
}
}
/** An async login task used to authenticate a user */
private class LoginTask extends AsyncTask<Void, Void, AsyncResult> {
public String login, password;
public String token = null;
public UserApp.Session.StatusCallback callback;
@Override
protected AsyncResult doInBackground(Void... params) {
try {
if (this.token == null) {
setToken(null);
// Login with the regular method, and then create a
// fixed token to keep the session alive forever
UserApp.log("Logging in...");
UserAppClient.Result result = api.method("user.login")
.parameter("login", login)
.parameter("password", password)
.call();
// Check locks...
ArrayList locks = result.get("locks").toArray();
if (!locks.isEmpty()) {
return new AsyncResult(null, new Exception("Your account has been locked."));
}
} else {
// Set token
setToken(this.token);
}
UserApp.log("Logged in. Establishing a persistent session using a fixed token...");
// Look for an existing fixed token
String fixedToken = null;
ArrayList tokens = api.method("token.search")
.parameter("fields", new UserAppClient.Array("token_id", "name", "value"))
.parameter("page_size", 100)
.call().get("items").toArray();
for (Object token : tokens) {
String id = (String) ((HashMap) token).get("token_id");
String name = (String) ((HashMap) token).get("name");
String value = (String) ((HashMap) token).get("value");
if (name.equalsIgnoreCase(getInstallationKey())) {
// Found a token for this installation
fixedToken = value;
setPersistentTokenId(id);
UserApp.log("Found an existing token with name '" + getInstallationKey() + "'.");
break;
}
}
if (fixedToken == null) {
// Create a new fixed token and use that instead
UserApp.log("Creating a new token with name '" + getInstallationKey() + "'.");
UserAppClient.Result saveResult = api.method("token.save")
.parameter("name", getInstallationKey())
.parameter("enabled", true)
.call();
// Get the value, and save the id for later use (i.e. logout)
if (saveResult.get("value").exists()) {
fixedToken = saveResult.get("value").toString();
setPersistentTokenId(saveResult.get("token_id").toString());
}
}
if (fixedToken == null) {
// Something went wrong, go with the session token instead
fixedToken = api.getOptions().token;
}
// Load the current user profile
UserAppClient.Result userResult = api.method("user.get")
.parameter("user_id", "self")
.call()
.get(0);
user = parseUser(userResult);
serializeUser(user);
return new AsyncResult(fixedToken, null);
} catch (Exception exception) {
return new AsyncResult(null, exception);
}
}
@Override
protected void onPostExecute(final AsyncResult result) {
loginTask = null;
onLoginCompleted(this.callback, result);
}
@Override
protected void onCancelled() {
loginTask = null;
onLoginCompleted(this.callback, new AsyncResult(null, new Exception("Login task was canceled.")));
}
}
/** Logout method */
public void logout() {
this.logout(false);
}
/**
* Logout method with the flag `localOnly`
* @param localOnly - if true, no token gets removed from UserApp, just from the client
*/
public void logout(Boolean localOnly) {
if (logoutTask != null) {
return;
}
UserApp.log("Logging out...");
// Broadcast logout success (before the logout task has finished for good UX)
this.postAction(UserApp.ACTION_SESSION_LOGOUT, null);
if (localOnly) {
// Remove token from shared preferences
onLogoutCompleted();
} else {
// Remove fixed token from UserApp (in background)
logoutTask = new LogoutTask();
logoutTask.tokenId = getPersistentTokenId();
logoutTask.execute();
}
}
/** Logout callback */
private void onLogoutCompleted() {
// Remove token from shared preferences
SharedPreferences.Editor editor = this.sharedPreferences.edit();
editor.remove(UserApp.SESSION_TOKEN_KEY);
editor.remove(UserApp.TOKEN_ID_KEY);
editor.commit();
// Clear the API token
setToken(null);
// Clear user
serializeUser(null);
UserApp.log("Logged out.");
}
/** An async logout task used to clear the session from UserApp */
private class LogoutTask extends AsyncTask<Void, Void, Boolean> {
public String tokenId;
@Override
protected Boolean doInBackground(Void... params) {
try {
// Log out by deleting the fixed token from UserApp
if (tokenId == null) {
return false;
}
api.method("token.remove").parameter("token_id", tokenId).call();
return true;
} catch (Exception exception) {
UserApp.log("Error: " + exception.getMessage());
return false;
}
}
@Override
protected void onPostExecute(final Boolean result) {
if (result) {
UserApp.log("Removed token from UserApp.");
} else {
UserApp.log("Failed to remove token from UserApp.");
}
logoutTask = null;
onLogoutCompleted();
}
@Override
protected void onCancelled() {
UserApp.log("Failed to remove token from UserApp: Task cancelled.");
logoutTask = null;
onLogoutCompleted();
}
}
/** Parse user from UserApp response */
private User parseUser(UserAppClient.Result result) {
User user = new User();
user.user_id = getStringResult(result, "user_id", "self");
user.first_name = getStringResult(result, "first_name", "");
user.last_name = getStringResult(result, "last_name", "");
user.email = getStringResult(result, "email", "");
user.email_verified = getBooleanResult(result, "email_verified", false);
user.login = getStringResult(result, "login", "");
user.ip_address = getStringResult(result, "ip_address", "");
user.password = getStringResult(result, "password", "");
user.last_login_at = new Date(getIntegerResult(result, "last_login_at", 0)*1000);
user.updated_at = new Date(getIntegerResult(result, "updated_at", 0)*1000);
user.created_at = new Date(getIntegerResult(result, "created_at", 0)*1000);
// Fill properties
Set<String> properties = result.get("properties").toHashMap().keySet();
for (String propertyName : properties) {
Property property = new Property();
property.value = result.get("properties").get(propertyName).get("value").result;
property.override = result.get("properties").get(propertyName).get("override").toBoolean();
user.properties.put(propertyName, property);
}
// Fill permissions
Set<String> permissions = result.get("permissions").toHashMap().keySet();
for (String permissionName : permissions) {
Permission permission = new Permission();
permission.value = result.get("permissions").get(permissionName).get("value").toBoolean();
permission.override = result.get("permissions").get(permissionName).get("override").toBoolean();
user.permissions.put(permissionName, permission);
}
// Fill features
Set<String> features = result.get("features").toHashMap().keySet();
for (String featureName : features) {
Feature feature = new Feature();
feature.value = result.get("features").get(featureName).get("value").toBoolean();
feature.override = result.get("features").get(featureName).get("override").toBoolean();
user.features.put(featureName, feature);
}
// Fill locks
ArrayList locks = result.get("locks").toArray();
for (Object lock : locks) {
Lock l = new Lock();
l.type = (String) ((HashMap) lock).get("type").toString();
l.reason = (String) ((HashMap) lock).get("reason").toString();
l.issued_by_user_id = (String) ((HashMap) lock).get("issued_by_user_id").toString();
l.created_at = new Date(((Result)((HashMap) lock).get("created_at")).toInteger()*1000);
user.locks.add(l);
}
// Subscription
if (result.get("subscription").exists()) {
user.subscription = new Subscription();
user.subscription.price_list_id = result.get("subscription").get("price_list_id").toString();
user.subscription.plan_id = result.get("subscription").get("plan_id").toString();
user.subscription.override = result.get("subscription").get("override").toBoolean();
}
return user;
}
private String getStringResult(UserAppClient.Result result, String key, String defaultValue) {
UserAppClient.Result field = result.get(key);
if (field.exists()) {
return field.toString();
} else {
return defaultValue;
}
}
private Integer getIntegerResult(UserAppClient.Result result, String key, Integer defaultValue) {
UserAppClient.Result field = result.get(key);
if (field.exists()) {
return field.toInteger();
} else {
return defaultValue;
}
}
private Boolean getBooleanResult(UserAppClient.Result result, String key, Boolean defaultValue) {
UserAppClient.Result field = result.get(key);
if (field.exists()) {
return field.toBoolean();
} else {
return defaultValue;
}
}
/** Save user in shared preferences */
private void serializeUser(User user) {
SharedPreferences.Editor editor = this.sharedPreferences.edit();
editor.putString(UserApp.USER_KEY, new Gson().toJson(user));
editor.commit();
}
/** Get user from shared preferences */
private User deserializeUser() {
return new Gson().fromJson(this.sharedPreferences.getString(UserApp.USER_KEY, ""), User.class);
}
/** Reload the logged in user */
public void reloadUser(UserApp.Session.UserCallback callback) {
if (loadUserTask != null) {
return;
}
loadUserTask = new LoadUserTask();
loadUserTask.callback = callback;
loadUserTask.execute();
}
/** An async task to load the current user from UserApp */
private class LoadUserTask extends AsyncTask<Void, Void, Exception> {
public UserApp.Session.UserCallback callback;
@Override
protected Exception doInBackground(Void... params) {
try {
UserAppClient.Result result = api.method("user.get")
.parameter("user_id", "self")
.call()
.get(0);
user = parseUser(result);
serializeUser(user);
return null;
} catch (Exception exception) {
UserApp.log("Error: " + exception.getMessage());
return exception;
}
}
@Override
protected void onPostExecute(final Exception exception) {
loadUserTask = null;
onLoadUserCompleted(callback, exception);
}
@Override
protected void onCancelled() {
loadUserTask = null;
onLoadUserCompleted(callback, new Exception("Task canceled."));
}
}
/** Load User callback */
private void onLoadUserCompleted(UserApp.Session.UserCallback callback, Exception exception) {
if (callback != null) {
callback.call(this.user, exception);
}
// Broadcast user update success
this.postAction(UserApp.ACTION_USER_UPDATE, exception);
}
/** Method to save a user */
public void saveUser(User user, UserApp.Session.UserCallback callback) {
if (saveUserTask != null) {
return;
}
saveUserTask = new SaveUserTask();
saveUserTask.newUser = user;
saveUserTask.callback = callback;
saveUserTask.execute();
}
/** Save user callback */
private void onSaveCompleted(UserApp.Session.UserCallback callback, AsyncResult result) {
if (result.value != null) {
this.user = (User) result.value;
serializeUser(this.user);
// Broadcast user update success
this.postAction(UserApp.ACTION_USER_UPDATE, result.exception);
}
if (callback != null) {
callback.call(this.user, result.exception);
}
}
/** An async save user task used to save a user */
private class SaveUserTask extends AsyncTask<Void, Void, AsyncResult> {
public User newUser;
public UserApp.Session.UserCallback callback;
@Override
protected AsyncResult doInBackground(Void... params) {
try {
// Save the user
UserAppClient.API saveCall = api.method("user.save")
.parameter("first_name", newUser.first_name)
.parameter("last_name", newUser.last_name)
.parameter("email", newUser.email)
.parameter("email_verified", newUser.email_verified);
if (newUser.login != null && !newUser.login.isEmpty()) {
saveCall.parameter("login", newUser.login);
}
if (newUser.user_id != null && !newUser.user_id.isEmpty()) {
saveCall.parameter("user_id", newUser.user_id);
}
if (newUser.ip_address != null && !newUser.ip_address.isEmpty()) {
saveCall.parameter("ip_address", newUser.ip_address);
}
if (newUser.password != null && !newUser.password.isEmpty()) {
saveCall.parameter("password", newUser.password);
}
// Properties
if (newUser.properties != null) {
UserAppClient.Struct propertyStruct = new UserAppClient.Struct();
Set<String> propertyKeys = newUser.properties.keySet();
for (String key : propertyKeys) {
Property property = newUser.properties.get(key);
propertyStruct.parameter(key, new UserAppClient.Struct()
.parameter("value", property.value)
.parameter("override", property.override)
);
}
saveCall.parameter("properties", propertyStruct);
}
// Permissions
if (newUser.permissions != null) {
UserAppClient.Struct permissionStruct = new UserAppClient.Struct();
Set<String> permissionKeys = newUser.permissions.keySet();
for (String key : permissionKeys) {
Permission permission = newUser.permissions.get(key);
permissionStruct.parameter(key, new UserAppClient.Struct()
.parameter("value", permission.value)
.parameter("override", permission.override)
);
}
saveCall.parameter("permissions", permissionStruct);
}
// Features
if (newUser.features != null) {
UserAppClient.Struct featureStruct = new UserAppClient.Struct();
Set<String> featureKeys = newUser.features.keySet();
for (String key : featureKeys) {
Feature feature = newUser.features.get(key);
featureStruct.parameter(key, new UserAppClient.Struct()
.parameter("value", feature.value)
.parameter("override", feature.override)
);
}
saveCall.parameter("features", featureStruct);
}
// Subscription
if (newUser.subscription != null) {
saveCall.parameter("subscription", new UserAppClient.Struct()
.parameter("price_list_id", newUser.subscription.price_list_id)
.parameter("plan_id", newUser.subscription.plan_id)
.parameter("override", newUser.subscription.override)
);
}
UserAppClient.Result result = saveCall.call();
User savedUser = parseUser(result);
return new AsyncResult(savedUser, null);
} catch (Exception exception) {
return new AsyncResult(null, exception);
}
}
@Override
protected void onPostExecute(final AsyncResult result) {
saveUserTask = null;
onSaveCompleted(this.callback, result);
}
@Override
protected void onCancelled() {
saveUserTask = null;
onSaveCompleted(this.callback, new AsyncResult(null, new Exception("Save user task was canceled.")));
}
}
/**
* Method to get an OAuth authorization url
*/
public void getOAuthUrl(String providerId, String redirectURI, String scopes, UserApp.Session.OAuthUrlCallback callback) {
if (getOAuthUrlTask != null) {
return;
}
UserApp.log("Getting OAuth authorization url...");
getOAuthUrlTask = new GetOAuthUrlTask();
getOAuthUrlTask.providerId = providerId;
getOAuthUrlTask.redirectURI = redirectURI;
getOAuthUrlTask.scopes = scopes;
getOAuthUrlTask.callback = callback;
getOAuthUrlTask.execute();
}
/** getOAuthAuthorizationUrl callback */
private void onGetOAuthUrlCompleted(UserApp.Session.OAuthUrlCallback callback, AsyncResult result) {
if (callback != null) {
callback.call((String) result.value, result.exception);
}
}
/** An async task to get an OAuth authorization url from UserApp */
private class GetOAuthUrlTask extends AsyncTask<Void, Void, AsyncResult> {
public String providerId = null;
public String redirectURI = null;
public String scopes = "";
public UserApp.Session.OAuthUrlCallback callback;
@Override
protected AsyncResult doInBackground(Void... params) {
try {
if (providerId == null || redirectURI == null) {
onGetOAuthUrlCompleted(this.callback, new AsyncResult(null, new Exception("Missing providerId or redirectURI.")));
}
setToken(null);
UserAppClient.Result result = api.method("oauth.getAuthorizationUrl")
.parameter("provider_id", providerId)
.parameter("redirect_uri", redirectURI)
.parameter("scopes", scopes)
.call();
return new AsyncResult(result.get("authorization_url").toString(), null);
} catch (Exception exception) {
return new AsyncResult(null, exception);
}
}
@Override
protected void onPostExecute(final AsyncResult result) {
getOAuthUrlTask = null;
onGetOAuthUrlCompleted(this.callback, result);
}
@Override
protected void onCancelled() {
getOAuthUrlTask = null;
onGetOAuthUrlCompleted(this.callback, new AsyncResult(null, new Exception("Get OAuth url task was canceled.")));
}
}
/** Check if the logged in user has a specific permission */
public Boolean hasPermission(String permissions) {
if (this.user == null || permissions.isEmpty()) {
return false;
}
String[] arr = permissions.split(" ");
for (int i = 0; i < arr.length; ++i) {
if (!(this.user.permissions.get(arr[i]) != null && this.user.permissions.get(arr[i]).value == true)) {
return false;
}
}
return true;
}
/** Check if the logged in user has a specific feature */
public Boolean hasFeature(String features) {
if (this.user == null || features.isEmpty()) {
return false;
}
String[] arr = features.split(" ");
for (int i = 0; i < arr.length; ++i) {
if (!(this.user.features.get(arr[i]) != null && this.user.features.get(arr[i]).value == true)) {
return false;
}
}
return true;
}
/** Set UserApp token */
private void setToken(String token) {
UserAppClient.ClientOptions options = api.getOptions();
options.token = token;
api.setOptions(options);
this.token = token;
}
}
/**
* Class that facilitates UI changes such as
* hiding and showing fragments
*/
public static class UIHelper {
private UserApp.Session session = null;
private int loginFragmentId = -1;
private int mainFragmentId = -1;
private int[] otherFragmentIds;
public UIHelper(UserApp.Session session) {
this(session, -1, -1);
}
/** Create a new UIHelper and attach it to the session */
public UIHelper(UserApp.Session session, int loginFragmentId, int mainFragmentId, int ... otherFragmentIds) {
this.session = session;
this.loginFragmentId = loginFragmentId;
this.mainFragmentId = mainFragmentId;
this.otherFragmentIds = otherFragmentIds;
// Attach a callback to the session
session.addCallback(new UserApp.Session.StatusCallback() {
@Override
public void call(Boolean authenticated, Exception exception) {
if (authenticated) {
showMainFragment();
} else {
showLoginFragment();
}
}
});
}
/** Show the login fragment */
public void showLoginFragment() {
this.hideOtherFragments();
UserApp.UIHelper.hideFragmentsById(this.session.getActivity().getSupportFragmentManager(), this.mainFragmentId);
UserApp.UIHelper.showFragmentsById(this.session.getActivity().getSupportFragmentManager(), false, this.loginFragmentId);
}
/** Show the main fragment */
public void showMainFragment() {
this.hideOtherFragments();
UserApp.UIHelper.hideFragmentsById(this.session.getActivity().getSupportFragmentManager(), this.loginFragmentId);
UserApp.UIHelper.showFragmentsById(this.session.getActivity().getSupportFragmentManager(), false, this.mainFragmentId);
}
/** Hide all the other fragment */
public void hideOtherFragments() {
UserApp.UIHelper.hideFragmentsById(this.session.getActivity().getSupportFragmentManager(), this.otherFragmentIds);
}
/** Lifecycle events */
public Boolean isResumed() {
return this.session.isResumed();
}
public void onResume() {
this.session.onResume();
}
public void onPause() {
this.session.onPause();
}
/** Hide all input fragments */
public static void hideFragmentsById(FragmentManager fm, int ... fragmentIds) {
FragmentTransaction transaction = fm.beginTransaction();
for (int i = 0; i < fragmentIds.length; ++i) {
transaction.hide(fm.findFragmentById(fragmentIds[i]));
}
transaction.commit();
}
/** Show all input fragments */
public static void showFragmentsById(FragmentManager fm, boolean addToBackStack, int ... fragmentIds) {
FragmentTransaction transaction = fm.beginTransaction();
for (int i = 0; i < fragmentIds.length; ++i) {
transaction.show(fm.findFragmentById(fragmentIds[i]));
}
if (addToBackStack) {
transaction.addToBackStack(null);
}
transaction.commit();
}
}
/** Load App Id from AndroidManifest.xml */
public static String getAppId(Context activity) {
try {
ApplicationInfo ai;
ai = activity.getPackageManager().getApplicationInfo(activity.getPackageName(), PackageManager.GET_META_DATA);
return ai.metaData.get("userapp.AppId").toString();
} catch (Exception e) {
UserApp.log("Error: Could not find App Id in AndroidManifest.xml.");
return "";
}
}
}
| |
/**
*
*/
package nl.wisdelft.prototype.client.shared;
import java.util.ArrayList;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import org.jboss.errai.common.client.api.annotations.Portable;
import org.jboss.errai.databinding.client.api.Bindable;
/**
* @author oosterman
*/
@NamedQuery(name = "config", query = "SELECT c FROM CurationConfiguration c")
@Bindable
@Portable
@Entity
@ApplicationScoped
public class CurationConfiguration {
@Id
private String resourceUrl;
private String imageUrl;
private String queryString;
String title;
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> selectedTypes = new ArrayList<Property>();
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> possibleTypes = new ArrayList<Property>();
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> selectedProperties = new ArrayList<Property>();
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> possibleProperties = new ArrayList<Property>();
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> resourceProperties = new ArrayList<Property>();
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
private List<Property> knowledgeModelProperties = new ArrayList<Property>();
public CurationConfiguration() {
// TODO Auto-generated constructor stub
}
public CurationConfiguration(String resourceUrl) {
this.resourceUrl = resourceUrl;
}
public void setConfigFromPage(RDFResource page) {
setTitle(page.getLabel());
setImageUrl(page.getThumbnail());
setResourceUrl(page.getResourceUrl());
setPossibleTypes(page.getTypes());
setResourceProperties(page.getProperties());
getSelectedProperties().clear();
getSelectedTypes().clear();
getKnowledgeModelProperties().clear();
}
public Property getTypeWithLabel(String label) {
if (label == null) {
return null;
}
for (Property type : possibleTypes) {
if (label.equals(type.getLabel())) {
return type;
}
}
return null;
}
public Property getTypeWithKey(String key) {
if (key == null) {
return null;
}
for (Property type : possibleTypes) {
if (key.equals(type.getKey())) {
return type;
}
}
return null;
}
public Property getPropertyWithLabel(String label) {
if (label == null) {
return null;
}
for (Property type : possibleProperties) {
if (label.equals(type.getLabel())) {
return type;
}
}
return null;
}
public Property getPropertyWithKey(String key) {
if (key == null) {
return null;
}
for (Property type : possibleProperties) {
if (key.equals(type.getKey())) {
return type;
}
}
return null;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((resourceUrl == null) ? 0 : resourceUrl.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CurationConfiguration other = (CurationConfiguration) obj;
if (resourceUrl == null)
return false;
return resourceUrl.equals(other.resourceUrl);
}
public String getResourceUrl() {
return resourceUrl;
}
public List<Property> getSelectedTypes() {
return selectedTypes;
}
public void setSelectedTypes(List<Property> selectedTypes) {
this.selectedTypes = selectedTypes;
}
public List<Property> getSelectedProperties() {
return selectedProperties;
}
public void setSelectedProperties(List<Property> selectedProperties) {
this.selectedProperties = selectedProperties;
}
public void setResourceUrl(String resourceUrl) {
this.resourceUrl = resourceUrl;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<Property> getPossibleTypes() {
return possibleTypes;
}
public void setPossibleTypes(List<Property> possibleTypes) {
this.possibleTypes = possibleTypes;
}
public List<Property> getPossibleProperties() {
return possibleProperties;
}
public void setPossibleProperties(List<Property> possibleProperties) {
this.possibleProperties = possibleProperties;
}
public List<Property> getResourceProperties() {
return resourceProperties;
}
public void setResourceProperties(List<Property> resourceProperties) {
this.resourceProperties = resourceProperties;
}
public List<Property> getKnowledgeModelProperties() {
return knowledgeModelProperties;
}
public void setKnowledgeModelProperties(List<Property> knowledgeModelProperties) {
this.knowledgeModelProperties = knowledgeModelProperties;
}
public String getQueryString() {
return queryString;
}
public void setQueryString(String queryString) {
this.queryString = queryString;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
}
| |
/*
* Copyright (c) 2010-2019 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
/**
*
*/
package com.evolveum.midpoint.schema;
import static com.evolveum.midpoint.prism.util.PrismAsserts.assertPropertyValue;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.impl.PrismContextImpl;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.path.ItemName;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.util.SchemaTestConstants;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.prism.xml.ns._public.types_3.ChangeTypeType;
import com.evolveum.prism.xml.ns._public.types_3.EncryptedDataType;
import com.evolveum.prism.xml.ns._public.types_3.ItemDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.ItemPathType;
import com.evolveum.prism.xml.ns._public.types_3.ModificationTypeType;
import com.evolveum.prism.xml.ns._public.types_3.ObjectDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
/**
* @author Radovan Semancik
*/
public class TestJaxbParsing {
private static final String NS_FOO = "http://www.example.com/foo";
@BeforeSuite
public void setup() throws SchemaException, SAXException, IOException {
PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX);
PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY);
}
@Test
public void testParseUserFromJaxb() throws SchemaException, SAXException, IOException, JAXBException {
PrismContext prismContext = PrismTestUtil.getPrismContext();
// Try to use the schema to validate Jack
UserType userType = PrismTestUtil.parseObjectable(new File(TestConstants.COMMON_DIR, "user-jack.xml"), UserType.class);
// WHEN
PrismObject<UserType> user = userType.asPrismObject();
user.revive(prismContext);
// THEN
System.out.println("Parsed user:");
System.out.println(user.debugDump());
user.checkConsistence();
assertPropertyValue(user, UserType.F_NAME, PrismTestUtil.createPolyString("jack"));
assertPropertyValue(user, new ItemName(SchemaConstants.NS_C, "fullName"), new PolyString("Jack Sparrow", "jack sparrow"));
assertPropertyValue(user, new ItemName(SchemaConstants.NS_C, "givenName"), new PolyString("Jack", "jack"));
assertPropertyValue(user, new ItemName(SchemaConstants.NS_C, "familyName"), new PolyString("Sparrow", "sparrow"));
assertPropertyValue(user, new ItemName(SchemaConstants.NS_C, "honorificPrefix"), new PolyString("Cpt.", "cpt"));
assertPropertyValue(user.findContainer(UserType.F_EXTENSION),
new ItemName(NS_FOO, "bar"), "BAR");
PrismProperty<ProtectedStringType> password = user.findOrCreateContainer(UserType.F_EXTENSION).findProperty(new ItemName(NS_FOO, "password"));
assertNotNull(password);
// TODO: check inside
assertPropertyValue(user.findOrCreateContainer(UserType.F_EXTENSION),
new ItemName(NS_FOO, "num"), 42);
PrismProperty<?> multi = user.findOrCreateContainer(UserType.F_EXTENSION).findProperty(new ItemName(NS_FOO, "multi"));
assertEquals(3, multi.getValues().size());
// WHEN
// Node domNode = user.serializeToDom();
//
// //THEN
// System.out.println("\nSerialized user:");
// System.out.println(DOMUtil.serializeDOMToString(domNode));
//
// Element userEl = DOMUtil.getFirstChildElement(domNode);
// assertEquals(SchemaConstants.I_USER, DOMUtil.getQName(userEl));
// TODO: more asserts
}
@Test
public void testParseAccountFromJaxb() throws SchemaException, SAXException, IOException, JAXBException {
PrismContext prismContext = PrismTestUtil.getPrismContext();
// Try to use the schema to validate Jack
ShadowType accType = PrismTestUtil.parseObjectable(new File(TestConstants.COMMON_DIR, "account-jack.xml"), ShadowType.class);
PrismObject<ShadowType> account = accType.asPrismObject();
account.revive(prismContext);
System.out.println("Parsed account:");
System.out.println(account.debugDump(1));
account.checkConsistence();
assertPropertyValue(account, ShadowType.F_NAME, PrismTestUtil.createPolyString("jack"));
assertPropertyValue(account, ShadowType.F_OBJECT_CLASS,
new QName("http://midpoint.evolveum.com/xml/ns/public/resource/instance/ef2bc95b-76e0-59e2-86d6-3d4f02d3ffff", "AccountObjectClass"));
assertPropertyValue(account, ShadowType.F_INTENT, "default");
// TODO: more asserts
}
@Test
public void testParseModernRoleFromJaxb() throws SchemaException, SAXException, IOException, JAXBException {
System.out.println("\n\n ===[ testParseModernRoleFromJaxb ]===\n");
testParseRoleFromJaxb(new File(TestConstants.COMMON_DIR, "role.xml"));
}
/**
* Test of parsing role with elements that were removed in 4.0.
*/
@Test
public void testParseLegacyRoleFromJaxb() throws SchemaException, SAXException, IOException, JAXBException {
System.out.println("\n\n ===[ testParseLegacyRoleFromJaxb ]===\n");
testParseRoleFromJaxb(new File(TestConstants.COMMON_DIR, "role-legacy.xml"));
}
public void testParseRoleFromJaxb(File file) throws SchemaException, SAXException, IOException, JAXBException {
PrismContext prismContext = PrismTestUtil.getPrismContext();
RoleType roleType = PrismTestUtil.parseObjectable(file, RoleType.class);
// WHEN
PrismObject<RoleType> role = roleType.asPrismObject();
role.revive(prismContext);
// THEN
System.out.println("Parsed role:");
System.out.println(role.debugDump(1));
role.checkConsistence();
assertPropertyValue(role, RoleType.F_NAME, PrismTestUtil.createPolyString("r3"));
// TODO: more asserts?
}
@Test
public void testParseGenericObjectFromJaxb() throws Exception {
System.out.println("\n\n ===[ testParseGenericObjectFromJaxb ]===\n");
PrismContext prismContext = PrismTestUtil.getPrismContext();
GenericObjectType object = PrismTestUtil.parseObjectable(new File(TestConstants.COMMON_DIR, "generic-sample-configuration.xml"),
GenericObjectType.class);
PrismObject<GenericObjectType> prism = object.asPrismObject();
prism.revive(prismContext);
prism.checkConsistence();
assertPropertyValue(prism, GenericObjectType.F_NAME, PrismTestUtil.createPolyString("My Sample Config Object"));
assertPropertyValue(prism, GenericObjectType.F_DESCRIPTION, "Sample description");
assertPropertyValue(prism, GenericObjectType.F_OBJECT_TYPE, "http://midpoint.evolveum.com/xml/ns/test/extension#SampleConfigType");
//assert extension
PrismContainer<?> extension = prism.findContainer(GenericObjectType.F_EXTENSION);
assertNotNull(extension);
PrismAsserts.assertPropertyValue(extension, SchemaTestConstants.EXTENSION_STRING_TYPE_ELEMENT, "X marks the spot");
PrismAsserts.assertPropertyValue(extension, SchemaTestConstants.EXTENSION_INT_TYPE_ELEMENT, 1234);
PrismAsserts.assertPropertyValue(extension, SchemaTestConstants.EXTENSION_DOUBLE_TYPE_ELEMENT, 456.789D);
PrismAsserts.assertPropertyValue(extension, SchemaTestConstants.EXTENSION_LONG_TYPE_ELEMENT, 567890L);
XMLGregorianCalendar calendar = DatatypeFactory.newInstance().newXMLGregorianCalendar("2002-05-30T09:10:11");
PrismAsserts.assertPropertyValue(extension, SchemaTestConstants.EXTENSION_DATE_TYPE_ELEMENT, calendar);
//todo locations ????? how to test DOM ??????
}
@Test
public void testMarshallObjectDeltaType() throws Exception {
ObjectDeltaType delta = new ObjectDeltaType();
delta.setOid("07b32c14-0c18-460b-bd4a-99b96699f952");
delta.setChangeType(ChangeTypeType.MODIFY);
ItemDeltaType item1 = new ItemDeltaType();
delta.getItemDelta().add(item1);
item1.setModificationType(ModificationTypeType.REPLACE);
Document document = DOMUtil.getDocument();
// Element path = document.createElementNS(SchemaConstantsGenerated.NS_TYPES, "path");
// path.setTextContent("c:credentials/c:password");
ItemPath path = ItemPath.create(SchemaConstantsGenerated.C_CREDENTIALS, CredentialsType.F_PASSWORD);
item1.setPath(new ItemPathType(path));
ProtectedStringType protectedString = new ProtectedStringType();
protectedString.setEncryptedData(new EncryptedDataType());
RawType value = new RawType(((PrismContextImpl) PrismTestUtil.getPrismContext()).getBeanMarshaller().marshall(protectedString), PrismTestUtil.getPrismContext());
item1.getValue().add(value);
String xml = PrismTestUtil.serializeJaxbElementToString(
new JAXBElement<>(new QName("http://www.example.com", "custom"), Object.class, delta));
assertNotNull(xml);
}
@Test
public void testParseAnyValue() throws Exception {
PrismContext prismContext = PrismTestUtil.getPrismContext();
// WHEN
String dataAsIs = "<asIs/>";
String dataValue = "<c:value xmlns:c='" + SchemaConstants.NS_C + "'>12345</c:value>";
// THEN
JAXBElement oAsIs = prismContext.parserFor(dataAsIs).xml().parseRealValueToJaxbElement();
System.out.println(dumpResult(dataAsIs, oAsIs));
assertJaxbElement(oAsIs, new QName("asIs"), AsIsExpressionEvaluatorType.class);
JAXBElement oValue = prismContext.parserFor(dataValue).xml().parseRealValueToJaxbElement();
System.out.println(dumpResult(dataValue, oValue));
//assertJaxbElement(oValue, SchemaConstantsGenerated.C_VALUE, String.class);
assertJaxbElement(oValue, SchemaConstantsGenerated.C_VALUE, RawType.class);
}
private void assertJaxbElement(JAXBElement jaxbElement, QName name, Class<?> clazz) {
assertEquals("Wrong JAXB element name", name, jaxbElement.getName());
assertEquals("Wrong JAXB element declared type", clazz, jaxbElement.getDeclaredType());
assertEquals("Wrong JAXB element value type", clazz, jaxbElement.getValue().getClass());
}
private String dumpResult(String data, JAXBElement jaxb) {
return "Parsed expression evaluator: " + data + " as " + jaxb + " (name=" + jaxb.getName() + ", declaredType=" + jaxb.getDeclaredType() + ", value=" + jaxb.getValue() + ")";
}
}
| |
package com.tinkerpop.rexster.extension;
import com.tinkerpop.rexster.Tokens;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jettison.json.JSONObject;
import javax.ws.rs.core.Response;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Wraps the Jersey response object with some simple response builder methods.
*/
public class ExtensionResponse {
private final Response jerseyResponse;
private final boolean errorResponse;
/**
* Create a non-error ExtensionResponse object.
*/
public ExtensionResponse(final Response response) {
this(response, false);
}
/**
* Create an ExtensionResponse object.
*/
public ExtensionResponse(final Response response, final boolean errorResponse) {
this.jerseyResponse = response;
this.errorResponse = errorResponse;
}
/**
* Override the builder and literally construct the Jersey response.
* <p/>
* Rexster will add its standard headers and override any provided in the response. It is recommended
* to use the @see error methods as opposed to override if the intention is to return an error on
* the response. The override methods will not throw a WebApplicationException or do any standard
* Rexster server side logging.
*/
public static ExtensionResponse override(final Response response) {
if (response == null) {
throw new IllegalArgumentException("Response cannot be null");
}
return new ExtensionResponse(response);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*/
public static ExtensionResponse error(final String message) {
return error(message, (Exception) null);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*/
public static ExtensionResponse error(final String message, final String appendKey, final JSONObject appendJson) {
return error(message, null, appendKey, appendJson);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*
* @param appendJson Additional JSON to push into the response. The root of the key values from this object
* will be merged into the root of the resulting JSON.
*/
public static ExtensionResponse error(final String message, final JSONObject appendJson) {
return error(message, null, null, appendJson);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*/
public static ExtensionResponse error(final Exception source) {
return error("", source);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*/
public static ExtensionResponse error(final Exception source, final String appendKey, final JSONObject appendJson) {
return error("", source, appendKey, appendJson);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*
* @param appendJson Additional JSON to push into the response. The root of the key values from this object
* will be merged into the root of the resulting JSON.
*/
public static ExtensionResponse error(final Exception source, final JSONObject appendJson) {
return error("", source, null, appendJson);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*/
public static ExtensionResponse error(final String message, final Exception source) {
return error(message, source, Response.Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*
* @param appendKey This parameter is only relevant if the appendJson parameter is passed. If this value
* is not null or non-empty the value of appendJson will be assigned to this key value in
* the response object. If the key is null or empty the appendJson parameter will be
* written at the root of the response object.
* @param appendJson Additional JSON to push into the response.
*/
public static ExtensionResponse error(final String message, final Exception source, final String appendKey, final JSONObject appendJson) {
return error(message, source, Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), appendKey, appendJson);
}
/**
* Generates standard Rexster JSON error with an internal server error response code.
*
* @param appendJson Additional JSON to push into the response. The root of the key values from this object
* will be merged into the root of the resulting JSON.
*/
public static ExtensionResponse error(final String message, final Exception source, final JSONObject appendJson) {
return error(message, source, null, appendJson);
}
/**
* Generates standard Rexster JSON error with a specified server error response code.
* <p/>
* The status code is not validated, so throw the right code.
*/
public static ExtensionResponse error(final String message, final Exception source, final int statusCode) {
return error(message, source, statusCode, null, null);
}
/**
* Generates standard Rexster JSON error with a specified server error response code.
* <p/>
* The status code is not validated, so throw the right code.
*
* @param appendKey This parameter is only relevant if the appendJson parameter is passed. If this value
* is not null or non-empty the value of appendJson will be assigned to this key value in
* the response object. If the key is null or empty the appendJson parameter will be
* written at the root of the response object.
* @param appendJson Additional JSON to push into the response.
*/
public static ExtensionResponse error(final String message, final Exception source, final int statusCode,
final String appendKey, final JSONObject appendJson) {
final Map<String, Object> m = new HashMap<String, Object>();
m.put(Tokens.MESSAGE, message);
if (source != null) {
m.put("error", source.getMessage());
}
if (appendJson != null) {
if (appendKey != null && !appendKey.isEmpty()) {
m.put(appendKey, appendJson);
} else {
final Iterator keys = appendJson.keys();
while (keys.hasNext()) {
final String key = (String) keys.next();
m.put(key, appendJson.opt(key));
}
}
}
// use a hashmap with the constructor so that a JSONException
// will not be thrown
return new ExtensionResponse(Response.status(statusCode).entity(new JSONObject(m)).build(), true);
}
/**
* Generates a response with no content and matching status code.
*/
public static ExtensionResponse noContent() {
return new ExtensionResponse(Response.noContent().build());
}
/**
* Generates an response with an OK status code. Accepts a HashMap as the response value.
* It is converted to JSON.
*/
public static ExtensionResponse ok(final Map result) {
if (result == null) {
throw new IllegalArgumentException("result cannot be null");
}
return ok(new JSONObject(result));
}
public static ExtensionResponse availableOptions(final String... methods) {
return new ExtensionResponse(Response.noContent()
.header("Access-Control-Allow-Methods", StringUtils.join(methods, ",")).build());
}
/**
* Generates an response with an OK status code.
*/
public static ExtensionResponse ok(final JSONObject result) {
return new ExtensionResponse(Response.ok(result).build());
}
public Response getJerseyResponse() {
return this.jerseyResponse;
}
public boolean isErrorResponse() {
return this.errorResponse;
}
}
| |
/*
* Copyright (c) 2015, Nordic Semiconductor
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package no.nordicsemi.android.nrftoolbox.scanner;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
import no.nordicsemi.android.nrftoolbox.R;
/**
* DeviceListAdapter class is list adapter for showing scanned Devices name, address and RSSI image based on RSSI values.
*/
public class DeviceListAdapter extends BaseAdapter {
private static final int TYPE_TITLE = 0;
private static final int TYPE_ITEM = 1;
private static final int TYPE_EMPTY = 2;
private final ArrayList<ExtendedBluetoothDevice> mListBondedValues = new ArrayList<>();
private final ArrayList<ExtendedBluetoothDevice> mListValues = new ArrayList<>();
private final Context mContext;
private final ExtendedBluetoothDevice.AddressComparator comparator = new ExtendedBluetoothDevice.AddressComparator();
public DeviceListAdapter(Context context) {
mContext = context;
}
public void addBondedDevice(ExtendedBluetoothDevice device) {
mListBondedValues.add(device);
notifyDataSetChanged();
}
/**
* Looks for the device with the same address as given one in the list of bonded devices. If the device has been found it updates its RSSI value.
*
* @param address
* the device address
* @param rssi
* the RSSI of the scanned device
*/
public void updateRssiOfBondedDevice(String address, int rssi) {
comparator.address = address;
final int indexInBonded = mListBondedValues.indexOf(comparator);
if (indexInBonded >= 0) {
ExtendedBluetoothDevice previousDevice = mListBondedValues.get(indexInBonded);
previousDevice.rssi = rssi;
notifyDataSetChanged();
}
}
/**
* If such device exists on the bonded device list, this method does nothing. If not then the device is updated (rssi value) or added.
*
* @param device
* the device to be added or updated
*/
public void addOrUpdateDevice(ExtendedBluetoothDevice device) {
final boolean indexInBonded = mListBondedValues.contains(device);
if (indexInBonded) {
return;
}
final int indexInNotBonded = mListValues.indexOf(device);
if (indexInNotBonded >= 0) {
ExtendedBluetoothDevice previousDevice = mListValues.get(indexInNotBonded);
previousDevice.rssi = device.rssi;
notifyDataSetChanged();
return;
}
mListValues.add(device);
notifyDataSetChanged();
}
public void clearDevices() {
mListValues.clear();
notifyDataSetChanged();
}
@Override
public int getCount() {
final int bondedCount = mListBondedValues.size() + 1; // 1 for the title
final int availableCount = mListValues.isEmpty() ? 2 : mListValues.size() + 1; // 1 for title, 1 for empty text
if (bondedCount == 1)
return availableCount;
return bondedCount + availableCount;
}
@Override
public Object getItem(int position) {
final int bondedCount = mListBondedValues.size() + 1; // 1 for the title
if (mListBondedValues.isEmpty()) {
if (position == 0)
return R.string.scanner_subtitle__not_bonded;
else
return mListValues.get(position - 1);
} else {
if (position == 0)
return R.string.scanner_subtitle_bonded;
if (position < bondedCount)
return mListBondedValues.get(position - 1);
if (position == bondedCount)
return R.string.scanner_subtitle__not_bonded;
return mListValues.get(position - bondedCount - 1);
}
}
@Override
public int getViewTypeCount() {
return 3;
}
@Override
public boolean areAllItemsEnabled() {
return false;
}
@Override
public boolean isEnabled(int position) {
return getItemViewType(position) == TYPE_ITEM;
}
@Override
public int getItemViewType(int position) {
if (position == 0)
return TYPE_TITLE;
if (!mListBondedValues.isEmpty() && position == mListBondedValues.size() + 1)
return TYPE_TITLE;
if (position == getCount() - 1 && mListValues.isEmpty())
return TYPE_EMPTY;
return TYPE_ITEM;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View oldView, ViewGroup parent) {
final LayoutInflater inflater = LayoutInflater.from(mContext);
final int type = getItemViewType(position);
View view = oldView;
switch (type) {
case TYPE_EMPTY:
if (view == null) {
view = inflater.inflate(R.layout.device_list_empty, parent, false);
}
break;
case TYPE_TITLE:
if (view == null) {
view = inflater.inflate(R.layout.device_list_title, parent, false);
}
final TextView title = (TextView) view;
title.setText((Integer) getItem(position));
break;
default:
if (view == null) {
view = inflater.inflate(R.layout.device_list_row, parent, false);
final ViewHolder holder = new ViewHolder();
holder.name = (TextView) view.findViewById(R.id.name);
holder.address = (TextView) view.findViewById(R.id.address);
holder.rssi = (ImageView) view.findViewById(R.id.rssi);
view.setTag(holder);
}
final ExtendedBluetoothDevice device = (ExtendedBluetoothDevice) getItem(position);
final ViewHolder holder = (ViewHolder) view.getTag();
final String name = device.name;
holder.name.setText(name != null ? name : mContext.getString(R.string.not_available));
holder.address.setText(device.device.getAddress());
if (!device.isBonded || device.rssi != ScannerFragment.NO_RSSI) {
final int rssiPercent = (int) (100.0f * (127.0f + device.rssi) / (127.0f + 20.0f));
holder.rssi.setImageLevel(rssiPercent);
holder.rssi.setVisibility(View.VISIBLE);
} else {
holder.rssi.setVisibility(View.GONE);
}
break;
}
return view;
}
private class ViewHolder {
private TextView name;
private TextView address;
private ImageView rssi;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.providers;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeSet;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.wiki.InternalWikiException;
import org.apache.wiki.WikiEngine;
import org.apache.wiki.WikiPage;
import org.apache.wiki.WikiProvider;
import org.apache.wiki.api.exceptions.NoRequiredPropertyException;
import org.apache.wiki.api.exceptions.ProviderException;
import org.apache.wiki.search.QueryItem;
import org.apache.wiki.search.SearchMatcher;
import org.apache.wiki.search.SearchResult;
import org.apache.wiki.search.SearchResultComparator;
import org.apache.wiki.util.FileUtil;
import org.apache.wiki.util.TextUtil;
/**
* Provides a simple directory based repository for Wiki pages.
* <P>
* All files have ".txt" appended to make life easier for those
* who insist on using Windows or other software which makes assumptions
* on the files contents based on its name.
* <p>
* This class functions as a superclass to all file based providers.
*
* @since 2.1.21.
*
*/
public abstract class AbstractFileProvider
implements WikiPageProvider
{
private static final Logger log = Logger.getLogger(AbstractFileProvider.class);
private String m_pageDirectory = "/tmp/";
protected String m_encoding;
protected WikiEngine m_engine;
public static final String PROP_CUSTOMPROP_MAXLIMIT = "custom.pageproperty.max.allowed";
public static final String PROP_CUSTOMPROP_MAXKEYLENGTH = "custom.pageproperty.key.length";
public static final String PROP_CUSTOMPROP_MAXVALUELENGTH = "custom.pageproperty.value.length";
public static final int DEFAULT_MAX_PROPLIMIT = 200;
public static final int DEFAULT_MAX_PROPKEYLENGTH = 255;
public static final int DEFAULT_MAX_PROPVALUELENGTH = 4096;
/**
* This parameter limits the number of custom page properties allowed on a page
*/
public static int MAX_PROPLIMIT = DEFAULT_MAX_PROPLIMIT;
/**
* This number limits the length of a custom page property key length
* The default value here designed with future JDBC providers in mind.
*/
public static int MAX_PROPKEYLENGTH = DEFAULT_MAX_PROPKEYLENGTH;
/**
* This number limits the length of a custom page property value length
* The default value here designed with future JDBC providers in mind.
*/
public static int MAX_PROPVALUELENGTH = DEFAULT_MAX_PROPVALUELENGTH;
/**
* Name of the property that defines where page directories are.
*/
public static final String PROP_PAGEDIR = "jspwiki.fileSystemProvider.pageDir";
/**
* All files should have this extension to be recognized as JSPWiki files.
* We default to .txt, because that is probably easiest for Windows users,
* and guarantees correct handling.
*/
public static final String FILE_EXT = ".txt";
/** The default encoding. */
public static final String DEFAULT_ENCODING = "ISO-8859-1";
private boolean m_windowsHackNeeded = false;
/**
* {@inheritDoc}
* @throws FileNotFoundException If the specified page directory does not exist.
* @throws IOException In case the specified page directory is a file, not a directory.
*/
public void initialize( WikiEngine engine, Properties properties )
throws NoRequiredPropertyException,
IOException, FileNotFoundException
{
log.debug("Initing FileSystemProvider");
m_pageDirectory = TextUtil.getCanonicalFilePathProperty(properties, PROP_PAGEDIR,
System.getProperty("user.home") + File.separator + "jspwiki-files");
File f = new File(m_pageDirectory);
if( !f.exists() )
{
if( !f.mkdirs() )
{
throw new IOException( "Failed to create page directory " + f.getAbsolutePath() + " , please check property "
+ PROP_PAGEDIR );
}
}
else
{
if( !f.isDirectory() )
{
throw new IOException( "Page directory is not a directory: " + f.getAbsolutePath() );
}
if( !f.canWrite() )
{
throw new IOException( "Page directory is not writable: " + f.getAbsolutePath() );
}
}
m_engine = engine;
m_encoding = properties.getProperty( WikiEngine.PROP_ENCODING, DEFAULT_ENCODING );
String os = System.getProperty( "os.name" ).toLowerCase();
if( os.startsWith("windows") || os.equals("nt") )
{
m_windowsHackNeeded = true;
}
if (properties != null) {
MAX_PROPLIMIT = TextUtil.getIntegerProperty(properties,PROP_CUSTOMPROP_MAXLIMIT,DEFAULT_MAX_PROPLIMIT);
MAX_PROPKEYLENGTH = TextUtil.getIntegerProperty(properties,PROP_CUSTOMPROP_MAXKEYLENGTH,DEFAULT_MAX_PROPKEYLENGTH);
MAX_PROPVALUELENGTH = TextUtil.getIntegerProperty(properties,PROP_CUSTOMPROP_MAXVALUELENGTH,DEFAULT_MAX_PROPVALUELENGTH);
}
log.info( "Wikipages are read from '" + m_pageDirectory + "'" );
}
String getPageDirectory()
{
return m_pageDirectory;
}
private static final String[] WINDOWS_DEVICE_NAMES =
{
"con", "prn", "nul", "aux", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9",
"com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8", "com9"
};
/**
* This makes sure that the queried page name
* is still readable by the file system. For example, all XML entities
* and slashes are encoded with the percent notation.
*
* @param pagename The name to mangle
* @return The mangled name.
*/
protected String mangleName( String pagename )
{
pagename = TextUtil.urlEncode( pagename, m_encoding );
pagename = TextUtil.replaceString( pagename, "/", "%2F" );
//
// Names which start with a dot must be escaped to prevent problems.
// Since we use URL encoding, this is invisible in our unescaping.
//
if( pagename.startsWith( "." ) )
{
pagename = "%2E" + pagename.substring( 1 );
}
if( m_windowsHackNeeded )
{
String pn = pagename.toLowerCase();
for( int i = 0; i < WINDOWS_DEVICE_NAMES.length; i++ )
{
if( WINDOWS_DEVICE_NAMES[i].equals(pn) )
{
pagename = "$$$" + pagename;
}
}
}
return pagename;
}
/**
* This makes the reverse of mangleName.
*
* @param filename The filename to unmangle
* @return The unmangled name.
*/
protected String unmangleName( String filename )
{
// The exception should never happen.
try
{
if( m_windowsHackNeeded && filename.startsWith( "$$$") && filename.length() > 3 )
{
filename = filename.substring(3);
}
return TextUtil.urlDecode( filename, m_encoding );
}
catch( UnsupportedEncodingException e )
{
throw new InternalWikiException("Faulty encoding; should never happen", e);
}
}
/**
* Finds a Wiki page from the page repository.
*
* @param page The name of the page.
* @return A File to the page. May be null.
*/
protected File findPage( String page )
{
return new File( m_pageDirectory, mangleName(page)+FILE_EXT );
}
/**
* {@inheritDoc}
*/
public boolean pageExists( String page )
{
File pagefile = findPage( page );
return pagefile.exists();
}
/**
* {@inheritDoc}
*/
public boolean pageExists( String page, int version )
{
return pageExists (page);
}
/**
* This implementation just returns the current version, as filesystem
* does not provide versioning information for now.
*
* @param page {@inheritDoc}
* @param version {@inheritDoc}
* @throws {@inheritDoc}
*/
public String getPageText( String page, int version )
throws ProviderException
{
return getPageText( page );
}
/**
* Read the text directly from the correct file.
*/
private String getPageText( String page )
{
String result = null;
InputStream in = null;
File pagedata = findPage( page );
if( pagedata.exists() )
{
if( pagedata.canRead() )
{
try
{
in = new FileInputStream( pagedata );
result = FileUtil.readContents( in, m_encoding );
}
catch( IOException e )
{
log.error("Failed to read", e);
}
finally
{
IOUtils.closeQuietly( in );
}
}
else
{
log.warn("Failed to read page '"+page+"' from '"+pagedata.getAbsolutePath()+"', possibly a permissions problem");
}
}
else
{
// This is okay.
log.info("New page '"+page+"'");
}
return result;
}
/**
* {@inheritDoc}
*/
public void putPageText( WikiPage page, String text )
throws ProviderException
{
File file = findPage( page.getName() );
PrintWriter out = null;
try
{
out = new PrintWriter(new OutputStreamWriter( new FileOutputStream( file ),
m_encoding ));
out.print( text );
}
catch( IOException e )
{
log.error( "Saving failed", e );
}
finally
{
IOUtils.closeQuietly( out );
}
}
/**
* {@inheritDoc}
*/
public Collection getAllPages()
throws ProviderException
{
log.debug("Getting all pages...");
ArrayList<WikiPage> set = new ArrayList<WikiPage>();
File wikipagedir = new File( m_pageDirectory );
File[] wikipages = wikipagedir.listFiles( new WikiFileFilter() );
if( wikipages == null )
{
log.error("Wikipages directory '" + m_pageDirectory + "' does not exist! Please check " + PROP_PAGEDIR + " in jspwiki.properties.");
throw new InternalWikiException("Page directory does not exist");
}
for( int i = 0; i < wikipages.length; i++ )
{
String wikiname = wikipages[i].getName();
int cutpoint = wikiname.lastIndexOf( FILE_EXT );
WikiPage page = getPageInfo( unmangleName(wikiname.substring(0,cutpoint)),
WikiPageProvider.LATEST_VERSION );
if( page == null )
{
// This should not really happen.
// FIXME: Should we throw an exception here?
log.error("Page "+wikiname+" was found in directory listing, but could not be located individually.");
continue;
}
set.add( page );
}
return set;
}
/**
* Does not work.
*
* @param date {@inheritDoc}
* @return {@inheritDoc}
*/
public Collection getAllChangedSince( Date date )
{
return new ArrayList(); // FIXME
}
/**
* {@inheritDoc}
*/
public int getPageCount()
{
File wikipagedir = new File( m_pageDirectory );
File[] wikipages = wikipagedir.listFiles( new WikiFileFilter() );
return wikipages.length;
}
/**
* Iterates through all WikiPages, matches them against the given query,
* and returns a Collection of SearchResult objects.
*
* @param query {@inheritDoc}
* @return {@inheritDoc}
*/
public Collection findPages( QueryItem[] query )
{
File wikipagedir = new File( m_pageDirectory );
TreeSet<SearchResult> res = new TreeSet<SearchResult>( new SearchResultComparator() );
SearchMatcher matcher = new SearchMatcher( m_engine, query );
File[] wikipages = wikipagedir.listFiles( new WikiFileFilter() );
for( int i = 0; i < wikipages.length; i++ )
{
FileInputStream input = null;
// log.debug("Searching page "+wikipages[i].getPath() );
String filename = wikipages[i].getName();
int cutpoint = filename.lastIndexOf( FILE_EXT );
String wikiname = filename.substring( 0, cutpoint );
wikiname = unmangleName( wikiname );
try
{
input = new FileInputStream( wikipages[i] );
String pagetext = FileUtil.readContents( input, m_encoding );
SearchResult comparison = matcher.matchPageContent( wikiname, pagetext );
if( comparison != null )
{
res.add( comparison );
}
}
catch( IOException e )
{
log.error( "Failed to read " + filename, e );
}
finally
{
IOUtils.closeQuietly( input );
}
}
return res;
}
/**
* Always returns the latest version, since FileSystemProvider
* does not support versioning.
*
* @param page {@inheritDoc}
* @param version {@inheritDoc}
* @return {@inheritDoc}
* @throws {@inheritDoc}
*/
public WikiPage getPageInfo( String page, int version )
throws ProviderException
{
File file = findPage( page );
if( !file.exists() )
{
return null;
}
WikiPage p = new WikiPage( m_engine, page );
p.setLastModified( new Date(file.lastModified()) );
return p;
}
/**
* The FileSystemProvider provides only one version.
*
* @param page {@inheritDoc}
* @throws {@inheritDoc}
* @return {@inheritDoc}
*/
public List getVersionHistory( String page )
throws ProviderException
{
ArrayList<WikiPage> list = new ArrayList<WikiPage>();
list.add( getPageInfo( page, WikiPageProvider.LATEST_VERSION ) );
return list;
}
/**
* {@inheritDoc}
*/
public String getProviderInfo()
{
return "";
}
/**
* {@inheritDoc}
*/
public void deleteVersion( String pageName, int version )
throws ProviderException
{
if( version == WikiProvider.LATEST_VERSION )
{
File f = findPage( pageName );
f.delete();
}
}
/**
* {@inheritDoc}
*/
public void deletePage( String pageName )
throws ProviderException
{
File f = findPage( pageName );
f.delete();
}
/**
* Set the custom properties provided into the given page.
*
* @since 2.10.2
*/
protected void setCustomProperties(WikiPage page, Properties properties) {
Enumeration propertyNames = properties.propertyNames();
while (propertyNames.hasMoreElements()) {
String key = (String) propertyNames.nextElement();
if (!key.equals(WikiPage.AUTHOR) && !key.equals(WikiPage.CHANGENOTE) && !key.equals(WikiPage.VIEWCOUNT)) {
page.setAttribute(key, properties.get(key));
}
}
}
/**
* Get custom properties using {@link this.addCustomPageProperties}, validate them using {@link this.validateCustomPageProperties}
* and add them to default properties provided
*
* @since 2.10.2
*/
protected void getCustomProperties(WikiPage page, Properties defaultProperties) throws IOException {
Properties customPageProperties = addCustomProperties(page,defaultProperties);
validateCustomPageProperties(customPageProperties);
defaultProperties.putAll(customPageProperties);
}
/**
* By default all page attributes that start with "@" are returned as custom properties.
* This can be overwritten by custom FileSystemProviders to save additional properties.
* CustomPageProperties are validated by {@link this.validateCustomPageProperties}
*
* @since 2.10.2
* @param page the current page
* @param props the default properties of this page
* @return default implementation returns empty Properties.
*/
protected Properties addCustomProperties(WikiPage page, Properties props) {
Properties customProperties = new Properties();
if (page != null) {
Map<String,Object> atts = page.getAttributes();
for (String key : atts.keySet()) {
Object value = atts.get(key);
if (key.startsWith("@") && value != null) {
customProperties.put(key,value.toString());
}
}
}
return customProperties;
}
/**
* Default validation, validates that key and value is ASCII <code>StringUtils.isAsciiPrintable()</code> and within lengths set up in jspwiki-custom.properties.
* This can be overwritten by custom FileSystemProviders to validate additional properties
* See https://issues.apache.org/jira/browse/JSPWIKI-856
* @since 2.10.2
* @param customProperties the custom page properties being added
*/
protected void validateCustomPageProperties(Properties customProperties) throws IOException {
// Default validation rules
if (customProperties != null && !customProperties.isEmpty()) {
if (customProperties.size()>MAX_PROPLIMIT) {
throw new IOException("Too many custom properties. You are adding "+customProperties.size()+", but max limit is "+MAX_PROPLIMIT);
}
Enumeration propertyNames = customProperties.propertyNames();
while (propertyNames.hasMoreElements()) {
String key = (String) propertyNames.nextElement();
String value = (String)customProperties.get(key);
if (key != null) {
if (key.length()>MAX_PROPKEYLENGTH) {
throw new IOException("Custom property key "+key+" is too long. Max allowed length is "+MAX_PROPKEYLENGTH);
}
if (!StringUtils.isAsciiPrintable(key)) {
throw new IOException("Custom property key "+key+" is not simple ASCII!");
}
}
if (value != null) {
if (value.length()>MAX_PROPVALUELENGTH) {
throw new IOException("Custom property key "+key+" has value that is too long. Value="+value+". Max allowed length is "+MAX_PROPVALUELENGTH);
}
if (!StringUtils.isAsciiPrintable(value)) {
throw new IOException("Custom property key "+key+" has value that is not simple ASCII! Value="+value);
}
}
}
}
}
/**
* A simple filter which filters only those filenames which correspond to the
* file extension used.
*/
public static class WikiFileFilter
implements FilenameFilter
{
/**
* {@inheritDoc}
*/
public boolean accept( File dir, String name )
{
return name.endsWith( FILE_EXT );
}
}
}
| |
/*****************************************************
*
* ProductOverviewFragment.java
*
*
* Modified MIT License
*
* Copyright (c) 2010-2015 Kite Tech Ltd. https://www.kite.ly
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The software MAY ONLY be used with the Kite Tech Ltd platform and MAY NOT be modified
* to be used with any competitor platforms. This means the software MAY NOT be modified
* to place orders with any competitors to Kite Tech Ltd, all orders MUST go through the
* Kite Tech Ltd platform servers.
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*****************************************************/
///// Package Declaration /////
package ly.kite.journey.selection;
///// Import(s) /////
///// Class Declaration /////
import android.os.Bundle;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.RotateAnimation;
import android.view.animation.ScaleAnimation;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.List;
import java.util.Locale;
import ly.kite.R;
import ly.kite.address.Country;
import ly.kite.analytics.Analytics;
import ly.kite.journey.AKiteActivity;
import ly.kite.journey.AKiteFragment;
import ly.kite.catalogue.MultipleCurrencyAmount;
import ly.kite.catalogue.MultipleDestinationShippingCosts;
import ly.kite.catalogue.Product;
import ly.kite.catalogue.SingleUnitSize;
import ly.kite.catalogue.UnitOfLength;
import ly.kite.catalogue.SingleCurrencyAmount;
import ly.kite.catalogue.SingleDestinationShippingCost;
import ly.kite.widget.BellInterpolator;
import ly.kite.widget.PagingDots;
import ly.kite.widget.SlidingOverlayFrame;
/*****************************************************
*
* This fragment displays a product overview.
*
*****************************************************/
public class ProductOverviewFragment extends AKiteFragment implements View.OnClickListener
{
////////// Static Constant(s) //////////
@SuppressWarnings("unused")
public static final String TAG = "ProductOverviewFragment";
public static final String BUNDLE_KEY_PRODUCT = "product";
private static final long PAGING_DOT_ANIMATION_DURATION_MILLIS = 300L;
private static final float PAGING_DOT_ANIMATION_OPAQUE = 1.0f;
private static final float PAGING_DOT_ANIMATION_TRANSLUCENT = 0.5f;
private static final float PAGING_DOT_ANIMATION_NORMAL_SCALE = 1.0f;
private static final long SLIDE_ANIMATION_DURATION_MILLIS = 500L;
private static final long OPEN_CLOSE_ICON_ANIMATION_DELAY_MILLIS = 250L;
private static final long OPEN_CLOSE_ICON_ANIMATION_DURATION_MILLIS = SLIDE_ANIMATION_DURATION_MILLIS - OPEN_CLOSE_ICON_ANIMATION_DELAY_MILLIS;
private static final float OPEN_CLOSE_ICON_ROTATION_UP = -180f;
private static final float OPEN_CLOSE_ICON_ROTATION_DOWN = 0f;
private static final String BUNDLE_KEY_SLIDING_DRAWER_IS_EXPANDED = "slidingDrawerIsExpanded";
////////// Static Variable(s) //////////
////////// Member Variable(s) //////////
private Product mProduct;
private View mOverlaidComponents;
private ViewPager mProductImageViewPager;
private PagingDots mPagingDots;
private Button mOverlaidStartButton;
private SlidingOverlayFrame mSlidingOverlayFrame;
private View mDrawerControlLayout;
private ImageView mOpenCloseDrawerIconImageView;
private Button mProceedOverlayButton;
private PagerAdapter mProductImageAdaptor;
////////// Static Initialiser(s) //////////
////////// Static Method(s) //////////
/*****************************************************
*
* Creates a new instance of this fragment.
*
*****************************************************/
public static ProductOverviewFragment newInstance( Product product )
{
ProductOverviewFragment fragment = new ProductOverviewFragment();
Bundle arguments = new Bundle();
arguments.putParcelable( BUNDLE_KEY_PRODUCT, product );
fragment.setArguments( arguments );
return (fragment);
}
////////// Constructor(s) //////////
////////// Activity Method(s) //////////
/*****************************************************
*
* Called when the fragment is created.
*
*****************************************************/
@Override
public void onCreate( Bundle savedInstanceState )
{
super.onCreate( savedInstanceState );
// Get the product
Bundle arguments = getArguments();
if ( arguments == null )
{
Log.e( TAG, "No arguments found" );
mKiteActivity.displayModalDialog(
R.string.alert_dialog_title_no_arguments,
R.string.alert_dialog_message_no_arguments,
AKiteActivity.NO_BUTTON,
null,
R.string.Cancel,
mKiteActivity.new FinishRunnable()
);
return;
}
mProduct = (Product) arguments.getParcelable( BUNDLE_KEY_PRODUCT );
if ( mProduct == null )
{
Log.e( TAG, "No product found" );
mKiteActivity.displayModalDialog(
R.string.alert_dialog_title_product_not_found,
R.string.alert_dialog_message_product_not_found,
AKiteActivity.NO_BUTTON,
null,
R.string.Cancel,
mKiteActivity.new FinishRunnable()
);
return;
}
}
/*****************************************************
*
* Returns the content view for this fragment
*
*****************************************************/
@Override
public View onCreateView( LayoutInflater layoutInflator, ViewGroup container, Bundle savedInstanceState )
{
boolean slidingDrawerIsExpanded = false;
// Get any saved instance state
if ( savedInstanceState != null )
{
slidingDrawerIsExpanded = savedInstanceState.getBoolean( BUNDLE_KEY_SLIDING_DRAWER_IS_EXPANDED, false );
}
else
{
Analytics.getInstance( mKiteActivity ).trackProductOverviewScreenViewed( mProduct );
}
// Set up the screen. Note that the SDK allows for different layouts to be used in place of the standard
// one, so some of these views are optional and may not actually exist in the current layout.
View view = layoutInflator.inflate( R.layout.screen_product_overview, container, false );
mProductImageViewPager = (ViewPager) view.findViewById( R.id.view_pager );
mOverlaidComponents = view.findViewById( R.id.overlaid_components );
mPagingDots = (PagingDots) view.findViewById( R.id.paging_dots );
mOverlaidStartButton = (Button) view.findViewById( R.id.overlaid_start_button );
mSlidingOverlayFrame = (SlidingOverlayFrame) view.findViewById( R.id.sliding_overlay_frame );
mDrawerControlLayout = view.findViewById( R.id.drawer_control_layout );
mOpenCloseDrawerIconImageView = (ImageView) view.findViewById( R.id.open_close_drawer_icon_image_view );
mProceedOverlayButton = (Button)view.findViewById( R.id.proceed_overlay_button );
TextView priceTextView = (TextView) view.findViewById( R.id.price_text_view );
TextView summaryDescriptionTextView = (TextView)view.findViewById( R.id.summary_description_text_view );
TextView summaryShippingTextView = (TextView)view.findViewById( R.id.summary_shipping_text_view );
View descriptionLayout = view.findViewById( R.id.description_layout );
TextView descriptionTextView = (TextView)view.findViewById( R.id.description_text_view );
View sizeLayout = view.findViewById( R.id.size_layout );
TextView sizeTextView = (TextView) view.findViewById( R.id.size_text_view );
View quantityLayout = view.findViewById( R.id.quantity_layout );
TextView quantityTextView = (TextView) view.findViewById( R.id.quantity_text_view );
TextView shippingTextView = (TextView) view.findViewById( R.id.shipping_text_view );
// Paging dots
Animation pagingDotOutAlphaAnimation = new AlphaAnimation( PAGING_DOT_ANIMATION_OPAQUE, PAGING_DOT_ANIMATION_TRANSLUCENT );
pagingDotOutAlphaAnimation.setFillAfter( true );
pagingDotOutAlphaAnimation.setDuration( PAGING_DOT_ANIMATION_DURATION_MILLIS );
Animation pagingDotOutScaleAnimation = new ScaleAnimation(
0f,
PAGING_DOT_ANIMATION_NORMAL_SCALE,
0f,
PAGING_DOT_ANIMATION_NORMAL_SCALE,
Animation.RELATIVE_TO_SELF,
0.5f,
Animation.RELATIVE_TO_SELF,
0.5f );
pagingDotOutScaleAnimation.setFillAfter( true );
pagingDotOutScaleAnimation.setDuration( PAGING_DOT_ANIMATION_DURATION_MILLIS );
pagingDotOutScaleAnimation.setInterpolator( new BellInterpolator( 1.0f, 0.8f, true ) );
AnimationSet pagingDotOutAnimation = new AnimationSet( false );
pagingDotOutAnimation.addAnimation( pagingDotOutAlphaAnimation );
pagingDotOutAnimation.addAnimation( pagingDotOutScaleAnimation );
pagingDotOutAnimation.setFillAfter( true );
Animation pagingDotInAlphaAnimation = new AlphaAnimation( PAGING_DOT_ANIMATION_TRANSLUCENT, PAGING_DOT_ANIMATION_OPAQUE );
pagingDotInAlphaAnimation.setFillAfter( true );
pagingDotInAlphaAnimation.setDuration( PAGING_DOT_ANIMATION_DURATION_MILLIS );
Animation pagingDotInScaleAnimation = new ScaleAnimation(
0f,
PAGING_DOT_ANIMATION_NORMAL_SCALE,
0f,
PAGING_DOT_ANIMATION_NORMAL_SCALE,
Animation.RELATIVE_TO_SELF,
0.5f,
Animation.RELATIVE_TO_SELF,
0.5f );
pagingDotInScaleAnimation.setFillAfter( true );
pagingDotInScaleAnimation.setDuration( PAGING_DOT_ANIMATION_DURATION_MILLIS );
pagingDotInScaleAnimation.setInterpolator( new BellInterpolator( 1.0f, 1.2f ) );
AnimationSet pagingDotInAnimation = new AnimationSet( false );
pagingDotInAnimation.addAnimation( pagingDotInAlphaAnimation );
pagingDotInAnimation.addAnimation( pagingDotInScaleAnimation );
pagingDotInAnimation.setFillAfter( true );
mPagingDots.setProperties( mProduct.getImageURLList().size(), R.drawable.paging_dot_unselected, R.drawable.paging_dot_selected );
mPagingDots.setOutAnimation( pagingDotOutAlphaAnimation );
mPagingDots.setInAnimation( pagingDotInAnimation );
mProductImageViewPager.setOnPageChangeListener( mPagingDots );
mOverlaidComponents.setAlpha( slidingDrawerIsExpanded ? 0f : 1f ); // If the drawer starts open, these components need to be invisible
if ( mSlidingOverlayFrame != null )
{
mSlidingOverlayFrame.snapToExpandedState( slidingDrawerIsExpanded );
mSlidingOverlayFrame.setSlideAnimationDuration( SLIDE_ANIMATION_DURATION_MILLIS );
mOpenCloseDrawerIconImageView.setRotation( slidingDrawerIsExpanded ? OPEN_CLOSE_ICON_ROTATION_DOWN : OPEN_CLOSE_ICON_ROTATION_UP );
}
SingleUnitSize size = mProduct.getSizeWithFallback( UnitOfLength.CENTIMETERS );
boolean formatAsInt = size.getWidth() == (int) size.getWidth() && size.getHeight() == (int) size.getHeight();
String sizeFormatString = getString( formatAsInt ? R.string.product_size_format_string_int : R.string.product_size_format_string_float );
String sizeString = String.format( sizeFormatString, size.getWidth(), size.getHeight(), size.getUnit().shortString( mKiteActivity ) );
int quantityPerSheet = mProduct.getQuantityPerSheet();
MultipleDestinationShippingCosts shippingCosts = mProduct.getShippingCosts();
Locale locale = Locale.getDefault();
Country country = Country.getInstance( locale );
SingleCurrencyAmount singleCurrencyCost;
// Price
if ( isVisible( priceTextView ) )
{
singleCurrencyCost = mProduct.getCostWithFallback( locale );
if ( singleCurrencyCost != null ) priceTextView.setText( singleCurrencyCost.getDisplayAmountForLocale( locale ) );
}
// Summary description. This is a short description - not to be confused with the (full) description.
if ( isVisible( summaryDescriptionTextView ) )
{
String summaryDescription =
String.valueOf( quantityPerSheet )
+ " "
+ mProduct.getName()
+ ( Product.isSensibleSize( size ) ? " (" + sizeString + ")" : "" );
summaryDescriptionTextView.setText( summaryDescription );
}
// (Full) description
String description = mProduct.getDescription();
boolean haveDescription = ( description != null && ( ! description.trim().equals( "" ) ) );
if ( haveDescription &&
descriptionLayout != null &&
descriptionTextView != null )
{
descriptionLayout.setVisibility( View.VISIBLE );
descriptionTextView.setVisibility( View.VISIBLE );
descriptionTextView.setText( description );
}
else
{
if ( descriptionLayout != null ) descriptionLayout.setVisibility( View.GONE );
if ( descriptionTextView != null ) descriptionTextView.setVisibility( View.GONE );
}
// Size
if ( isVisible( sizeTextView ) )
{
if ( Product.isSensibleSize( size ) )
{
sizeTextView.setText( String.format( sizeFormatString, size.getWidth(), size.getHeight(), size.getUnit().shortString( mKiteActivity ) ) );
}
else
{
sizeLayout.setVisibility( View.GONE );
}
}
// Quantity
if ( isVisible( quantityTextView ) )
{
if ( quantityPerSheet > 1 )
{
quantityLayout.setVisibility( View.VISIBLE );
quantityTextView.setText( getString( R.string.product_quantity_format_string, quantityPerSheet ) );
}
else
{
quantityLayout.setVisibility( View.GONE );
}
}
// Shipping description
if ( isVisible( summaryShippingTextView ) )
{
// Currently we just check that shipping is free everywhere. If it isn't - we don't display
// anything.
boolean freeShippingEverywhere = true;
MultipleDestinationShippingCosts multipleDestinationShippingCosts = shippingCosts;
for ( SingleDestinationShippingCost singleDestinationShippingCosts : multipleDestinationShippingCosts.asList() )
{
MultipleCurrencyAmount multipleCurrencyShippingCost = singleDestinationShippingCosts.getCost();
for ( SingleCurrencyAmount singleCurrencyShippingCost : multipleCurrencyShippingCost.asCollection() )
{
if ( singleCurrencyShippingCost.isNonZero() )
{
freeShippingEverywhere = false;
}
}
}
if ( freeShippingEverywhere )
{
summaryShippingTextView.setText( R.string.product_free_worldwide_shipping );
}
else
{
summaryShippingTextView.setText( getString( R.string.product_shipping_summary_format_string, shippingCosts.getDisplayCost( locale ) ) );
}
}
// Shipping (postage)
if ( isVisible( shippingTextView ) )
{
List<SingleDestinationShippingCost> sortedShippingCostList = mProduct.getSortedShippingCosts( country );
StringBuilder shippingCostsStringBuilder = new StringBuilder();
String newlineString = "";
for ( SingleDestinationShippingCost singleDestinationShippingCost : sortedShippingCostList )
{
// We want to prepend a new line for every shipping destination except the first
shippingCostsStringBuilder.append( newlineString );
newlineString = "\n";
// Get the cost in the default currency for the locale, and format the amount.
singleCurrencyCost = singleDestinationShippingCost.getCost().getDefaultAmountWithFallback();
if ( singleCurrencyCost != null )
{
String formatString = getString( R.string.product_shipping_format_string );
String costString = ( singleCurrencyCost.isNonZero()
? singleCurrencyCost.getDisplayAmountForLocale( locale )
: getString( R.string.product_free_shipping ) );
shippingCostsStringBuilder
.append( String.format( formatString, singleDestinationShippingCost.getDestinationDescription( mKiteActivity ), costString ) );
}
shippingTextView.setText( shippingCostsStringBuilder.toString() );
}
}
if ( mProceedOverlayButton != null )
{
mProceedOverlayButton.setText( R.string.product_overview_start_button_text );
mProceedOverlayButton.setOnClickListener( this );
}
mProductImageViewPager.setOnClickListener( this );
if ( mDrawerControlLayout != null ) mDrawerControlLayout.setOnClickListener( this );
mOverlaidStartButton.setOnClickListener( this );
return ( view );
}
/*****************************************************
*
* Called when the back key is pressed. The fragment
* can either intercept it, or ignore it - in which case
* the default behaviour is performed.
*
*****************************************************/
@Override
public boolean onBackPressIntercepted()
{
// If the slider is open - close it
if ( mSlidingOverlayFrame != null && mSlidingOverlayFrame.sliderIsExpanded() )
{
toggleSliderState();
return ( true );
}
return ( false );
}
/*****************************************************
*
* Called to save the state of the instance when (e.g.)
* changing orientation.
*
*****************************************************/
@Override
public void onSaveInstanceState( Bundle outState )
{
super.onSaveInstanceState( outState );
// Save the state of the sliding drawer
if ( mSlidingOverlayFrame != null )
{
outState.putBoolean( BUNDLE_KEY_SLIDING_DRAWER_IS_EXPANDED, mSlidingOverlayFrame.sliderIsExpanded() );
}
}
/*****************************************************
*
* Called when the fragment is top-most.
*
*****************************************************/
@Override
public void onTop()
{
super.onTop();
if ( mProduct != null ) mKiteActivity.setTitle( mProduct.getName() );
// Set up the product image gallery
mProductImageAdaptor = new ProductImagePagerAdaptor( mKiteActivity, mProduct.getImageURLList(), this );
mProductImageViewPager.setAdapter( mProductImageAdaptor );
}
/*****************************************************
*
* Called when the fragment is not on top.
*
*****************************************************/
@Override
public void onNotTop()
{
super.onNotTop();
// Clear out the stored images to reduce memory usage
// when not on this screen.
if ( mProductImageViewPager != null ) mProductImageViewPager.setAdapter( null );
mProductImageAdaptor = null;
}
////////// View.OnClickListener Method(s) //////////
/*****************************************************
*
* Called when a view is clicked.
*
*****************************************************/
@Override
public void onClick( View view )
{
// Anything that's not the drawer control is assumed to be
// one of the start buttons.
if ( view == mDrawerControlLayout )
{
toggleSliderState();
}
else
{
onCreateProduct();
}
}
////////// Method(s) //////////
/*****************************************************
*
* Returns true if a view is visible, false otherwise.
*
*****************************************************/
private boolean isVisible( View view )
{
return ( view != null && view.getVisibility() == View.VISIBLE );
}
/*****************************************************
*
* Called when one of the start creating buttons is clicked.
*
*****************************************************/
public void onCreateProduct()
{
// Call back to the activity
if ( mKiteActivity instanceof ICallback )
{
( (ICallback)mKiteActivity ).poOnCreateProduct( mProduct );
}
}
/*****************************************************
*
* Called when the details control is clicked.
*
*****************************************************/
private void toggleSliderState()
{
// We want to animation the following:
// - Overlaid start button fade in / out
// - Sliding drawer up / down
// - Open / close drawer icon rotation
boolean sliderWillBeOpening = ! mSlidingOverlayFrame.sliderIsExpanded();
float overlaidComponentsInitialAlpha;
float overlaidComponentsFinalAlpha;
float openCloseIconInitialRotation;
float openCloseIconFinalRotation;
if ( sliderWillBeOpening )
{
overlaidComponentsInitialAlpha = 1f;
overlaidComponentsFinalAlpha = 0f;
openCloseIconInitialRotation = OPEN_CLOSE_ICON_ROTATION_UP;
openCloseIconFinalRotation = OPEN_CLOSE_ICON_ROTATION_DOWN;
}
else
{
overlaidComponentsInitialAlpha = 0f;
overlaidComponentsFinalAlpha = 1f;
openCloseIconInitialRotation = OPEN_CLOSE_ICON_ROTATION_DOWN;
openCloseIconFinalRotation = OPEN_CLOSE_ICON_ROTATION_UP;
}
// Create the overlaid components animation
Animation overlaidComponentsAnimation = new AlphaAnimation( overlaidComponentsInitialAlpha, overlaidComponentsFinalAlpha );
overlaidComponentsAnimation.setDuration( SLIDE_ANIMATION_DURATION_MILLIS );
overlaidComponentsAnimation.setFillAfter( true );
// Create the open/close icon animation.
// The rotation is delayed, but will finish at the same time as the slide animation.
Animation openCloseIconAnimation = new RotateAnimation( openCloseIconInitialRotation, openCloseIconFinalRotation, mOpenCloseDrawerIconImageView.getWidth() * 0.5f, mOpenCloseDrawerIconImageView.getHeight() * 0.5f );
openCloseIconAnimation.setStartOffset( OPEN_CLOSE_ICON_ANIMATION_DELAY_MILLIS );
openCloseIconAnimation.setDuration( OPEN_CLOSE_ICON_ANIMATION_DURATION_MILLIS );
openCloseIconAnimation.setFillAfter( true );
if ( mOverlaidComponents != null )
{
mOverlaidComponents.setAlpha( 1f ); // Clear any alpha already applied
mOverlaidComponents.startAnimation( overlaidComponentsAnimation );
}
if ( mOpenCloseDrawerIconImageView != null )
{
mOpenCloseDrawerIconImageView.setRotation( 0f ); // Clear any rotation already applied
mOpenCloseDrawerIconImageView.startAnimation( openCloseIconAnimation );
}
mSlidingOverlayFrame.animateToExpandedState( sliderWillBeOpening );
}
////////// Inner Class(es) //////////
/*****************************************************
*
* A callback interface.
*
*****************************************************/
public interface ICallback
{
public void poOnCreateProduct( Product product );
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.authentication.NoHdfsAuthentication;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.StorageFormat;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.testing.TestingConnectorSession;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.units.DataSize;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.util.Progressable;
import org.testng.annotations.Test;
import java.net.URI;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Executor;
import static com.facebook.presto.hive.HiveBucketing.HiveBucket;
import static com.facebook.presto.hive.HiveColumnHandle.pathColumnHandle;
import static com.facebook.presto.hive.HiveType.HIVE_INT;
import static com.facebook.presto.hive.HiveType.HIVE_STRING;
import static com.facebook.presto.hive.HiveUtil.getRegularColumnHandles;
import static com.facebook.presto.spi.predicate.TupleDomain.withColumnDomains;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.units.DataSize.Unit.GIGABYTE;
import static org.testng.Assert.assertEquals;
public class TestBackgroundSplitLoader
{
private static final int BUCKET_COUNT = 2;
private static final String SAMPLE_PATH = "hdfs://VOL1:9000/db_name/table_name/000000_0";
private static final String SAMPLE_PATH_FILTERED = "hdfs://VOL1:9000/db_name/table_name/000000_1";
private static final String TEST_CONNECTOR_ID = "test_connector";
private static final Path RETURNED_PATH = new Path(SAMPLE_PATH);
private static final Path FILTERED_PATH = new Path(SAMPLE_PATH_FILTERED);
private static final Executor EXECUTOR = directExecutor();
private static final TupleDomain<HiveColumnHandle> RETURNED_PATH_DOMAIN = withColumnDomains(
ImmutableMap.of(
pathColumnHandle(TEST_CONNECTOR_ID),
Domain.singleValue(VARCHAR, utf8Slice(RETURNED_PATH.toString()))));
private static final List<LocatedFileStatus> TEST_FILES = ImmutableList.of(
locatedFileStatus(RETURNED_PATH),
locatedFileStatus(FILTERED_PATH));
private static final List<Column> PARTITION_COLUMNS = ImmutableList.of(
new Column("partitionColumn", HIVE_INT, Optional.empty()));
private static final Optional<HiveBucketProperty> BUCKET_PROPERTY = Optional.of(
new HiveBucketProperty(ImmutableList.of("col1"), BUCKET_COUNT));
private static final Table SIMPLE_TABLE = table(ImmutableList.of(), Optional.empty());
private static final Table PARTITIONED_TABLE = table(PARTITION_COLUMNS, BUCKET_PROPERTY);
@Test
public void testNoPathFilter()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
TupleDomain.none());
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader);
backgroundHiveSplitLoader.start(hiveSplitSource);
assertEquals(2, hiveSplitSource.getOutstandingSplitCount());
}
@Test
public void testPathFilter()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN);
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader);
backgroundHiveSplitLoader.start(hiveSplitSource);
assertEquals(1, hiveSplitSource.getOutstandingSplitCount());
List<ConnectorSplit> connectorSplits = hiveSplitSource.getNextBatch(1).get();
assertEquals(1, connectorSplits.size());
assertEquals(RETURNED_PATH.toString(), ((HiveSplit) connectorSplits.get(0)).getPath());
}
@Test
public void testPathFilterOneBucketMatchPartitionedTable()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN,
ImmutableList.of(
new HiveBucket(0, BUCKET_COUNT),
new HiveBucket(1, BUCKET_COUNT)),
PARTITIONED_TABLE,
Optional.empty());
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader);
backgroundHiveSplitLoader.start(hiveSplitSource);
assertEquals(1, hiveSplitSource.getOutstandingSplitCount());
List<ConnectorSplit> connectorSplits = hiveSplitSource.getNextBatch(1).get();
assertEquals(1, connectorSplits.size());
assertEquals(RETURNED_PATH.toString(), ((HiveSplit) connectorSplits.get(0)).getPath());
}
@Test
public void testPathFilterBucketedPartitionedTable()
throws Exception
{
BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
TEST_FILES,
RETURNED_PATH_DOMAIN,
ImmutableList.of(),
PARTITIONED_TABLE,
Optional.of(
new HiveBucketHandle(
getRegularColumnHandles(TEST_CONNECTOR_ID, PARTITIONED_TABLE),
BUCKET_COUNT)));
HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader);
backgroundHiveSplitLoader.start(hiveSplitSource);
assertEquals(1, hiveSplitSource.getOutstandingSplitCount());
List<ConnectorSplit> connectorSplits = hiveSplitSource.getNextBatch(1).get();
assertEquals(1, connectorSplits.size());
assertEquals(RETURNED_PATH.toString(), ((HiveSplit) connectorSplits.get(0)).getPath());
}
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
List<LocatedFileStatus> files,
TupleDomain<HiveColumnHandle> tupleDomain)
{
return backgroundHiveSplitLoader(
files,
tupleDomain,
ImmutableList.of(),
SIMPLE_TABLE,
Optional.empty());
}
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
List<LocatedFileStatus> files,
TupleDomain<HiveColumnHandle> tupleDomain,
List<HiveBucket> hiveBuckets,
Table table,
Optional<HiveBucketHandle> bucketHandle)
{
List<HivePartitionMetadata> hivePartitionMetadatas =
ImmutableList.of(
new HivePartitionMetadata(
new HivePartition(new SchemaTableName("testSchema", "table_name"), tupleDomain, ImmutableList.of()),
Optional.empty(),
ImmutableMap.of()));
ConnectorSession connectorSession = new TestingConnectorSession(
new HiveSessionProperties(new HiveClientConfig().setMaxSplitSize(new DataSize(1.0, GIGABYTE))).getSessionProperties());
return new BackgroundHiveSplitLoader(
TEST_CONNECTOR_ID,
table,
hivePartitionMetadatas,
bucketHandle,
hiveBuckets,
connectorSession,
new TestingHdfsEnvironment(),
new NamenodeStats(),
new TestingDirectoryLister(files),
EXECUTOR,
2,
0,
false);
}
private static HiveSplitSource hiveSplitSource(BackgroundHiveSplitLoader backgroundHiveSplitLoader)
{
return new HiveSplitSource(
1,
backgroundHiveSplitLoader,
EXECUTOR);
}
private static Table table(
List<Column> partitionColumns,
Optional<HiveBucketProperty> bucketProperty)
{
Table.Builder tableBuilder = Table.builder();
tableBuilder.getStorageBuilder()
.setStorageFormat(
StorageFormat.create(
"com.facebook.hive.orc.OrcSerde",
"org.apache.hadoop.hive.ql.io.RCFileInputFormat",
"org.apache.hadoop.hive.ql.io.RCFileInputFormat"))
.setLocation("hdfs://VOL1:9000/db_name/table_name")
.setSkewed(false)
.setBucketProperty(bucketProperty)
.setSorted(false);
return tableBuilder
.setDatabaseName("test_dbname")
.setOwner("testOwner")
.setTableName("test_table")
.setTableType(TableType.MANAGED_TABLE.toString())
.setDataColumns(ImmutableList.of(new Column("col1", HIVE_STRING, Optional.empty())))
.setParameters(ImmutableMap.of())
.setPartitionColumns(partitionColumns)
.build();
}
private static LocatedFileStatus locatedFileStatus(Path path)
{
return new LocatedFileStatus(
0L,
false,
0,
0L,
0L,
0L,
null,
null,
null,
null,
path,
new BlockLocation[] {new BlockLocation()});
}
private static class TestingDirectoryLister
implements DirectoryLister
{
private final List<LocatedFileStatus> files;
public TestingDirectoryLister(List<LocatedFileStatus> files)
{
this.files = files;
}
@Override
public RemoteIterator<LocatedFileStatus> list(FileSystem fs, Path path)
{
return new RemoteIterator<LocatedFileStatus>()
{
private final Iterator<LocatedFileStatus> iterator = files.iterator();
@Override
public boolean hasNext()
{
return iterator.hasNext();
}
@Override
public LocatedFileStatus next()
{
return iterator.next();
}
};
}
}
private static class TestingHdfsEnvironment
extends HdfsEnvironment
{
public TestingHdfsEnvironment()
{
super(
new HiveHdfsConfiguration(new HdfsConfigurationUpdater(new HiveClientConfig(), new HiveS3Config())),
new HiveClientConfig(),
new NoHdfsAuthentication());
}
@Override
public FileSystem getFileSystem(String user, Path path, Configuration configuration)
{
return new TestingHdfsFileSystem();
}
}
private static class TestingHdfsFileSystem
extends FileSystem
{
@Override
public boolean delete(Path f, boolean recursive)
{
throw new UnsupportedOperationException();
}
@Override
public boolean rename(Path src, Path dst)
{
throw new UnsupportedOperationException();
}
@Override
public void setWorkingDirectory(Path dir)
{
throw new UnsupportedOperationException();
}
@Override
public FileStatus[] listStatus(Path f)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataOutputStream create(
Path f,
FsPermission permission,
boolean overwrite,
int bufferSize,
short replication,
long blockSize,
Progressable progress)
{
throw new UnsupportedOperationException();
}
@Override
public boolean mkdirs(Path f, FsPermission permission)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataOutputStream append(Path f, int bufferSize, Progressable progress)
{
throw new UnsupportedOperationException();
}
@Override
public FSDataInputStream open(Path f, int buffersize)
{
throw new UnsupportedOperationException();
}
@Override
public FileStatus getFileStatus(Path f)
{
throw new UnsupportedOperationException();
}
@Override
public Path getWorkingDirectory()
{
throw new UnsupportedOperationException();
}
@Override
public URI getUri()
{
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authentication.forms;
import static org.keycloak.userprofile.profile.UserProfileContextFactory.forRegistrationUserCreation;
import org.keycloak.Config;
import org.keycloak.authentication.FormAction;
import org.keycloak.authentication.FormActionFactory;
import org.keycloak.authentication.FormContext;
import org.keycloak.authentication.ValidationContext;
import org.keycloak.events.Details;
import org.keycloak.events.Errors;
import org.keycloak.events.EventType;
import org.keycloak.forms.login.LoginFormsProvider;
import org.keycloak.models.AuthenticationExecutionModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.FormMessage;
import org.keycloak.protocol.oidc.OIDCLoginProtocol;
import org.keycloak.provider.ProviderConfigProperty;
import org.keycloak.services.messages.Messages;
import org.keycloak.services.resources.AttributeFormDataProcessor;
import org.keycloak.services.validation.Validation;
import org.keycloak.userprofile.UserProfile;
import org.keycloak.userprofile.profile.representations.AttributeUserProfile;
import org.keycloak.userprofile.utils.UserUpdateHelper;
import org.keycloak.userprofile.validation.UserProfileValidationResult;
import javax.ws.rs.core.MultivaluedMap;
import java.util.List;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class RegistrationUserCreation implements FormAction, FormActionFactory {
public static final String PROVIDER_ID = "registration-user-creation";
@Override
public String getHelpText() {
return "This action must always be first! Validates the username of the user in validation phase. In success phase, this will create the user in the database.";
}
@Override
public List<ProviderConfigProperty> getConfigProperties() {
return null;
}
@Override
public void validate(ValidationContext context) {
MultivaluedMap<String, String> formData = context.getHttpRequest().getDecodedFormParameters();
context.getEvent().detail(Details.REGISTER_METHOD, "form");
UserProfileValidationResult result = forRegistrationUserCreation(context.getSession(), formData).validate();
UserProfile newProfile = result.getProfile();
String email = newProfile.getAttributes().getFirstAttribute(UserModel.EMAIL);
String username = newProfile.getAttributes().getFirstAttribute(UserModel.USERNAME);
String firstName = newProfile.getAttributes().getFirstAttribute(UserModel.FIRST_NAME);
String lastName = newProfile.getAttributes().getFirstAttribute(UserModel.LAST_NAME);
context.getEvent().detail(Details.EMAIL, email);
context.getEvent().detail(Details.USERNAME, username);
context.getEvent().detail(Details.FIRST_NAME, firstName);
context.getEvent().detail(Details.LAST_NAME, lastName);
List<FormMessage> errors = Validation.getFormErrorsFromValidation(result);
if (context.getRealm().isRegistrationEmailAsUsername()) {
context.getEvent().detail(Details.USERNAME, email);
}
if (errors.size() > 0) {
if (result.hasFailureOfErrorType(Messages.EMAIL_EXISTS)) {
context.error(Errors.EMAIL_IN_USE);
formData.remove(RegistrationPage.FIELD_EMAIL);
} else if (result.hasFailureOfErrorType(Messages.MISSING_EMAIL, Messages.MISSING_USERNAME, Messages.INVALID_EMAIL)) {
if (result.hasFailureOfErrorType(Messages.INVALID_EMAIL))
formData.remove(Validation.FIELD_EMAIL);
context.error(Errors.INVALID_REGISTRATION);
} else if (result.hasFailureOfErrorType(Messages.USERNAME_EXISTS)) {
context.error(Errors.USERNAME_IN_USE);
formData.remove(Validation.FIELD_USERNAME);
}
context.validationError(formData, errors);
return;
}
context.success();
}
@Override
public void buildPage(FormContext context, LoginFormsProvider form) {
}
@Override
public void success(FormContext context) {
AttributeUserProfile updatedProfile = AttributeFormDataProcessor.toUserProfile(context.getHttpRequest().getDecodedFormParameters());
String email = updatedProfile.getAttributes().getFirstAttribute(UserModel.EMAIL);
String username = updatedProfile.getAttributes().getFirstAttribute(UserModel.USERNAME);
if (context.getRealm().isRegistrationEmailAsUsername()) {
username = email;
}
context.getEvent().detail(Details.USERNAME, username)
.detail(Details.REGISTER_METHOD, "form")
.detail(Details.EMAIL, email);
UserModel user = context.getSession().users().addUser(context.getRealm(), username);
user.setEnabled(true);
UserUpdateHelper.updateRegistrationUserCreation(context.getRealm(), user, updatedProfile);
context.getAuthenticationSession().setClientNote(OIDCLoginProtocol.LOGIN_HINT_PARAM, username);
context.setUser(user);
context.getEvent().user(user);
context.getEvent().success();
context.newEvent().event(EventType.LOGIN);
context.getEvent().client(context.getAuthenticationSession().getClient().getClientId())
.detail(Details.REDIRECT_URI, context.getAuthenticationSession().getRedirectUri())
.detail(Details.AUTH_METHOD, context.getAuthenticationSession().getProtocol());
String authType = context.getAuthenticationSession().getAuthNote(Details.AUTH_TYPE);
if (authType != null) {
context.getEvent().detail(Details.AUTH_TYPE, authType);
}
}
@Override
public boolean requiresUser() {
return false;
}
@Override
public boolean configuredFor(KeycloakSession session, RealmModel realm, UserModel user) {
return true;
}
@Override
public void setRequiredActions(KeycloakSession session, RealmModel realm, UserModel user) {
}
@Override
public boolean isUserSetupAllowed() {
return false;
}
@Override
public void close() {
}
@Override
public String getDisplayType() {
return "Registration User Creation";
}
@Override
public String getReferenceCategory() {
return null;
}
@Override
public boolean isConfigurable() {
return false;
}
private static AuthenticationExecutionModel.Requirement[] REQUIREMENT_CHOICES = {
AuthenticationExecutionModel.Requirement.REQUIRED,
AuthenticationExecutionModel.Requirement.DISABLED
};
@Override
public AuthenticationExecutionModel.Requirement[] getRequirementChoices() {
return REQUIREMENT_CHOICES;
}
@Override
public FormAction create(KeycloakSession session) {
return this;
}
@Override
public void init(Config.Scope config) {
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public String getId() {
return PROVIDER_ID;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.