gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.spark.roar.gui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.HashMap;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.DefaultListModel;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JTextField;
import javax.swing.event.ChangeEvent;
import org.jivesoftware.spark.component.VerticalFlowLayout;
import org.jivesoftware.spark.roar.RoarProperties;
import org.jivesoftware.spark.roar.RoarResources;
import org.jivesoftware.spark.roar.displaytype.RoarDisplayType;
import org.jivesoftware.spark.util.ColorPick;
/**
* Super Awesome Preference Panel
*
* @author wolf.posdorfer
*
*/
public class RoarPreferencePanel extends JPanel {
private static final long serialVersionUID = -5334936099931215962L;
// private Image _backgroundimage;
private final JTextField _duration;
private final JTextField _amount;
private final JCheckBox _enabledCheckbox;
private final JComboBox<String> _typelist;
private final JList<ColorTypes> _singleColorlist;
private final ColorPick _singleColorpicker;
private final HashMap<ColorTypes, Color> _colormap;
private final HashMap<String, Object> _components;
private final Insets INSETS = new Insets(5, 5, 5, 5);
public RoarPreferencePanel() {
_components = new HashMap<>();
_colormap = new HashMap<>();
for (ColorTypes e : ColorTypes.values()) {
_colormap.put(e, Color.BLACK);
}
this.setLayout(new BorderLayout());
// ClassLoader cl = getClass().getClassLoader();
// _backgroundimage = new ImageIcon(cl.getResource("background2.png")).getImage();
_duration = new JTextField();
_amount = new JTextField();
_enabledCheckbox = new JCheckBox(RoarResources.getString("roar.enabled"));
_singleColorpicker = new ColorPick(false);
_singleColorpicker.addChangeListener(this::stateChangedSingleColorPicker);
DefaultListModel<ColorTypes> listModel = new DefaultListModel<>();
listModel.addElement(ColorTypes.BACKGROUNDCOLOR);
listModel.addElement(ColorTypes.HEADERCOLOR);
listModel.addElement(ColorTypes.TEXTCOLOR);
_singleColorlist = new JList<>(listModel);
List<RoarDisplayType> roarDisplayTypes = RoarProperties.getInstance().getDisplayTypes();
String[] _typelistdata = new String[ roarDisplayTypes.size() ];
for (int i = 0; i < roarDisplayTypes.size(); i++) {
_typelistdata[i] = roarDisplayTypes.get(i).getLocalizedName();
}
_typelist = new JComboBox<>( _typelistdata );
_typelist.addActionListener( e -> updateWarningLabel(getDisplayTypeClass().getWarningMessage()) );
add(makeGeneralSettingsPanel());
_singleColorlist.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
colorListMouseClicked(e);
}
});
}
private JComponent makeGeneralSettingsPanel() {
JPanel generalPanel = new JPanel();
generalPanel.setLayout(new GridBagLayout());
generalPanel.setBorder(BorderFactory.createTitledBorder(RoarResources.getString("roar.settings")));
int rowcount = 0;
generalPanel.add(_enabledCheckbox,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
generalPanel.add(new JLabel(RoarResources.getString("roar.amount")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
generalPanel.add(_amount,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
generalPanel.add(new JLabel(RoarResources.getString("roar.location")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
generalPanel.add(_typelist,
new GridBagConstraints(1, rowcount, 1, 1, 0.8, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
JLabel warningLabel = new JLabel("<html>placeholder :-)</html>");
//warningLabel.setForeground(Color.RED);
generalPanel.add(warningLabel,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
_components.put("label.warning", warningLabel);
JPanel panel = new JPanel(new VerticalFlowLayout());
panel.add(generalPanel);
panel.add(makeSinglePanel());
panel.add(makeGroupChatPanel());
panel.add(makeKeyWordPanel());
return new JScrollPane(panel);
}
private JPanel makeSinglePanel() {
JPanel singlePanel = new JPanel();
singlePanel.setLayout(new GridBagLayout());
singlePanel.setBorder(BorderFactory.createTitledBorder(RoarResources.getString("roar.single")));
JCheckBox disableSingle = new JCheckBox(RoarResources.getString("roar.single.disable"));
// row
int rowcount = 0;
singlePanel.add(_singleColorlist,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
singlePanel.add(_singleColorpicker,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
singlePanel.add(new JLabel(RoarResources.getString("roar.duration")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
singlePanel.add(_duration,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
singlePanel.add(disableSingle,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
_components.put("roar.disable.single", disableSingle);
return singlePanel;
}
private JPanel makeGroupChatPanel() {
JPanel groupPanel = new JPanel();
groupPanel.setLayout(new GridBagLayout());
groupPanel.setBorder(BorderFactory.createTitledBorder(RoarResources.getString("roar.group")));
final JCheckBox enableDifferentGroup = new JCheckBox(RoarResources.getString("roar.group.different"));
JCheckBox disableGroup = new JCheckBox(RoarResources.getString("roar.group.disable"));
JTextField durationGroup = new JTextField();
enableDifferentGroup.addActionListener( e -> toggleDifferentSettingsForGroup(enableDifferentGroup.isSelected()) );
int rowcount = 0;
groupPanel.add(enableDifferentGroup,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
groupPanel.add(new JLabel(RoarResources.getString("roar.duration")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
groupPanel.add(durationGroup,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
groupPanel.add(disableGroup,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
_components.put("group.different.enabled", enableDifferentGroup);
_components.put("group.duration", durationGroup);
_components.put("group.disable", disableGroup);
return groupPanel;
}
private JPanel makeKeyWordPanel() {
JPanel panel = new JPanel();
panel.setLayout(new GridBagLayout());
panel.setBorder(BorderFactory.createTitledBorder(RoarResources.getString("roar.keyword")));
final JCheckBox differentKeyword = new JCheckBox(RoarResources.getString("roar.keyword.different"));
differentKeyword.addActionListener( e -> toggleDifferentSettingsForKeyword(differentKeyword.isSelected()) );
JTextField durationKeyword = new JTextField();
JTextField keywords = new JTextField();
int rowcount = 0;
panel.add(differentKeyword,
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
panel.add(new JLabel(RoarResources.getString("roar.duration")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
panel.add(durationKeyword,
new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
rowcount++;
panel.add(new JLabel(RoarResources.getString("roar.keyword.keyword")),
new GridBagConstraints(0, rowcount, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
panel.add(keywords, new GridBagConstraints(1, rowcount, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, INSETS, 0, 0));
_components.put("keyword.different.enabled", differentKeyword);
_components.put("keyword.duration", durationKeyword);
_components.put("keywords", keywords);
return panel;
}
public void initializeValues() {
RoarProperties props = RoarProperties.getInstance();
_enabledCheckbox.setSelected(props.getShowingPopups());
_amount.setText("" + props.getMaximumPopups());
setDisplayType(props.getDisplayType());
setColor(ColorTypes.BACKGROUNDCOLOR, props.getBackgroundColor());
setColor(ColorTypes.BACKGROUNDCOLOR_GROUP, props.getColor(RoarProperties.BACKGROUNDCOLOR_GROUP, props.getBackgroundColor()));
setColor(ColorTypes.BACKGROUNDCOLOR_KEYWORD, props.getColor(RoarProperties.BACKGROUNDCOLOR_KEYWORD, props.getBackgroundColor()));
setColor(ColorTypes.HEADERCOLOR, props.getHeaderColor());
setColor(ColorTypes.HEADERCOLOR_GROUP, props.getColor(RoarProperties.HEADERCOLOR_GROUP, props.getHeaderColor()));
setColor(ColorTypes.HEADERCOLOR_KEYWORD, props.getColor(RoarProperties.HEADERCOLOR_KEYWORD, props.getHeaderColor()));
setColor(ColorTypes.TEXTCOLOR, props.getTextColor());
setColor(ColorTypes.TEXTCOLOR_GROUP, props.getColor(RoarProperties.TEXTCOLOR_GROUP, props.getTextColor()));
setColor(ColorTypes.TEXTCOLOR_KEYWORD, props.getColor(RoarProperties.TEXTCOLOR_KEYWORD, props.getTextColor()));
retrieveComponent("roar.disable.single", JCheckBox.class).setSelected(props.getBoolean("roar.disable.single", false));
_duration.setText("" + props.getDuration());
retrieveComponent("keyword.duration", JTextField.class).setText("" + props.getInt("keyword.duration"));
retrieveComponent("group.duration", JTextField.class).setText("" + props.getInt("group.duration"));
retrieveComponent("keywords", JTextField.class).setText(props.getProperty("keywords"));
retrieveComponent("group.disable", JCheckBox.class).setSelected(props.getBoolean("group.disable", false));
boolean group_different_enabled = props.getBoolean("group.different.enabled", false);
retrieveComponent("group.different.enabled", JCheckBox.class).setSelected(group_different_enabled);
toggleDifferentSettingsForGroup(group_different_enabled);
boolean keyword_different_enabled = props.getBoolean("keyword.different.enabled", false);
retrieveComponent("keyword.different.enabled", JCheckBox.class).setSelected(keyword_different_enabled);
toggleDifferentSettingsForKeyword(keyword_different_enabled);
}
public void storeValues() {
RoarProperties props = RoarProperties.getInstance();
props.setShowingPopups(_enabledCheckbox.isSelected());
props.setDisplayType(this.getDisplayType());
props.setMaximumPopups(this.getAmount());
props.setDuration(this.getDuration());
props.setInt("group.duration", getIntFromTextField("group.duration"));
props.setInt("keyword.duration", getIntFromTextField("keyword.duration"));
props.setKeywords(retrieveComponent("keywords", JTextField.class).getText());
props.setBackgroundColor(getColor(ColorTypes.BACKGROUNDCOLOR));
props.setColor(RoarProperties.BACKGROUNDCOLOR_GROUP, getColor(ColorTypes.BACKGROUNDCOLOR_GROUP));
props.setColor(RoarProperties.BACKGROUNDCOLOR_KEYWORD, getColor(ColorTypes.BACKGROUNDCOLOR_KEYWORD));
props.setTextColor(getColor(ColorTypes.TEXTCOLOR));
props.setColor(RoarProperties.TEXTCOLOR_GROUP, getColor(ColorTypes.TEXTCOLOR_GROUP));
props.setColor(RoarProperties.TEXTCOLOR_KEYWORD, getColor(ColorTypes.TEXTCOLOR_KEYWORD));
props.setHeaderColor(getColor(ColorTypes.HEADERCOLOR));
props.setColor(RoarProperties.HEADERCOLOR_GROUP, getColor(ColorTypes.HEADERCOLOR_GROUP));
props.setColor(RoarProperties.HEADERCOLOR_KEYWORD, getColor(ColorTypes.HEADERCOLOR_KEYWORD));
props.setBoolean("roar.disable.single", retrieveComponent("roar.disable.single", JCheckBox.class).isSelected());
props.setBoolean("group.different.enabled", retrieveComponent("group.different.enabled", JCheckBox.class).isSelected());
props.setBoolean("keyword.different.enabled", retrieveComponent("keyword.different.enabled", JCheckBox.class).isSelected());
props.setBoolean("group.disable", retrieveComponent("group.disable", JCheckBox.class).isSelected());
props.save();
}
@SuppressWarnings("unchecked")
private <K> K retrieveComponent(String key, Class<K> classs) {
return (K) _components.get(key);
}
private int getIntFromTextField(String key) {
JTextField field = retrieveComponent(key, JTextField.class);
try {
return Integer.parseInt(field.getText());
} catch (Exception e) {
return 3000;
}
}
/**
* returns the popup duration
*
* @return int
*/
public int getDuration() {
try {
return Integer.parseInt(_duration.getText());
} catch (Exception e) {
return 3000;
}
}
/**
* Amount of Windows on Screen
*
* @return int
*/
public int getAmount() {
return Integer.parseInt(_amount.getText());
}
public Color getColor(ColorTypes type) {
return _colormap.get(type);
}
public void setColor(ColorTypes type, Color color) {
_colormap.put(type, color);
}
private void colorListMouseClicked(MouseEvent e) {
if (e.getSource() == _singleColorlist) {
ColorTypes key = _singleColorlist.getSelectedValue();
_singleColorpicker.setColor(_colormap.get(key));
}
}
public void setDisplayType(String t) {
for (RoarDisplayType type : RoarProperties.getInstance().getDisplayTypes()) {
if (type.getName().equals(t)) {
_typelist.setSelectedItem(type.getLocalizedName());
updateWarningLabel(type.getWarningMessage());
return;
}
}
}
public void updateWarningLabel(String text) {
retrieveComponent("label.warning", JLabel.class).setText("<html>" + text + "</html>");
}
public RoarDisplayType getDisplayTypeClass() {
String o = (String) _typelist.getSelectedItem();
for (RoarDisplayType type : RoarProperties.getInstance().getDisplayTypes()) {
if (type.getLocalizedName().equals(o)) {
return type;
}
}
return RoarProperties.getInstance().getDisplayTypes().get(0);
// topright is default
}
public String getDisplayType() {
return getDisplayTypeClass().getName();
}
private void toggleDifferentSettingsForKeyword(boolean isSelected) {
DefaultListModel<ColorTypes> model = (DefaultListModel<ColorTypes>) _singleColorlist.getModel();
JTextField duration = retrieveComponent("keyword.duration", JTextField.class);
if (isSelected) {
if (!model.contains(ColorTypes.BACKGROUNDCOLOR_KEYWORD)) {
model.addElement(ColorTypes.BACKGROUNDCOLOR_KEYWORD);
model.addElement(ColorTypes.HEADERCOLOR_KEYWORD);
model.addElement(ColorTypes.TEXTCOLOR_KEYWORD);
}
duration.setEnabled(true);
} else {
model.removeElement(ColorTypes.BACKGROUNDCOLOR_KEYWORD);
model.removeElement(ColorTypes.HEADERCOLOR_KEYWORD);
model.removeElement(ColorTypes.TEXTCOLOR_KEYWORD);
duration.setEnabled(false);
duration.setText(_duration.getText());
}
}
private void toggleDifferentSettingsForGroup(boolean isSelected) {
DefaultListModel<ColorTypes> model = (DefaultListModel<ColorTypes>) _singleColorlist.getModel();
JTextField duration = retrieveComponent("group.duration", JTextField.class);
if (isSelected) {
if (!model.contains(ColorTypes.BACKGROUNDCOLOR_GROUP)) {
model.addElement(ColorTypes.BACKGROUNDCOLOR_GROUP);
model.addElement(ColorTypes.HEADERCOLOR_GROUP);
model.addElement(ColorTypes.TEXTCOLOR_GROUP);
}
duration.setEnabled(true);
} else {
model.removeElement(ColorTypes.BACKGROUNDCOLOR_GROUP);
model.removeElement(ColorTypes.HEADERCOLOR_GROUP);
model.removeElement(ColorTypes.TEXTCOLOR_GROUP);
duration.setEnabled(false);
duration.setText(_duration.getText());
}
}
private void stateChangedSingleColorPicker(ChangeEvent e) {
if (e.getSource() instanceof JSlider) {
_colormap.put( _singleColorlist.getSelectedValue(), _singleColorpicker.getColor());
}
}
// ============================================================================================================
// ============================================================================================================
// ============================================================================================================
// public void paintComponent(Graphics g) {
// CENTER LOGO
// int imgwi = _backgroundimage.getWidth(null);
// int imghe = _backgroundimage.getHeight(null);
// int x = this.getSize().width;
// x = (x/2)-(imgwi/2) < 0 ? 0 : (x/2)-(imgwi/2) ;
//
// int y = this.getSize().height;
// y = (y/2) -(imghe/2)< 0 ? 0 : y/2-(imghe/2) ;
//
// LOGO in bottom right corner
//
// int x = this.getSize().width - _backgroundimage.getWidth(null);
// int y = this.getSize().height - _backgroundimage.getHeight(null);
//
// super.paintComponent(g);
// g.drawImage(_backgroundimage, x, y, this);
// }
// ============================================================================================================
// ============================================================================================================
// ============================================================================================================
public enum ColorTypes {
BACKGROUNDCOLOR(RoarResources.getString("roar.background")),
HEADERCOLOR(RoarResources.getString("roar.header")),
TEXTCOLOR(RoarResources.getString("roar.text")),
BACKGROUNDCOLOR_GROUP(RoarResources.getString("roar.background.group")),
HEADERCOLOR_GROUP(RoarResources.getString("roar.header.group")),
TEXTCOLOR_GROUP(RoarResources.getString("roar.text.group")),
BACKGROUNDCOLOR_KEYWORD(RoarResources.getString("roar.background.keyword")),
HEADERCOLOR_KEYWORD(RoarResources.getString("roar.header.keyword")),
TEXTCOLOR_KEYWORD(RoarResources.getString("roar.text.keyword"));
private final String string;
ColorTypes(String c) {
string = c;
}
public String toString() {
return string;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.resourcemanager.utils;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.runtime.blob.TransientBlobKey;
import org.apache.flink.runtime.clusterframework.ApplicationStatus;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.clusterframework.types.ResourceID;
import org.apache.flink.runtime.clusterframework.types.SlotID;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.entrypoint.ClusterInformation;
import org.apache.flink.runtime.instance.InstanceID;
import org.apache.flink.runtime.io.network.partition.DataSetMetaInfo;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobmaster.JobMasterId;
import org.apache.flink.runtime.jobmaster.JobMasterRegistrationSuccess;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.runtime.registration.RegistrationResponse;
import org.apache.flink.runtime.resourcemanager.ResourceManagerGateway;
import org.apache.flink.runtime.resourcemanager.ResourceManagerId;
import org.apache.flink.runtime.resourcemanager.ResourceOverview;
import org.apache.flink.runtime.resourcemanager.SlotRequest;
import org.apache.flink.runtime.resourcemanager.TaskExecutorRegistration;
import org.apache.flink.runtime.resourcemanager.exceptions.UnknownTaskExecutorException;
import org.apache.flink.runtime.rest.messages.LogInfo;
import org.apache.flink.runtime.rest.messages.taskmanager.TaskManagerInfo;
import org.apache.flink.runtime.taskexecutor.FileType;
import org.apache.flink.runtime.taskexecutor.SlotReport;
import org.apache.flink.runtime.taskexecutor.TaskExecutorHeartbeatPayload;
import org.apache.flink.runtime.taskexecutor.TaskExecutorRegistrationSuccess;
import org.apache.flink.util.Preconditions;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
/**
* Implementation of the {@link ResourceManagerGateway} for testing purposes solely.
*/
public class TestingResourceManagerGateway implements ResourceManagerGateway {
private final ResourceManagerId resourceManagerId;
private final ResourceID ownResourceId;
private final String address;
private final String hostname;
private final AtomicReference<CompletableFuture<Acknowledge>> slotFutureReference;
private volatile Consumer<AllocationID> cancelSlotConsumer;
private volatile Consumer<SlotRequest> requestSlotConsumer;
private volatile Consumer<Tuple4<JobMasterId, ResourceID, String, JobID>> registerJobManagerConsumer;
private volatile Consumer<Tuple2<JobID, Throwable>> disconnectJobManagerConsumer;
private volatile Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>> registerTaskExecutorFunction;
private volatile Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>> requestTaskManagerFileUploadByTypeFunction;
private volatile Function<Tuple2<ResourceID, String>, CompletableFuture<TransientBlobKey>> requestTaskManagerFileUploadByNameFunction;
private volatile Consumer<Tuple2<ResourceID, Throwable>> disconnectTaskExecutorConsumer;
private volatile Function<Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>> sendSlotReportFunction;
private volatile BiConsumer<ResourceID, TaskExecutorHeartbeatPayload> taskExecutorHeartbeatConsumer;
private volatile Consumer<Tuple3<InstanceID, SlotID, AllocationID>> notifySlotAvailableConsumer;
private volatile Function<ResourceID, CompletableFuture<Collection<LogInfo>>> requestTaskManagerLogListFunction;
public TestingResourceManagerGateway() {
this(
ResourceManagerId.generate(),
ResourceID.generate(),
"localhost/" + UUID.randomUUID(),
"localhost");
}
public TestingResourceManagerGateway(
ResourceManagerId resourceManagerId,
ResourceID resourceId,
String address,
String hostname) {
this.resourceManagerId = Preconditions.checkNotNull(resourceManagerId);
this.ownResourceId = Preconditions.checkNotNull(resourceId);
this.address = Preconditions.checkNotNull(address);
this.hostname = Preconditions.checkNotNull(hostname);
this.slotFutureReference = new AtomicReference<>();
this.cancelSlotConsumer = null;
this.requestSlotConsumer = null;
}
public ResourceID getOwnResourceId() {
return ownResourceId;
}
public void setRequestSlotFuture(CompletableFuture<Acknowledge> slotFuture) {
this.slotFutureReference.set(slotFuture);
}
public void setCancelSlotConsumer(Consumer<AllocationID> cancelSlotConsumer) {
this.cancelSlotConsumer = cancelSlotConsumer;
}
public void setRequestSlotConsumer(Consumer<SlotRequest> slotRequestConsumer) {
this.requestSlotConsumer = slotRequestConsumer;
}
public void setRegisterJobManagerConsumer(Consumer<Tuple4<JobMasterId, ResourceID, String, JobID>> registerJobManagerConsumer) {
this.registerJobManagerConsumer = registerJobManagerConsumer;
}
public void setDisconnectJobManagerConsumer(Consumer<Tuple2<JobID, Throwable>> disconnectJobManagerConsumer) {
this.disconnectJobManagerConsumer = disconnectJobManagerConsumer;
}
public void setRegisterTaskExecutorFunction(Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>> registerTaskExecutorFunction) {
this.registerTaskExecutorFunction = registerTaskExecutorFunction;
}
public void setRequestTaskManagerFileUploadByTypeFunction(Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>> requestTaskManagerFileUploadByTypeFunction) {
this.requestTaskManagerFileUploadByTypeFunction = requestTaskManagerFileUploadByTypeFunction;
}
public void setRequestTaskManagerFileUploadByNameFunction(Function<Tuple2<ResourceID, String>, CompletableFuture<TransientBlobKey>> requestTaskManagerFileUploadByNameFunction) {
this.requestTaskManagerFileUploadByNameFunction = requestTaskManagerFileUploadByNameFunction;
}
public void setRequestTaskManagerLogListFunction(Function<ResourceID, CompletableFuture<Collection<LogInfo>>> requestTaskManagerLogListFunction) {
this.requestTaskManagerLogListFunction = requestTaskManagerLogListFunction;
}
public void setDisconnectTaskExecutorConsumer(Consumer<Tuple2<ResourceID, Throwable>> disconnectTaskExecutorConsumer) {
this.disconnectTaskExecutorConsumer = disconnectTaskExecutorConsumer;
}
public void setSendSlotReportFunction(Function<Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>> sendSlotReportFunction) {
this.sendSlotReportFunction = sendSlotReportFunction;
}
public void setTaskExecutorHeartbeatConsumer(BiConsumer<ResourceID, TaskExecutorHeartbeatPayload> taskExecutorHeartbeatConsumer) {
this.taskExecutorHeartbeatConsumer = taskExecutorHeartbeatConsumer;
}
public void setNotifySlotAvailableConsumer(Consumer<Tuple3<InstanceID, SlotID, AllocationID>> notifySlotAvailableConsumer) {
this.notifySlotAvailableConsumer = notifySlotAvailableConsumer;
}
@Override
public CompletableFuture<RegistrationResponse> registerJobManager(JobMasterId jobMasterId, ResourceID jobMasterResourceId, String jobMasterAddress, JobID jobId, Time timeout) {
final Consumer<Tuple4<JobMasterId, ResourceID, String, JobID>> currentConsumer = registerJobManagerConsumer;
if (currentConsumer != null) {
currentConsumer.accept(Tuple4.of(jobMasterId, jobMasterResourceId, jobMasterAddress, jobId));
}
return CompletableFuture.completedFuture(
new JobMasterRegistrationSuccess(
resourceManagerId,
ownResourceId));
}
@Override
public CompletableFuture<Acknowledge> requestSlot(JobMasterId jobMasterId, SlotRequest slotRequest, Time timeout) {
Consumer<SlotRequest> currentRequestSlotConsumer = requestSlotConsumer;
if (currentRequestSlotConsumer != null) {
currentRequestSlotConsumer.accept(slotRequest);
}
CompletableFuture<Acknowledge> slotFuture = slotFutureReference.getAndSet(null);
if (slotFuture != null) {
return slotFuture;
} else {
return CompletableFuture.completedFuture(Acknowledge.get());
}
}
@Override
public void cancelSlotRequest(AllocationID allocationID) {
Consumer<AllocationID> currentCancelSlotConsumer = cancelSlotConsumer;
if (currentCancelSlotConsumer != null) {
currentCancelSlotConsumer.accept(allocationID);
}
}
@Override
public CompletableFuture<Acknowledge> sendSlotReport(ResourceID taskManagerResourceId, InstanceID taskManagerRegistrationId, SlotReport slotReport, Time timeout) {
final Function<Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>> currentSendSlotReportFunction = sendSlotReportFunction;
if (currentSendSlotReportFunction != null) {
return currentSendSlotReportFunction.apply(Tuple3.of(taskManagerResourceId, taskManagerRegistrationId, slotReport));
} else {
return CompletableFuture.completedFuture(Acknowledge.get());
}
}
@Override
public CompletableFuture<RegistrationResponse> registerTaskExecutor(TaskExecutorRegistration taskExecutorRegistration, Time timeout) {
final Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>> currentFunction = registerTaskExecutorFunction;
if (currentFunction != null) {
return currentFunction.apply(taskExecutorRegistration);
} else {
return CompletableFuture.completedFuture(
new TaskExecutorRegistrationSuccess(
new InstanceID(),
ownResourceId,
new ClusterInformation("localhost", 1234)));
}
}
@Override
public void notifySlotAvailable(InstanceID instanceId, SlotID slotID, AllocationID oldAllocationId) {
final Consumer<Tuple3<InstanceID, SlotID, AllocationID>> currentNotifySlotAvailableConsumer = notifySlotAvailableConsumer;
if (currentNotifySlotAvailableConsumer != null) {
currentNotifySlotAvailableConsumer.accept(Tuple3.of(instanceId, slotID, oldAllocationId));
}
}
@Override
public CompletableFuture<Acknowledge> deregisterApplication(ApplicationStatus finalStatus, String diagnostics) {
return CompletableFuture.completedFuture(Acknowledge.get());
}
@Override
public CompletableFuture<Integer> getNumberOfRegisteredTaskManagers() {
return CompletableFuture.completedFuture(0);
}
@Override
public void heartbeatFromTaskManager(ResourceID heartbeatOrigin, TaskExecutorHeartbeatPayload heartbeatPayload) {
final BiConsumer<ResourceID, TaskExecutorHeartbeatPayload> currentTaskExecutorHeartbeatConsumer = taskExecutorHeartbeatConsumer;
if (currentTaskExecutorHeartbeatConsumer != null) {
currentTaskExecutorHeartbeatConsumer.accept(heartbeatOrigin, heartbeatPayload);
}
}
@Override
public void heartbeatFromJobManager(ResourceID heartbeatOrigin) {
}
@Override
public void disconnectTaskManager(ResourceID resourceID, Exception cause) {
final Consumer<Tuple2<ResourceID, Throwable>> currentConsumer = disconnectTaskExecutorConsumer;
if (currentConsumer != null) {
currentConsumer.accept(Tuple2.of(resourceID, cause));
}
}
@Override
public void disconnectJobManager(JobID jobId, Exception cause) {
final Consumer<Tuple2<JobID, Throwable>> currentConsumer = disconnectJobManagerConsumer;
if (currentConsumer != null) {
currentConsumer.accept(Tuple2.of(jobId, cause));
}
}
@Override
public CompletableFuture<Collection<TaskManagerInfo>> requestTaskManagerInfo(Time timeout) {
return CompletableFuture.completedFuture(Collections.emptyList());
}
@Override
public CompletableFuture<TaskManagerInfo> requestTaskManagerInfo(ResourceID resourceId, Time timeout) {
return FutureUtils.completedExceptionally(new UnsupportedOperationException("Not yet implemented"));
}
@Override
public CompletableFuture<ResourceOverview> requestResourceOverview(Time timeout) {
return CompletableFuture.completedFuture(new ResourceOverview(1, 1, 1));
}
@Override
public CompletableFuture<Collection<Tuple2<ResourceID, String>>> requestTaskManagerMetricQueryServiceAddresses(Time timeout) {
return CompletableFuture.completedFuture(Collections.emptyList());
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByType(ResourceID taskManagerId, FileType fileType, Time timeout) {
final Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>> function = requestTaskManagerFileUploadByTypeFunction;
if (function != null) {
return function.apply(Tuple2.of(taskManagerId, fileType));
} else {
return CompletableFuture.completedFuture(new TransientBlobKey());
}
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByName(ResourceID taskManagerId, String fileName, Time timeout) {
final Function<Tuple2<ResourceID, String>, CompletableFuture<TransientBlobKey>> function = requestTaskManagerFileUploadByNameFunction;
if (function != null) {
return function.apply(Tuple2.of(taskManagerId, fileName));
} else {
return CompletableFuture.completedFuture(new TransientBlobKey());
}
}
@Override
public CompletableFuture<Collection<LogInfo>> requestTaskManagerLogList(ResourceID taskManagerId, Time timeout) {
final Function<ResourceID, CompletableFuture<Collection<LogInfo>>> function = this.requestTaskManagerLogListFunction;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public ResourceManagerId getFencingToken() {
return resourceManagerId;
}
@Override
public String getAddress() {
return address;
}
@Override
public String getHostname() {
return hostname;
}
@Override
public CompletableFuture<Map<IntermediateDataSetID, DataSetMetaInfo>> listDataSets() {
return CompletableFuture.completedFuture(Collections.emptyMap());
}
@Override
public CompletableFuture<Void> releaseClusterPartitions(IntermediateDataSetID dataSetToRelease) {
return CompletableFuture.completedFuture(null);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.storage.flow;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Coprocessor for flow run table.
*/
public class FlowRunCoprocessor extends BaseRegionObserver {
private static final Logger LOG =
LoggerFactory.getLogger(FlowRunCoprocessor.class);
private boolean isFlowRunRegion = false;
private Region region;
/**
* generate a timestamp that is unique per row in a region this is per region.
*/
private final TimestampGenerator timestampGenerator =
new TimestampGenerator();
@Override
public void start(CoprocessorEnvironment e) throws IOException {
if (e instanceof RegionCoprocessorEnvironment) {
RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e;
this.region = env.getRegion();
isFlowRunRegion = HBaseTimelineStorageUtils.isFlowRunTable(
region.getRegionInfo(), env.getConfiguration());
}
}
public boolean isFlowRunRegion() {
return isFlowRunRegion;
}
/*
* (non-Javadoc)
*
* This method adds the tags onto the cells in the Put. It is presumed that
* all the cells in one Put have the same set of Tags. The existing cell
* timestamp is overwritten for non-metric cells and each such cell gets a new
* unique timestamp generated by {@link TimestampGenerator}
*
* @see
* org.apache.hadoop.hbase.coprocessor.BaseRegionObserver#prePut(org.apache
* .hadoop.hbase.coprocessor.ObserverContext,
* org.apache.hadoop.hbase.client.Put,
* org.apache.hadoop.hbase.regionserver.wal.WALEdit,
* org.apache.hadoop.hbase.client.Durability)
*/
@Override
public void prePut(ObserverContext<RegionCoprocessorEnvironment> e, Put put,
WALEdit edit, Durability durability) throws IOException {
Map<String, byte[]> attributes = put.getAttributesMap();
if (!isFlowRunRegion) {
return;
}
// Assumption is that all the cells in a put are the same operation.
List<Tag> tags = new ArrayList<>();
if ((attributes != null) && (attributes.size() > 0)) {
for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
Tag t = HBaseTimelineStorageUtils.getTagFromAttribute(attribute);
tags.add(t);
}
byte[] tagByteArray = Tag.fromList(tags);
NavigableMap<byte[], List<Cell>> newFamilyMap = new TreeMap<>(
Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], List<Cell>> entry : put.getFamilyCellMap()
.entrySet()) {
List<Cell> newCells = new ArrayList<>(entry.getValue().size());
for (Cell cell : entry.getValue()) {
// for each cell in the put add the tags
// Assumption is that all the cells in
// one put are the same operation
// also, get a unique cell timestamp for non-metric cells
// this way we don't inadvertently overwrite cell versions
long cellTimestamp = getCellTimestamp(cell.getTimestamp(), tags);
newCells.add(CellUtil.createCell(CellUtil.cloneRow(cell),
CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell),
cellTimestamp, KeyValue.Type.Put, CellUtil.cloneValue(cell),
tagByteArray));
}
newFamilyMap.put(entry.getKey(), newCells);
} // for each entry
// Update the family map for the Put
put.setFamilyCellMap(newFamilyMap);
}
}
/**
* Determines if the current cell's timestamp is to be used or a new unique
* cell timestamp is to be used. The reason this is done is to inadvertently
* overwrite cells when writes come in very fast. But for metric cells, the
* cell timestamp signifies the metric timestamp. Hence we don't want to
* overwrite it.
*
* @param timestamp
* @param tags
* @return cell timestamp
*/
private long getCellTimestamp(long timestamp, List<Tag> tags) {
// if ts not set (hbase sets to HConstants.LATEST_TIMESTAMP by default)
// then use the generator
if (timestamp == HConstants.LATEST_TIMESTAMP) {
return timestampGenerator.getUniqueTimestamp();
} else {
return timestamp;
}
}
/*
* (non-Javadoc)
*
* Creates a {@link FlowScanner} Scan so that it can correctly process the
* contents of {@link FlowRunTable}.
*
* @see
* org.apache.hadoop.hbase.coprocessor.BaseRegionObserver#preGetOp(org.apache
* .hadoop.hbase.coprocessor.ObserverContext,
* org.apache.hadoop.hbase.client.Get, java.util.List)
*/
@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> e,
Get get, List<Cell> results) throws IOException {
if (!isFlowRunRegion) {
return;
}
Scan scan = new Scan(get);
scan.setMaxVersions();
RegionScanner scanner = null;
try {
scanner = new FlowScanner(e.getEnvironment(), scan,
region.getScanner(scan), FlowScannerOperation.READ);
scanner.next(results);
e.bypass();
} finally {
if (scanner != null) {
scanner.close();
}
}
}
/*
* (non-Javadoc)
*
* Ensures that max versions are set for the Scan so that metrics can be
* correctly aggregated and min/max can be correctly determined.
*
* @see
* org.apache.hadoop.hbase.coprocessor.BaseRegionObserver#preScannerOpen(org
* .apache.hadoop.hbase.coprocessor.ObserverContext,
* org.apache.hadoop.hbase.client.Scan,
* org.apache.hadoop.hbase.regionserver.RegionScanner)
*/
@Override
public RegionScanner preScannerOpen(
ObserverContext<RegionCoprocessorEnvironment> e, Scan scan,
RegionScanner scanner) throws IOException {
if (isFlowRunRegion) {
// set max versions for scan to see all
// versions to aggregate for metrics
scan.setMaxVersions();
}
return scanner;
}
/*
* (non-Javadoc)
*
* Creates a {@link FlowScanner} Scan so that it can correctly process the
* contents of {@link FlowRunTable}.
*
* @see
* org.apache.hadoop.hbase.coprocessor.BaseRegionObserver#postScannerOpen(
* org.apache.hadoop.hbase.coprocessor.ObserverContext,
* org.apache.hadoop.hbase.client.Scan,
* org.apache.hadoop.hbase.regionserver.RegionScanner)
*/
@Override
public RegionScanner postScannerOpen(
ObserverContext<RegionCoprocessorEnvironment> e, Scan scan,
RegionScanner scanner) throws IOException {
if (!isFlowRunRegion) {
return scanner;
}
return new FlowScanner(e.getEnvironment(), scan,
scanner, FlowScannerOperation.READ);
}
@Override
public InternalScanner preFlush(
ObserverContext<RegionCoprocessorEnvironment> c, Store store,
InternalScanner scanner) throws IOException {
if (!isFlowRunRegion) {
return scanner;
}
if (LOG.isDebugEnabled()) {
if (store != null) {
LOG.debug("preFlush store = " + store.getColumnFamilyName()
+ " flushableSize=" + store.getFlushableSize()
+ " flushedCellsCount=" + store.getFlushedCellsCount()
+ " compactedCellsCount=" + store.getCompactedCellsCount()
+ " majorCompactedCellsCount="
+ store.getMajorCompactedCellsCount() + " memstoreFlushSize="
+ store.getMemstoreFlushSize() + " memstoreSize="
+ store.getMemStoreSize() + " size=" + store.getSize()
+ " storeFilesCount=" + store.getStorefilesCount());
}
}
return new FlowScanner(c.getEnvironment(), scanner,
FlowScannerOperation.FLUSH);
}
@Override
public void postFlush(ObserverContext<RegionCoprocessorEnvironment> c,
Store store, StoreFile resultFile) {
if (!isFlowRunRegion) {
return;
}
if (LOG.isDebugEnabled()) {
if (store != null) {
LOG.debug("postFlush store = " + store.getColumnFamilyName()
+ " flushableSize=" + store.getFlushableSize()
+ " flushedCellsCount=" + store.getFlushedCellsCount()
+ " compactedCellsCount=" + store.getCompactedCellsCount()
+ " majorCompactedCellsCount="
+ store.getMajorCompactedCellsCount() + " memstoreFlushSize="
+ store.getMemstoreFlushSize() + " memstoreSize="
+ store.getMemStoreSize() + " size=" + store.getSize()
+ " storeFilesCount=" + store.getStorefilesCount());
}
}
}
@Override
public InternalScanner preCompact(
ObserverContext<RegionCoprocessorEnvironment> e, Store store,
InternalScanner scanner, ScanType scanType, CompactionRequest request)
throws IOException {
if (!isFlowRunRegion) {
return scanner;
}
FlowScannerOperation requestOp = FlowScannerOperation.MINOR_COMPACTION;
if (request != null) {
requestOp = (request.isMajor() ? FlowScannerOperation.MAJOR_COMPACTION
: FlowScannerOperation.MINOR_COMPACTION);
LOG.info("Compactionrequest= " + request.toString() + " "
+ requestOp.toString() + " RegionName=" + e.getEnvironment()
.getRegion().getRegionInfo().getRegionNameAsString());
}
return new FlowScanner(e.getEnvironment(), scanner, requestOp);
}
}
| |
/*
* Copyright (C) 2015-2016 Emanuel Moecklin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onegravity.rteditor;
import android.annotation.SuppressLint;
import android.text.Spannable;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
/**
* This class manages Operations for multiple rich text editors.
* It's used by the RTManager to undo/redo operations.
*/
@SuppressLint("UseSparseArrays")
class RTOperationManager {
/*
* Maximum number of operations to put in the undo/redo stack
*/
private static final int MAX_NR_OF_OPERATIONS = 50;
/*
* two operations performed in this time frame (in ms) are considered one
* operation
*/
private static final int TIME_BETWEEN_OPERATIONS = 300;
/*
* The undo/redo stacks by editor Id
*/
private Map<Integer, Stack<Operation>> mUndoStacks = new HashMap<>();
private Map<Integer, Stack<Operation>> mRedoStacks = new HashMap<>();
;
// ****************************************** Operation Classes *******************************************
/**
* An atomic operation in the rich text editor.
* If two operations are performed within a certain time frame
* they will be considered as one operation and un-done/re-done together.
*/
private abstract static class Operation {
protected long mTimestamp;
private int mSelStartBefore;
private int mSelEndBefore;
private Spannable mBefore;
private int mSelStartAfter;
private int mSelEndAfter;
private Spannable mAfter;
Operation(Spannable before, Spannable after, int selStartBefore, int selEndBefore, int selStartAfter, int selEndAfter) {
mSelStartBefore = selStartBefore;
mSelEndBefore = selEndBefore;
mSelStartAfter = selStartAfter;
mSelEndAfter = selEndAfter;
mBefore = before;
mAfter = after;
mTimestamp = System.currentTimeMillis();
}
boolean canMerge(Operation other) {
return Math.abs(mTimestamp - other.mTimestamp) < TIME_BETWEEN_OPERATIONS;
}
Operation merge(Operation previousOp) {
mBefore = previousOp.mBefore;
mSelStartBefore = previousOp.mSelStartBefore;
mSelEndBefore = previousOp.mSelEndBefore;
return this;
}
void undo(RTEditText editor) {
editor.ignoreTextChanges();
editor.setText(mBefore);
editor.setSelection(mSelStartBefore, mSelEndBefore);
editor.registerTextChanges();
}
void redo(RTEditText editor) {
editor.ignoreTextChanges();
editor.setText(mAfter);
editor.setSelection(mSelStartAfter, mSelEndAfter);
editor.registerTextChanges();
}
}
static class TextChangeOperation extends Operation {
TextChangeOperation(Spannable before, Spannable after, int selStartBefore, int selEndBefore, int selStartAfter, int selEndAfter) {
super(before, after, selStartBefore, selEndBefore, selStartAfter, selEndAfter);
}
}
// ****************************************** execute/undo/redo/flush *******************************************
/**
* Call this when an operation is performed to add it to the undo stack.
*
* @param editor The rich text editor the operation was performed on
* @param op The Operation that was performed
*/
synchronized void executed(RTEditText editor, Operation op) {
Stack<Operation> undoStack = getUndoStack(editor);
Stack<Operation> redoStack = getRedoStack(editor);
// if operations are executed in a quick succession we "merge" them to have but one
// -> saves memory and makes more sense from a user perspective (each key stroke an undo? -> no way)
while (!undoStack.empty() && op.canMerge(undoStack.peek())) {
Operation previousOp = undoStack.pop();
op.merge(previousOp);
}
push(op, undoStack);
redoStack.clear();
}
/**
* Undo the last operation for a specific rich text editor
*
* @param editor Undo the last operation for this rich text editor
*/
synchronized void undo(RTEditText editor) {
Stack<Operation> undoStack = getUndoStack(editor);
if (!undoStack.empty()) {
Stack<Operation> redoStack = getRedoStack(editor);
Operation op = undoStack.pop();
push(op, redoStack);
op.undo(editor);
while (!undoStack.empty() && op.canMerge(undoStack.peek())) {
op = undoStack.pop();
push(op, redoStack);
op.undo(editor);
}
}
}
/**
* Re-do the last undone operation for a specific rich text editor
*
* @param editor Re-do an operation for this rich text editor
*/
synchronized void redo(RTEditText editor) {
Stack<Operation> redoStack = getRedoStack(editor);
if (!redoStack.empty()) {
Stack<Operation> undoStack = getUndoStack(editor);
Operation op = redoStack.pop();
push(op, undoStack);
op.redo(editor);
while (!redoStack.empty() && op.canMerge(redoStack.peek())) {
op = redoStack.pop();
push(op, undoStack);
op.redo(editor);
}
}
}
/**
* Flush all operations for a specific rich text editor (method unused at the moment)
*
* @param editor This rich text editor's operations will be flushed
*/
synchronized void flushOperations(RTEditText editor) {
Stack<Operation> undoStack = getUndoStack(editor);
Stack<Operation> redoStack = getRedoStack(editor);
undoStack.clear();
redoStack.clear();
}
// ****************************************** Private Methods *******************************************
private void push(Operation op, Stack<Operation> stack) {
if (stack.size() >= MAX_NR_OF_OPERATIONS) {
stack.remove(0);
}
stack.push(op);
}
private Stack<Operation> getUndoStack(RTEditText editor) {
return getStack(mUndoStacks, editor);
}
private Stack<Operation> getRedoStack(RTEditText editor) {
return getStack(mRedoStacks, editor);
}
private Stack<Operation> getStack(Map<Integer, Stack<Operation>> stacks, RTEditText editor) {
Stack<Operation> stack = stacks.get(editor.getId());
if (stack == null) {
stack = new Stack<>();
stacks.put(editor.getId(), stack);
}
return stack;
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.extractor.mp4;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.extractor.GaplessInfoHolder;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.id3.ApicFrame;
import com.google.android.exoplayer2.metadata.id3.CommentFrame;
import com.google.android.exoplayer2.metadata.id3.Id3Frame;
import com.google.android.exoplayer2.metadata.id3.InternalFrame;
import com.google.android.exoplayer2.metadata.id3.TextInformationFrame;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.ParsableByteArray;
import java.nio.ByteBuffer;
/** Utilities for handling metadata in MP4. */
/* package */ final class MetadataUtil {
private static final String TAG = "MetadataUtil";
// Codes that start with the copyright character (omitted) and have equivalent ID3 frames.
private static final int SHORT_TYPE_NAME_1 = 0x006e616d;
private static final int SHORT_TYPE_NAME_2 = 0x0074726b;
private static final int SHORT_TYPE_COMMENT = 0x00636d74;
private static final int SHORT_TYPE_YEAR = 0x00646179;
private static final int SHORT_TYPE_ARTIST = 0x00415254;
private static final int SHORT_TYPE_ENCODER = 0x00746f6f;
private static final int SHORT_TYPE_ALBUM = 0x00616c62;
private static final int SHORT_TYPE_COMPOSER_1 = 0x00636f6d;
private static final int SHORT_TYPE_COMPOSER_2 = 0x00777274;
private static final int SHORT_TYPE_LYRICS = 0x006c7972;
private static final int SHORT_TYPE_GENRE = 0x0067656e;
// Codes that have equivalent ID3 frames.
private static final int TYPE_COVER_ART = 0x636f7672;
private static final int TYPE_GENRE = 0x676e7265;
private static final int TYPE_GROUPING = 0x00677270;
private static final int TYPE_DISK_NUMBER = 0x6469736b;
private static final int TYPE_TRACK_NUMBER = 0x74726b6e;
private static final int TYPE_TEMPO = 0x746d706f;
private static final int TYPE_COMPILATION = 0x6370696c;
private static final int TYPE_ALBUM_ARTIST = 0x61415254;
private static final int TYPE_SORT_TRACK_NAME = 0x736f6e6d;
private static final int TYPE_SORT_ALBUM = 0x736f616c;
private static final int TYPE_SORT_ARTIST = 0x736f6172;
private static final int TYPE_SORT_ALBUM_ARTIST = 0x736f6161;
private static final int TYPE_SORT_COMPOSER = 0x736f636f;
// Types that do not have equivalent ID3 frames.
private static final int TYPE_RATING = 0x72746e67;
private static final int TYPE_GAPLESS_ALBUM = 0x70676170;
private static final int TYPE_TV_SORT_SHOW = 0x736f736e;
private static final int TYPE_TV_SHOW = 0x74767368;
// Type for items that are intended for internal use by the player.
private static final int TYPE_INTERNAL = 0x2d2d2d2d;
private static final int PICTURE_TYPE_FRONT_COVER = 3;
// Standard genres.
private static final String[] STANDARD_GENRES = new String[] {
// These are the official ID3v1 genres.
"Blues", "Classic Rock", "Country", "Dance", "Disco", "Funk", "Grunge", "Hip-Hop", "Jazz",
"Metal", "New Age", "Oldies", "Other", "Pop", "R&B", "Rap", "Reggae", "Rock", "Techno",
"Industrial", "Alternative", "Ska", "Death Metal", "Pranks", "Soundtrack", "Euro-Techno",
"Ambient", "Trip-Hop", "Vocal", "Jazz+Funk", "Fusion", "Trance", "Classical", "Instrumental",
"Acid", "House", "Game", "Sound Clip", "Gospel", "Noise", "AlternRock", "Bass", "Soul",
"Punk", "Space", "Meditative", "Instrumental Pop", "Instrumental Rock", "Ethnic", "Gothic",
"Darkwave", "Techno-Industrial", "Electronic", "Pop-Folk", "Eurodance", "Dream",
"Southern Rock", "Comedy", "Cult", "Gangsta", "Top 40", "Christian Rap", "Pop/Funk", "Jungle",
"Native American", "Cabaret", "New Wave", "Psychadelic", "Rave", "Showtunes", "Trailer",
"Lo-Fi", "Tribal", "Acid Punk", "Acid Jazz", "Polka", "Retro", "Musical", "Rock & Roll",
"Hard Rock",
// These were made up by the authors of Winamp and later added to the ID3 spec.
"Folk", "Folk-Rock", "National Folk", "Swing", "Fast Fusion", "Bebob", "Latin", "Revival",
"Celtic", "Bluegrass", "Avantgarde", "Gothic Rock", "Progressive Rock", "Psychedelic Rock",
"Symphonic Rock", "Slow Rock", "Big Band", "Chorus", "Easy Listening", "Acoustic", "Humour",
"Speech", "Chanson", "Opera", "Chamber Music", "Sonata", "Symphony", "Booty Bass", "Primus",
"Porn Groove", "Satire", "Slow Jam", "Club", "Tango", "Samba", "Folklore", "Ballad",
"Power Ballad", "Rhythmic Soul", "Freestyle", "Duet", "Punk Rock", "Drum Solo", "A capella",
"Euro-House", "Dance Hall",
// These were med up by the authors of Winamp but have not been added to the ID3 spec.
"Goa", "Drum & Bass", "Club-House", "Hardcore", "Terror", "Indie", "BritPop", "Negerpunk",
"Polsk Punk", "Beat", "Christian Gangsta Rap", "Heavy Metal", "Black Metal", "Crossover",
"Contemporary Christian", "Christian Rock", "Merengue", "Salsa", "Thrash Metal", "Anime",
"Jpop", "Synthpop"
};
private static final String LANGUAGE_UNDEFINED = "und";
private static final int TYPE_TOP_BYTE_COPYRIGHT = 0xA9;
private static final int TYPE_TOP_BYTE_REPLACEMENT = 0xFD; // Truncated value of \uFFFD.
private static final String MDTA_KEY_ANDROID_CAPTURE_FPS = "com.android.capture.fps";
private static final int MDTA_TYPE_INDICATOR_FLOAT = 23;
private MetadataUtil() {}
/**
* Returns a {@link Format} that is the same as the input format but includes information from the
* specified sources of metadata.
*/
public static Format getFormatWithMetadata(
int trackType,
Format format,
@Nullable Metadata udtaMetadata,
@Nullable Metadata mdtaMetadata,
GaplessInfoHolder gaplessInfoHolder) {
if (trackType == C.TRACK_TYPE_AUDIO) {
if (gaplessInfoHolder.hasGaplessInfo()) {
format =
format.copyWithGaplessInfo(
gaplessInfoHolder.encoderDelay, gaplessInfoHolder.encoderPadding);
}
// We assume all udta metadata is associated with the audio track.
if (udtaMetadata != null) {
format = format.copyWithMetadata(udtaMetadata);
}
} else if (trackType == C.TRACK_TYPE_VIDEO && mdtaMetadata != null) {
// Populate only metadata keys that are known to be specific to video.
for (int i = 0; i < mdtaMetadata.length(); i++) {
Metadata.Entry entry = mdtaMetadata.get(i);
if (entry instanceof MdtaMetadataEntry) {
MdtaMetadataEntry mdtaMetadataEntry = (MdtaMetadataEntry) entry;
if (MDTA_KEY_ANDROID_CAPTURE_FPS.equals(mdtaMetadataEntry.key)
&& mdtaMetadataEntry.typeIndicator == MDTA_TYPE_INDICATOR_FLOAT) {
try {
float fps = ByteBuffer.wrap(mdtaMetadataEntry.value).asFloatBuffer().get();
format = format.copyWithFrameRate(fps);
format = format.copyWithMetadata(new Metadata(mdtaMetadataEntry));
} catch (NumberFormatException e) {
Log.w(TAG, "Ignoring invalid framerate");
}
}
}
}
}
return format;
}
/**
* Parses a single userdata ilst element from a {@link ParsableByteArray}. The element is read
* starting from the current position of the {@link ParsableByteArray}, and the position is
* advanced by the size of the element. The position is advanced even if the element's type is
* unrecognized.
*
* @param ilst Holds the data to be parsed.
* @return The parsed element, or null if the element's type was not recognized.
*/
@Nullable
public static Metadata.Entry parseIlstElement(ParsableByteArray ilst) {
int position = ilst.getPosition();
int endPosition = position + ilst.readInt();
int type = ilst.readInt();
int typeTopByte = (type >> 24) & 0xFF;
try {
if (typeTopByte == TYPE_TOP_BYTE_COPYRIGHT || typeTopByte == TYPE_TOP_BYTE_REPLACEMENT) {
int shortType = type & 0x00FFFFFF;
if (shortType == SHORT_TYPE_COMMENT) {
return parseCommentAttribute(type, ilst);
} else if (shortType == SHORT_TYPE_NAME_1 || shortType == SHORT_TYPE_NAME_2) {
return parseTextAttribute(type, "TIT2", ilst);
} else if (shortType == SHORT_TYPE_COMPOSER_1 || shortType == SHORT_TYPE_COMPOSER_2) {
return parseTextAttribute(type, "TCOM", ilst);
} else if (shortType == SHORT_TYPE_YEAR) {
return parseTextAttribute(type, "TDRC", ilst);
} else if (shortType == SHORT_TYPE_ARTIST) {
return parseTextAttribute(type, "TPE1", ilst);
} else if (shortType == SHORT_TYPE_ENCODER) {
return parseTextAttribute(type, "TSSE", ilst);
} else if (shortType == SHORT_TYPE_ALBUM) {
return parseTextAttribute(type, "TALB", ilst);
} else if (shortType == SHORT_TYPE_LYRICS) {
return parseTextAttribute(type, "USLT", ilst);
} else if (shortType == SHORT_TYPE_GENRE) {
return parseTextAttribute(type, "TCON", ilst);
} else if (shortType == TYPE_GROUPING) {
return parseTextAttribute(type, "TIT1", ilst);
}
} else if (type == TYPE_GENRE) {
return parseStandardGenreAttribute(ilst);
} else if (type == TYPE_DISK_NUMBER) {
return parseIndexAndCountAttribute(type, "TPOS", ilst);
} else if (type == TYPE_TRACK_NUMBER) {
return parseIndexAndCountAttribute(type, "TRCK", ilst);
} else if (type == TYPE_TEMPO) {
return parseUint8Attribute(type, "TBPM", ilst, true, false);
} else if (type == TYPE_COMPILATION) {
return parseUint8Attribute(type, "TCMP", ilst, true, true);
} else if (type == TYPE_COVER_ART) {
return parseCoverArt(ilst);
} else if (type == TYPE_ALBUM_ARTIST) {
return parseTextAttribute(type, "TPE2", ilst);
} else if (type == TYPE_SORT_TRACK_NAME) {
return parseTextAttribute(type, "TSOT", ilst);
} else if (type == TYPE_SORT_ALBUM) {
return parseTextAttribute(type, "TSO2", ilst);
} else if (type == TYPE_SORT_ARTIST) {
return parseTextAttribute(type, "TSOA", ilst);
} else if (type == TYPE_SORT_ALBUM_ARTIST) {
return parseTextAttribute(type, "TSOP", ilst);
} else if (type == TYPE_SORT_COMPOSER) {
return parseTextAttribute(type, "TSOC", ilst);
} else if (type == TYPE_RATING) {
return parseUint8Attribute(type, "ITUNESADVISORY", ilst, false, false);
} else if (type == TYPE_GAPLESS_ALBUM) {
return parseUint8Attribute(type, "ITUNESGAPLESS", ilst, false, true);
} else if (type == TYPE_TV_SORT_SHOW) {
return parseTextAttribute(type, "TVSHOWSORT", ilst);
} else if (type == TYPE_TV_SHOW) {
return parseTextAttribute(type, "TVSHOW", ilst);
} else if (type == TYPE_INTERNAL) {
return parseInternalAttribute(ilst, endPosition);
}
Log.d(TAG, "Skipped unknown metadata entry: " + Atom.getAtomTypeString(type));
return null;
} finally {
ilst.setPosition(endPosition);
}
}
/**
* Parses an 'mdta' metadata entry starting at the current position in an ilst box.
*
* @param ilst The ilst box.
* @param endPosition The end position of the entry in the ilst box.
* @param key The mdta metadata entry key for the entry.
* @return The parsed element, or null if the entry wasn't recognized.
*/
@Nullable
public static MdtaMetadataEntry parseMdtaMetadataEntryFromIlst(
ParsableByteArray ilst, int endPosition, String key) {
int atomPosition;
while ((atomPosition = ilst.getPosition()) < endPosition) {
int atomSize = ilst.readInt();
int atomType = ilst.readInt();
if (atomType == Atom.TYPE_data) {
int typeIndicator = ilst.readInt();
int localeIndicator = ilst.readInt();
int dataSize = atomSize - 16;
byte[] value = new byte[dataSize];
ilst.readBytes(value, 0, dataSize);
return new MdtaMetadataEntry(key, value, localeIndicator, typeIndicator);
}
ilst.setPosition(atomPosition + atomSize);
}
return null;
}
@Nullable
private static TextInformationFrame parseTextAttribute(
int type, String id, ParsableByteArray data) {
int atomSize = data.readInt();
int atomType = data.readInt();
if (atomType == Atom.TYPE_data) {
data.skipBytes(8); // version (1), flags (3), empty (4)
String value = data.readNullTerminatedString(atomSize - 16);
return new TextInformationFrame(id, /* description= */ null, value);
}
Log.w(TAG, "Failed to parse text attribute: " + Atom.getAtomTypeString(type));
return null;
}
@Nullable
private static CommentFrame parseCommentAttribute(int type, ParsableByteArray data) {
int atomSize = data.readInt();
int atomType = data.readInt();
if (atomType == Atom.TYPE_data) {
data.skipBytes(8); // version (1), flags (3), empty (4)
String value = data.readNullTerminatedString(atomSize - 16);
return new CommentFrame(LANGUAGE_UNDEFINED, value, value);
}
Log.w(TAG, "Failed to parse comment attribute: " + Atom.getAtomTypeString(type));
return null;
}
@Nullable
private static Id3Frame parseUint8Attribute(
int type,
String id,
ParsableByteArray data,
boolean isTextInformationFrame,
boolean isBoolean) {
int value = parseUint8AttributeValue(data);
if (isBoolean) {
value = Math.min(1, value);
}
if (value >= 0) {
return isTextInformationFrame
? new TextInformationFrame(id, /* description= */ null, Integer.toString(value))
: new CommentFrame(LANGUAGE_UNDEFINED, id, Integer.toString(value));
}
Log.w(TAG, "Failed to parse uint8 attribute: " + Atom.getAtomTypeString(type));
return null;
}
@Nullable
private static TextInformationFrame parseIndexAndCountAttribute(
int type, String attributeName, ParsableByteArray data) {
int atomSize = data.readInt();
int atomType = data.readInt();
if (atomType == Atom.TYPE_data && atomSize >= 22) {
data.skipBytes(10); // version (1), flags (3), empty (4), empty (2)
int index = data.readUnsignedShort();
if (index > 0) {
String value = "" + index;
int count = data.readUnsignedShort();
if (count > 0) {
value += "/" + count;
}
return new TextInformationFrame(attributeName, /* description= */ null, value);
}
}
Log.w(TAG, "Failed to parse index/count attribute: " + Atom.getAtomTypeString(type));
return null;
}
@Nullable
private static TextInformationFrame parseStandardGenreAttribute(ParsableByteArray data) {
int genreCode = parseUint8AttributeValue(data);
String genreString = (0 < genreCode && genreCode <= STANDARD_GENRES.length)
? STANDARD_GENRES[genreCode - 1] : null;
if (genreString != null) {
return new TextInformationFrame("TCON", /* description= */ null, genreString);
}
Log.w(TAG, "Failed to parse standard genre code");
return null;
}
@Nullable
private static ApicFrame parseCoverArt(ParsableByteArray data) {
int atomSize = data.readInt();
int atomType = data.readInt();
if (atomType == Atom.TYPE_data) {
int fullVersionInt = data.readInt();
int flags = Atom.parseFullAtomFlags(fullVersionInt);
String mimeType = flags == 13 ? "image/jpeg" : flags == 14 ? "image/png" : null;
if (mimeType == null) {
Log.w(TAG, "Unrecognized cover art flags: " + flags);
return null;
}
data.skipBytes(4); // empty (4)
byte[] pictureData = new byte[atomSize - 16];
data.readBytes(pictureData, 0, pictureData.length);
return new ApicFrame(
mimeType,
/* description= */ null,
/* pictureType= */ PICTURE_TYPE_FRONT_COVER,
pictureData);
}
Log.w(TAG, "Failed to parse cover art attribute");
return null;
}
@Nullable
private static Id3Frame parseInternalAttribute(ParsableByteArray data, int endPosition) {
String domain = null;
String name = null;
int dataAtomPosition = -1;
int dataAtomSize = -1;
while (data.getPosition() < endPosition) {
int atomPosition = data.getPosition();
int atomSize = data.readInt();
int atomType = data.readInt();
data.skipBytes(4); // version (1), flags (3)
if (atomType == Atom.TYPE_mean) {
domain = data.readNullTerminatedString(atomSize - 12);
} else if (atomType == Atom.TYPE_name) {
name = data.readNullTerminatedString(atomSize - 12);
} else {
if (atomType == Atom.TYPE_data) {
dataAtomPosition = atomPosition;
dataAtomSize = atomSize;
}
data.skipBytes(atomSize - 12);
}
}
if (domain == null || name == null || dataAtomPosition == -1) {
return null;
}
data.setPosition(dataAtomPosition);
data.skipBytes(16); // size (4), type (4), version (1), flags (3), empty (4)
String value = data.readNullTerminatedString(dataAtomSize - 16);
return new InternalFrame(domain, name, value);
}
private static int parseUint8AttributeValue(ParsableByteArray data) {
data.skipBytes(4); // atomSize
int atomType = data.readInt();
if (atomType == Atom.TYPE_data) {
data.skipBytes(8); // version (1), flags (3), empty (4)
return data.readUnsignedByte();
}
Log.w(TAG, "Failed to parse uint8 attribute value");
return -1;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.collect.AbstractIterator;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.ColumnToCollectionType;
import org.apache.cassandra.db.marshal.CompositeType;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.utils.ByteBufferUtil;
/**
* Holds metadata on a CF preprocessed for use by CQL queries.
*/
public class CFDefinition implements Iterable<CFDefinition.Name>
{
public static final AbstractType<?> definitionType = UTF8Type.instance;
private static final String DEFAULT_KEY_ALIAS = "key";
private static final String DEFAULT_COLUMN_ALIAS = "column";
private static final String DEFAULT_VALUE_ALIAS = "value";
public final CFMetaData cfm;
// LinkedHashMap because the order does matter (it is the order in the composite type)
public final LinkedHashMap<ColumnIdentifier, Name> keys = new LinkedHashMap<ColumnIdentifier, Name>();
public final LinkedHashMap<ColumnIdentifier, Name> columns = new LinkedHashMap<ColumnIdentifier, Name>();
public final Name value;
// Keep metadata lexicographically ordered so that wildcard expansion have a deterministic order
public final Map<ColumnIdentifier, Name> metadata = new TreeMap<ColumnIdentifier, Name>();
public final boolean isComposite;
public final boolean hasCompositeKey;
// Note that isCompact means here that no componet of the comparator correspond to the column names
// defined in the CREATE TABLE QUERY. This is not exactly equivalent to the 'WITH COMPACT STORAGE'
// option when creating a table in that "static CF" without a composite type will have isCompact == false
// even though one must use 'WITH COMPACT STORAGE' to declare them.
public final boolean isCompact;
public final boolean hasCollections;
public CFDefinition(CFMetaData cfm)
{
this.cfm = cfm;
if (cfm.getKeyValidator() instanceof CompositeType)
{
this.hasCompositeKey = true;
CompositeType keyComposite = (CompositeType)cfm.getKeyValidator();
assert keyComposite.types.size() > 1;
for (int i = 0; i < keyComposite.types.size(); i++)
{
ColumnIdentifier id = getKeyId(cfm, i);
this.keys.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.KEY_ALIAS, i, keyComposite.types.get(i)));
}
}
else
{
this.hasCompositeKey = false;
ColumnIdentifier id = getKeyId(cfm, 0);
this.keys.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.KEY_ALIAS, 0, cfm.getKeyValidator()));
}
if (cfm.comparator instanceof CompositeType)
{
this.isComposite = true;
CompositeType composite = (CompositeType)cfm.comparator;
/*
* We are a "sparse" composite, i.e. a non-compact one, if either:
* - the last type of the composite is a ColumnToCollectionType
* - or we have one less alias than of composite types and the last type is UTF8Type.
*
* Note that this is not perfect: if someone upgrading from thrift "renames" all but
* the last column alias, the cf will be considered "sparse" and he will be stuck with
* that even though that might not be what he wants. But the simple workaround is
* for that user to rename all the aliases at the same time in the first place.
*/
int last = composite.types.size() - 1;
AbstractType<?> lastType = composite.types.get(last);
if (lastType instanceof ColumnToCollectionType
|| (cfm.getColumnAliases().size() == last && lastType instanceof UTF8Type))
{
// "sparse" composite
this.isCompact = false;
this.value = null;
assert cfm.getValueAlias() == null;
// check for collection type
if (lastType instanceof ColumnToCollectionType)
{
--last;
this.hasCollections = true;
}
else
{
this.hasCollections = false;
}
for (int i = 0; i < last; i++)
{
ColumnIdentifier id = getColumnId(cfm, i);
this.columns.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.COLUMN_ALIAS, i, composite.types.get(i)));
}
for (Map.Entry<ByteBuffer, ColumnDefinition> def : cfm.getColumn_metadata().entrySet())
{
ColumnIdentifier id = new ColumnIdentifier(def.getKey(), cfm.getColumnDefinitionComparator(def.getValue()));
this.metadata.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.COLUMN_METADATA, def.getValue().getValidator()));
}
}
else
{
// "dense" composite
this.isCompact = true;
this.hasCollections = false;
for (int i = 0; i < composite.types.size(); i++)
{
ColumnIdentifier id = getColumnId(cfm, i);
this.columns.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.COLUMN_ALIAS, i, composite.types.get(i)));
}
this.value = createValue(cfm);
}
}
else
{
this.isComposite = false;
this.hasCollections = false;
if (!cfm.getColumnAliases().isEmpty() || cfm.getColumn_metadata().isEmpty())
{
// dynamic CF
this.isCompact = true;
ColumnIdentifier id = getColumnId(cfm, 0);
Name name = new Name(cfm.ksName, cfm.cfName, id, Name.Kind.COLUMN_ALIAS, 0, cfm.comparator);
this.columns.put(id, name);
this.value = createValue(cfm);
}
else
{
// static CF
this.isCompact = false;
this.value = null;
assert cfm.getValueAlias() == null;
assert cfm.getColumnAliases() == null || cfm.getColumnAliases().isEmpty();
for (Map.Entry<ByteBuffer, ColumnDefinition> def : cfm.getColumn_metadata().entrySet())
{
ColumnIdentifier id = new ColumnIdentifier(def.getKey(), cfm.getColumnDefinitionComparator(def.getValue()));
this.metadata.put(id, new Name(cfm.ksName, cfm.cfName, id, Name.Kind.COLUMN_METADATA, def.getValue().getValidator()));
}
}
}
assert value == null || metadata.isEmpty();
}
private static ColumnIdentifier getKeyId(CFMetaData cfm, int i)
{
List<ByteBuffer> definedNames = cfm.getKeyAliases();
// For compatibility sake, non-composite key default alias is 'key', not 'key1'.
return definedNames == null || i >= definedNames.size() || cfm.getKeyAliases().get(i) == null
? new ColumnIdentifier(i == 0 ? DEFAULT_KEY_ALIAS : DEFAULT_KEY_ALIAS + (i + 1), false)
: new ColumnIdentifier(cfm.getKeyAliases().get(i), definitionType);
}
private static ColumnIdentifier getColumnId(CFMetaData cfm, int i)
{
List<ByteBuffer> definedNames = cfm.getColumnAliases();
return definedNames == null || i >= definedNames.size() || cfm.getColumnAliases().get(i) == null
? new ColumnIdentifier(DEFAULT_COLUMN_ALIAS + (i + 1), false)
: new ColumnIdentifier(cfm.getColumnAliases().get(i), definitionType);
}
private static ColumnIdentifier getValueId(CFMetaData cfm)
{
return cfm.getValueAlias() == null
? new ColumnIdentifier(DEFAULT_VALUE_ALIAS, false)
: new ColumnIdentifier(cfm.getValueAlias(), definitionType);
}
public ColumnToCollectionType getCollectionType()
{
if (!hasCollections)
return null;
CompositeType composite = (CompositeType)cfm.comparator;
return (ColumnToCollectionType)composite.types.get(composite.types.size() - 1);
}
private static Name createValue(CFMetaData cfm)
{
ColumnIdentifier alias = getValueId(cfm);
// That's how we distinguish between 'no value alias because coming from thrift' and 'I explicitely did not
// define a value' (see CreateColumnFamilyStatement)
return alias.key.equals(ByteBufferUtil.EMPTY_BYTE_BUFFER)
? null
: new Name(cfm.ksName, cfm.cfName, alias, Name.Kind.VALUE_ALIAS, cfm.getDefaultValidator());
}
public Name get(ColumnIdentifier name)
{
CFDefinition.Name kdef = keys.get(name);
if (kdef != null)
return kdef;
if (value != null && name.equals(value.name))
return value;
CFDefinition.Name def = columns.get(name);
if (def != null)
return def;
return metadata.get(name);
}
public Iterator<Name> iterator()
{
return new AbstractIterator<Name>()
{
private final Iterator<Name> keyIter = keys.values().iterator();
private final Iterator<Name> columnIter = columns.values().iterator();
private boolean valueDone;
private final Iterator<Name> metadataIter = metadata.values().iterator();
protected Name computeNext()
{
if (keyIter.hasNext())
return keyIter.next();
if (columnIter.hasNext())
return columnIter.next();
if (value != null && !valueDone)
{
valueDone = true;
return value;
}
if (metadataIter.hasNext())
return metadataIter.next();
return endOfData();
}
};
}
public ColumnNameBuilder getKeyNameBuilder()
{
return hasCompositeKey
? new CompositeType.Builder((CompositeType)cfm.getKeyValidator())
: new NonCompositeBuilder(cfm.getKeyValidator());
}
public ColumnNameBuilder getColumnNameBuilder()
{
return isComposite
? new CompositeType.Builder((CompositeType)cfm.comparator)
: new NonCompositeBuilder(cfm.comparator);
}
public static class Name extends ColumnSpecification
{
public static enum Kind
{
KEY_ALIAS, COLUMN_ALIAS, VALUE_ALIAS, COLUMN_METADATA
}
private Name(String ksName, String cfName, ColumnIdentifier name, Kind kind, AbstractType<?> type)
{
this(ksName, cfName, name, kind, -1, type);
}
private Name(String ksName, String cfName, ColumnIdentifier name, Kind kind, int position, AbstractType<?> type)
{
super(ksName, cfName, name, type);
this.kind = kind;
this.position = position;
}
public final Kind kind;
public final int position; // only make sense for KEY_ALIAS and COLUMN_ALIAS
@Override
public boolean equals(Object o)
{
if(!(o instanceof Name))
return false;
Name that = (Name)o;
return Objects.equal(ksName, that.ksName)
&& Objects.equal(cfName, that.cfName)
&& Objects.equal(name, that.name)
&& Objects.equal(type, that.type)
&& kind == that.kind
&& position == that.position;
}
@Override
public final int hashCode()
{
return Objects.hashCode(ksName, cfName, name, type, kind, position);
}
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append(Joiner.on(", ").join(keys.values()));
if (!columns.isEmpty())
sb.append(", ").append(Joiner.on(", ").join(columns.values()));
sb.append(" => ");
if (value != null)
sb.append(value.name);
if (!metadata.isEmpty())
sb.append("{").append(Joiner.on(", ").join(metadata.values())).append(" }");
return sb.toString();
}
private static class NonCompositeBuilder implements ColumnNameBuilder
{
private final AbstractType<?> type;
private ByteBuffer columnName;
private NonCompositeBuilder(AbstractType<?> type)
{
this.type = type;
}
public NonCompositeBuilder add(ByteBuffer bb)
{
if (columnName != null)
throw new IllegalStateException("Column name is already constructed");
columnName = bb;
return this;
}
public NonCompositeBuilder add(ByteBuffer bb, Relation.Type op)
{
return add(bb);
}
public int componentCount()
{
return columnName == null ? 0 : 1;
}
public int remainingCount()
{
return columnName == null ? 1 : 0;
}
public ByteBuffer build()
{
return columnName == null ? ByteBufferUtil.EMPTY_BYTE_BUFFER : columnName;
}
public ByteBuffer buildAsEndOfRange()
{
return build();
}
public NonCompositeBuilder copy()
{
NonCompositeBuilder newBuilder = new NonCompositeBuilder(type);
newBuilder.columnName = columnName;
return newBuilder;
}
}
}
| |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.view;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Insets;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.os.Handler;
import android.text.format.DateFormat;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.view.DisplayCutout;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.PointerIcon;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewStructure;
import android.view.WindowInsets;
import android.view.WindowManager;
import android.view.accessibility.AccessibilityManager;
import android.view.accessibility.AccessibilityNodeProvider;
import android.view.autofill.AutofillValue;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputMethodManager;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.annotation.UiThread;
import io.flutter.Log;
import io.flutter.app.FlutterPluginRegistry;
import io.flutter.embedding.android.AndroidTouchProcessor;
import io.flutter.embedding.android.KeyChannelResponder;
import io.flutter.embedding.android.KeyboardManager;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.renderer.FlutterRenderer;
import io.flutter.embedding.engine.renderer.SurfaceTextureWrapper;
import io.flutter.embedding.engine.systemchannels.AccessibilityChannel;
import io.flutter.embedding.engine.systemchannels.KeyEventChannel;
import io.flutter.embedding.engine.systemchannels.LifecycleChannel;
import io.flutter.embedding.engine.systemchannels.LocalizationChannel;
import io.flutter.embedding.engine.systemchannels.MouseCursorChannel;
import io.flutter.embedding.engine.systemchannels.NavigationChannel;
import io.flutter.embedding.engine.systemchannels.PlatformChannel;
import io.flutter.embedding.engine.systemchannels.SettingsChannel;
import io.flutter.embedding.engine.systemchannels.SystemChannel;
import io.flutter.embedding.engine.systemchannels.TextInputChannel;
import io.flutter.plugin.common.ActivityLifecycleListener;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.editing.TextInputPlugin;
import io.flutter.plugin.localization.LocalizationPlugin;
import io.flutter.plugin.mouse.MouseCursorPlugin;
import io.flutter.plugin.platform.PlatformPlugin;
import io.flutter.plugin.platform.PlatformViewsController;
import io.flutter.util.ViewUtils;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
/**
* Deprecated Android view containing a Flutter app.
*
* @deprecated {@link io.flutter.embedding.android.FlutterView} is the new API that now replaces
* this class. See https://flutter.dev/go/android-project-migration for more migration details.
*/
@Deprecated
public class FlutterView extends SurfaceView
implements BinaryMessenger, TextureRegistry, MouseCursorPlugin.MouseCursorViewDelegate {
/**
* Interface for those objects that maintain and expose a reference to a {@code FlutterView} (such
* as a full-screen Flutter activity).
*
* <p>This indirection is provided to support applications that use an activity other than {@link
* io.flutter.app.FlutterActivity} (e.g. Android v4 support library's {@code FragmentActivity}).
* It allows Flutter plugins to deal in this interface and not require that the activity be a
* subclass of {@code FlutterActivity}.
*/
public interface Provider {
/**
* Returns a reference to the Flutter view maintained by this object. This may be {@code null}.
*
* @return a reference to the Flutter view maintained by this object.
*/
FlutterView getFlutterView();
}
private static final String TAG = "FlutterView";
static final class ViewportMetrics {
float devicePixelRatio = 1.0f;
int physicalWidth = 0;
int physicalHeight = 0;
int physicalViewPaddingTop = 0;
int physicalViewPaddingRight = 0;
int physicalViewPaddingBottom = 0;
int physicalViewPaddingLeft = 0;
int physicalViewInsetTop = 0;
int physicalViewInsetRight = 0;
int physicalViewInsetBottom = 0;
int physicalViewInsetLeft = 0;
int systemGestureInsetTop = 0;
int systemGestureInsetRight = 0;
int systemGestureInsetBottom = 0;
int systemGestureInsetLeft = 0;
int physicalTouchSlop = -1;
}
private final DartExecutor dartExecutor;
private final FlutterRenderer flutterRenderer;
private final NavigationChannel navigationChannel;
private final KeyEventChannel keyEventChannel;
private final LifecycleChannel lifecycleChannel;
private final LocalizationChannel localizationChannel;
private final PlatformChannel platformChannel;
private final SettingsChannel settingsChannel;
private final SystemChannel systemChannel;
private final InputMethodManager mImm;
private final TextInputPlugin mTextInputPlugin;
private final LocalizationPlugin mLocalizationPlugin;
private final MouseCursorPlugin mMouseCursorPlugin;
private final KeyboardManager mKeyboardManager;
private final AndroidTouchProcessor androidTouchProcessor;
private AccessibilityBridge mAccessibilityNodeProvider;
private final SurfaceHolder.Callback mSurfaceCallback;
private final ViewportMetrics mMetrics;
private final List<ActivityLifecycleListener> mActivityLifecycleListeners;
private final List<FirstFrameListener> mFirstFrameListeners;
private final AtomicLong nextTextureId = new AtomicLong(0L);
private FlutterNativeView mNativeView;
private boolean mIsSoftwareRenderingEnabled = false; // using the software renderer or not
private boolean didRenderFirstFrame = false;
private final AccessibilityBridge.OnAccessibilityChangeListener onAccessibilityChangeListener =
new AccessibilityBridge.OnAccessibilityChangeListener() {
@Override
public void onAccessibilityChanged(
boolean isAccessibilityEnabled, boolean isTouchExplorationEnabled) {
resetWillNotDraw(isAccessibilityEnabled, isTouchExplorationEnabled);
}
};
public FlutterView(Context context) {
this(context, null);
}
public FlutterView(Context context, AttributeSet attrs) {
this(context, attrs, null);
}
public FlutterView(Context context, AttributeSet attrs, FlutterNativeView nativeView) {
super(context, attrs);
Activity activity = ViewUtils.getActivity(getContext());
if (activity == null) {
throw new IllegalArgumentException("Bad context");
}
if (nativeView == null) {
mNativeView = new FlutterNativeView(activity.getApplicationContext());
} else {
mNativeView = nativeView;
}
dartExecutor = mNativeView.getDartExecutor();
flutterRenderer = new FlutterRenderer(mNativeView.getFlutterJNI());
mIsSoftwareRenderingEnabled = mNativeView.getFlutterJNI().getIsSoftwareRenderingEnabled();
mMetrics = new ViewportMetrics();
mMetrics.devicePixelRatio = context.getResources().getDisplayMetrics().density;
mMetrics.physicalTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop();
setFocusable(true);
setFocusableInTouchMode(true);
mNativeView.attachViewAndActivity(this, activity);
mSurfaceCallback =
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
assertAttached();
mNativeView.getFlutterJNI().onSurfaceCreated(holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
assertAttached();
mNativeView.getFlutterJNI().onSurfaceChanged(width, height);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
assertAttached();
mNativeView.getFlutterJNI().onSurfaceDestroyed();
}
};
getHolder().addCallback(mSurfaceCallback);
mActivityLifecycleListeners = new ArrayList<>();
mFirstFrameListeners = new ArrayList<>();
// Create all platform channels
navigationChannel = new NavigationChannel(dartExecutor);
keyEventChannel = new KeyEventChannel(dartExecutor);
lifecycleChannel = new LifecycleChannel(dartExecutor);
localizationChannel = new LocalizationChannel(dartExecutor);
platformChannel = new PlatformChannel(dartExecutor);
systemChannel = new SystemChannel(dartExecutor);
settingsChannel = new SettingsChannel(dartExecutor);
// Create and set up plugins
PlatformPlugin platformPlugin = new PlatformPlugin(activity, platformChannel);
addActivityLifecycleListener(
new ActivityLifecycleListener() {
@Override
public void onPostResume() {
platformPlugin.updateSystemUiOverlays();
}
});
mImm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
PlatformViewsController platformViewsController =
mNativeView.getPluginRegistry().getPlatformViewsController();
mTextInputPlugin =
new TextInputPlugin(this, new TextInputChannel(dartExecutor), platformViewsController);
mKeyboardManager =
new KeyboardManager(
this,
mTextInputPlugin,
new KeyChannelResponder[] {new KeyChannelResponder(keyEventChannel)});
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
mMouseCursorPlugin = new MouseCursorPlugin(this, new MouseCursorChannel(dartExecutor));
} else {
mMouseCursorPlugin = null;
}
mLocalizationPlugin = new LocalizationPlugin(context, localizationChannel);
androidTouchProcessor =
new AndroidTouchProcessor(flutterRenderer, /*trackMotionEvents=*/ false);
platformViewsController.attachToFlutterRenderer(flutterRenderer);
mNativeView
.getPluginRegistry()
.getPlatformViewsController()
.attachTextInputPlugin(mTextInputPlugin);
mNativeView.getFlutterJNI().setLocalizationPlugin(mLocalizationPlugin);
// Send initial platform information to Dart
mLocalizationPlugin.sendLocalesToFlutter(getResources().getConfiguration());
sendUserPlatformSettingsToDart();
}
@NonNull
public DartExecutor getDartExecutor() {
return dartExecutor;
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
Log.e(TAG, "dispatchKeyEvent: " + event.toString());
if (event.getAction() == KeyEvent.ACTION_DOWN && event.getRepeatCount() == 0) {
// Tell Android to start tracking this event.
getKeyDispatcherState().startTracking(event, this);
} else if (event.getAction() == KeyEvent.ACTION_UP) {
// Stop tracking the event.
getKeyDispatcherState().handleUpEvent(event);
}
// If the key processor doesn't handle it, then send it on to the
// superclass. The key processor will typically handle all events except
// those where it has re-dispatched the event after receiving a reply from
// the framework that the framework did not handle it.
return (isAttached() && mKeyboardManager.handleEvent(event)) || super.dispatchKeyEvent(event);
}
public FlutterNativeView getFlutterNativeView() {
return mNativeView;
}
public FlutterPluginRegistry getPluginRegistry() {
return mNativeView.getPluginRegistry();
}
public String getLookupKeyForAsset(String asset) {
return FlutterMain.getLookupKeyForAsset(asset);
}
public String getLookupKeyForAsset(String asset, String packageName) {
return FlutterMain.getLookupKeyForAsset(asset, packageName);
}
public void addActivityLifecycleListener(ActivityLifecycleListener listener) {
mActivityLifecycleListeners.add(listener);
}
public void onStart() {
lifecycleChannel.appIsInactive();
}
public void onPause() {
lifecycleChannel.appIsInactive();
}
public void onPostResume() {
for (ActivityLifecycleListener listener : mActivityLifecycleListeners) {
listener.onPostResume();
}
lifecycleChannel.appIsResumed();
}
public void onStop() {
lifecycleChannel.appIsPaused();
}
public void onMemoryPressure() {
mNativeView.getFlutterJNI().notifyLowMemoryWarning();
systemChannel.sendMemoryPressureWarning();
}
/**
* Returns true if the Flutter experience associated with this {@code FlutterView} has rendered
* its first frame, or false otherwise.
*/
public boolean hasRenderedFirstFrame() {
return didRenderFirstFrame;
}
/**
* Provide a listener that will be called once when the FlutterView renders its first frame to the
* underlaying SurfaceView.
*/
public void addFirstFrameListener(FirstFrameListener listener) {
mFirstFrameListeners.add(listener);
}
/** Remove an existing first frame listener. */
public void removeFirstFrameListener(FirstFrameListener listener) {
mFirstFrameListeners.remove(listener);
}
@Override
public void enableBufferingIncomingMessages() {}
@Override
public void disableBufferingIncomingMessages() {}
/**
* Reverts this back to the {@link SurfaceView} defaults, at the back of its window and opaque.
*/
public void disableTransparentBackground() {
setZOrderOnTop(false);
getHolder().setFormat(PixelFormat.OPAQUE);
}
public void setInitialRoute(String route) {
navigationChannel.setInitialRoute(route);
}
public void pushRoute(String route) {
navigationChannel.pushRoute(route);
}
public void popRoute() {
navigationChannel.popRoute();
}
private void sendUserPlatformSettingsToDart() {
// Lookup the current brightness of the Android OS.
boolean isNightModeOn =
(getResources().getConfiguration().uiMode & Configuration.UI_MODE_NIGHT_MASK)
== Configuration.UI_MODE_NIGHT_YES;
SettingsChannel.PlatformBrightness brightness =
isNightModeOn
? SettingsChannel.PlatformBrightness.dark
: SettingsChannel.PlatformBrightness.light;
settingsChannel
.startMessage()
.setTextScaleFactor(getResources().getConfiguration().fontScale)
.setUse24HourFormat(DateFormat.is24HourFormat(getContext()))
.setPlatformBrightness(brightness)
.send();
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mLocalizationPlugin.sendLocalesToFlutter(newConfig);
sendUserPlatformSettingsToDart();
}
float getDevicePixelRatio() {
return mMetrics.devicePixelRatio;
}
public FlutterNativeView detach() {
if (!isAttached()) return null;
getHolder().removeCallback(mSurfaceCallback);
mNativeView.detachFromFlutterView();
FlutterNativeView view = mNativeView;
mNativeView = null;
return view;
}
public void destroy() {
if (!isAttached()) return;
getHolder().removeCallback(mSurfaceCallback);
releaseAccessibilityNodeProvider();
mNativeView.destroy();
mNativeView = null;
}
@Override
public InputConnection onCreateInputConnection(EditorInfo outAttrs) {
return mTextInputPlugin.createInputConnection(this, mKeyboardManager, outAttrs);
}
@Override
public boolean checkInputConnectionProxy(View view) {
return mNativeView
.getPluginRegistry()
.getPlatformViewsController()
.checkInputConnectionProxy(view);
}
@Override
public void onProvideAutofillVirtualStructure(ViewStructure structure, int flags) {
super.onProvideAutofillVirtualStructure(structure, flags);
mTextInputPlugin.onProvideAutofillVirtualStructure(structure, flags);
}
@Override
public void autofill(SparseArray<AutofillValue> values) {
mTextInputPlugin.autofill(values);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!isAttached()) {
return super.onTouchEvent(event);
}
// TODO(abarth): This version check might not be effective in some
// versions of Android that statically compile code and will be upset
// at the lack of |requestUnbufferedDispatch|. Instead, we should factor
// version-dependent code into separate classes for each supported
// version and dispatch dynamically.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
requestUnbufferedDispatch(event);
}
return androidTouchProcessor.onTouchEvent(event);
}
@Override
public boolean onHoverEvent(MotionEvent event) {
if (!isAttached()) {
return super.onHoverEvent(event);
}
boolean handled = mAccessibilityNodeProvider.onAccessibilityHoverEvent(event);
if (!handled) {
// TODO(ianh): Expose hover events to the platform,
// implementing ADD, REMOVE, etc.
}
return handled;
}
/**
* Invoked by Android when a generic motion event occurs, e.g., joystick movement, mouse hover,
* track pad touches, scroll wheel movements, etc.
*
* <p>Flutter handles all of its own gesture detection and processing, therefore this method
* forwards all {@link MotionEvent} data from Android to Flutter.
*/
@Override
public boolean onGenericMotionEvent(MotionEvent event) {
boolean handled = isAttached() && androidTouchProcessor.onGenericMotionEvent(event);
return handled ? true : super.onGenericMotionEvent(event);
}
@Override
protected void onSizeChanged(int width, int height, int oldWidth, int oldHeight) {
mMetrics.physicalWidth = width;
mMetrics.physicalHeight = height;
updateViewportMetrics();
super.onSizeChanged(width, height, oldWidth, oldHeight);
}
// TODO(garyq): Add support for notch cutout API
// Decide if we want to zero the padding of the sides. When in Landscape orientation,
// android may decide to place the software navigation bars on the side. When the nav
// bar is hidden, the reported insets should be removed to prevent extra useless space
// on the sides.
private enum ZeroSides {
NONE,
LEFT,
RIGHT,
BOTH
}
private ZeroSides calculateShouldZeroSides() {
// We get both orientation and rotation because rotation is all 4
// rotations relative to default rotation while orientation is portrait
// or landscape. By combining both, we can obtain a more precise measure
// of the rotation.
Context context = getContext();
int orientation = context.getResources().getConfiguration().orientation;
int rotation =
((WindowManager) context.getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay()
.getRotation();
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
if (rotation == Surface.ROTATION_90) {
return ZeroSides.RIGHT;
} else if (rotation == Surface.ROTATION_270) {
// In android API >= 23, the nav bar always appears on the "bottom" (USB) side.
return Build.VERSION.SDK_INT >= 23 ? ZeroSides.LEFT : ZeroSides.RIGHT;
}
// Ambiguous orientation due to landscape left/right default. Zero both sides.
else if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) {
return ZeroSides.BOTH;
}
}
// Square orientation deprecated in API 16, we will not check for it and return false
// to be safe and not remove any unique padding for the devices that do use it.
return ZeroSides.NONE;
}
// TODO(garyq): Use new Android R getInsets API
// TODO(garyq): The keyboard detection may interact strangely with
// https://github.com/flutter/flutter/issues/22061
// Uses inset heights and screen heights as a heuristic to determine if the insets should
// be padded. When the on-screen keyboard is detected, we want to include the full inset
// but when the inset is just the hidden nav bar, we want to provide a zero inset so the space
// can be used.
@TargetApi(20)
@RequiresApi(20)
private int guessBottomKeyboardInset(WindowInsets insets) {
int screenHeight = getRootView().getHeight();
// Magic number due to this being a heuristic. This should be replaced, but we have not
// found a clean way to do it yet (Sept. 2018)
final double keyboardHeightRatioHeuristic = 0.18;
if (insets.getSystemWindowInsetBottom() < screenHeight * keyboardHeightRatioHeuristic) {
// Is not a keyboard, so return zero as inset.
return 0;
} else {
// Is a keyboard, so return the full inset.
return insets.getSystemWindowInsetBottom();
}
}
// This callback is not present in API < 20, which means lower API devices will see
// the wider than expected padding when the status and navigation bars are hidden.
// The annotations to suppress "InlinedApi" and "NewApi" lints prevent lint warnings
// caused by usage of Android Q APIs. These calls are safe because they are
// guarded.
@Override
@TargetApi(20)
@RequiresApi(20)
@SuppressLint({"InlinedApi", "NewApi"})
public final WindowInsets onApplyWindowInsets(WindowInsets insets) {
// getSystemGestureInsets() was introduced in API 29 and immediately deprecated in 30.
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.Q) {
Insets systemGestureInsets = insets.getSystemGestureInsets();
mMetrics.systemGestureInsetTop = systemGestureInsets.top;
mMetrics.systemGestureInsetRight = systemGestureInsets.right;
mMetrics.systemGestureInsetBottom = systemGestureInsets.bottom;
mMetrics.systemGestureInsetLeft = systemGestureInsets.left;
}
boolean statusBarVisible = (SYSTEM_UI_FLAG_FULLSCREEN & getWindowSystemUiVisibility()) == 0;
boolean navigationBarVisible =
(SYSTEM_UI_FLAG_HIDE_NAVIGATION & getWindowSystemUiVisibility()) == 0;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
int mask = 0;
if (navigationBarVisible) {
mask = mask | android.view.WindowInsets.Type.navigationBars();
}
if (statusBarVisible) {
mask = mask | android.view.WindowInsets.Type.statusBars();
}
Insets uiInsets = insets.getInsets(mask);
mMetrics.physicalViewPaddingTop = uiInsets.top;
mMetrics.physicalViewPaddingRight = uiInsets.right;
mMetrics.physicalViewPaddingBottom = uiInsets.bottom;
mMetrics.physicalViewPaddingLeft = uiInsets.left;
Insets imeInsets = insets.getInsets(android.view.WindowInsets.Type.ime());
mMetrics.physicalViewInsetTop = imeInsets.top;
mMetrics.physicalViewInsetRight = imeInsets.right;
mMetrics.physicalViewInsetBottom = imeInsets.bottom; // Typically, only bottom is non-zero
mMetrics.physicalViewInsetLeft = imeInsets.left;
Insets systemGestureInsets =
insets.getInsets(android.view.WindowInsets.Type.systemGestures());
mMetrics.systemGestureInsetTop = systemGestureInsets.top;
mMetrics.systemGestureInsetRight = systemGestureInsets.right;
mMetrics.systemGestureInsetBottom = systemGestureInsets.bottom;
mMetrics.systemGestureInsetLeft = systemGestureInsets.left;
// TODO(garyq): Expose the full rects of the display cutout.
// Take the max of the display cutout insets and existing padding to merge them
DisplayCutout cutout = insets.getDisplayCutout();
if (cutout != null) {
Insets waterfallInsets = cutout.getWaterfallInsets();
mMetrics.physicalViewPaddingTop =
Math.max(
Math.max(mMetrics.physicalViewPaddingTop, waterfallInsets.top),
cutout.getSafeInsetTop());
mMetrics.physicalViewPaddingRight =
Math.max(
Math.max(mMetrics.physicalViewPaddingRight, waterfallInsets.right),
cutout.getSafeInsetRight());
mMetrics.physicalViewPaddingBottom =
Math.max(
Math.max(mMetrics.physicalViewPaddingBottom, waterfallInsets.bottom),
cutout.getSafeInsetBottom());
mMetrics.physicalViewPaddingLeft =
Math.max(
Math.max(mMetrics.physicalViewPaddingLeft, waterfallInsets.left),
cutout.getSafeInsetLeft());
}
} else {
// We zero the left and/or right sides to prevent the padding the
// navigation bar would have caused.
ZeroSides zeroSides = ZeroSides.NONE;
if (!navigationBarVisible) {
zeroSides = calculateShouldZeroSides();
}
// Status bar (top), navigation bar (bottom) and left/right system insets should
// partially obscure the content (padding).
mMetrics.physicalViewPaddingTop = statusBarVisible ? insets.getSystemWindowInsetTop() : 0;
mMetrics.physicalViewPaddingRight =
zeroSides == ZeroSides.RIGHT || zeroSides == ZeroSides.BOTH
? 0
: insets.getSystemWindowInsetRight();
mMetrics.physicalViewPaddingBottom =
navigationBarVisible && guessBottomKeyboardInset(insets) == 0
? insets.getSystemWindowInsetBottom()
: 0;
mMetrics.physicalViewPaddingLeft =
zeroSides == ZeroSides.LEFT || zeroSides == ZeroSides.BOTH
? 0
: insets.getSystemWindowInsetLeft();
// Bottom system inset (keyboard) should adjust scrollable bottom edge (inset).
mMetrics.physicalViewInsetTop = 0;
mMetrics.physicalViewInsetRight = 0;
mMetrics.physicalViewInsetBottom = guessBottomKeyboardInset(insets);
mMetrics.physicalViewInsetLeft = 0;
}
updateViewportMetrics();
return super.onApplyWindowInsets(insets);
}
@Override
@SuppressWarnings("deprecation")
protected boolean fitSystemWindows(Rect insets) {
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) {
// Status bar, left/right system insets partially obscure content (padding).
mMetrics.physicalViewPaddingTop = insets.top;
mMetrics.physicalViewPaddingRight = insets.right;
mMetrics.physicalViewPaddingBottom = 0;
mMetrics.physicalViewPaddingLeft = insets.left;
// Bottom system inset (keyboard) should adjust scrollable bottom edge (inset).
mMetrics.physicalViewInsetTop = 0;
mMetrics.physicalViewInsetRight = 0;
mMetrics.physicalViewInsetBottom = insets.bottom;
mMetrics.physicalViewInsetLeft = 0;
updateViewportMetrics();
return true;
} else {
return super.fitSystemWindows(insets);
}
}
private boolean isAttached() {
return mNativeView != null && mNativeView.isAttached();
}
void assertAttached() {
if (!isAttached()) throw new AssertionError("Platform view is not attached");
}
private void preRun() {
resetAccessibilityTree();
}
void resetAccessibilityTree() {
if (mAccessibilityNodeProvider != null) {
mAccessibilityNodeProvider.reset();
}
}
private void postRun() {}
public void runFromBundle(FlutterRunArguments args) {
assertAttached();
preRun();
mNativeView.runFromBundle(args);
postRun();
}
/**
* Return the most recent frame as a bitmap.
*
* @return A bitmap.
*/
public Bitmap getBitmap() {
assertAttached();
return mNativeView.getFlutterJNI().getBitmap();
}
private void updateViewportMetrics() {
if (!isAttached()) return;
mNativeView
.getFlutterJNI()
.setViewportMetrics(
mMetrics.devicePixelRatio,
mMetrics.physicalWidth,
mMetrics.physicalHeight,
mMetrics.physicalViewPaddingTop,
mMetrics.physicalViewPaddingRight,
mMetrics.physicalViewPaddingBottom,
mMetrics.physicalViewPaddingLeft,
mMetrics.physicalViewInsetTop,
mMetrics.physicalViewInsetRight,
mMetrics.physicalViewInsetBottom,
mMetrics.physicalViewInsetLeft,
mMetrics.systemGestureInsetTop,
mMetrics.systemGestureInsetRight,
mMetrics.systemGestureInsetBottom,
mMetrics.systemGestureInsetLeft,
mMetrics.physicalTouchSlop,
new int[0],
new int[0],
new int[0]);
}
// Called by FlutterNativeView to notify first Flutter frame rendered.
public void onFirstFrame() {
didRenderFirstFrame = true;
// Allow listeners to remove themselves when they are called.
List<FirstFrameListener> listeners = new ArrayList<>(mFirstFrameListeners);
for (FirstFrameListener listener : listeners) {
listener.onFirstFrame();
}
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
PlatformViewsController platformViewsController =
getPluginRegistry().getPlatformViewsController();
mAccessibilityNodeProvider =
new AccessibilityBridge(
this,
new AccessibilityChannel(dartExecutor, getFlutterNativeView().getFlutterJNI()),
(AccessibilityManager) getContext().getSystemService(Context.ACCESSIBILITY_SERVICE),
getContext().getContentResolver(),
platformViewsController);
mAccessibilityNodeProvider.setOnAccessibilityChangeListener(onAccessibilityChangeListener);
resetWillNotDraw(
mAccessibilityNodeProvider.isAccessibilityEnabled(),
mAccessibilityNodeProvider.isTouchExplorationEnabled());
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
releaseAccessibilityNodeProvider();
}
// TODO(mattcarroll): Confer with Ian as to why we need this method. Delete if possible, otherwise
// add comments.
private void resetWillNotDraw(boolean isAccessibilityEnabled, boolean isTouchExplorationEnabled) {
if (!mIsSoftwareRenderingEnabled) {
setWillNotDraw(!(isAccessibilityEnabled || isTouchExplorationEnabled));
} else {
setWillNotDraw(false);
}
}
@Override
public AccessibilityNodeProvider getAccessibilityNodeProvider() {
if (mAccessibilityNodeProvider != null && mAccessibilityNodeProvider.isAccessibilityEnabled()) {
return mAccessibilityNodeProvider;
} else {
// TODO(goderbauer): when a11y is off this should return a one-off snapshot of
// the a11y
// tree.
return null;
}
}
private void releaseAccessibilityNodeProvider() {
if (mAccessibilityNodeProvider != null) {
mAccessibilityNodeProvider.release();
mAccessibilityNodeProvider = null;
}
}
@Override
@TargetApi(Build.VERSION_CODES.N)
@RequiresApi(Build.VERSION_CODES.N)
@NonNull
public PointerIcon getSystemPointerIcon(int type) {
return PointerIcon.getSystemIcon(getContext(), type);
}
@Override
@UiThread
public TaskQueue makeBackgroundTaskQueue(TaskQueueOptions options) {
return null;
}
@Override
@UiThread
public void send(String channel, ByteBuffer message) {
send(channel, message, null);
}
@Override
@UiThread
public void send(String channel, ByteBuffer message, BinaryReply callback) {
if (!isAttached()) {
Log.d(TAG, "FlutterView.send called on a detached view, channel=" + channel);
return;
}
mNativeView.send(channel, message, callback);
}
@Override
@UiThread
public void setMessageHandler(String channel, BinaryMessageHandler handler) {
mNativeView.setMessageHandler(channel, handler);
}
@Override
@UiThread
public void setMessageHandler(String channel, BinaryMessageHandler handler, TaskQueue taskQueue) {
mNativeView.setMessageHandler(channel, handler, taskQueue);
}
/** Listener will be called on the Android UI thread once when Flutter renders the first frame. */
public interface FirstFrameListener {
void onFirstFrame();
}
@Override
public TextureRegistry.SurfaceTextureEntry createSurfaceTexture() {
final SurfaceTexture surfaceTexture = new SurfaceTexture(0);
return registerSurfaceTexture(surfaceTexture);
}
@Override
public TextureRegistry.SurfaceTextureEntry registerSurfaceTexture(
@NonNull SurfaceTexture surfaceTexture) {
surfaceTexture.detachFromGLContext();
final SurfaceTextureRegistryEntry entry =
new SurfaceTextureRegistryEntry(nextTextureId.getAndIncrement(), surfaceTexture);
mNativeView.getFlutterJNI().registerTexture(entry.id(), entry.textureWrapper());
return entry;
}
final class SurfaceTextureRegistryEntry implements TextureRegistry.SurfaceTextureEntry {
private final long id;
private final SurfaceTextureWrapper textureWrapper;
private boolean released;
SurfaceTextureRegistryEntry(long id, SurfaceTexture surfaceTexture) {
this.id = id;
this.textureWrapper = new SurfaceTextureWrapper(surfaceTexture);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// The callback relies on being executed on the UI thread (unsynchronised read of
// mNativeView
// and also the engine code check for platform thread in
// Shell::OnPlatformViewMarkTextureFrameAvailable),
// so we explicitly pass a Handler for the current thread.
this.surfaceTexture().setOnFrameAvailableListener(onFrameListener, new Handler());
} else {
// Android documentation states that the listener can be called on an arbitrary thread.
// But in practice, versions of Android that predate the newer API will call the listener
// on the thread where the SurfaceTexture was constructed.
this.surfaceTexture().setOnFrameAvailableListener(onFrameListener);
}
}
private SurfaceTexture.OnFrameAvailableListener onFrameListener =
new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture texture) {
if (released || mNativeView == null) {
// Even though we make sure to unregister the callback before releasing, as of Android
// O
// SurfaceTexture has a data race when accessing the callback, so the callback may
// still be called by a stale reference after released==true and mNativeView==null.
return;
}
mNativeView
.getFlutterJNI()
.markTextureFrameAvailable(SurfaceTextureRegistryEntry.this.id);
}
};
public SurfaceTextureWrapper textureWrapper() {
return textureWrapper;
}
@Override
public SurfaceTexture surfaceTexture() {
return textureWrapper.surfaceTexture();
}
@Override
public long id() {
return id;
}
@Override
public void release() {
if (released) {
return;
}
released = true;
// The ordering of the next 3 calls is important:
// First we remove the frame listener, then we release the SurfaceTexture, and only after we
// unregister
// the texture which actually deletes the GL texture.
// Otherwise onFrameAvailableListener might be called after mNativeView==null
// (https://github.com/flutter/flutter/issues/20951). See also the check in onFrameAvailable.
surfaceTexture().setOnFrameAvailableListener(null);
textureWrapper.release();
mNativeView.getFlutterJNI().unregisterTexture(id);
}
}
}
| |
/*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.uibinder.test.client;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.DivElement;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.ParagraphElement;
import com.google.gwt.dom.client.SpanElement;
import com.google.gwt.junit.DoNotRunWith;
import com.google.gwt.junit.Platform;
import com.google.gwt.junit.client.GWTTestCase;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.CssResource.NotStrict;
import com.google.gwt.uibinder.test.client.EnumeratedLabel.Suffix;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.DisclosurePanel;
import com.google.gwt.user.client.ui.DockPanel;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RadioButton;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.StackPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* Functional test of UiBinder.
*/
public class UiBinderTest extends GWTTestCase {
private WidgetBasedUi widgetUi;
private DomBasedUi domUi;
private DockPanel root;
@Override
public String getModuleName() {
return "com.google.gwt.uibinder.test.UiBinderTestApp";
}
@Override
public void gwtSetUp() throws Exception {
super.gwtSetUp();
RootPanel.get().clear();
domUi = new DomBasedUi("Cherished User");
Document.get().getBody().appendChild(domUi.root);
widgetUi = new WidgetBasedUi();
root = widgetUi.root;
RootPanel.get().add(widgetUi);
}
@Override
public void gwtTearDown() throws Exception {
RootPanel.get().clear();
super.gwtTearDown();
}
public void testAccessToNonStandardElement() {
Element elm = widgetUi.nonStandardElement;
assertEquals("I", elm.getTagName());
}
public void testAddStyleNamesAndDebugId() {
Label l = widgetUi.lblDebugId;
assertEquals("gwt-debug-joe", l.getElement().getId());
WidgetBasedUiExternalResources resources = GWT.create(WidgetBasedUiExternalResources.class);
assertTrue(l.getStyleName().contains("newStyle"));
assertTrue(l.getStyleName().contains("anotherStyle"));
assertTrue(l.getStyleName().contains("dependentStyle"));
assertTrue(l.getStyleName().contains("anotherDependentStyle"));
assertTrue(l.getStyleName().contains(resources.style().prettyText()));
}
// TODO(rjrjr) The direction stuff in these tests really belongs in
// DockPanelParserTest
public void testAllowIdOnDomElements() {
Element elm = DOM.getElementById("shouldSayHTML");
assertEquals("HTML", elm.getInnerHTML());
}
public void testBraceEscaping() {
assertEquals("blah di blah {foo: \"bar\"} di blah",
widgetUi.bracedParagraph.getAttribute("fnord"));
}
public void testBundle() {
assertEquals(getCenter(), widgetUi.bundledLabel.getParent());
assertEquals(new FakeBundle().helloText(), widgetUi.bundledLabel.getText());
WidgetBasedUiExternalResources resources = GWT.create(WidgetBasedUiExternalResources.class);
assertEquals(resources.style().prettyText(), widgetUi.bundledLabel.getStyleName());
Element pretty = DOM.getElementById("prettyPara");
assertEquals(resources.style().prettyText(), pretty.getClassName());
ArbitraryPojo pojo = new ArbitraryPojo();
FooLabel foo = new FooLabel();
foo.setPojo(pojo);
assertEquals(foo.getText(), widgetUi.theFoo.getText());
}
public void testCenter() {
// TODO(rjrjr) More of a test of HTMLPanelParser
Widget center = getCenter();
assertEquals(DockPanel.CENTER, root.getWidgetDirection(center));
assertEquals(HTMLPanel.class, center.getClass());
String html = center.getElement().getInnerHTML();
assertTrue(html.contains("main area"));
assertTrue(html.contains("Button with"));
assertTrue(html.contains("Of course"));
assertEquals(center, widgetUi.myButton.getParent());
}
public void testComputedAttributeInPlaceholderedElement() {
WidgetBasedUiExternalResources resources = GWT.create(WidgetBasedUiExternalResources.class);
assertEquals(resources.style().prettyText(),
widgetUi.spanInMsg.getClassName());
}
public void testComputedStyleInAPlaceholder() {
WidgetBasedUiExternalResources resources = GWT.create(WidgetBasedUiExternalResources.class);
assertEquals(resources.style().tmText(), widgetUi.tmElement.getClassName());
}
public void testDomAccessAndComputedAttributeOnPlaceholderedElement() {
WidgetBasedUiExternalResources resources = GWT.create(WidgetBasedUiExternalResources.class);
Element elem = DOM.getElementById("placeholdersSpan");
assertEquals("bold", elem.getStyle().getProperty("fontWeight"));
assertEquals(resources.style().prettyText(), elem.getClassName());
}
public void testDomAccessInHtml() {
DivElement sideBar = widgetUi.sideBar;
assertTrue("sideBar should start: \"This could\"",
sideBar.getInnerText().startsWith("This could"));
assertTrue("sideBar should end: \"example:\"",
sideBar.getInnerText().endsWith("like that..."));
assertEquals("Should have no id", "", sideBar.getAttribute("id"));
}
public void testDomAccessInHtmlPanel() {
SpanElement messageInMain = widgetUi.messageInMain;
String text = messageInMain.getInnerText().trim();
assertTrue("sideBar should start: \"This is the main area\"",
text.startsWith("This is the main area"));
assertTrue("sideBar should end: \"example.\"", text.endsWith("example."));
}
public void testDomAttributeMessageWithFunnyChars() {
ParagraphElement p = widgetUi.funnyCharsMessageDomAttributeParagraph;
String t = p.getAttribute("title");
assertEquals("funny characters \" ' ' & < > > { }", t);
}
public void testDomAttributeNoMessageWithFunnyChars() {
ParagraphElement p = widgetUi.funnyCharsDomAttributeParagraph;
String t = p.getAttribute("title");
assertEquals("funny characters \" ' ' & < > > { }", t);
}
public void testDomTextMessageWithFunnyChars() {
String t = widgetUi.funnyCharsMessageParagraph.getInnerText();
assertEquals("They might show up in body text that has been marked for "
+ "translation: funny characters \" \" ' ' & < > > { }", t);
}
public void testMixOfWidgetsAndElementsInUiMsg() {
assertEquals("single translatable message", widgetUi.mixedMessageWidget.getText());
assertEquals("exciting and subtle", widgetUi.mixedMessageSpan.getInnerText());
}
public void testEnums() {
Suffix expected = EnumeratedLabel.Suffix.tail;
assertTrue("Should end with suffix \"" + expected + "\"",
widgetUi.enumLabel.getText().endsWith(expected.toString()));
}
public void testCustomButtonParser() {
// .toLowerCase normalization to keep IE happy
assertEquals("<b>click me</b>",
widgetUi.pushButton.getUpFace().getHTML().toLowerCase());
assertTrue(widgetUi.pushButton.getUpHoveringFace().getHTML().contains(
">Click ME!<"));
assertEquals("<b>click me!</b>",
widgetUi.pushButton.getUpHoveringFace().getHTML().toLowerCase());
// Can't test the images at all :-P
}
public void testProtectedDomTextMessageWithFunnyChars() {
String t = widgetUi.funnyCharsProtectedMessageParagraph.getInnerText();
assertEquals("Don't forget about protected untranslatable blocks: "
+ "funny characters \" \" ' ' & < > > { }", t);
}
public void testDomTextInNamedElementMessageWithFunnyChars() {
String t = widgetUi.funnyCharsMessageChildSpan.getInnerText();
assertEquals("funny characters \" \" ' ' & < > > { }", t);
}
public void suppressedForSafari3Fail_testDomTextNoMessageWithFunnyChars() {
ParagraphElement p = widgetUi.funnyCharsParagraph;
// WebKit does \n replace thing, so let's do it everywhere
String t = p.getInnerHTML().replace("\n", " ").toLowerCase();
String expected = "Templates can be marked up for <b>localization</b>, which presents alls "
+ "kinds of exciting opportunities for bugs related to character escaping. "
+ "Consider these funny characters \" \" ' ' & < > > { }, and "
+ "the various places they might make your life miserable, like this "
+ "untranslated paragraph.";
expected = expected.toLowerCase();
assertEquals(expected, t);
}
public void testFieldAttribute() {
assertEquals(getCenter(), widgetUi.gwtFieldLabel.getParent());
}
public void testFieldInPlaceholderedElement() {
assertEquals("named portions", widgetUi.spanInMsg.getInnerText());
}
public void testMenuAttributes() {
assertEquals(widgetUi.dropdownMenuBar.getStyleName(),
widgetUi.myStyle.menuBar());
}
public void testMenuItems() {
// Has a legacy MenuItemHTML in its midst
assertEquals("The pig's in a hurry",
widgetUi.menuItemLegacy.getElement().getInnerText());
assertTrue("Style should include \"moppy\"",
widgetUi.menuItemMop.getStyleName().contains("moppy"));
}
public void testMessageTrimming() {
assertEquals("Title area, specified largely in HTML.",
widgetUi.trimmedMessage.getInnerHTML());
assertEquals("Tommy can you hear me? Can you field me near you?",
widgetUi.gwtFieldLabel.getText());
}
public void testMinimalDom() {
assertEquals("Expect no wrapper div around root", widgetUi.getElement(),
root.getElement());
}
public void testNamedElementInAPlaceholder() {
assertEquals("TM", widgetUi.tmElement.getInnerText());
}
public void testNestedBundle() {
DomBasedUi.Resources resources = GWT.create(DomBasedUi.Resources.class);
assertEquals(resources.style().bodyColor() + " "
+ resources.style().bodyFont(), domUi.root.getClassName());
}
interface Bundle extends ClientBundle {
@Source(value = {"WidgetBasedUi.css", "Menu.css"})
@NotStrict
WidgetBasedUi.Style style();
}
@DoNotRunWith(Platform.HtmlUnit)
public void testNoOverrideInheritedSharedCssClasses() {
Bundle bundle = GWT.create(Bundle.class);
WidgetBasedUi ui = GWT.create(WidgetBasedUi.class);
String publicStyle = bundle.style().menuBar();
String privateStyle = ui.myStyle.menuBar();
assertEquals(publicStyle, privateStyle);
}
public void suppressedForIEfail_testNonXmlEntities() {
// This fragment includes both translated and non-translated strings
ParagraphElement mainParagraph = widgetUi.main;
final String innerHTML = mainParagraph.getInnerHTML().trim();
assertTrue(innerHTML.contains(" \u261E \u2022 XHTML \u2022 \u261C"));
assertTrue(innerHTML.startsWith("\u261E <span>"));
assertTrue(innerHTML.endsWith("</span> \u261C"));
}
public void testNorth() {
Widget north = root.getWidget(0);
assertEquals(DockPanel.NORTH, root.getWidgetDirection(north));
assertEquals(HTML.class, north.getClass());
assertTrue(((HTML) north).getHTML().contains("Title area"));
}
public void testPrivateStyleFromExternalCss() {
ParagraphElement p = widgetUi.privateStyleParagraph;
assertTrue("Some kind of class should be set",
p.getClassName().length() > 0);
}
public void testPrivateStylesFromInlineCss() {
ParagraphElement p = widgetUi.reallyPrivateStyleParagraph;
assertTrue("Some kind of class should be set",
p.getClassName().length() > 0);
assertFalse("Should be a different style than privateStyleParagraph's",
widgetUi.privateStyleParagraph.getClassName().equals(p.getClassName()));
assertTrue("Some kind of class should be set",
widgetUi.totallyPrivateStyleSpan.getClassName().length() > 0);
}
public void testRadioButton() {
RadioButton able = widgetUi.myRadioAble;
RadioButton baker = widgetUi.myRadioBaker;
assertTrue("able should be checked", able.getValue());
assertFalse("baker should not be checked", baker.getValue());
assertEquals("radios", able.getName());
assertEquals("radios", baker.getName());
}
public void testStackPanel() {
StackPanel p = widgetUi.myStackPanel;
assertNotNull("Panel exists", p);
Widget w = widgetUi.myStackPanelItem;
assertNotNull("Widget exists", w);
boolean containsWidget = false;
for (int i = 0; i < p.getWidgetCount(); i++) {
if (p.getWidget(i) == w) {
containsWidget = true;
}
}
assertTrue("Panel contains widget", containsWidget);
}
public void testDisclosurePanel() {
DisclosurePanel p = widgetUi.myDisclosurePanel;
assertNotNull("Panel exists", p);
Widget w = widgetUi.myDisclosurePanelItem;
assertNotNull("Widget exists", w);
assertEquals("Panel contains widget", w, p.getContent());
}
public void testStringAttributeIgnoresStaticSetter() {
// Assumes setPopupText() is overloaded such that there is a static
// setPopupText(Foo, String) method.
ClickyLink clicky = widgetUi.customLinkWidget;
assertEquals("overloaded setter should have been called", "That tickles!",
clicky.getPopupText());
}
public void testStringAttributeWithFormatChars() {
assertEquals("100%", root.getElement().getStyle().getProperty("width"));
}
public void testWest() {
Widget west = root.getWidget(1);
assertEquals(DockPanel.WEST, root.getWidgetDirection(west));
assertEquals(HTML.class, west.getClass());
String html = ((HTML) west).getHTML();
assertTrue(html.contains("side bar"));
}
public void testWidgetAttributeMessageWithFunnyChars() {
ClickyLink clicky = widgetUi.funnyCharsMessageAttributeWidget;
String t = clicky.getPopupText();
assertEquals("funny characters \" ' ' & < > > { }", t);
}
public void testWidgetAttributeNoMessageWithFunnyChars() {
ClickyLink clicky = widgetUi.funnyCharsAttributeWidget;
String t = clicky.getPopupText();
assertEquals("funny characters \" ' ' & < > > { }", t);
}
public void testImageResourceInImageWidget() {
assertEquals(widgetUi.prettyImage.getWidth(),
widgetUi.babyWidget.getOffsetWidth());
assertEquals(widgetUi.prettyImage.getHeight(),
widgetUi.babyWidget.getOffsetHeight());
assertEquals(widgetUi.prettyImage.getTop(),
widgetUi.babyWidget.getOriginTop());
assertEquals(widgetUi.prettyImage.getLeft(),
widgetUi.babyWidget.getOriginLeft());
}
public void testDataResource() {
assertNotNull(widgetUi.heartCursorResource.getUrl());
}
@DoNotRunWith(Platform.HtmlUnit)
public void testCssImportedScopes() {
assertEquals(100, widgetUi.cssImportScopeSample.inner.getOffsetWidth());
}
public void testSpritedElement() {
assertEquals(widgetUi.prettyImage.getWidth(),
widgetUi.simpleSpriteParagraph.getOffsetWidth());
assertEquals(widgetUi.prettyImage.getHeight(),
widgetUi.simpleSpriteParagraph.getOffsetHeight());
}
public void suppressForIEfail_testBizarrelyElementedWidgets() {
assertInOrder(widgetUi.widgetCrazyTable.getInnerHTML().toLowerCase(),
"<td>they have been known</td>", "<td>to write widgets</td>",
"<td>that masquerade</td>", "<td>as table cells,</td>",
"<td>just like these.</td>", "<td>burma shave</td>");
assertInOrder(widgetUi.widgetCrazyOrderedList.getInnerHTML(),
"<li>similar</li>", "<li>things</li>");
assertInOrder(widgetUi.widgetCrazyDefinitionList.getInnerHTML(),
"<dt>Being</dt>", "<dd>done</dd>", "<dd>with</dd>", "<dd>lists</dd>");
}
public void testCustomHtmlPanelTag() {
assertInOrder(widgetUi.customTagHtmlPanel.getElement().getInnerHTML(),
"<td>Even HTMLPanel gets in on the game</td>",
"<td>Lately, anyway.</td>");
}
/**
* Assert that the expect strings are found in body, and in the order given.
* WARNING: both body and expected are normalized to lower case, to get around
* IE's habit of returning capitalized DOM elements.
*/
private void assertInOrder(String body, String... expected) {
body = body.toLowerCase();
int lastIndex = 0;
String lastExpected = "";
for (String next : expected) {
next = next.toLowerCase();
int index = body.indexOf(next);
assertTrue(body + " should contain " + next, index > -1);
assertTrue("Expect " + next + " after " + lastExpected, index > lastIndex);
lastIndex = index;
}
}
private Widget getCenter() {
Widget center = root.getWidget(2);
return center;
}
}
| |
package com.speedcam;
import android.annotation.SuppressLint;
import android.content.Context;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.ListView;
import android.widget.TextView;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import static com.speedcam.Constants.COLOR_CHANNELS;
import static com.speedcam.Constants.IMAGE_SIZE;
import static com.speedcam.Constants.INPUT_NODE;
import static com.speedcam.Constants.MIN_DISTANCE_UPDATE;
import static com.speedcam.Constants.MIN_TIME_UPDATE;
import static com.speedcam.Constants.NEGATIVE_SIGN;
import static com.speedcam.Constants.OUTPUT_NODE;
import static com.speedcam.Constants.SPEED_UNITS;
import static com.speedcam.Constants.TIME_POP_SIGN_LISTIVEW;
public class HomeActivity extends AppCompatActivity
implements CameraBridgeViewBase.CvCameraViewListener2, LocationListener {
private static final String TAG = HomeActivity.class.getName();
private TensorFlowInferenceInterface signClassifier;
private CascadeClassifier cascadeClassifier;
private File mCascadeFile;
private ListView signView;
private JavaCameraView cameraView;
private Mat frame;
private HashMap<Integer, Integer> signImages;
private ArrayList<Integer> signList;
private SignAdapter signAdapter;
private Handler handler = new Handler(Looper.getMainLooper());
BaseLoaderCallback loaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
if (status == BaseLoaderCallback.SUCCESS) {
cameraView.enableView();
loadCascadeClassifier();
} else {
super.onManagerConnected(status);
}
}
};
private Runnable runnable = new Runnable() {
public void run() {
if (signList.size() > 2) {
signList.remove(signList.size() - 1);
signAdapter.notifyDataSetChanged();
}
handler.postDelayed(this, TIME_POP_SIGN_LISTIVEW);
}
};
@SuppressLint("MissingPermission")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
signList = new ArrayList<>();
signView = findViewById(R.id.signList);
signAdapter = new SignAdapter(HomeActivity.this, signList);
signView.setAdapter(signAdapter);
signClassifier = new TensorFlowInferenceInterface(getAssets(), Constants.TENSORFLOW_MODEL_FILE);
initSignImages();
cameraView = findViewById(R.id.camera_view);
cameraView.setVisibility(SurfaceView.VISIBLE);
cameraView.setCvCameraViewListener(this);
LocationManager locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER,
MIN_TIME_UPDATE, MIN_DISTANCE_UPDATE, this);
this.updateSpeed(0);
handler.postDelayed(runnable, TIME_POP_SIGN_LISTIVEW);
}
@Override
protected void onPause() {
super.onPause();
if (cameraView != null) {
cameraView.disableView();
}
}
@Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()) {
loaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_3_0,
this, loaderCallback);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
cameraView.disableView();
signClassifier.close();
}
@Override
public void onCameraViewStarted(int width, int height) {
frame = new Mat(height, width, CvType.CV_32FC3);
}
@Override
public void onCameraViewStopped() {
frame.release();
}
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame = inputFrame.rgba();
new SignRecognition().execute(frame);
return frame;
}
@Override
public void onLocationChanged(Location location) {
if(location != null) {
this.updateSpeed(location.getSpeed());
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
// TODO Auto-generated method stub
}
@Override
public void onProviderEnabled(String provider) {
// TODO Auto-generated method stub
}
@Override
public void onProviderDisabled(String provider) {
// TODO Auto-generated method stub
}
private void updateSpeed(float currentSpeed) {
TextView speedView = this.findViewById(R.id.speed_view);
speedView.setText((int)currentSpeed + " " + SPEED_UNITS);
}
private class SignRecognition extends AsyncTask<Object, Void, Set<Integer>> {
@Override
protected Set doInBackground(Object[] objects) {
Mat frame = (Mat) objects[0];
MatOfRect detectedObjects = new MatOfRect();
cascadeClassifier.detectMultiScale(frame, detectedObjects, 1.3, 5, 0,
new Size(Math.round(frame.rows() * 0.2f), frame.cols() * 0.2f), new Size());
Set<Integer> detectedSigns = new HashSet<>();
for (Rect object : detectedObjects.toArray()) {
Rect objectCoordinates = new Rect(object.x, object.y, object.width, object.height);
Mat croppedObject = new Mat(frame, objectCoordinates);
Mat resizedImg = new Mat(IMAGE_SIZE, IMAGE_SIZE, CvType.CV_32FC3);
Imgproc.resize(croppedObject, resizedImg, new Size(IMAGE_SIZE, IMAGE_SIZE));
float[] floatArrImg = convertMatToFloatArray(resizedImg);
int prediction = predictSign(floatArrImg);
detectedSigns.add(prediction);
}
return detectedSigns;
}
@Override
protected void onPostExecute(Set<Integer> detectedSigns) {
if (detectedSigns.size() != 0) {
for (Integer detectedSign : detectedSigns) {
if (detectedSign != NEGATIVE_SIGN && !signList.contains(signImages.get(detectedSign))) {
signList.add(0, signImages.get(detectedSign));
}
}
signAdapter.notifyDataSetChanged();
}
}
private float[] convertMatToFloatArray(Mat image) {
float[] floatImage = new float[image.rows() * image.cols() * 3];
int index = 0;
for (int i = 0; i < image.rows(); i++) {
for (int j = 0; j < image.cols(); j++) {
floatImage[index] = (float) ((image.get(i, j)[0]) / 255.0);
floatImage[index + 1] = (float) (image.get(i, j)[1] / 255.0);
floatImage[index + 2] = (float) (image.get(i, j)[2] / 255.0);
index += 3;
}
}
return floatImage;
}
private int predictSign(float[] floatArrImg) {
long[] outputResult = {0, 0};
signClassifier.feed(INPUT_NODE, floatArrImg, 1,
IMAGE_SIZE, IMAGE_SIZE, COLOR_CHANNELS);
signClassifier.run(new String[] { OUTPUT_NODE }, false);
signClassifier.fetch("prediction", outputResult);
return (int) outputResult[0];
}
}
private void saveCascadeFile() {
final InputStream is;
FileOutputStream os;
try {
is = getResources().getAssets().open(Constants.CASCADE_FILE_NAME);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, Constants.CASCADE_FILE_NAME);
os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
} catch (IOException e) {
Log.e(TAG, "Cascade classifier not found");
}
}
private void loadCascadeClassifier() {
saveCascadeFile();
cascadeClassifier = new CascadeClassifier(mCascadeFile.getAbsolutePath());
}
private void initSignImages() {
signImages = new HashMap<>();
int count = 0;
for (Field field : R.drawable.class.getFields()) {
if (field.getName().contains("sign_number")) {
try {
field.setAccessible(true);
signImages.put(count++, field.getInt(R.drawable.class));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
}
| |
/*******************************************************************************
* PathVisio, a tool for data visualization and analysis using biological pathways
* Copyright 2006-2021 BiGCaT Bioinformatics, WikiPathways
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.pathvisio.model;
import java.awt.Color;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.XMLConstants;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.ValidatorHandler;
import org.apache.commons.collections4.BidiMap;
import org.apache.commons.collections4.bidimap.DualHashBidiMap;
import org.bridgedb.DataSource;
import org.bridgedb.Xref;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.output.Format;
import org.jdom2.output.SAXOutputter;
import org.jdom2.output.XMLOutputter;
import org.pathvisio.debug.Logger;
import org.pathvisio.model.type.ArrowHeadType;
import org.pathvisio.model.type.ShapeType;
import org.pathvisio.util.ColorUtils;
import org.pathvisio.util.MiscUtils;
import org.xml.sax.SAXException;
/**
* Abstract class for GPML2013a format. Contains static properties
* {@link String}, {@link Map}, {@link BidiMap}, {@link List}, and methods used
* in reading or writing GPML2013a.
*
* @author finterly
*/
public abstract class GPML2013aFormatAbstract {
/**
* The namespace
*/
private final Namespace nsGPML;
/**
* The schema file
*/
private final String xsdFile;
/**
* Constructor for GPML2013aFormat Abstract.
*
* @param xsdFile the schema file.
* @param nsGPML the GPML namespace.
*/
protected GPML2013aFormatAbstract(String xsdFile, Namespace nsGPML) {
this.xsdFile = xsdFile;
this.nsGPML = nsGPML;
}
/**
* Returns the GPML schema file.
*
* @return xsdFile the schema file.
*/
public String getSchemaFile() {
return xsdFile;
}
/**
* Returns the GPML namespace.
*
* @return nsGPML the GPML namespace.
*/
public Namespace getGpmlNamespace() {
return nsGPML;
}
// ================================================================================
// Static Variables
// ================================================================================
/**
* In GPML2013a, specific {@link Namespace} are defined for Biopax elements.
*/
public static final Namespace RDF_NAMESPACE = Namespace.getNamespace("rdf",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#");
public static final Namespace RDFS_NAMESPACE = Namespace.getNamespace("rdfs",
"http://www.w3.org/2000/01/rdf-schema#");
public static final Namespace BIOPAX_NAMESPACE = Namespace.getNamespace("bp",
"http://www.biopax.org/release/biopax-level3.owl#");
public static final Namespace OWL_NAMESPACE = Namespace.getNamespace("owl", "http://www.w3.org/2002/07/owl#");
public final static String RDF_STRING = "http://www.w3.org/2001/XMLSchema#string";
/**
* Some GPML2013a properties are removed from GPML2021 and therefore cannot be
* mapped to the Java model. These deprecated properties are stored in dynamic
* properties with the following static strings as keys.
*/
public final static String PATHWAY_AUTHOR = "pathway_author_gpml2013a";
public final static String PATHWAY_MAINTAINER = "pathway_maintainer_gpml2013a";
public final static String PATHWAY_EMAIL = "pathway_email_gpml2013a";
public final static String PATHWAY_LASTMODIFIED = "pathway_lastModified_gpml2013a";
public final static String INFOBOX_CENTER_X = "pathway_infobox_centerX_gpml2013a";
public final static String INFOBOX_CENTER_Y = "pathway_infobox_centerY_gpml2013a";
public final static String LEGEND_CENTER_X = "pathway_legend_centerX_gpml2013a";
public final static String LEGEND_CENTER_Y = "pathway_legend_centerY_gpml2013a";
/**
* This {@link Set} stores the deprecated GPML2013a properties. Dynamic
* properties with these keys are ignored when writing GPML2013a
* {@link GPML2013aWriter#writePathwayDynamicProperties} and GPML2021
* {@link GPML2021Writer#writeDynamicProperties}.
*/
public static final Set<String> GPML2013A_KEY_SET = new HashSet<>(Arrays.asList(PATHWAY_AUTHOR, PATHWAY_MAINTAINER,
PATHWAY_EMAIL, PATHWAY_LASTMODIFIED, INFOBOX_CENTER_X, INFOBOX_CENTER_Y, LEGEND_CENTER_X, LEGEND_CENTER_Y));
/**
* In GPML2013a, {@link Pathway} description is written as a
* {@link PathwayElement.Comment} with source="WikiPathways-description".
*/
public final static String WP_DESCRIPTION = "WikiPathways-description";
/**
* In GPML2013a, Double LineStyleType, Cellular Component Shape Types, and State
* rotation were stored as a dynamic properties using the following String keys.
*/
public final static String DOUBLE_LINE_KEY = "org.pathvisio.DoubleLineProperty";
public final static String CELL_CMPNT_KEY = "org.pathvisio.CellularComponentProperty";
public final static String STATE_ROTATION = "org.pathvisio.core.StateRotation";
/**
* This {@link BidiMap}is used for mapping {@link ShapeType} Strings to their
* new camelCase spelling for reading and writing GPML2013a.
*/
public static final BidiMap<String, String> SHAPETYPE_TO_CAMELCASE = new DualHashBidiMap<>();
static {
SHAPETYPE_TO_CAMELCASE.put("Sarcoplasmic Reticulum", "SarcoplasmicReticulum");
SHAPETYPE_TO_CAMELCASE.put("Endoplasmic Reticulum", "EndoplasmicReticulum");
SHAPETYPE_TO_CAMELCASE.put("Golgi Apparatus", "GolgiApparatus");
SHAPETYPE_TO_CAMELCASE.put("Cytosol region", "CytosolRegion");
SHAPETYPE_TO_CAMELCASE.put("Extracellular region", "ExtracellularRegion");
}
/**
* Converts shapeType {@link String} to UpperCamelCase convention. In GPML2013a,
* naming convention was inconsistent. Moving forward, enum types strings are
* all in UpperCamelCase.
*
* @param str the string.
* @return the string in camelCase format, or string as it was.
* @throws ConverterException
*/
protected String toCamelCase(String str) throws ConverterException {
if (SHAPETYPE_TO_CAMELCASE.containsKey(str)) {
return SHAPETYPE_TO_CAMELCASE.get(str);
} else
return str;
}
/**
* Converts shapeType {@link String} from UpperCamelCase convention back to its
* original appearance in GPML2013a.
*
* @param str the string.
* @return the string in its original format.
* @throws ConverterException
*/
protected String fromCamelCase(String str) throws ConverterException {
if (SHAPETYPE_TO_CAMELCASE.containsValue(str)) {
return SHAPETYPE_TO_CAMELCASE.getKey(str);
} else
return str;
}
/**
* This {@link Map} maps deprecated {@link ShapeType} to the new shape types
* when reading GPML2013a {@link GPML2013aReader#readShapeStyleProperty}.
* However, if the pathway element has dynamic property with
* {@link #CELL_CMPNT_KEY}, shapeType may be overridden after reading dynamic
* properties.
*/
public static final Map<ShapeType, ShapeType> DEPRECATED_MAP = new HashMap<ShapeType, ShapeType>();
static {
DEPRECATED_MAP.put(ShapeType.CELL, ShapeType.ROUNDED_RECTANGLE);
DEPRECATED_MAP.put(ShapeType.ORGANELLE, ShapeType.ROUNDED_RECTANGLE);
DEPRECATED_MAP.put(ShapeType.MEMBRANE, ShapeType.ROUNDED_RECTANGLE);
DEPRECATED_MAP.put(ShapeType.CELLA, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.NUCLEUS, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.ORGANA, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.ORGANB, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.ORGANC, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.VESICLE, ShapeType.OVAL);
DEPRECATED_MAP.put(ShapeType.PROTEINB, ShapeType.HEXAGON);
DEPRECATED_MAP.put(ShapeType.RIBOSOME, ShapeType.HEXAGON);
}
/**
* This cellular component {@link Map} maps {@link ShapeType}s. In GPML2013a,
* cellular component shapeTypes are written as dynamic properties
* {@link GPML2013aWriter#writeShapedOrStateDynamicProperties} with
* {@link #CELL_CMPNT_KEY} key and a value (e.g. "Nucleus); and property
* shapeType in Graphics is written with a corresponding shapeType value (e.g.
* "Oval") {@link GPML2013aWriter#writeShapeStyleProperty}.
*/
public static final Map<ShapeType, ShapeType> CELL_CMPNT_MAP = new HashMap<ShapeType, ShapeType>();
static {
CELL_CMPNT_MAP.put(ShapeType.CELL, ShapeType.ROUNDED_RECTANGLE);
CELL_CMPNT_MAP.put(ShapeType.NUCLEUS, ShapeType.OVAL);
CELL_CMPNT_MAP.put(ShapeType.ENDOPLASMIC_RETICULUM, ShapeType.ENDOPLASMIC_RETICULUM);
CELL_CMPNT_MAP.put(ShapeType.GOLGI_APPARATUS, ShapeType.GOLGI_APPARATUS);
CELL_CMPNT_MAP.put(ShapeType.MITOCHONDRIA, ShapeType.MITOCHONDRIA);
CELL_CMPNT_MAP.put(ShapeType.SARCOPLASMIC_RETICULUM, ShapeType.SARCOPLASMIC_RETICULUM);
CELL_CMPNT_MAP.put(ShapeType.ORGANELLE, ShapeType.ROUNDED_RECTANGLE);
CELL_CMPNT_MAP.put(ShapeType.LYSOSOME, ShapeType.OVAL);
CELL_CMPNT_MAP.put(ShapeType.NUCLEOLUS, ShapeType.OVAL);
CELL_CMPNT_MAP.put(ShapeType.VACUOLE, ShapeType.OVAL);
CELL_CMPNT_MAP.put(ShapeType.VESICLE, ShapeType.OVAL);
CELL_CMPNT_MAP.put(ShapeType.CYTOSOL, ShapeType.ROUNDED_RECTANGLE);
CELL_CMPNT_MAP.put(ShapeType.EXTRACELLULAR, ShapeType.ROUNDED_RECTANGLE);
CELL_CMPNT_MAP.put(ShapeType.MEMBRANE, ShapeType.ROUNDED_RECTANGLE);
}
/**
* This {@link BidiMap} maps GPML2013a openControlledVocabulary Ontology types
* to their {@link DataSource} Prefix for reading
* {@link GPML2013aReader#readOpenControlledVocabulary} and writing
* {@link GPML2013aWriter#writeOpenControlledVocabulary}.
*/
public static final BidiMap<String, String> OCV_ONTOLOGY_MAP = new DualHashBidiMap<>();
static {
OCV_ONTOLOGY_MAP.put("Disease", "DOID");
OCV_ONTOLOGY_MAP.put("Pathway Ontology", "PW");
OCV_ONTOLOGY_MAP.put("Cell Type", "CL");
}
/**
* String values for {@link DataNode.State} phosphosite
* {@link PathwayElement.Comment} information in GPML2013a.
*/
public final static String PARENT = "parent";
public final static String POSITION = "position";
public final static String PARENTID = "parentid";
public final static String PARENTSYMBOL = "parentsymbol";
public final static String PTM = "ptm";
public final static String DIRECTION = "direction";
public final static String SITE = "site";
public final static String SITEGRPID = "sitegrpid";
public final static String PARENTID_DB = "uniprot";
public final static String PARENTSYMBOL_DB = "hgnc";
public final static String SITEGRPID_DB = "phosphositeplus";
/**
* This {@link Set} contains known phosphosite related annotation types for
* {@link DataNode.State} phosphosite {@link PathwayElement.Comment} in
* GPML2013a. This set is used in determining whether a state comment should be
* written as {@link Annotation}s and {@link Xref} in
* {@link GPML2013aReader#convertStateCommentToRefs}.
*/
Set<String> STATE_REF_LIST = new HashSet<>(
Arrays.asList(PARENT, POSITION, PTM, DIRECTION, PARENTID, PARENTSYMBOL, SITE, SITEGRPID));
/**
* This {@link Map} for {@link DataNode.State} phosphosite
* {@link PathwayElement.Comment} maps PTM character to {@link Annotation} and
* {@link Xref} information. E.g. for ptm=p, Annotation value=Phosphorylation,
* Xref identifier=0000216, and dataSource = SBO. Used in writing state comments
* to annotations and xref {@link GPML2013aReader#convertStateCommentToRefs}.
*/
public static final Map<String, List<String>> STATE_PTM_MAP = new HashMap<String, List<String>>();
static {
STATE_PTM_MAP.put("p", new ArrayList<>(Arrays.asList("Phosphorylation", "0000216", "SBO")));
STATE_PTM_MAP.put("m", new ArrayList<>(Arrays.asList("Methylation", "0000214", "SBO")));
STATE_PTM_MAP.put("me", new ArrayList<>(Arrays.asList("Methylation", "0000214", "SBO")));
STATE_PTM_MAP.put("u", new ArrayList<>(Arrays.asList("Ubiquitination", "000022", "SBO")));
STATE_PTM_MAP.put("ub", new ArrayList<>(Arrays.asList("Ubiquitination", "000022", "SBO")));
}
/**
* Map for {@link DataNode.State} phosphosite {@link PathwayElement.Comment}
* direction character to {@link Annotation} and {@link Xref} information. "u"
* for up-regulated and "d" for down-regulated. Used in writing state comments
* to annotations and xref {@link GPML2013aReader#convertStateCommentToRefs}.
*/
public static final Map<String, List<String>> STATE_DIRECTION_MAP = new HashMap<String, List<String>>();
static {
STATE_DIRECTION_MAP.put("u",
new ArrayList<>(Arrays.asList("positive regulation of biological process", "0048518", "GO")));
STATE_DIRECTION_MAP.put("d",
new ArrayList<>(Arrays.asList("negative regulation of biological process", "0048519", "GO")));
}
/**
* In GPML2013a, we introduce a new Interaction Panel of {@link ArrowHeadType}.
* For each new arrowHead type we define a {@link List} of the old arrowHead
* types from GPML2013a which correspond to it. The first GPML2013a arrow head
* type string in the list is prioritized when writing from GPML2021 to
* GPML2013a.
*/
public static final List<String> UNDIRECTED_LIST = new ArrayList<>(Arrays.asList("Line"));
public static final List<String> DIRECTED_LIST = new ArrayList<>(Arrays.asList("Arrow"));
public static final List<String> CONVERSION_LIST = new ArrayList<>(Arrays.asList("mim-conversion",
"mim-modification", "mim-cleavage", "mim-gap", "mim-branching-left", "mim-branching-right"));
public static final List<String> INHIBITION_LIST = new ArrayList<>(Arrays.asList("mim-inhibition", "TBar"));
public static final List<String> CATALYSIS_LIST = new ArrayList<>(Arrays.asList("mim-catalysis"));
public static final List<String> STIMULATION_LIST = new ArrayList<>(
Arrays.asList("mim-stimulation", "mim-necessary-stimulation"));
public static final List<String> BINDING_LIST = new ArrayList<>(Arrays.asList("mim-binding", "mim-covalent-bond"));
public static final List<String> TRANSLOCATION_LIST = new ArrayList<>(Arrays.asList("mim-translocation"));
public static final List<String> TRANSCRIPTION_TRANSLATION_LIST = new ArrayList<>(
Arrays.asList("mim-transcription-translation"));
/**
* This {@link Map} maps new Interaction Panel arrow head types to the defined
* {@link List} for corresponding GPML2013a arrowHead types.
*/
public static final Map<ArrowHeadType, List<String>> IA_PANEL_MAP = new HashMap<ArrowHeadType, List<String>>();
static {
IA_PANEL_MAP.put(ArrowHeadType.UNDIRECTED, UNDIRECTED_LIST);
IA_PANEL_MAP.put(ArrowHeadType.DIRECTED, DIRECTED_LIST);
IA_PANEL_MAP.put(ArrowHeadType.CONVERSION, CONVERSION_LIST);
IA_PANEL_MAP.put(ArrowHeadType.INHIBITION, INHIBITION_LIST);
IA_PANEL_MAP.put(ArrowHeadType.CATALYSIS, CATALYSIS_LIST);
IA_PANEL_MAP.put(ArrowHeadType.STIMULATION, STIMULATION_LIST);
IA_PANEL_MAP.put(ArrowHeadType.BINDING, BINDING_LIST);
IA_PANEL_MAP.put(ArrowHeadType.TRANSLOCATION, TRANSLOCATION_LIST);
IA_PANEL_MAP.put(ArrowHeadType.TRANSCRIPTION_TRANSLATION, TRANSCRIPTION_TRANSLATION_LIST);
}
/**
* Returns the GPML2021 Interaction Panel arrow head type for given GPML2013a
* arrowHead type string.
*
* @param arrowHeadStr the string for GPML2013a arrow head type.
* @return arrowHead the interaction panel arrow head type which corresponds to
* arrowHeadStr, or null if no corresponding type exists.
* @throws ConverterException
*/
protected ArrowHeadType getInteractionPanelType(String arrowHeadStr) throws ConverterException {
Set<ArrowHeadType> arrowHeads = IA_PANEL_MAP.keySet();
for (ArrowHeadType arrowHead : arrowHeads) {
List<String> arrowHeadStrs = IA_PANEL_MAP.get(arrowHead);
// case insensitive method for matching in list
if (MiscUtils.containsCaseInsensitive(arrowHeadStr, arrowHeadStrs)) {
return arrowHead;
}
}
return null;
}
/**
* Returns the prioritized GPML2013a arrowHead type string for given GPML2021
* Interaction Panel arrow head type.
*
* @param arrowHead the interaction panel arrow head type for GPML2021e.
* @return the first GPML2013a arrow head which corresponds to the interaction
* panel arrow head type, or null if no corresponding type exists.
* @throws ConverterException
*/
protected String getArrowHeadTypeStr(ArrowHeadType arrowHead) throws ConverterException {
List<String> arrowHeadStrs = IA_PANEL_MAP.get(arrowHead);
if (arrowHeadStrs != null && !arrowHeadStrs.isEmpty()) {
// first arrow head string is priority.
return arrowHeadStrs.get(0);
} else {
return null;
}
}
/**
* Attribute info map is initiated with {@link #initAttributeInfo()}.
*
*/
private static final Map<String, AttributeInfo> ATTRIBUTE_INFO = initAttributeInfo();
/**
* The {@link Map} initAttributeInfo maps {@link String} tag to
* {@link AttributeInfo}. For GPML2013a reading/writing, we often use
* {@link #getAttr} and {@link #setAttr} in place of standard jdom methods
* {@link Element#getAttributeValue} and {@link Element#setAttribute}
* respectively. If an attribute is null when reading, its default value is
* fetched from this map. When writing, if trying to set a default value or an
* optional value to null, the attribute is omitted which results in a leaner
* xml output.
*
* This map defines custom default values not in the GPML2013a schema such as
* default "Label.Graphics@FillColor" as "Transparent". We do not do this for
* GPML2021 as it can be confusing to have custom reading/writing resulting in
* xml which do not adhere to the schema.
*
* @return
*/
private static Map<String, AttributeInfo> initAttributeInfo() {
Map<String, AttributeInfo> result = new HashMap<String, AttributeInfo>();
result.put("Comment@Source", new AttributeInfo("xsd:string", null, "optional"));
result.put("PublicationXref@ID", new AttributeInfo("xsd:string", null, "required"));
result.put("PublicationXref@Database", new AttributeInfo("xsd:string", null, "required"));
result.put("Attribute@Key", new AttributeInfo("xsd:string", null, "required"));
result.put("Attribute@Value", new AttributeInfo("xsd:string", null, "required"));
result.put("Pathway.Graphics@BoardWidth", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Pathway.Graphics@BoardHeight", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Pathway@Name", new AttributeInfo("xsd:string", null, "required"));
result.put("Pathway@Organism", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Data-Source", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Version", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Author", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Maintainer", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Email", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@License", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@Last-Modified", new AttributeInfo("xsd:string", null, "optional"));
result.put("Pathway@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("DataNode.Graphics@CenterX", new AttributeInfo("xsd:float", null, "required"));
result.put("DataNode.Graphics@CenterY", new AttributeInfo("xsd:float", null, "required"));
result.put("DataNode.Graphics@Width", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("DataNode.Graphics@Height", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("DataNode.Graphics@FontName", new AttributeInfo("xsd:string", "Arial", "optional"));
result.put("DataNode.Graphics@FontStyle", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("DataNode.Graphics@FontDecoration", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("DataNode.Graphics@FontStrikethru", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("DataNode.Graphics@FontWeight", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("DataNode.Graphics@FontSize", new AttributeInfo("xsd:nonNegativeInteger", "12", "optional"));
result.put("DataNode.Graphics@Align", new AttributeInfo("xsd:string", "Center", "optional"));
result.put("DataNode.Graphics@Valign", new AttributeInfo("xsd:string", "Top", "optional"));
result.put("DataNode.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("DataNode.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("DataNode.Graphics@LineThickness", new AttributeInfo("xsd:float", "1.0", "optional"));
result.put("DataNode.Graphics@FillColor", new AttributeInfo("gpml:ColorType", "White", "optional"));
result.put("DataNode.Graphics@ShapeType", new AttributeInfo("xsd:string", "Rectangle", "optional"));
result.put("DataNode.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("DataNode.Xref@Database", new AttributeInfo("xsd:string", null, "required"));
result.put("DataNode.Xref@ID", new AttributeInfo("xsd:string", null, "required"));
result.put("DataNode@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("DataNode@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("DataNode@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("DataNode@TextLabel", new AttributeInfo("xsd:string", null, "required"));
result.put("DataNode@Type", new AttributeInfo("xsd:string", "Unknown", "optional"));
result.put("State.Graphics@RelX", new AttributeInfo("xsd:float", null, "required"));
result.put("State.Graphics@RelY", new AttributeInfo("xsd:float", null, "required"));
result.put("State.Graphics@Width", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("State.Graphics@Height", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("State.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("State.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("State.Graphics@LineThickness", new AttributeInfo("xsd:float", "1.0", "optional"));
result.put("State.Graphics@FillColor", new AttributeInfo("gpml:ColorType", "White", "optional"));
result.put("State.Graphics@ShapeType", new AttributeInfo("xsd:string", "Rectangle", "optional"));
result.put("State.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("State.Xref@Database", new AttributeInfo("xsd:string", null, "required"));
result.put("State.Xref@ID", new AttributeInfo("xsd:string", null, "required"));
result.put("State@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("State@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("State@GraphRef", new AttributeInfo("xsd:IDREF", null, "optional"));
result.put("State@TextLabel", new AttributeInfo("xsd:string", null, "required"));
result.put("State@StateType", new AttributeInfo("xsd:string", "Unknown", "optional"));
result.put("GraphicalLine.Graphics.Point@X", new AttributeInfo("xsd:float", null, "required"));
result.put("GraphicalLine.Graphics.Point@Y", new AttributeInfo("xsd:float", null, "required"));
result.put("GraphicalLine.Graphics.Point@RelX", new AttributeInfo("xsd:float", null, "optional"));
result.put("GraphicalLine.Graphics.Point@RelY", new AttributeInfo("xsd:float", null, "optional"));
result.put("GraphicalLine.Graphics.Point@GraphRef", new AttributeInfo("xsd:IDREF", null, "optional"));
result.put("GraphicalLine.Graphics.Point@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("GraphicalLine.Graphics.Point@ArrowHead", new AttributeInfo("xsd:string", "Line", "optional"));
result.put("GraphicalLine.Graphics.Anchor@Position", new AttributeInfo("xsd:float", null, "required"));
result.put("GraphicalLine.Graphics.Anchor@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("GraphicalLine.Graphics.Anchor@Shape", new AttributeInfo("xsd:string", "ReceptorRound", "optional"));
result.put("GraphicalLine.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("GraphicalLine.Graphics@LineThickness", new AttributeInfo("xsd:float", null, "optional"));
result.put("GraphicalLine.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("GraphicalLine.Graphics@ConnectorType", new AttributeInfo("xsd:string", "Straight", "optional"));
result.put("GraphicalLine.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("GraphicalLine@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("GraphicalLine@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("GraphicalLine@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("GraphicalLine@Type", new AttributeInfo("xsd:string", null, "optional"));
result.put("Interaction.Graphics.Point@X", new AttributeInfo("xsd:float", null, "required"));
result.put("Interaction.Graphics.Point@Y", new AttributeInfo("xsd:float", null, "required"));
result.put("Interaction.Graphics.Point@RelX", new AttributeInfo("xsd:float", null, "optional"));
result.put("Interaction.Graphics.Point@RelY", new AttributeInfo("xsd:float", null, "optional"));
result.put("Interaction.Graphics.Point@GraphRef", new AttributeInfo("xsd:IDREF", null, "optional"));
result.put("Interaction.Graphics.Point@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("Interaction.Graphics.Point@ArrowHead", new AttributeInfo("xsd:string", "Line", "optional"));
result.put("Interaction.Graphics.Anchor@Position", new AttributeInfo("xsd:float", null, "required"));
result.put("Interaction.Graphics.Anchor@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("Interaction.Graphics.Anchor@Shape", new AttributeInfo("xsd:string", "ReceptorRound", "optional"));
result.put("Interaction.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("Interaction.Graphics@LineThickness", new AttributeInfo("xsd:float", null, "optional"));
result.put("Interaction.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("Interaction.Graphics@ConnectorType", new AttributeInfo("xsd:string", "Straight", "optional"));
result.put("Interaction.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("Interaction.Xref@Database", new AttributeInfo("xsd:string", null, "required"));
result.put("Interaction.Xref@ID", new AttributeInfo("xsd:string", null, "required"));
result.put("Interaction@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Interaction@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Interaction@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("Interaction@Type", new AttributeInfo("xsd:string", null, "optional"));
result.put("Label.Graphics@CenterX", new AttributeInfo("xsd:float", null, "required"));
result.put("Label.Graphics@CenterY", new AttributeInfo("xsd:float", null, "required"));
result.put("Label.Graphics@Width", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Label.Graphics@Height", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Label.Graphics@FontName", new AttributeInfo("xsd:string", "Arial", "optional"));
result.put("Label.Graphics@FontStyle", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Label.Graphics@FontDecoration", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Label.Graphics@FontStrikethru", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Label.Graphics@FontWeight", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Label.Graphics@FontSize", new AttributeInfo("xsd:nonNegativeInteger", "12", "optional"));
result.put("Label.Graphics@Align", new AttributeInfo("xsd:string", "Center", "optional"));
result.put("Label.Graphics@Valign", new AttributeInfo("xsd:string", "Top", "optional"));
result.put("Label.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("Label.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("Label.Graphics@LineThickness", new AttributeInfo("xsd:float", "1.0", "optional"));
result.put("Label.Graphics@FillColor", new AttributeInfo("gpml:ColorType", "Transparent", "optional"));
result.put("Label.Graphics@ShapeType", new AttributeInfo("xsd:string", "None", "optional"));
result.put("Label.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("Label@Href", new AttributeInfo("xsd:string", null, "optional"));
result.put("Label@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Label@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("Label@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Label@TextLabel", new AttributeInfo("xsd:string", null, "required"));
result.put("Shape.Graphics@CenterX", new AttributeInfo("xsd:float", null, "required"));
result.put("Shape.Graphics@CenterY", new AttributeInfo("xsd:float", null, "required"));
result.put("Shape.Graphics@Width", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Shape.Graphics@Height", new AttributeInfo("gpml:Dimension", null, "required"));
result.put("Shape.Graphics@FontName", new AttributeInfo("xsd:string", "Arial", "optional"));
result.put("Shape.Graphics@FontStyle", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Shape.Graphics@FontDecoration", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Shape.Graphics@FontStrikethru", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Shape.Graphics@FontWeight", new AttributeInfo("xsd:string", "Normal", "optional"));
result.put("Shape.Graphics@FontSize", new AttributeInfo("xsd:nonNegativeInteger", "12", "optional"));
result.put("Shape.Graphics@Align", new AttributeInfo("xsd:string", "Center", "optional"));
result.put("Shape.Graphics@Valign", new AttributeInfo("xsd:string", "Top", "optional"));
result.put("Shape.Graphics@Color", new AttributeInfo("gpml:ColorType", "Black", "optional"));
result.put("Shape.Graphics@LineStyle", new AttributeInfo("gpml:StyleType", "Solid", "optional"));
result.put("Shape.Graphics@LineThickness", new AttributeInfo("xsd:float", "1.0", "optional"));
result.put("Shape.Graphics@FillColor", new AttributeInfo("gpml:ColorType", "Transparent", "optional"));
result.put("Shape.Graphics@ShapeType", new AttributeInfo("xsd:string", null, "required"));
result.put("Shape.Graphics@ZOrder", new AttributeInfo("xsd:integer", null, "optional"));
result.put("Shape.Graphics@Rotation", new AttributeInfo("gpml:RotationType", "Top", "optional"));
result.put("Shape@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Shape@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("Shape@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Shape@TextLabel", new AttributeInfo("xsd:string", null, "optional"));
result.put("Group@BiopaxRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Group@GroupId", new AttributeInfo("xsd:string", null, "required"));
result.put("Group@GroupRef", new AttributeInfo("xsd:string", null, "optional"));
result.put("Group@Style", new AttributeInfo("xsd:string", "None", "optional"));
result.put("Group@TextLabel", new AttributeInfo("xsd:string", null, "optional"));
result.put("Group@GraphId", new AttributeInfo("xsd:ID", null, "optional"));
result.put("InfoBox@CenterX", new AttributeInfo("xsd:float", null, "required"));
result.put("InfoBox@CenterY", new AttributeInfo("xsd:float", null, "required"));
result.put("Legend@CenterX", new AttributeInfo("xsd:float", null, "required"));
result.put("Legend@CenterY", new AttributeInfo("xsd:float", null, "required"));
return result;
}
/**
* Returns {@link Map} ATTRIBUTE_INFO collection that contains {@link String} as
* key and {@link AttributeInfo} as value.
*/
protected Map<String, AttributeInfo> getAttributeInfo() {
return ATTRIBUTE_INFO;
}
/**
* Name of resource containing the gpml schema definition.
*/
protected static class AttributeInfo {
/**
* xsd validated type. Note that in the current implementation we don't do
* anything with restrictions, only with the base type.
*/
public String schemaType;
/**
* default value for the attribute
*/
public String def; // default
/**
* use of the attribute: can be "required" or "optional"
*/
public String use;
/**
* Creates an object containing the gpml schema definition of a given attribute.
*
* @param aSchemaType the xsd validated type of the attribute.
* @param aDef the default value for the attribute.
* @param aUse the use of the attribute.
*/
AttributeInfo(String aSchemaType, String aDef, String aUse) {
schemaType = aSchemaType;
def = aDef;
use = aUse;
}
}
/**
* Returns true if given string value and default value are equal.
*
* @param def the default string.
* @param value the given string.
* @return true if the specified arguments are equal, or both null.
*/
private boolean isEqualsString(String def, String value) {
return ((def == null && value == null) || (def != null && def.equals(value))
|| (def == null && value != null && value.equals("")));
}
/**
* Returns true if given string value and default value are numerically equal.
*
* @param def the string for default number value.
* @param value the string for given number value.
* @return true if absolute value of difference between def and value is less
* than 1e-6, and false otherwise.
*/
private boolean isEqualsNumber(String def, String value) {
if (def != null && value != null) {
Double x = Double.parseDouble(def);
Double y = Double.parseDouble(value);
if (Math.abs(x - y) < 1e-6)
return true;
}
return false;
}
/**
* Returns true if given value and default value are the same color object.
*
* @param def the string for default color object.
* @param value the string for given color object.
* @return true if color is equal, false otherwise.
*/
private boolean isEqualsColor(String def, String value) {
if (def != null && value != null) {
boolean aTrans = "Transparent".equals(def);
boolean bTrans = "Transparent".equals(value);
Color a = ColorUtils.stringToColor(def);
Color b = ColorUtils.stringToColor(value);
return (a.equals(b) && aTrans == bTrans);
}
return def == null && value == null;
}
/**
* Sets a certain attribute value, performs a basic check for some types, and
* throws an exception if trying to set an invalid value. If trying to set a
* default value or an optional value to null, the attribute is omitted, which
* results in a leaner xml output. This customized method is often used in place
* of {@link Element#setAttribute} for writing GPML2013a.
*
* @param tag used for lookup in the defaults table.
* @param name used for lookup in the defaults table.
* @param el jdom element where this attribute belongs in.
* @param value value you want to check and set.
* @throws ConverterException if value invalid.
*/
protected void setAttr(String tag, String name, Element el, String value) throws ConverterException {
String key = tag + "@" + name;
// throw exception for value invalid
if (!getAttributeInfo().containsKey(key))
throw new ConverterException("Trying to set invalid attribute " + key);
AttributeInfo aInfo = getAttributeInfo().get(key);
boolean isDefault = false;
// if attribute equal to the default value, leave out from the jdom
if (aInfo.use.equals("optional")) {
if (aInfo.schemaType.equals("xsd:string") || aInfo.schemaType.equals("xsd:ID")
|| aInfo.schemaType.equals("gpml:StyleType")) {
isDefault = isEqualsString(aInfo.def, value);
} else if (aInfo.schemaType.equals("xsd:float") || aInfo.schemaType.equals("Dimension")) {
isDefault = isEqualsNumber(aInfo.def, value);
} else if (aInfo.schemaType.equals("gpml:ColorType")) {
isDefault = isEqualsColor(aInfo.def, value);
}
}
if (!isDefault)
el.setAttribute(name, value);
}
/**
* Gets a certain attribute value, and replaces it with a suitable default under
* certain conditions. This customized method is often used in place of
* {@link Element#getAttributeValue} for reading GPML2013a.
*
* @param tag used for lookup in the defaults table.
* @param name used for lookup in the defaults table.
* @param el jdom element to get the attribute from.
* @throws ConverterException if {@link #getAttributeInfo} does not contain a
* mapping for the specified key.
*/
protected String getAttr(String tag, String name, Element el) throws ConverterException {
String key = tag + "@" + name;
if (!getAttributeInfo().containsKey(key))
throw new ConverterException("Trying to get invalid attribute " + key);
AttributeInfo aInfo = getAttributeInfo().get(key);
String result = ((el == null) ? aInfo.def : el.getAttributeValue(name, aInfo.def));
return result;
}
/**
* Removes group from pathwayModel if empty. Check executed after reading and
* before writing.
*
* @param pathwayModel the pathway model.
* @throws ConverterException
*/
protected void removeEmptyGroups(PathwayModel pathwayModel) throws ConverterException {
List<Group> groups = pathwayModel.getGroups();
List<Group> groupsToRemove = new ArrayList<Group>();
for (Group group : groups) {
if (group.getPathwayElements().isEmpty()) {
groupsToRemove.add(group);
}
}
for (Group groupToRemove : groupsToRemove) {
Logger.log.trace("Warning: Removed empty group " + groupToRemove.getElementId());
pathwayModel.removeGroup(groupToRemove);
}
}
/**
* validates a JDOM document against the xml-schema definition specified by
* 'xsdFile'
*
* @param doc the document to validate
*/
public void validateDocument(Document doc) throws ConverterException {
ClassLoader cl = PathwayModel.class.getClassLoader();
InputStream is = cl.getResourceAsStream(xsdFile);
if (is != null) {
Schema schema;
try {
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
StreamSource ss = new StreamSource(is);
schema = factory.newSchema(ss);
ValidatorHandler vh = schema.newValidatorHandler();
SAXOutputter so = new SAXOutputter(vh);
so.output(doc);
// If no errors occur, the file is valid according to the gpml xml schema
// definition
Logger.log
.info("Document is valid according to the xml schema definition '" + xsdFile.toString() + "'");
} catch (SAXException se) {
Logger.log.error("Could not parse the xml-schema definition", se);
throw new ConverterException(se);
} catch (JDOMException je) {
Logger.log.error("Document is invalid according to the xml-schema definition!: " + je.getMessage(), je);
XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat());
Logger.log.error("The invalid XML code:\n" + xmlcode.outputString(doc));
throw new ConverterException(je);
}
} else {
Logger.log.error("Document is not validated because the xml schema definition '" + xsdFile
+ "' could not be found in classpath");
throw new ConverterException("Document is not validated because the xml schema definition '" + xsdFile
+ "' could not be found in classpath");
}
}
}
| |
// Copyright 2011 Google Inc. All Rights Reserved.
package com.google.common.util.concurrent;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.base.Function;
import com.google.common.collect.Maps;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
/**
* A map containing {@code long} values that can be atomically updated. While writes to a
* traditional {@code Map} rely on {@code put(K, V)}, the typical mechanism for writing to this map
* is {@code addAndGet(K, long)}, which adds a {@code long} to the value currently associated with
* {@code K}. If a key has not yet been associated with a value, its implicit value is zero.
*
* <p>Most methods in this class treat absent values and zero values identically, as individually
* documented. Exceptions to this are {@link #containsKey}, {@link #size}, {@link #isEmpty},
* {@link #asMap}, and {@link #toString}.
*
* <p>Instances of this class may be used by multiple threads concurrently. All operations are
* atomic unless otherwise noted.
*
* <p><b>Note:</b> If your values are always positive and less than 2^31, you may wish to use a
* {@link com.google.common.collect.Multiset} such as
* {@link com.google.common.collect.ConcurrentHashMultiset} instead.
*
* <b>Warning:</b> Unlike {@code Multiset}, entries whose values are zero are not automatically
* removed from the map. Instead they must be removed manually with {@link #removeAllZeros}.
*
* @author Charles Fry
* @since 11.0
*/
@Beta
public final class AtomicLongMap<K> {
private final ConcurrentHashMap<K, AtomicLong> map;
private AtomicLongMap(ConcurrentHashMap<K, AtomicLong> map) {
this.map = checkNotNull(map);
}
/**
* Creates an {@code AtomicLongMap}.
*/
public static <K> AtomicLongMap<K> create() {
return new AtomicLongMap<K>(new ConcurrentHashMap<K, AtomicLong>());
}
/**
* Creates an {@code AtomicLongMap} with the same mappings as the specified {@code Map}.
*/
public static <K> AtomicLongMap<K> create(Map<? extends K, ? extends Long> m) {
AtomicLongMap<K> result = create();
result.putAll(m);
return result;
}
/**
* Returns the value associated with {@code key}, or zero if there is no value associated with
* {@code key}.
*/
public long get(K key) {
AtomicLong atomic = map.get(key);
return atomic == null ? 0L : atomic.get();
}
/**
* Increments by one the value currently associated with {@code key}, and returns the new value.
*/
public long incrementAndGet(K key) {
return addAndGet(key, 1);
}
/**
* Decrements by one the value currently associated with {@code key}, and returns the new value.
*/
public long decrementAndGet(K key) {
return addAndGet(key, -1);
}
/**
* Adds {@code delta} to the value currently associated with {@code key}, and returns the new
* value.
*/
public long addAndGet(K key, long delta) {
outer: for (;;) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
atomic = map.putIfAbsent(key, new AtomicLong(delta));
if (atomic == null) {
return delta;
}
// atomic is now non-null; fall through
}
for (;;) {
long oldValue = atomic.get();
if (oldValue == 0L) {
// don't compareAndSet a zero
if (map.replace(key, atomic, new AtomicLong(delta))) {
return delta;
}
// atomic replaced
continue outer;
}
long newValue = oldValue + delta;
if (atomic.compareAndSet(oldValue, newValue)) {
return newValue;
}
// value changed
}
}
}
/**
* Increments by one the value currently associated with {@code key}, and returns the old value.
*/
public long getAndIncrement(K key) {
return getAndAdd(key, 1);
}
/**
* Decrements by one the value currently associated with {@code key}, and returns the old value.
*/
public long getAndDecrement(K key) {
return getAndAdd(key, -1);
}
/**
* Adds {@code delta} to the value currently associated with {@code key}, and returns the old
* value.
*/
public long getAndAdd(K key, long delta) {
outer: for (;;) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
atomic = map.putIfAbsent(key, new AtomicLong(delta));
if (atomic == null) {
return 0L;
}
// atomic is now non-null; fall through
}
for (;;) {
long oldValue = atomic.get();
if (oldValue == 0L) {
// don't compareAndSet a zero
if (map.replace(key, atomic, new AtomicLong(delta))) {
return 0L;
}
// atomic replaced
continue outer;
}
long newValue = oldValue + delta;
if (atomic.compareAndSet(oldValue, newValue)) {
return oldValue;
}
// value changed
}
}
}
/**
* Associates {@code newValue} with {@code key} in this map, and returns the value previously
* associated with {@code key}, or zero if there was no such value.
*/
public long put(K key, long newValue) {
outer: for (;;) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
atomic = map.putIfAbsent(key, new AtomicLong(newValue));
if (atomic == null) {
return 0L;
}
// atomic is now non-null; fall through
}
for (;;) {
long oldValue = atomic.get();
if (oldValue == 0L) {
// don't compareAndSet a zero
if (map.replace(key, atomic, new AtomicLong(newValue))) {
return 0L;
}
// atomic replaced
continue outer;
}
if (atomic.compareAndSet(oldValue, newValue)) {
return oldValue;
}
// value changed
}
}
}
/**
* Copies all of the mappings from the specified map to this map. The effect of this call is
* equivalent to that of calling {@code put(k, v)} on this map once for each mapping from key
* {@code k} to value {@code v} in the specified map. The behavior of this operation is undefined
* if the specified map is modified while the operation is in progress.
*/
public void putAll(Map<? extends K, ? extends Long> m) {
for (Map.Entry<? extends K, ? extends Long> entry : m.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
/**
* Removes and returns the value associated with {@code key}. If {@code key} is not
* in the map, this method has no effect and returns zero.
*/
public long remove(K key) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
return 0L;
}
for (;;) {
long oldValue = atomic.get();
if (oldValue == 0L || atomic.compareAndSet(oldValue, 0L)) {
// only remove after setting to zero, to avoid concurrent updates
map.remove(key, atomic);
// succeed even if the remove fails, since the value was already adjusted
return oldValue;
}
}
}
/**
* Removes all mappings from this map whose values are zero.
*
* <p>This method is not atomic: the map may be visible in intermediate states, where some
* of the zero values have been removed and others have not.
*/
public void removeAllZeros() {
for (K key : map.keySet()) {
AtomicLong atomic = map.get(key);
if (atomic != null && atomic.get() == 0L) {
map.remove(key, atomic);
}
}
}
/**
* Returns the sum of all values in this map.
*
* <p>This method is not atomic: the sum may or may not include other concurrent operations.
*/
public long sum() {
long sum = 0L;
for (AtomicLong value : map.values()) {
sum = sum + value.get();
}
return sum;
}
private transient Map<K, Long> asMap;
/**
* Returns a live, read-only view of the map backing this {@code AtomicLongMap}.
*/
public Map<K, Long> asMap() {
Map<K, Long> result = asMap;
return (result == null) ? asMap = createAsMap() : result;
}
private Map<K, Long> createAsMap() {
return Collections.unmodifiableMap(
Maps.transformValues(map, new Function<AtomicLong, Long>() {
@Override
public Long apply(AtomicLong atomic) {
return atomic.get();
}
}));
}
/**
* Returns true if this map contains a mapping for the specified key.
*/
public boolean containsKey(Object key) {
return map.containsKey(key);
}
/**
* Returns the number of key-value mappings in this map. If the map contains more than
* {@code Integer.MAX_VALUE} elements, returns {@code Integer.MAX_VALUE}.
*/
public int size() {
return map.size();
}
/**
* Returns {@code true} if this map contains no key-value mappings.
*/
public boolean isEmpty() {
return map.isEmpty();
}
/**
* Removes all of the mappings from this map. The map will be empty after this call returns.
*
* <p>This method is not atomic: the map may not be empty after returning if there were concurrent
* writes.
*/
public void clear() {
map.clear();
}
@Override
public String toString() {
return map.toString();
}
/*
* ConcurrentMap operations which we may eventually add.
*
* The problem with these is that remove(K, long) has to be done in two phases by definition ---
* first decrementing to zero, and then removing. putIfAbsent or replace could observe the
* intermediate zero-state. Ways we could deal with this are:
*
* - Don't define any of the ConcurrentMap operations. This is the current state of affairs.
*
* - Define putIfAbsent and replace as treating zero and absent identically (as currently
* implemented below). This is a bit surprising with putIfAbsent, which really becomes
* putIfZero.
*
* - Allow putIfAbsent and replace to distinguish between zero and absent, but don't implement
* remove(K, long). Without any two-phase operations it becomes feasible for all remaining
* operations to distinguish between zero and absent. If we do this, then perhaps we should add
* replace(key, long).
*
* - Introduce a special-value private static final AtomicLong that would have the meaning of
* removal-in-progress, and rework all operations to properly distinguish between zero and
* absent.
*/
/**
* If {@code key} is not already associated with a value or if {@code key} is associated with
* zero, associate it with {@code newValue}. Returns the previous value associated with
* {@code key}, or zero if there was no mapping for {@code key}.
*/
long putIfAbsent(K key, long newValue) {
for (;;) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
atomic = map.putIfAbsent(key, new AtomicLong(newValue));
if (atomic == null) {
return 0L;
}
// atomic is now non-null; fall through
}
long oldValue = atomic.get();
if (oldValue == 0L) {
// don't compareAndSet a zero
if (map.replace(key, atomic, new AtomicLong(newValue))) {
return 0L;
}
// atomic replaced
continue;
}
return oldValue;
}
}
/**
* If {@code (key, expectedOldValue)} is currently in the map, this method replaces
* {@code expectedOldValue} with {@code newValue} and returns true; otherwise, this method
* returns false.
*
* <p>If {@code expectedOldValue} is zero, this method will succeed if {@code (key, zero)}
* is currently in the map, or if {@code key} is not in the map at all.
*/
boolean replace(K key, long expectedOldValue, long newValue) {
if (expectedOldValue == 0L) {
return putIfAbsent(key, newValue) == 0L;
} else {
AtomicLong atomic = map.get(key);
return (atomic == null) ? false : atomic.compareAndSet(expectedOldValue, newValue);
}
}
/**
* If {@code (key, value)} is currently in the map, this method removes it and returns
* true; otherwise, this method returns false.
*/
boolean remove(K key, long value) {
AtomicLong atomic = map.get(key);
if (atomic == null) {
return false;
}
long oldValue = atomic.get();
if (oldValue != value) {
return false;
}
if (oldValue == 0L || atomic.compareAndSet(oldValue, 0L)) {
// only remove after setting to zero, to avoid concurrent updates
map.remove(key, atomic);
// succeed even if the remove fails, since the value was already adjusted
return true;
}
// value changed
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server;
import org.apache.activemq.artemis.api.core.RoutingType;
import org.apache.activemq.artemis.api.core.SimpleString;
public class QueueQueryResult {
private SimpleString name;
private boolean exists;
private boolean durable;
private int consumerCount;
private long messageCount;
private SimpleString filterString;
private SimpleString address;
private boolean temporary;
private boolean autoCreateQueues;
private boolean autoCreated;
private boolean purgeOnNoConsumers;
private RoutingType routingType;
private int maxConsumers;
private Boolean exclusive;
private Boolean groupRebalance;
private Boolean groupRebalancePauseDispatch;
private Integer groupBuckets;
private SimpleString groupFirstKey;
private Boolean lastValue;
private SimpleString lastValueKey;
private Boolean nonDestructive;
private Integer consumersBeforeDispatch;
private Long delayBeforeDispatch;
private Boolean autoDelete;
private Long autoDeleteDelay;
private Long autoDeleteMessageCount;
private Integer defaultConsumerWindowSize;
private Long ringSize;
private Boolean enabled;
private Boolean configurationManaged;
public QueueQueryResult(final SimpleString name,
final SimpleString address,
final boolean durable,
final boolean temporary,
final SimpleString filterString,
final int consumerCount,
final long messageCount,
final boolean autoCreateQueues,
final boolean exists,
final boolean autoCreated,
final boolean purgeOnNoConsumers,
final RoutingType routingType,
final int maxConsumers,
final Boolean exclusive,
final Boolean groupRebalance,
final Boolean groupRebalancePauseDispatch,
final Integer groupBuckets,
final SimpleString groupFirstKey,
final Boolean lastValue,
final SimpleString lastValueKey,
final Boolean nonDestructive,
final Integer consumersBeforeDispatch,
final Long delayBeforeDispatch,
final Boolean autoDelete,
final Long autoDeleteDelay,
final Long autoDeleteMessageCount,
final Integer defaultConsumerWindowSize,
final Long ringSize,
final Boolean enabled,
final Boolean configurationManaged) {
this.durable = durable;
this.temporary = temporary;
this.consumerCount = consumerCount;
this.messageCount = messageCount;
this.filterString = filterString;
this.address = address;
this.name = name;
this.autoCreateQueues = autoCreateQueues;
this.exists = exists;
this.autoCreated = autoCreated;
this.purgeOnNoConsumers = purgeOnNoConsumers;
this.routingType = routingType;
this.maxConsumers = maxConsumers;
this.exclusive = exclusive;
this.groupRebalance = groupRebalance;
this.groupRebalancePauseDispatch = groupRebalancePauseDispatch;
this.groupBuckets = groupBuckets;
this.groupFirstKey = groupFirstKey;
this.lastValue = lastValue;
this.lastValueKey = lastValueKey;
this.nonDestructive = nonDestructive;
this.consumersBeforeDispatch = consumersBeforeDispatch;
this.delayBeforeDispatch = delayBeforeDispatch;
this.autoDelete = autoDelete;
this.autoDeleteDelay = autoDeleteDelay;
this.autoDeleteMessageCount = autoDeleteMessageCount;
this.defaultConsumerWindowSize = defaultConsumerWindowSize;
this.ringSize = ringSize;
this.enabled = enabled;
this.configurationManaged = configurationManaged;
}
public boolean isExists() {
return exists;
}
public boolean isDurable() {
return durable;
}
public int getConsumerCount() {
return consumerCount;
}
public long getMessageCount() {
return messageCount;
}
public SimpleString getFilterString() {
return filterString;
}
public SimpleString getAddress() {
return address;
}
public SimpleString getName() {
return name;
}
public boolean isTemporary() {
return temporary;
}
public boolean isAutoCreateQueues() {
return autoCreateQueues;
}
public boolean isAutoCreated() {
return autoCreated;
}
public boolean isPurgeOnNoConsumers() {
return purgeOnNoConsumers;
}
public RoutingType getRoutingType() {
return routingType;
}
public int getMaxConsumers() {
return maxConsumers;
}
public void setAddress(SimpleString address) {
this.address = address;
}
public Boolean isExclusive() {
return exclusive;
}
public Boolean isLastValue() {
return lastValue;
}
public SimpleString getLastValueKey() {
return lastValueKey;
}
public Boolean isNonDestructive() {
return nonDestructive;
}
public Integer getConsumersBeforeDispatch() {
return consumersBeforeDispatch;
}
public Long getDelayBeforeDispatch() {
return delayBeforeDispatch;
}
public Integer getDefaultConsumerWindowSize() {
return defaultConsumerWindowSize;
}
public Boolean isGroupRebalance() {
return groupRebalance;
}
public Boolean isGroupRebalancePauseDispatch() {
return groupRebalancePauseDispatch;
}
public Integer getGroupBuckets() {
return groupBuckets;
}
public SimpleString getGroupFirstKey() {
return groupFirstKey;
}
public Boolean isAutoDelete() {
return autoDelete;
}
public Long getAutoDeleteDelay() {
return autoDeleteDelay;
}
public Long getAutoDeleteMessageCount() {
return autoDeleteMessageCount;
}
public Long getRingSize() {
return ringSize;
}
public Boolean isEnabled() {
return enabled;
}
public Boolean isConfigurationManaged() {
return configurationManaged;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.authorization.history;
import static org.camunda.bpm.engine.authorization.Authorization.ANY;
import static org.camunda.bpm.engine.authorization.Permissions.DELETE_HISTORY;
import static org.camunda.bpm.engine.authorization.Permissions.READ_HISTORY;
import static org.camunda.bpm.engine.authorization.Resources.PROCESS_DEFINITION;
import static org.camunda.bpm.engine.authorization.Resources.TASK;
import java.util.List;
import org.camunda.bpm.engine.AuthorizationException;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.authorization.MissingAuthorization;
import org.camunda.bpm.engine.history.DurationReportResult;
import org.camunda.bpm.engine.history.HistoricProcessInstance;
import org.camunda.bpm.engine.history.HistoricTaskInstanceQuery;
import org.camunda.bpm.engine.history.HistoricTaskInstanceReportResult;
import org.camunda.bpm.engine.impl.AbstractQuery;
import org.camunda.bpm.engine.query.PeriodUnit;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.RequiredHistoryLevel;
import org.camunda.bpm.engine.test.api.authorization.AuthorizationTest;
/**
* @author Roman Smirnov
*
*/
@RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_ACTIVITY)
public class HistoricTaskInstanceAuthorizationTest extends AuthorizationTest {
protected static final String PROCESS_KEY = "oneTaskProcess";
protected static final String MESSAGE_START_PROCESS_KEY = "messageStartProcess";
protected static final String CASE_KEY = "oneTaskCase";
protected String deploymentId;
@Override
public void setUp() throws Exception {
deploymentId = createDeployment(null,
"org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/messageStartEventProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/oneTaskCase.cmmn").getId();
super.setUp();
}
@Override
public void tearDown() {
super.tearDown();
deleteDeployment(deploymentId);
}
// historic task instance query (standalone task) ///////////////////////////////////////
public void testQueryAfterStandaloneTask() {
// given
String taskId = "myTask";
createTask(taskId);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 1);
deleteTask(taskId, true);
}
// historic task instance query (process task) //////////////////////////////////////////
public void testSimpleQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 0);
}
public void testSimpleQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 1);
}
public void testSimpleQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 1);
}
public void testSimpleQueryWithMultiple() {
// given
startProcessInstanceByKey(PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 1);
}
// historic task instance query (multiple process instances) ////////////////////////
public void testQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 0);
}
public void testQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 3);
}
public void testQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 7);
}
// historic task instance query (case task) ///////////////////////////////////////
public void testQueryAfterCaseTask() {
// given
createCaseInstanceByKey(CASE_KEY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 1);
}
// historic task instance query (mixed tasks) ////////////////////////////////////
public void testMixedQueryWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
createCaseInstanceByKey(CASE_KEY);
createCaseInstanceByKey(CASE_KEY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 7);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
public void testMixedQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
createCaseInstanceByKey(CASE_KEY);
createCaseInstanceByKey(CASE_KEY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 10);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
public void testMixedQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
startProcessInstanceByKey(MESSAGE_START_PROCESS_KEY);
createTask("one");
createTask("two");
createTask("three");
createTask("four");
createTask("five");
createCaseInstanceByKey(CASE_KEY);
createCaseInstanceByKey(CASE_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 14);
deleteTask("one", true);
deleteTask("two", true);
deleteTask("three", true);
deleteTask("four", true);
deleteTask("five", true);
}
// delete deployment (cascade = false)
public void testQueryAfterDeletingDeployment() {
// given
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
startProcessInstanceByKey(PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, READ_HISTORY);
disableAuthorization();
List<Task> tasks = taskService.createTaskQuery().list();
for (Task task : tasks) {
taskService.complete(task.getId());
}
enableAuthorization();
disableAuthorization();
repositoryService.deleteDeployment(deploymentId);
enableAuthorization();
// when
HistoricTaskInstanceQuery query = historyService.createHistoricTaskInstanceQuery();
// then
verifyQueryResults(query, 3);
disableAuthorization();
List<HistoricProcessInstance> instances = historyService.createHistoricProcessInstanceQuery().list();
for (HistoricProcessInstance instance : instances) {
historyService.deleteHistoricProcessInstance(instance.getId());
}
enableAuthorization();
}
// delete historic task (standalone task) ///////////////////////
public void testDeleteStandaloneTask() {
// given
String taskId = "myTask";
createTask(taskId);
// when
historyService.deleteHistoricTaskInstance(taskId);
// then
disableAuthorization();
HistoricTaskInstanceQuery query = historyService
.createHistoricTaskInstanceQuery()
.taskId(taskId);
verifyQueryResults(query, 0);
enableAuthorization();
deleteTask(taskId, true);
}
// delete historic task (process task) ///////////////////////
public void testDeleteProcessTaskWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
try {
// when
historyService.deleteHistoricTaskInstance(taskId);
fail("Exception expected: It should not be possible to delete the historic task instance");
} catch (AuthorizationException e) {
// then
String message = e.getMessage();
assertTextPresent(userId, message);
assertTextPresent(DELETE_HISTORY.getName(), message);
assertTextPresent(PROCESS_KEY, message);
assertTextPresent(PROCESS_DEFINITION.resourceName(), message);
}
}
public void testDeleteProcessTaskWithDeleteHistoryPermissionOnProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
createGrantAuthorization(PROCESS_DEFINITION, PROCESS_KEY, userId, DELETE_HISTORY);
// when
historyService.deleteHistoricTaskInstance(taskId);
// then
disableAuthorization();
HistoricTaskInstanceQuery query = historyService
.createHistoricTaskInstanceQuery()
.taskId(taskId);
verifyQueryResults(query, 0);
enableAuthorization();
}
public void testDeleteProcessTaskWithDeleteHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, DELETE_HISTORY);
// when
historyService.deleteHistoricTaskInstance(taskId);
// then
disableAuthorization();
HistoricTaskInstanceQuery query = historyService
.createHistoricTaskInstanceQuery()
.taskId(taskId);
verifyQueryResults(query, 0);
enableAuthorization();
}
public void testDeleteHistoricTaskInstanceAfterDeletingDeployment() {
// given
String processInstanceId = startProcessInstanceByKey(PROCESS_KEY).getId();
String taskId = selectSingleTask().getId();
disableAuthorization();
taskService.complete(taskId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, DELETE_HISTORY);
disableAuthorization();
repositoryService.deleteDeployment(deploymentId);
enableAuthorization();
// when
historyService.deleteHistoricTaskInstance(taskId);
// then
disableAuthorization();
HistoricTaskInstanceQuery query = historyService
.createHistoricTaskInstanceQuery()
.taskId(taskId);
verifyQueryResults(query, 0);
enableAuthorization();
disableAuthorization();
historyService.deleteHistoricProcessInstance(processInstanceId);
enableAuthorization();
}
public void testHistoricTaskInstanceReportWithoutAuthorization() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
disableAuthorization();
taskService.complete(taskId);
enableAuthorization();
try {
// when
historyService
.createHistoricTaskInstanceReport()
.duration(PeriodUnit.MONTH);
fail("Exception expected: It should not be possible to create a historic task instance report");
} catch (AuthorizationException e) {
// then
List<MissingAuthorization> missingAuthorizations = e.getMissingAuthorizations();
assertEquals(1, missingAuthorizations.size());
MissingAuthorization missingAuthorization = missingAuthorizations.get(0);
assertEquals(READ_HISTORY.toString(), missingAuthorization.getViolatedPermissionName());
assertEquals(PROCESS_DEFINITION.resourceName(), missingAuthorization.getResourceType());
assertEquals(ANY, missingAuthorization.getResourceId());
}
}
public void testHistoricTaskInstanceReportWithHistoryReadPermissionOnAny() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
disableAuthorization();
taskService.complete(taskId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(TASK, ANY, userId, READ_HISTORY);
// when
List<DurationReportResult> result = historyService
.createHistoricTaskInstanceReport()
.duration(PeriodUnit.MONTH);
// then
assertEquals(1, result.size());
}
public void testHistoricTaskInstanceReportGroupedByProcessDefinitionKeyWithHistoryReadPermissionOnAny() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
disableAuthorization();
taskService.complete(taskId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(TASK, ANY, userId, READ_HISTORY);
// when
List<HistoricTaskInstanceReportResult> result = historyService
.createHistoricTaskInstanceReport()
.countByProcessDefinitionKey();
// then
assertEquals(1, result.size());
}
public void testHistoricTaskInstanceReportGroupedByTaskNameWithHistoryReadPermissionOnAny() {
// given
startProcessInstanceByKey(PROCESS_KEY);
String taskId = selectSingleTask().getId();
disableAuthorization();
taskService.complete(taskId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(TASK, ANY, userId, READ_HISTORY);
// when
List<HistoricTaskInstanceReportResult> result = historyService
.createHistoricTaskInstanceReport()
.countByTaskName();
// then
assertEquals(1, result.size());
}
// helper ////////////////////////////////////////////////////////
protected void verifyQueryResults(HistoricTaskInstanceQuery query, int countExpected) {
verifyQueryResults((AbstractQuery<?, ?>) query, countExpected);
}
}
| |
package org.broadinstitute.hellbender.engine.spark.datasources;
import htsjdk.samtools.*;
import htsjdk.samtools.util.FileExtensions;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.broadinstitute.hellbender.engine.GATKPath;
import org.broadinstitute.hellbender.engine.spark.SparkContextFactory;
import org.broadinstitute.hellbender.utils.gcs.BucketUtils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.read.GATKRead;
import org.broadinstitute.hellbender.utils.read.ReadCoordinateComparator;
import org.broadinstitute.hellbender.utils.read.ReadsWriteFormat;
import org.broadinstitute.hellbender.GATKBaseTest;
import org.broadinstitute.hellbender.testutils.MiniClusterUtils;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
public class ReadsSparkSinkUnitTest extends GATKBaseTest {
private MiniDFSCluster cluster;
private static String testDataDir = publicTestDir + "org/broadinstitute/hellbender/";
@BeforeClass(alwaysRun = true)
private void setupMiniCluster() throws IOException {
cluster = MiniClusterUtils.getMiniCluster();
}
@AfterClass(alwaysRun = true)
private void shutdownMiniCluster() {
MiniClusterUtils.stopCluster(cluster);
}
@DataProvider(name = "loadReadsBAM")
public Object[][] loadReadsBAM() {
return new Object[][]{
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1", null, ".bam", true, true, 100L},
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1", null, ".bam", true, true, 1L}, // check SBI granularity setting
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1", null, ".bam", true, false, 100L}, // write BAI, don't write SBI
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1", null, ".bam", false, true, 100L}, // don't write BAI, write SBI
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1", null, ".bam", false, false, 100L}, // don't write BAI, don't write SBI
{testDataDir + "tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam", "ReadsSparkSinkUnitTest2", null, ".bam", true, true, 100L},
// This file has unmapped reads that are set to the position of their mates -- the ordering check
// in the tests below will fail if our ordering of these reads relative to the mapped reads
// is not consistent with the definition of coordinate sorting as defined in
// htsjdk.samtools.SAMRecordCoordinateComparator
{testDataDir + "tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam", "ReadsSparkSinkUnitTest3", null, ".bam", true, true, 100L},
{testDataDir + "tools/BQSR/NA12878.chr17_69k_70k.dictFix.cram", "ReadsSparkSinkUnitTest5",
publicTestDir + "human_g1k_v37.chr17_1Mb.fasta", ".cram", true, true, 100L},
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest6", null, ".sam", true, true, 100L},
};
}
@DataProvider(name = "loadReadsADAM")
public Object[][] loadReadsADAM() {
return new Object[][]{
{testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam", "ReadsSparkSinkUnitTest1_ADAM"},
{testDataDir + "tools/BQSR/expected.HiSeq.1mb.1RG.2k_lines.alternate.recalibrated.DIQ.bam", "ReadsSparkSinkUnitTest2_ADAM"},
// This file has unmapped reads that are set to the position of their mates -- the ordering check
// in the tests below will fail if our ordering of these reads relative to the mapped reads
// is not consistent with the definition of coordinate sorting as defined in
// htsjdk.samtools.SAMRecordCoordinateComparator
//
// This test is currently disabled, as this test case doesn't pass for ADAM (we have an open ticket for this:
// https://github.com/broadinstitute/gatk/issues/1254)
// {testDataDir + "tools/BQSR/CEUTrio.HiSeq.WGS.b37.ch20.1m-1m1k.NA12878.bam", "ReadsSparkSinkUnitTest3_ADAM"},
//This test is disabled because it fails on travis (passes locally though)
//https://github.com/broadinstitute/gatk/issues/1254
// {NA12878_chr17_1k_BAM, "ReadsSparkSinkUnitTest4_ADAM"}
};
}
// This bam was samtools sorted queryname bam, we expect if this were sorted to match the header that this would no longer match read-for-read due to differences in queryname-sort definitions compared to htsjdk
@Test
public void testReadsSparkSinkNotSortingReadsToHeader() throws IOException {
final GATKPath inputBam = new GATKPath(testDataDir + "engine/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.10000000-10000020.with.unmapped.queryname.samtools.sam");
final File outputFile = createTempFile("ReadsSparkSinkNotSorting", ".bam");
JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
ReadsSparkSource readSource = new ReadsSparkSource(ctx);
JavaRDD<GATKRead> rddParallelReads = readSource.getParallelReads(inputBam, null);
SAMFileHeader header = readSource.getHeader(inputBam, null);
ReadsSparkSink.writeReads(ctx, outputFile.getPath(), null, rddParallelReads, header, ReadsWriteFormat.SINGLE, 0, null, true, true, false, SBIIndexWriter.DEFAULT_GRANULARITY);
JavaRDD<GATKRead> rddParallelReads2 = readSource.getParallelReads(new GATKPath(outputFile.getPath()), null);
final List<GATKRead> writtenReads = rddParallelReads2.collect();
JavaRDD<GATKRead> rddParallelReads3 = readSource.getParallelReads(inputBam, null);
final List<GATKRead> inputReads = rddParallelReads3.collect();
Assert.assertEquals(writtenReads.size(), inputReads.size());
for (int i = 0; i < writtenReads.size(); i++) {
Assert.assertEquals(writtenReads.get(i), inputReads.get(i), "These bams were likely out of order to eachother, which may have been caused by automatic sorting of the output bam");
}
}
@Test(dataProvider = "loadReadsBAM", groups = "spark")
public void readsSinkTest(String inputBam, String outputFileName, String referenceFile, String outputFileExtension, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
final File outputFile = createTempFile(outputFileName, outputFileExtension);
assertSingleShardedWritingWorks(new GATKPath(inputBam), referenceFile, outputFile.getAbsolutePath(), null, writeBai, writeSbi, sbiGranularity);
}
@Test(dataProvider = "loadReadsBAM", groups = "spark")
public void testSpecifyPartsDir(String inputBam, String outputFileName, String referenceFile, String outputFileExtension, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
final File outputFile = createTempFile(outputFileName, outputFileExtension);
final File nonDefaultShardsDir = createTempDir(outputFileName + ".someOtherPlace");
nonDefaultShardsDir.delete();
final java.nio.file.Path defaultPartsDir = IOUtils.getPath(ReadsSparkSink.getDefaultPartsDirectory(outputFile.getAbsolutePath()));
final java.nio.file.Path subpath = defaultPartsDir.resolve("subpath");
try {
// Make a directory with unusable permissions in place of where the default file will live
Files.createDirectory(defaultPartsDir);
Files.createFile(subpath);
Runtime.getRuntime().exec("chmod a-w -R " + defaultPartsDir + "/");
//show this succeeds when specifying a different path for the parts directory
assertSingleShardedWritingWorks(new GATKPath(inputBam), referenceFile, outputFile.getAbsolutePath(), nonDefaultShardsDir.getAbsolutePath(), writeBai, writeSbi, sbiGranularity);
// Test that the file wasn't deleted when spark cleared its temp directory
Assert.assertTrue(Files.exists(defaultPartsDir));
} finally {
// Remove the file this time
Runtime.getRuntime().exec("rm -r " + defaultPartsDir );
}
}
@Test(dataProvider = "loadReadsBAM", groups = "spark")
public void readsSinkHDFSTest(String inputBam, String outputFileName, String referenceFileName, String outputFileExtension, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
final String outputHDFSPath = MiniClusterUtils.getTempPath(cluster, outputFileName, outputFileExtension).toString();
Assert.assertTrue(BucketUtils.isHadoopUrl(outputHDFSPath));
assertSingleShardedWritingWorks(new GATKPath(inputBam), referenceFileName, outputHDFSPath, null, writeBai, writeSbi, sbiGranularity);
}
@Test(dataProvider = "loadReadsBAM", groups = "spark")
public void testWritingToAnExistingFileHDFS(String inputBam, String outputFileName, String referenceFileName, String outputFileExtension, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
final Path outputPath = MiniClusterUtils.getTempPath(cluster, outputFileName, outputFileExtension);
final FileSystem fs = outputPath.getFileSystem(new Configuration());
Assert.assertTrue(fs.createNewFile(outputPath));
Assert.assertTrue(fs.exists(outputPath));
assertSingleShardedWritingWorks(new GATKPath(inputBam), referenceFileName, outputPath.toString(), null, writeBai, writeSbi, sbiGranularity);
}
@Test(groups = "spark")
public void testWritingToFileURL() throws IOException {
GATKPath inputBam = new GATKPath(testDataDir + "tools/BQSR/HiSeq.1mb.1RG.2k_lines.bam");
String outputUrl = "file:///" + createTempFile("ReadsSparkSinkUnitTest1", ".bam").getAbsolutePath();
assertSingleShardedWritingWorks(inputBam, null, outputUrl, null, true, true, 100L);
}
private void assertSingleShardedWritingWorks(GATKPath inputBam, String referenceFile, String outputPath, String outputPartsPath, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
final GATKPath referencePath = referenceFile == null ? null : new GATKPath(referenceFile);
ReadsSparkSource readSource = new ReadsSparkSource(ctx);
JavaRDD<GATKRead> rddParallelReads = readSource.getParallelReads(inputBam, referencePath);
SAMFileHeader header = readSource.getHeader(inputBam, referencePath);
ReadsSparkSink.writeReads(ctx, outputPath, referencePath, rddParallelReads, header, ReadsWriteFormat.SINGLE, 0, outputPartsPath, writeBai, writeSbi, true, sbiGranularity);
// check that a bai file is created
if (new GATKPath(outputPath).isBam() && writeBai) {
Assert.assertTrue(Files.exists(IOUtils.getPath(outputPath + FileExtensions.BAI_INDEX)));
}
// check that a splitting bai file is created with correct granularity
if (new GATKPath(outputPath).isBam() && writeSbi) {
final java.nio.file.Path sbiPath = IOUtils.getPath(outputPath + FileExtensions.SBI);
Assert.assertTrue(Files.exists(sbiPath));
final SBIIndex sbi = SBIIndex.load(sbiPath);
Assert.assertEquals(sbi.getGranularity(), sbiGranularity);
}
JavaRDD<GATKRead> rddParallelReads2 = readSource.getParallelReads(new GATKPath(outputPath), referencePath);
final List<GATKRead> writtenReads = rddParallelReads2.collect();
assertReadsAreSorted(header, writtenReads);
Assert.assertEquals(rddParallelReads.count(), rddParallelReads2.count());
}
private static void assertReadsAreSorted(SAMFileHeader header, List<GATKRead> writtenReads) {
final SAMRecordCoordinateComparator comparator = new SAMRecordCoordinateComparator();
// Assert that the reads are sorted.
final int size = writtenReads.size();
for (int i = 0; i < size-1; ++i) {
final SAMRecord smaller = writtenReads.get(i).convertToSAMRecord(header);
final SAMRecord larger = writtenReads.get(i + 1).convertToSAMRecord(header);
final int compare = comparator.compare(smaller, larger);
Assert.assertTrue(compare < 0, "Reads are out of order (compare=" + compare+"): " + smaller.getSAMString() + " and " + larger.getSAMString());
}
}
@Test(dataProvider = "loadReadsBAM", groups = "spark")
public void readsSinkShardedTest(String inputBam, String outputFileName, String referenceFile, String outputFileExtension, boolean writeBai, boolean writeSbi, long sbiGranularity) throws IOException {
final GATKPath inputBamSpecifier = new GATKPath(inputBam);
final File outputFile = createTempFile(outputFileName, outputFileExtension);
JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
final GATKPath referencePath = referenceFile == null ? null : new GATKPath(referenceFile);
ReadsSparkSource readSource = new ReadsSparkSource(ctx);
JavaRDD<GATKRead> rddParallelReads = readSource.getParallelReads(inputBamSpecifier, referencePath);
rddParallelReads = rddParallelReads.repartition(2); // ensure that the output is in two shards
SAMFileHeader header = readSource.getHeader(inputBamSpecifier, referencePath);
ReadsSparkSink.writeReads(ctx, outputFile.getAbsolutePath(), referencePath, rddParallelReads, header, ReadsWriteFormat.SHARDED, 0, null, false, sbiGranularity);
int shards = outputFile.listFiles((dir, name) -> !name.startsWith(".") && !name.startsWith("_")).length;
Assert.assertEquals(shards, 2);
// check that no local .crc files are created
int crcs = outputFile.listFiles((dir, name) -> name.startsWith(".") && name.endsWith(".crc")).length;
Assert.assertEquals(crcs, 0);
JavaRDD<GATKRead> rddParallelReads2 = readSource.getParallelReads(new GATKPath(outputFile.getAbsolutePath()), referencePath);
// reads are not globally sorted, so don't test that
Assert.assertEquals(rddParallelReads.count(), rddParallelReads2.count());
}
@Test(enabled = false, dataProvider = "loadReadsADAM", groups = "spark")
public void readsSinkADAMTest(String inputBam, String outputDirectoryName) throws IOException {
// Since the test requires that we not create the actual output directory in advance,
// we instead create its parent directory and mark it for deletion on exit. This protects
// us from naming collisions across multiple instances of the test suite.
final File outputParentDirectory = createTempDir(outputDirectoryName + "_parent");
final File outputDirectory = new File(outputParentDirectory, outputDirectoryName);
final GATKPath inputBamSpecifier = new GATKPath(inputBam);
JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
ReadsSparkSource readSource = new ReadsSparkSource(ctx);
JavaRDD<GATKRead> rddParallelReads = readSource.getParallelReads(inputBamSpecifier, null)
.filter(r -> !r.isUnmapped()); // filter out unmapped reads (see comment below)
SAMFileHeader header = readSource.getHeader(inputBamSpecifier, null);
ReadsSparkSink.writeReads(ctx, outputDirectory.getAbsolutePath(), null, rddParallelReads, header, ReadsWriteFormat.ADAM, 0, null, true, SBIIndexWriter.DEFAULT_GRANULARITY);
JavaRDD<GATKRead> rddParallelReads2 = readSource.getADAMReads(new GATKPath(outputDirectory.getAbsolutePath()), null, header);
Assert.assertEquals(rddParallelReads.count(), rddParallelReads2.count());
// Test the round trip
List<GATKRead> samList = new ArrayList<>(rddParallelReads.collect());//make a mutable copy for sort
List<GATKRead> adamList = new ArrayList<>(rddParallelReads2.collect());//make a mutable copy for sort
Comparator<GATKRead> comparator = new ReadCoordinateComparator(header);
samList.sort(comparator);
adamList.sort(comparator);
for (int i = 0; i < samList.size(); i++) {
SAMRecord expected = samList.get(i).convertToSAMRecord(header);
SAMRecord observed = adamList.get(i).convertToSAMRecord(header);
// manually test equality of some fields, as there are issues with roundtrip BAM -> ADAM -> BAM
// see https://github.com/bigdatagenomics/adam/issues/823
Assert.assertEquals(observed.getReadName(), expected.getReadName(), "readname");
Assert.assertEquals(observed.getAlignmentStart(), expected.getAlignmentStart(), "getAlignmentStart");
Assert.assertEquals(observed.getAlignmentEnd(), expected.getAlignmentEnd(), "getAlignmentEnd");
Assert.assertEquals(observed.getFlags(), expected.getFlags(), "getFlags");
Assert.assertEquals(observed.getMappingQuality(), expected.getMappingQuality(), "getMappingQuality");
Assert.assertEquals(observed.getMateAlignmentStart(), expected.getMateAlignmentStart(), "getMateAlignmentStart");
Assert.assertEquals(observed.getCigar(), expected.getCigar(), "getCigar");
}
}
}
| |
/*
* Copyright 2015 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.benchmarks.netty;
import io.grpc.CallOptions;
import io.grpc.ClientCall;
import io.grpc.ManagedChannel;
import io.grpc.Metadata;
import io.grpc.MethodDescriptor;
import io.grpc.MethodDescriptor.MethodType;
import io.grpc.Server;
import io.grpc.ServerCall;
import io.grpc.ServerCallHandler;
import io.grpc.ServerServiceDefinition;
import io.grpc.ServiceDescriptor;
import io.grpc.Status;
import io.grpc.benchmarks.ByteBufOutputMarshaller;
import io.grpc.netty.NegotiationType;
import io.grpc.netty.NettyChannelBuilder;
import io.grpc.netty.NettyServerBuilder;
import io.grpc.stub.ClientCalls;
import io.grpc.stub.StreamObserver;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.channel.local.LocalAddress;
import io.netty.channel.local.LocalChannel;
import io.netty.channel.local.LocalServerChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.concurrent.DefaultThreadFactory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.ServerSocket;
import java.net.SocketAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Abstract base class for Netty end-to-end benchmarks.
*/
public abstract class AbstractBenchmark {
private static final Logger logger = Logger.getLogger(AbstractBenchmark.class.getName());
/**
* Standard message sizes.
*/
public enum MessageSize {
// Max out at 1MB to avoid creating messages larger than Netty's buffer pool can handle
// by default
SMALL(10), MEDIUM(1024), LARGE(65536), JUMBO(1048576);
private final int bytes;
MessageSize(int bytes) {
this.bytes = bytes;
}
public int bytes() {
return bytes;
}
}
/**
* Standard flow-control window sizes.
*/
public enum FlowWindowSize {
SMALL(16383), MEDIUM(65535), LARGE(1048575), JUMBO(8388607);
private final int bytes;
FlowWindowSize(int bytes) {
this.bytes = bytes;
}
public int bytes() {
return bytes;
}
}
/**
* Executor types used by Channel & Server.
*/
public enum ExecutorType {
DEFAULT, DIRECT;
}
/**
* Support channel types.
*/
public enum ChannelType {
NIO, LOCAL;
}
private static final CallOptions CALL_OPTIONS = CallOptions.DEFAULT;
private static final InetAddress BENCHMARK_ADDR = buildBenchmarkAddr();
/**
* Resolve the address bound to the benchmark interface. Currently we assume it's a
* child interface of the loopback interface with the term 'benchmark' in its name.
*
* <p>>This allows traffic shaping to be applied to an IP address and to have the benchmarks
* detect it's presence and use it. E.g for Linux we can apply netem to a specific IP to
* do traffic shaping, bind that IP to the loopback adapter and then apply a label to that
* binding so that it appears as a child interface.
*
* <pre>
* sudo tc qdisc del dev lo root
* sudo tc qdisc add dev lo root handle 1: prio
* sudo tc qdisc add dev lo parent 1:1 handle 2: netem delay 0.1ms rate 10gbit
* sudo tc filter add dev lo parent 1:0 protocol ip prio 1 \
* u32 match ip dst 127.127.127.127 flowid 2:1
* sudo ip addr add dev lo 127.127.127.127/32 label lo:benchmark
* </pre>
*/
private static InetAddress buildBenchmarkAddr() {
InetAddress tmp = null;
try {
Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces();
outer: while (networkInterfaces.hasMoreElements()) {
NetworkInterface networkInterface = networkInterfaces.nextElement();
if (!networkInterface.isLoopback()) {
continue;
}
Enumeration<NetworkInterface> subInterfaces = networkInterface.getSubInterfaces();
while (subInterfaces.hasMoreElements()) {
NetworkInterface subLoopback = subInterfaces.nextElement();
if (subLoopback.getDisplayName().contains("benchmark")) {
tmp = subLoopback.getInetAddresses().nextElement();
System.out.println("\nResolved benchmark address to " + tmp + " on "
+ subLoopback.getDisplayName() + "\n\n");
break outer;
}
}
}
} catch (SocketException se) {
System.out.println("\nWARNING: Error trying to resolve benchmark interface \n" + se);
}
if (tmp == null) {
try {
System.out.println(
"\nWARNING: Unable to resolve benchmark interface, defaulting to localhost");
tmp = InetAddress.getLocalHost();
} catch (UnknownHostException uhe) {
throw new RuntimeException(uhe);
}
}
return tmp;
}
protected Server server;
protected ByteBuf request;
protected ByteBuf response;
protected MethodDescriptor<ByteBuf, ByteBuf> unaryMethod;
private MethodDescriptor<ByteBuf, ByteBuf> pingPongMethod;
private MethodDescriptor<ByteBuf, ByteBuf> flowControlledStreaming;
protected ManagedChannel[] channels;
public AbstractBenchmark() {
}
/**
* Initialize the environment for the executor.
*/
public void setup(ExecutorType clientExecutor,
ExecutorType serverExecutor,
MessageSize requestSize,
MessageSize responseSize,
FlowWindowSize windowSize,
ChannelType channelType,
int maxConcurrentStreams,
int channelCount) throws Exception {
NettyServerBuilder serverBuilder;
NettyChannelBuilder channelBuilder;
if (channelType == ChannelType.LOCAL) {
LocalAddress address = new LocalAddress("netty-e2e-benchmark");
serverBuilder = NettyServerBuilder.forAddress(address);
serverBuilder.channelType(LocalServerChannel.class);
channelBuilder = NettyChannelBuilder.forAddress(address);
channelBuilder.channelType(LocalChannel.class);
} else {
ServerSocket sock = new ServerSocket();
// Pick a port using an ephemeral socket.
sock.bind(new InetSocketAddress(BENCHMARK_ADDR, 0));
SocketAddress address = sock.getLocalSocketAddress();
sock.close();
serverBuilder = NettyServerBuilder.forAddress(address);
channelBuilder = NettyChannelBuilder.forAddress(address);
}
if (serverExecutor == ExecutorType.DIRECT) {
serverBuilder.directExecutor();
}
if (clientExecutor == ExecutorType.DIRECT) {
channelBuilder.directExecutor();
}
// Always use a different worker group from the client.
ThreadFactory serverThreadFactory = new DefaultThreadFactory("STF pool", true /* daemon */);
serverBuilder.workerEventLoopGroup(new NioEventLoopGroup(0, serverThreadFactory));
// Always set connection and stream window size to same value
serverBuilder.flowControlWindow(windowSize.bytes());
channelBuilder.flowControlWindow(windowSize.bytes());
channelBuilder.negotiationType(NegotiationType.PLAINTEXT);
serverBuilder.maxConcurrentCallsPerConnection(maxConcurrentStreams);
// Create buffers of the desired size for requests and responses.
PooledByteBufAllocator alloc = PooledByteBufAllocator.DEFAULT;
// Use a heap buffer for now, since MessageFramer doesn't know how to directly convert this
// into a WritableBuffer
// TODO(carl-mastrangelo): convert this into a regular buffer() call. See
// https://github.com/grpc/grpc-java/issues/2062#issuecomment-234646216
request = alloc.heapBuffer(requestSize.bytes());
request.writerIndex(request.capacity() - 1);
response = alloc.heapBuffer(responseSize.bytes());
response.writerIndex(response.capacity() - 1);
// Simple method that sends and receives NettyByteBuf
unaryMethod = MethodDescriptor.<ByteBuf, ByteBuf>newBuilder()
.setType(MethodType.UNARY)
.setFullMethodName("benchmark/unary")
.setRequestMarshaller(new ByteBufOutputMarshaller())
.setResponseMarshaller(new ByteBufOutputMarshaller())
.build();
pingPongMethod = unaryMethod.toBuilder()
.setType(MethodType.BIDI_STREAMING)
.setFullMethodName("benchmark/pingPong")
.build();
flowControlledStreaming = pingPongMethod.toBuilder()
.setFullMethodName("benchmark/flowControlledStreaming")
.build();
// Server implementation of unary & streaming methods
serverBuilder.addService(
ServerServiceDefinition.builder(
new ServiceDescriptor("benchmark",
unaryMethod,
pingPongMethod,
flowControlledStreaming))
.addMethod(unaryMethod, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(
final ServerCall<ByteBuf, ByteBuf> call,
Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
// no-op
message.release();
call.sendMessage(response.slice());
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
};
}
})
.addMethod(pingPongMethod, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(
final ServerCall<ByteBuf, ByteBuf> call,
Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
message.release();
call.sendMessage(response.slice());
// Request next message
call.request(1);
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
};
}
})
.addMethod(flowControlledStreaming, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(
final ServerCall<ByteBuf, ByteBuf> call,
Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
message.release();
while (call.isReady()) {
call.sendMessage(response.slice());
}
// Request next message
call.request(1);
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
@Override
public void onReady() {
while (call.isReady()) {
call.sendMessage(response.slice());
}
}
};
}
})
.build());
// Build and start the clients and servers
server = serverBuilder.build();
server.start();
channels = new ManagedChannel[channelCount];
ThreadFactory clientThreadFactory = new DefaultThreadFactory("CTF pool", true /* daemon */);
for (int i = 0; i < channelCount; i++) {
// Use a dedicated event-loop for each channel
channels[i] = channelBuilder
.eventLoopGroup(new NioEventLoopGroup(1, clientThreadFactory))
.build();
}
}
/**
* Start a continuously executing set of unary calls that will terminate when
* {@code done.get()} is true. Each completed call will increment the counter by the specified
* delta which benchmarks can use to measure QPS or bandwidth.
*/
protected void startUnaryCalls(int callsPerChannel,
final AtomicLong counter,
final AtomicBoolean done,
final long counterDelta) {
for (final ManagedChannel channel : channels) {
for (int i = 0; i < callsPerChannel; i++) {
StreamObserver<ByteBuf> observer = new StreamObserver<ByteBuf>() {
@Override
public void onNext(ByteBuf value) {
counter.addAndGet(counterDelta);
}
@Override
public void onError(Throwable t) {
done.set(true);
}
@Override
public void onCompleted() {
if (!done.get()) {
ByteBuf slice = request.slice();
ClientCalls.asyncUnaryCall(
channel.newCall(unaryMethod, CALL_OPTIONS), slice, this);
}
}
};
observer.onCompleted();
}
}
}
/**
* Start a continuously executing set of duplex streaming ping-pong calls that will terminate when
* {@code done.get()} is true. Each completed call will increment the counter by the specified
* delta which benchmarks can use to measure messages per second or bandwidth.
*/
protected CountDownLatch startStreamingCalls(int callsPerChannel, final AtomicLong counter,
final AtomicBoolean record, final AtomicBoolean done, final long counterDelta) {
final CountDownLatch latch = new CountDownLatch(callsPerChannel * channels.length);
for (final ManagedChannel channel : channels) {
for (int i = 0; i < callsPerChannel; i++) {
final ClientCall<ByteBuf, ByteBuf> streamingCall =
channel.newCall(pingPongMethod, CALL_OPTIONS);
final AtomicReference<StreamObserver<ByteBuf>> requestObserverRef =
new AtomicReference<>();
final AtomicBoolean ignoreMessages = new AtomicBoolean();
StreamObserver<ByteBuf> requestObserver = ClientCalls.asyncBidiStreamingCall(
streamingCall,
new StreamObserver<ByteBuf>() {
@Override
public void onNext(ByteBuf value) {
if (done.get()) {
if (!ignoreMessages.getAndSet(true)) {
requestObserverRef.get().onCompleted();
}
return;
}
requestObserverRef.get().onNext(request.slice());
if (record.get()) {
counter.addAndGet(counterDelta);
}
// request is called automatically because the observer implicitly has auto
// inbound flow control
}
@Override
public void onError(Throwable t) {
logger.log(Level.WARNING, "call error", t);
latch.countDown();
}
@Override
public void onCompleted() {
latch.countDown();
}
});
requestObserverRef.set(requestObserver);
requestObserver.onNext(request.slice());
requestObserver.onNext(request.slice());
}
}
return latch;
}
/**
* Start a continuously executing set of duplex streaming ping-pong calls that will terminate when
* {@code done.get()} is true. Each completed call will increment the counter by the specified
* delta which benchmarks can use to measure messages per second or bandwidth.
*/
protected CountDownLatch startFlowControlledStreamingCalls(int callsPerChannel,
final AtomicLong counter, final AtomicBoolean record, final AtomicBoolean done,
final long counterDelta) {
final CountDownLatch latch = new CountDownLatch(callsPerChannel * channels.length);
for (final ManagedChannel channel : channels) {
for (int i = 0; i < callsPerChannel; i++) {
final ClientCall<ByteBuf, ByteBuf> streamingCall =
channel.newCall(flowControlledStreaming, CALL_OPTIONS);
final AtomicReference<StreamObserver<ByteBuf>> requestObserverRef =
new AtomicReference<>();
final AtomicBoolean ignoreMessages = new AtomicBoolean();
StreamObserver<ByteBuf> requestObserver = ClientCalls.asyncBidiStreamingCall(
streamingCall,
new StreamObserver<ByteBuf>() {
@Override
public void onNext(ByteBuf value) {
StreamObserver<ByteBuf> obs = requestObserverRef.get();
if (done.get()) {
if (!ignoreMessages.getAndSet(true)) {
obs.onCompleted();
}
return;
}
if (record.get()) {
counter.addAndGet(counterDelta);
}
// request is called automatically because the observer implicitly has auto
// inbound flow control
}
@Override
public void onError(Throwable t) {
logger.log(Level.WARNING, "call error", t);
latch.countDown();
}
@Override
public void onCompleted() {
latch.countDown();
}
});
requestObserverRef.set(requestObserver);
// Add some outstanding requests to ensure the server is filling the connection
streamingCall.request(5);
requestObserver.onNext(request.slice());
}
}
return latch;
}
/**
* Shutdown all the client channels and then shutdown the server.
*/
protected void teardown() throws Exception {
logger.fine("shutting down channels");
for (ManagedChannel channel : channels) {
channel.shutdown();
}
logger.fine("shutting down server");
server.shutdown();
if (!server.awaitTermination(5, TimeUnit.SECONDS)) {
logger.warning("Failed to shutdown server");
}
logger.fine("server shut down");
for (ManagedChannel channel : channels) {
if (!channel.awaitTermination(1, TimeUnit.SECONDS)) {
logger.warning("Failed to shutdown client");
}
}
logger.fine("channels shut down");
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.primitives.Shorts;
import com.google.common.primitives.SignedBytes;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceOutput;
import io.airlift.slice.Slices;
import io.prestosql.spi.PrestoException;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.BlockBuilder;
import io.prestosql.spi.block.SingleRowBlockWriter;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.type.ArrayType;
import io.prestosql.spi.type.BigintType;
import io.prestosql.spi.type.BooleanType;
import io.prestosql.spi.type.DateType;
import io.prestosql.spi.type.DecimalType;
import io.prestosql.spi.type.Decimals;
import io.prestosql.spi.type.DoubleType;
import io.prestosql.spi.type.IntegerType;
import io.prestosql.spi.type.LongTimestamp;
import io.prestosql.spi.type.MapType;
import io.prestosql.spi.type.RealType;
import io.prestosql.spi.type.RowType;
import io.prestosql.spi.type.RowType.Field;
import io.prestosql.spi.type.SmallintType;
import io.prestosql.spi.type.StandardTypes;
import io.prestosql.spi.type.TimestampType;
import io.prestosql.spi.type.TinyintType;
import io.prestosql.spi.type.Type;
import io.prestosql.spi.type.VarcharType;
import io.prestosql.type.BigintOperators;
import io.prestosql.type.BooleanOperators;
import io.prestosql.type.DoubleOperators;
import io.prestosql.type.JsonType;
import io.prestosql.type.UnknownType;
import io.prestosql.type.VarcharOperators;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.math.BigDecimal;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
import static com.fasterxml.jackson.core.JsonFactory.Feature.CANONICALIZE_FIELD_NAMES;
import static com.fasterxml.jackson.core.JsonToken.END_ARRAY;
import static com.fasterxml.jackson.core.JsonToken.END_OBJECT;
import static com.fasterxml.jackson.core.JsonToken.FIELD_NAME;
import static com.fasterxml.jackson.core.JsonToken.START_ARRAY;
import static com.fasterxml.jackson.core.JsonToken.START_OBJECT;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Verify.verify;
import static io.prestosql.spi.StandardErrorCode.GENERIC_INSUFFICIENT_RESOURCES;
import static io.prestosql.spi.StandardErrorCode.INVALID_CAST_ARGUMENT;
import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.DateType.DATE;
import static io.prestosql.spi.type.Decimals.decodeUnscaledValue;
import static io.prestosql.spi.type.Decimals.encodeUnscaledValue;
import static io.prestosql.spi.type.Decimals.isShortDecimal;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.IntegerType.INTEGER;
import static io.prestosql.spi.type.RealType.REAL;
import static io.prestosql.spi.type.SmallintType.SMALLINT;
import static io.prestosql.spi.type.TinyintType.TINYINT;
import static io.prestosql.type.JsonType.JSON;
import static io.prestosql.type.Timestamps.formatTimestamp;
import static io.prestosql.type.Timestamps.scaleEpochMillisToMicros;
import static io.prestosql.type.TypeUtils.hashPosition;
import static io.prestosql.type.TypeUtils.positionEqualsPosition;
import static io.prestosql.util.DateTimeUtils.printDate;
import static io.prestosql.util.JsonUtil.ObjectKeyProvider.createObjectKeyProvider;
import static it.unimi.dsi.fastutil.HashCommon.arraySize;
import static java.lang.Float.floatToRawIntBits;
import static java.lang.Float.intBitsToFloat;
import static java.lang.Math.toIntExact;
import static java.lang.String.format;
import static java.math.RoundingMode.HALF_UP;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Objects.requireNonNull;
public final class JsonUtil
{
public static final JsonFactory JSON_FACTORY = new JsonFactory().disable(CANONICALIZE_FIELD_NAMES);
// This object mapper is constructed without .configure(ORDER_MAP_ENTRIES_BY_KEYS, true) because
// `OBJECT_MAPPER.writeValueAsString(parser.readValueAsTree());` preserves input order.
// Be aware. Using it arbitrarily can produce invalid json (ordered by key is required in Presto).
private static final ObjectMapper OBJECT_MAPPED_UNORDERED = new ObjectMapper(JSON_FACTORY);
private static final int MAX_JSON_LENGTH_IN_ERROR_MESSAGE = 10_000;
private JsonUtil() {}
public static JsonParser createJsonParser(JsonFactory factory, Slice json)
throws IOException
{
// Jackson tries to detect the character encoding automatically when using InputStream
// so we pass an InputStreamReader instead.
return factory.createParser(new InputStreamReader(json.getInput(), UTF_8));
}
public static JsonGenerator createJsonGenerator(JsonFactory factory, SliceOutput output)
throws IOException
{
return factory.createGenerator((OutputStream) output);
}
public static String truncateIfNecessaryForErrorMessage(Slice json)
{
if (json.length() <= MAX_JSON_LENGTH_IN_ERROR_MESSAGE) {
return json.toStringUtf8();
}
else {
return json.slice(0, MAX_JSON_LENGTH_IN_ERROR_MESSAGE).toStringUtf8() + "...(truncated)";
}
}
public static boolean canCastToJson(Type type)
{
if (type instanceof UnknownType ||
type instanceof BooleanType ||
type instanceof TinyintType ||
type instanceof SmallintType ||
type instanceof IntegerType ||
type instanceof BigintType ||
type instanceof RealType ||
type instanceof DoubleType ||
type instanceof DecimalType ||
type instanceof VarcharType ||
type instanceof JsonType ||
type instanceof TimestampType ||
type instanceof DateType) {
return true;
}
if (type instanceof ArrayType) {
return canCastToJson(((ArrayType) type).getElementType());
}
if (type instanceof MapType) {
MapType mapType = (MapType) type;
return (mapType.getKeyType() instanceof UnknownType ||
isValidJsonObjectKeyType(mapType.getKeyType())) &&
canCastToJson(mapType.getValueType());
}
if (type instanceof RowType) {
return type.getTypeParameters().stream().allMatch(JsonUtil::canCastToJson);
}
return false;
}
public static boolean canCastFromJson(Type type)
{
if (type instanceof BooleanType ||
type instanceof TinyintType ||
type instanceof SmallintType ||
type instanceof IntegerType ||
type instanceof BigintType ||
type instanceof RealType ||
type instanceof DoubleType ||
type instanceof DecimalType ||
type instanceof VarcharType ||
type instanceof JsonType) {
return true;
}
if (type instanceof ArrayType) {
return canCastFromJson(((ArrayType) type).getElementType());
}
if (type instanceof MapType) {
return isValidJsonObjectKeyType(((MapType) type).getKeyType()) && canCastFromJson(((MapType) type).getValueType());
}
if (type instanceof RowType) {
return type.getTypeParameters().stream().allMatch(JsonUtil::canCastFromJson);
}
return false;
}
private static boolean isValidJsonObjectKeyType(Type type)
{
return type instanceof BooleanType ||
type instanceof TinyintType ||
type instanceof SmallintType ||
type instanceof IntegerType ||
type instanceof BigintType ||
type instanceof RealType ||
type instanceof DoubleType ||
type instanceof DecimalType ||
type instanceof VarcharType;
}
// transform the map key into string for use as JSON object key
public interface ObjectKeyProvider
{
String getObjectKey(Block block, int position);
static ObjectKeyProvider createObjectKeyProvider(Type type)
{
if (type instanceof UnknownType) {
return (block, position) -> null;
}
if (type instanceof BooleanType) {
return (block, position) -> type.getBoolean(block, position) ? "true" : "false";
}
if (type instanceof TinyintType || type instanceof SmallintType || type instanceof IntegerType || type instanceof BigintType) {
return (block, position) -> String.valueOf(type.getLong(block, position));
}
if (type instanceof RealType) {
return (block, position) -> String.valueOf(intBitsToFloat(toIntExact(type.getLong(block, position))));
}
if (type instanceof DoubleType) {
return (block, position) -> String.valueOf(type.getDouble(block, position));
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
if (isShortDecimal(decimalType)) {
return (block, position) -> Decimals.toString(decimalType.getLong(block, position), decimalType.getScale());
}
return (block, position) -> Decimals.toString(
decodeUnscaledValue(type.getSlice(block, position)),
decimalType.getScale());
}
if (type instanceof VarcharType) {
return (block, position) -> type.getSlice(block, position).toStringUtf8();
}
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Unsupported type: %s", type));
}
}
// given block and position, write to JsonGenerator
public interface JsonGeneratorWriter
{
// write a Json value into the JsonGenerator, provided by block and position
void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException;
static JsonGeneratorWriter createJsonGeneratorWriter(Type type)
{
if (type instanceof UnknownType) {
return new UnknownJsonGeneratorWriter();
}
if (type instanceof BooleanType) {
return new BooleanJsonGeneratorWriter();
}
if (type instanceof TinyintType || type instanceof SmallintType || type instanceof IntegerType || type instanceof BigintType) {
return new LongJsonGeneratorWriter(type);
}
if (type instanceof RealType) {
return new RealJsonGeneratorWriter();
}
if (type instanceof DoubleType) {
return new DoubleJsonGeneratorWriter();
}
if (type instanceof DecimalType) {
if (isShortDecimal(type)) {
return new ShortDecimalJsonGeneratorWriter((DecimalType) type);
}
return new LongDeicmalJsonGeneratorWriter((DecimalType) type);
}
if (type instanceof VarcharType) {
return new VarcharJsonGeneratorWriter(type);
}
if (type instanceof JsonType) {
return new JsonJsonGeneratorWriter();
}
if (type instanceof TimestampType) {
return new TimestampJsonGeneratorWriter((TimestampType) type);
}
if (type instanceof DateType) {
return new DateGeneratorWriter();
}
if (type instanceof ArrayType) {
ArrayType arrayType = (ArrayType) type;
return new ArrayJsonGeneratorWriter(
arrayType,
createJsonGeneratorWriter(arrayType.getElementType()));
}
if (type instanceof MapType) {
MapType mapType = (MapType) type;
return new MapJsonGeneratorWriter(
mapType,
createObjectKeyProvider(mapType.getKeyType()),
createJsonGeneratorWriter(mapType.getValueType()));
}
if (type instanceof RowType) {
List<Type> fieldTypes = type.getTypeParameters();
List<JsonGeneratorWriter> fieldWriters = new ArrayList<>(fieldTypes.size());
for (int i = 0; i < fieldTypes.size(); i++) {
fieldWriters.add(createJsonGeneratorWriter(fieldTypes.get(i)));
}
return new RowJsonGeneratorWriter((RowType) type, fieldWriters);
}
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Unsupported type: %s", type));
}
}
private static class UnknownJsonGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
jsonGenerator.writeNull();
}
}
private static class BooleanJsonGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
boolean value = BOOLEAN.getBoolean(block, position);
jsonGenerator.writeBoolean(value);
}
}
}
private static class LongJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final Type type;
public LongJsonGeneratorWriter(Type type)
{
this.type = type;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
long value = type.getLong(block, position);
jsonGenerator.writeNumber(value);
}
}
}
private static class RealJsonGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
float value = intBitsToFloat(toIntExact(REAL.getLong(block, position)));
jsonGenerator.writeNumber(value);
}
}
}
private static class DoubleJsonGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
double value = DOUBLE.getDouble(block, position);
jsonGenerator.writeNumber(value);
}
}
}
private static class ShortDecimalJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final DecimalType type;
public ShortDecimalJsonGeneratorWriter(DecimalType type)
{
this.type = type;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
BigDecimal value = BigDecimal.valueOf(type.getLong(block, position), type.getScale());
jsonGenerator.writeNumber(value);
}
}
}
private static class LongDeicmalJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final DecimalType type;
public LongDeicmalJsonGeneratorWriter(DecimalType type)
{
this.type = type;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
BigDecimal value = new BigDecimal(
decodeUnscaledValue(type.getSlice(block, position)),
type.getScale());
jsonGenerator.writeNumber(value);
}
}
}
private static class VarcharJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final Type type;
public VarcharJsonGeneratorWriter(Type type)
{
this.type = type;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
Slice value = type.getSlice(block, position);
jsonGenerator.writeString(value.toStringUtf8());
}
}
}
private static class JsonJsonGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
Slice value = JSON.getSlice(block, position);
jsonGenerator.writeRawValue(value.toStringUtf8());
}
}
}
private static class TimestampJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final TimestampType type;
public TimestampJsonGeneratorWriter(TimestampType type)
{
this.type = type;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
long epochMicros;
int fraction;
if (type.isShort()) {
epochMicros = type.getLong(block, position);
if (type.getPrecision() <= 3) {
epochMicros = scaleEpochMillisToMicros(epochMicros);
}
fraction = 0;
}
else {
LongTimestamp timestamp = (LongTimestamp) type.getObject(block, position);
epochMicros = timestamp.getEpochMicros();
fraction = timestamp.getPicosOfMicro();
}
ZoneId zoneId = ZoneOffset.UTC;
if (session.isLegacyTimestamp()) {
zoneId = session.getTimeZoneKey().getZoneId();
}
jsonGenerator.writeString(formatTimestamp(type.getPrecision(), epochMicros, fraction, zoneId));
}
}
}
private static class DateGeneratorWriter
implements JsonGeneratorWriter
{
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
int value = toIntExact(DATE.getLong(block, position));
jsonGenerator.writeString(printDate(value));
}
}
}
private static class ArrayJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final ArrayType type;
private final JsonGeneratorWriter elementWriter;
public ArrayJsonGeneratorWriter(ArrayType type, JsonGeneratorWriter elementWriter)
{
this.type = type;
this.elementWriter = elementWriter;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
Block arrayBlock = type.getObject(block, position);
jsonGenerator.writeStartArray();
for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
elementWriter.writeJsonValue(jsonGenerator, arrayBlock, i, session);
}
jsonGenerator.writeEndArray();
}
}
}
private static class MapJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final MapType type;
private final ObjectKeyProvider keyProvider;
private final JsonGeneratorWriter valueWriter;
public MapJsonGeneratorWriter(MapType type, ObjectKeyProvider keyProvider, JsonGeneratorWriter valueWriter)
{
this.type = type;
this.keyProvider = keyProvider;
this.valueWriter = valueWriter;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
Block mapBlock = type.getObject(block, position);
Map<String, Integer> orderedKeyToValuePosition = new TreeMap<>();
for (int i = 0; i < mapBlock.getPositionCount(); i += 2) {
String objectKey = keyProvider.getObjectKey(mapBlock, i);
orderedKeyToValuePosition.put(objectKey, i + 1);
}
jsonGenerator.writeStartObject();
for (Map.Entry<String, Integer> entry : orderedKeyToValuePosition.entrySet()) {
jsonGenerator.writeFieldName(entry.getKey());
valueWriter.writeJsonValue(jsonGenerator, mapBlock, entry.getValue(), session);
}
jsonGenerator.writeEndObject();
}
}
}
private static class RowJsonGeneratorWriter
implements JsonGeneratorWriter
{
private final RowType type;
private final List<JsonGeneratorWriter> fieldWriters;
public RowJsonGeneratorWriter(RowType type, List<JsonGeneratorWriter> fieldWriters)
{
this.type = type;
this.fieldWriters = fieldWriters;
}
@Override
public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int position, ConnectorSession session)
throws IOException
{
if (block.isNull(position)) {
jsonGenerator.writeNull();
}
else {
Block rowBlock = type.getObject(block, position);
jsonGenerator.writeStartArray();
for (int i = 0; i < rowBlock.getPositionCount(); i++) {
fieldWriters.get(i).writeJsonValue(jsonGenerator, rowBlock, i, session);
}
jsonGenerator.writeEndArray();
}
}
}
// utility classes and functions for cast from JSON
public static Slice currentTokenAsVarchar(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return Slices.utf8Slice(parser.getText());
case VALUE_NUMBER_FLOAT:
// Avoidance of loss of precision does not seem to be possible here because of Jackson implementation.
return DoubleOperators.castToVarchar(parser.getDoubleValue());
case VALUE_NUMBER_INT:
// An alternative is calling getLongValue and then BigintOperators.castToVarchar.
// It doesn't work as well because it can result in overflow and underflow exceptions for large integral numbers.
return Slices.utf8Slice(parser.getText());
case VALUE_TRUE:
return BooleanOperators.castToVarchar(true);
case VALUE_FALSE:
return BooleanOperators.castToVarchar(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.VARCHAR, parser.getText()));
}
}
public static Long currentTokenAsBigint(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToBigint(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return DoubleOperators.castToLong(parser.getDoubleValue());
case VALUE_NUMBER_INT:
return parser.getLongValue();
case VALUE_TRUE:
return BooleanOperators.castToBigint(true);
case VALUE_FALSE:
return BooleanOperators.castToBigint(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.BIGINT, parser.getText()));
}
}
public static Long currentTokenAsInteger(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToInteger(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return DoubleOperators.castToInteger(parser.getDoubleValue());
case VALUE_NUMBER_INT:
return (long) toIntExact(parser.getLongValue());
case VALUE_TRUE:
return BooleanOperators.castToInteger(true);
case VALUE_FALSE:
return BooleanOperators.castToInteger(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.INTEGER, parser.getText()));
}
}
public static Long currentTokenAsSmallint(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToSmallint(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return DoubleOperators.castToSmallint(parser.getDoubleValue());
case VALUE_NUMBER_INT:
return (long) Shorts.checkedCast(parser.getLongValue());
case VALUE_TRUE:
return BooleanOperators.castToSmallint(true);
case VALUE_FALSE:
return BooleanOperators.castToSmallint(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.SMALLINT, parser.getText()));
}
}
public static Long currentTokenAsTinyint(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToTinyint(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return DoubleOperators.castToTinyint(parser.getDoubleValue());
case VALUE_NUMBER_INT:
return (long) SignedBytes.checkedCast(parser.getLongValue());
case VALUE_TRUE:
return BooleanOperators.castToTinyint(true);
case VALUE_FALSE:
return BooleanOperators.castToTinyint(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.TINYINT, parser.getText()));
}
}
public static Double currentTokenAsDouble(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToDouble(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return parser.getDoubleValue();
case VALUE_NUMBER_INT:
// An alternative is calling getLongValue and then BigintOperators.castToDouble.
// It doesn't work as well because it can result in overflow and underflow exceptions for large integral numbers.
return parser.getDoubleValue();
case VALUE_TRUE:
return BooleanOperators.castToDouble(true);
case VALUE_FALSE:
return BooleanOperators.castToDouble(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.DOUBLE, parser.getText()));
}
}
public static Long currentTokenAsReal(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToFloat(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return (long) floatToRawIntBits(parser.getFloatValue());
case VALUE_NUMBER_INT:
// An alternative is calling getLongValue and then BigintOperators.castToReal.
// It doesn't work as well because it can result in overflow and underflow exceptions for large integral numbers.
return (long) floatToRawIntBits(parser.getFloatValue());
case VALUE_TRUE:
return BooleanOperators.castToReal(true);
case VALUE_FALSE:
return BooleanOperators.castToReal(false);
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.REAL, parser.getText()));
}
}
public static Boolean currentTokenAsBoolean(JsonParser parser)
throws IOException
{
switch (parser.currentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
return VarcharOperators.castToBoolean(Slices.utf8Slice(parser.getText()));
case VALUE_NUMBER_FLOAT:
return DoubleOperators.castToBoolean(parser.getDoubleValue());
case VALUE_NUMBER_INT:
return BigintOperators.castToBoolean(parser.getLongValue());
case VALUE_TRUE:
return true;
case VALUE_FALSE:
return false;
default:
throw new JsonCastException(format("Unexpected token when cast to %s: %s", StandardTypes.BOOLEAN, parser.getText()));
}
}
public static Long currentTokenAsShortDecimal(JsonParser parser, int precision, int scale)
throws IOException
{
BigDecimal bigDecimal = currentTokenAsJavaDecimal(parser, precision, scale);
return bigDecimal != null ? bigDecimal.unscaledValue().longValue() : null;
}
public static Slice currentTokenAsLongDecimal(JsonParser parser, int precision, int scale)
throws IOException
{
BigDecimal bigDecimal = currentTokenAsJavaDecimal(parser, precision, scale);
if (bigDecimal == null) {
return null;
}
return encodeUnscaledValue(bigDecimal.unscaledValue());
}
// TODO: Instead of having BigDecimal as an intermediate step,
// an alternative way is to make currentTokenAsShortDecimal and currentTokenAsLongDecimal
// directly return the Long or Slice representation of the cast result
// by calling the corresponding cast-to-decimal function, similar to other JSON cast function.
private static BigDecimal currentTokenAsJavaDecimal(JsonParser parser, int precision, int scale)
throws IOException
{
BigDecimal result;
switch (parser.getCurrentToken()) {
case VALUE_NULL:
return null;
case VALUE_STRING:
case FIELD_NAME:
result = new BigDecimal(parser.getText());
result = result.setScale(scale, HALF_UP);
break;
case VALUE_NUMBER_FLOAT:
case VALUE_NUMBER_INT:
result = parser.getDecimalValue();
result = result.setScale(scale, HALF_UP);
break;
case VALUE_TRUE:
result = BigDecimal.ONE.setScale(scale, HALF_UP);
break;
case VALUE_FALSE:
result = BigDecimal.ZERO.setScale(scale, HALF_UP);
break;
default:
throw new JsonCastException(format("Unexpected token when cast to DECIMAL(%s,%s): %s", precision, scale, parser.getText()));
}
if (result.precision() > precision) {
// TODO: Should we use NUMERIC_VALUE_OUT_OF_RANGE instead?
throw new PrestoException(INVALID_CAST_ARGUMENT, format("Cannot cast input json to DECIMAL(%s,%s)", precision, scale));
}
return result;
}
// given a JSON parser, write to the BlockBuilder
public interface BlockBuilderAppender
{
void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException;
static BlockBuilderAppender createBlockBuilderAppender(Type type)
{
if (type instanceof BooleanType) {
return new BooleanBlockBuilderAppender();
}
if (type instanceof TinyintType) {
return new TinyintBlockBuilderAppender();
}
if (type instanceof SmallintType) {
return new SmallintBlockBuilderAppender();
}
if (type instanceof IntegerType) {
return new IntegerBlockBuilderAppender();
}
if (type instanceof BigintType) {
return new BigintBlockBuilderAppender();
}
if (type instanceof RealType) {
return new RealBlockBuilderAppender();
}
if (type instanceof DoubleType) {
return new DoubleBlockBuilderAppender();
}
if (type instanceof DecimalType) {
if (isShortDecimal(type)) {
return new ShortDecimalBlockBuilderAppender((DecimalType) type);
}
return new LongDecimalBlockBuilderAppender((DecimalType) type);
}
if (type instanceof VarcharType) {
return new VarcharBlockBuilderAppender(type);
}
if (type instanceof JsonType) {
return (parser, blockBuilder) -> {
String json = OBJECT_MAPPED_UNORDERED.writeValueAsString(parser.readValueAsTree());
JSON.writeSlice(blockBuilder, Slices.utf8Slice(json));
};
}
if (type instanceof ArrayType) {
return new ArrayBlockBuilderAppender(createBlockBuilderAppender(((ArrayType) type).getElementType()));
}
if (type instanceof MapType) {
MapType mapType = (MapType) type;
return new MapBlockBuilderAppender(
createBlockBuilderAppender(mapType.getKeyType()),
createBlockBuilderAppender(mapType.getValueType()),
mapType.getKeyType());
}
if (type instanceof RowType) {
RowType rowType = (RowType) type;
List<Field> rowFields = rowType.getFields();
BlockBuilderAppender[] fieldAppenders = new BlockBuilderAppender[rowFields.size()];
for (int i = 0; i < fieldAppenders.length; i++) {
fieldAppenders[i] = createBlockBuilderAppender(rowFields.get(i).getType());
}
return new RowBlockBuilderAppender(fieldAppenders, getFieldNameToIndex(rowFields));
}
throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Unsupported type: %s", type));
}
}
private static class BooleanBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Boolean result = currentTokenAsBoolean(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
BOOLEAN.writeBoolean(blockBuilder, result);
}
}
}
private static class TinyintBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsTinyint(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
TINYINT.writeLong(blockBuilder, result);
}
}
}
private static class SmallintBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsInteger(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
SMALLINT.writeLong(blockBuilder, result);
}
}
}
private static class IntegerBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsInteger(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
INTEGER.writeLong(blockBuilder, result);
}
}
}
private static class BigintBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsBigint(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
BIGINT.writeLong(blockBuilder, result);
}
}
}
private static class RealBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsReal(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
REAL.writeLong(blockBuilder, result);
}
}
}
private static class DoubleBlockBuilderAppender
implements BlockBuilderAppender
{
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Double result = currentTokenAsDouble(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
DOUBLE.writeDouble(blockBuilder, result);
}
}
}
private static class ShortDecimalBlockBuilderAppender
implements BlockBuilderAppender
{
final DecimalType type;
ShortDecimalBlockBuilderAppender(DecimalType type)
{
this.type = type;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Long result = currentTokenAsShortDecimal(parser, type.getPrecision(), type.getScale());
if (result == null) {
blockBuilder.appendNull();
}
else {
type.writeLong(blockBuilder, result);
}
}
}
private static class LongDecimalBlockBuilderAppender
implements BlockBuilderAppender
{
final DecimalType type;
LongDecimalBlockBuilderAppender(DecimalType type)
{
this.type = type;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Slice result = currentTokenAsLongDecimal(parser, type.getPrecision(), type.getScale());
if (result == null) {
blockBuilder.appendNull();
}
else {
type.writeSlice(blockBuilder, result);
}
}
}
private static class VarcharBlockBuilderAppender
implements BlockBuilderAppender
{
final Type type;
VarcharBlockBuilderAppender(Type type)
{
this.type = type;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
Slice result = currentTokenAsVarchar(parser);
if (result == null) {
blockBuilder.appendNull();
}
else {
type.writeSlice(blockBuilder, result);
}
}
}
private static class ArrayBlockBuilderAppender
implements BlockBuilderAppender
{
final BlockBuilderAppender elementAppender;
ArrayBlockBuilderAppender(BlockBuilderAppender elementAppender)
{
this.elementAppender = elementAppender;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
if (parser.getCurrentToken() == JsonToken.VALUE_NULL) {
blockBuilder.appendNull();
return;
}
if (parser.getCurrentToken() != START_ARRAY) {
throw new JsonCastException(format("Expected a json array, but got %s", parser.getText()));
}
BlockBuilder entryBuilder = blockBuilder.beginBlockEntry();
while (parser.nextToken() != END_ARRAY) {
elementAppender.append(parser, entryBuilder);
}
blockBuilder.closeEntry();
}
}
private static class MapBlockBuilderAppender
implements BlockBuilderAppender
{
final BlockBuilderAppender keyAppender;
final BlockBuilderAppender valueAppender;
final Type keyType;
MapBlockBuilderAppender(BlockBuilderAppender keyAppender, BlockBuilderAppender valueAppender, Type keyType)
{
this.keyAppender = keyAppender;
this.valueAppender = valueAppender;
this.keyType = keyType;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
if (parser.getCurrentToken() == JsonToken.VALUE_NULL) {
blockBuilder.appendNull();
return;
}
if (parser.getCurrentToken() != START_OBJECT) {
throw new JsonCastException(format("Expected a json object, but got %s", parser.getText()));
}
BlockBuilder entryBuilder = blockBuilder.beginBlockEntry();
HashTable entryBuilderHashTable = new HashTable(keyType, entryBuilder);
int position = 0;
while (parser.nextToken() != END_OBJECT) {
keyAppender.append(parser, entryBuilder);
parser.nextToken();
valueAppender.append(parser, entryBuilder);
if (!entryBuilderHashTable.addIfAbsent(position)) {
throw new JsonCastException("Duplicate keys are not allowed");
}
position += 2;
}
blockBuilder.closeEntry();
}
}
private static class RowBlockBuilderAppender
implements BlockBuilderAppender
{
final BlockBuilderAppender[] fieldAppenders;
final Optional<Map<String, Integer>> fieldNameToIndex;
RowBlockBuilderAppender(BlockBuilderAppender[] fieldAppenders, Optional<Map<String, Integer>> fieldNameToIndex)
{
this.fieldAppenders = fieldAppenders;
this.fieldNameToIndex = fieldNameToIndex;
}
@Override
public void append(JsonParser parser, BlockBuilder blockBuilder)
throws IOException
{
if (parser.getCurrentToken() == JsonToken.VALUE_NULL) {
blockBuilder.appendNull();
return;
}
if (parser.getCurrentToken() != START_ARRAY && parser.getCurrentToken() != START_OBJECT) {
throw new JsonCastException(format("Expected a json array or object, but got %s", parser.getText()));
}
parseJsonToSingleRowBlock(
parser,
(SingleRowBlockWriter) blockBuilder.beginBlockEntry(),
fieldAppenders,
fieldNameToIndex);
blockBuilder.closeEntry();
}
}
public static Optional<Map<String, Integer>> getFieldNameToIndex(List<Field> rowFields)
{
if (rowFields.get(0).getName().isEmpty()) {
return Optional.empty();
}
Map<String, Integer> fieldNameToIndex = new HashMap<>(rowFields.size());
for (int i = 0; i < rowFields.size(); i++) {
fieldNameToIndex.put(rowFields.get(i).getName().get(), i);
}
return Optional.of(fieldNameToIndex);
}
// TODO: Once CAST function supports cachedInstanceFactory or directly write to BlockBuilder,
// JsonToRowCast::toRow can use RowBlockBuilderAppender::append to parse JSON and append to the block builder.
// Thus there will be single call to this method, so this method can be inlined.
public static void parseJsonToSingleRowBlock(
JsonParser parser,
SingleRowBlockWriter singleRowBlockWriter,
BlockBuilderAppender[] fieldAppenders,
Optional<Map<String, Integer>> fieldNameToIndex)
throws IOException
{
if (parser.getCurrentToken() == START_ARRAY) {
for (int i = 0; i < fieldAppenders.length; i++) {
parser.nextToken();
fieldAppenders[i].append(parser, singleRowBlockWriter);
}
if (parser.nextToken() != JsonToken.END_ARRAY) {
throw new JsonCastException(format("Expected json array ending, but got %s", parser.getText()));
}
}
else {
verify(parser.getCurrentToken() == START_OBJECT);
if (fieldNameToIndex.isEmpty()) {
throw new JsonCastException("Cannot cast a JSON object to anonymous row type. Input must be a JSON array.");
}
boolean[] fieldWritten = new boolean[fieldAppenders.length];
int numFieldsWritten = 0;
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.currentToken() != FIELD_NAME) {
throw new JsonCastException(format("Expected a json field name, but got %s", parser.getText()));
}
String fieldName = parser.getText().toLowerCase(Locale.ENGLISH);
Integer fieldIndex = fieldNameToIndex.get().get(fieldName);
parser.nextToken();
if (fieldIndex != null) {
if (fieldWritten[fieldIndex]) {
throw new JsonCastException("Duplicate field: " + fieldName);
}
fieldWritten[fieldIndex] = true;
numFieldsWritten++;
fieldAppenders[fieldIndex].append(parser, singleRowBlockWriter.getFieldBlockBuilder(fieldIndex));
}
else {
parser.skipChildren();
}
}
if (numFieldsWritten != fieldAppenders.length) {
for (int i = 0; i < fieldWritten.length; i++) {
if (!fieldWritten[i]) {
singleRowBlockWriter.getFieldBlockBuilder(i).appendNull();
}
}
}
}
}
// TODO: This class might be useful to other Map functions (transform_key, cast map to map, map_concat, etc)
// It is caller's responsibility to make the block data synchronized with the hash table
public static class HashTable
{
private static final int EXPECTED_ENTRIES = 20;
private static final float FILL_RATIO = 0.75f;
private static final int EMPTY_SLOT = -1;
private final Type type;
private final BlockBuilder block;
private int[] positionByHash;
private int hashCapacity;
private int maxFill;
private int hashMask;
private int size;
public HashTable(Type type, BlockBuilder block)
{
this.type = requireNonNull(type, "type is null");
this.block = requireNonNull(block, "block is null");
hashCapacity = arraySize(EXPECTED_ENTRIES, FILL_RATIO);
this.maxFill = calculateMaxFill(hashCapacity);
this.hashMask = hashCapacity - 1;
positionByHash = new int[hashCapacity];
Arrays.fill(positionByHash, EMPTY_SLOT);
}
public boolean contains(int position)
{
checkArgument(position >= 0, "position is negative");
return positionByHash[getHashPosition(position)] != EMPTY_SLOT;
}
public boolean addIfAbsent(int position)
{
checkArgument(position >= 0, "position is negative");
int hashPosition = getHashPosition(position);
if (positionByHash[hashPosition] == EMPTY_SLOT) {
positionByHash[hashPosition] = position;
size++;
if (size >= maxFill) {
rehash();
}
return true;
}
else {
return false;
}
}
private int getHashPosition(int position)
{
int hashPosition = getMaskedHash(hashPosition(type, block, position));
while (true) {
if (positionByHash[hashPosition] == EMPTY_SLOT) {
return hashPosition;
}
if (positionEqualsPosition(type, block, positionByHash[hashPosition], block, position)) {
return hashPosition;
}
hashPosition = getMaskedHash(hashPosition + 1);
}
}
private void rehash()
{
long newCapacityLong = hashCapacity * 2L;
if (newCapacityLong > Integer.MAX_VALUE) {
throw new PrestoException(GENERIC_INSUFFICIENT_RESOURCES, "Size of hash table cannot exceed 1 billion entries");
}
int newCapacity = (int) newCapacityLong;
hashCapacity = newCapacity;
hashMask = newCapacity - 1;
maxFill = calculateMaxFill(newCapacity);
int[] oldPositionByHash = positionByHash;
positionByHash = new int[newCapacity];
Arrays.fill(positionByHash, EMPTY_SLOT);
for (int position : oldPositionByHash) {
if (position != EMPTY_SLOT) {
positionByHash[getHashPosition(position)] = position;
}
}
}
private static int calculateMaxFill(int hashSize)
{
checkArgument(hashSize > 0, "hashSize must be greater than 0");
int maxFill = (int) Math.ceil(hashSize * FILL_RATIO);
if (maxFill == hashSize) {
maxFill--;
}
checkArgument(hashSize > maxFill, "hashSize must be larger than maxFill");
return maxFill;
}
private int getMaskedHash(long rawHash)
{
return (int) (rawHash & hashMask);
}
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.pocketworkstation.pckeyboard;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.PopupWindow;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class CandidateView extends View {
private static final int OUT_OF_BOUNDS_WORD_INDEX = -1;
private static final int OUT_OF_BOUNDS_X_COORD = -1;
private LatinIME mService;
private final ArrayList<CharSequence> mSuggestions = new ArrayList<CharSequence>();
private boolean mShowingCompletions;
private CharSequence mSelectedString;
private int mSelectedIndex;
private int mTouchX = OUT_OF_BOUNDS_X_COORD;
private final Drawable mSelectionHighlight;
private boolean mTypedWordValid;
private boolean mHaveMinimalSuggestion;
private Rect mBgPadding;
private final TextView mPreviewText;
private final PopupWindow mPreviewPopup;
private int mCurrentWordIndex;
private Drawable mDivider;
private static final int MAX_SUGGESTIONS = 32;
private static final int SCROLL_PIXELS = 20;
private final int[] mWordWidth = new int[MAX_SUGGESTIONS];
private final int[] mWordX = new int[MAX_SUGGESTIONS];
private int mPopupPreviewX;
private int mPopupPreviewY;
private static final int X_GAP = 10;
private final int mColorNormal;
private final int mColorRecommended;
private final int mColorOther;
private final Paint mPaint;
private final int mDescent;
private boolean mScrolled;
private boolean mShowingAddToDictionary;
private CharSequence mAddToDictionaryHint;
private int mTargetScrollX;
private final int mMinTouchableWidth;
private int mTotalWidth;
private final GestureDetector mGestureDetector;
/**
* Construct a CandidateView for showing suggested words for completion.
* @param context
* @param attrs
*/
public CandidateView(Context context, AttributeSet attrs) {
super(context, attrs);
mSelectionHighlight = context.getResources().getDrawable(
R.drawable.list_selector_background_pressed);
LayoutInflater inflate =
(LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
Resources res = context.getResources();
mPreviewPopup = new PopupWindow(context);
mPreviewText = (TextView) inflate.inflate(R.layout.candidate_preview, null);
mPreviewPopup.setWindowLayoutMode(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
mPreviewPopup.setContentView(mPreviewText);
mPreviewPopup.setBackgroundDrawable(null);
mPreviewPopup.setAnimationStyle(R.style.KeyPreviewAnimation);
mColorNormal = res.getColor(R.color.candidate_normal);
mColorRecommended = res.getColor(R.color.candidate_recommended);
mColorOther = res.getColor(R.color.candidate_other);
mDivider = res.getDrawable(R.drawable.keyboard_suggest_strip_divider);
mAddToDictionaryHint = res.getString(R.string.hint_add_to_dictionary);
mPaint = new Paint();
mPaint.setColor(mColorNormal);
mPaint.setAntiAlias(true);
mPaint.setTextSize(mPreviewText.getTextSize() * LatinIME.sKeyboardSettings.candidateScalePref);
mPaint.setStrokeWidth(0);
mPaint.setTextAlign(Align.CENTER);
mDescent = (int) mPaint.descent();
mMinTouchableWidth = (int)res.getDimension(R.dimen.candidate_min_touchable_width);
mGestureDetector = new GestureDetector(
new CandidateStripGestureListener(mMinTouchableWidth));
setWillNotDraw(false);
setHorizontalScrollBarEnabled(false);
setVerticalScrollBarEnabled(false);
scrollTo(0, getScrollY());
}
private class CandidateStripGestureListener extends GestureDetector.SimpleOnGestureListener {
private final int mTouchSlopSquare;
public CandidateStripGestureListener(int touchSlop) {
// Slightly reluctant to scroll to be able to easily choose the suggestion
mTouchSlopSquare = touchSlop * touchSlop;
}
@Override
public void onLongPress(MotionEvent me) {
if (mSuggestions.size() > 0) {
if (me.getX() + getScrollX() < mWordWidth[0] && getScrollX() < 10) {
longPressFirstWord();
}
}
}
@Override
public boolean onDown(MotionEvent e) {
mScrolled = false;
return false;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2,
float distanceX, float distanceY) {
if (!mScrolled) {
// This is applied only when we recognize that scrolling is starting.
final int deltaX = (int) (e2.getX() - e1.getX());
final int deltaY = (int) (e2.getY() - e1.getY());
final int distance = (deltaX * deltaX) + (deltaY * deltaY);
if (distance < mTouchSlopSquare) {
return true;
}
mScrolled = true;
}
final int width = getWidth();
mScrolled = true;
int scrollX = getScrollX();
scrollX += (int) distanceX;
if (scrollX < 0) {
scrollX = 0;
}
if (distanceX > 0 && scrollX + width > mTotalWidth) {
scrollX -= (int) distanceX;
}
mTargetScrollX = scrollX;
scrollTo(scrollX, getScrollY());
hidePreview();
invalidate();
return true;
}
}
/**
* A connection back to the service to communicate with the text field
* @param listener
*/
public void setService(LatinIME listener) {
mService = listener;
}
@Override
public int computeHorizontalScrollRange() {
return mTotalWidth;
}
/**
* If the canvas is null, then only touch calculations are performed to pick the target
* candidate.
*/
@Override
protected void onDraw(Canvas canvas) {
if (canvas != null) {
super.onDraw(canvas);
}
mTotalWidth = 0;
final int height = getHeight();
if (mBgPadding == null) {
mBgPadding = new Rect(0, 0, 0, 0);
if (getBackground() != null) {
getBackground().getPadding(mBgPadding);
}
mDivider.setBounds(0, 0, mDivider.getIntrinsicWidth(),
mDivider.getIntrinsicHeight());
}
final int count = mSuggestions.size();
final Rect bgPadding = mBgPadding;
final Paint paint = mPaint;
final int touchX = mTouchX;
final int scrollX = getScrollX();
final boolean scrolled = mScrolled;
final boolean typedWordValid = mTypedWordValid;
final int y = (int) (height + mPaint.getTextSize() - mDescent) / 2;
boolean existsAutoCompletion = false;
int x = 0;
for (int i = 0; i < count; i++) {
CharSequence suggestion = mSuggestions.get(i);
if (suggestion == null) continue;
final int wordLength = suggestion.length();
paint.setColor(mColorNormal);
if (mHaveMinimalSuggestion
&& ((i == 1 && !typedWordValid) || (i == 0 && typedWordValid))) {
paint.setTypeface(Typeface.DEFAULT_BOLD);
paint.setColor(mColorRecommended);
existsAutoCompletion = true;
} else if (i != 0 || (wordLength == 1 && count > 1)) {
// HACK: even if i == 0, we use mColorOther when this suggestion's length is 1 and
// there are multiple suggestions, such as the default punctuation list.
paint.setColor(mColorOther);
}
int wordWidth;
if ((wordWidth = mWordWidth[i]) == 0) {
float textWidth = paint.measureText(suggestion, 0, wordLength);
wordWidth = Math.max(mMinTouchableWidth, (int) textWidth + X_GAP * 2);
mWordWidth[i] = wordWidth;
}
mWordX[i] = x;
if (touchX != OUT_OF_BOUNDS_X_COORD && !scrolled
&& touchX + scrollX >= x && touchX + scrollX < x + wordWidth) {
if (canvas != null && !mShowingAddToDictionary) {
canvas.translate(x, 0);
mSelectionHighlight.setBounds(0, bgPadding.top, wordWidth, height);
mSelectionHighlight.draw(canvas);
canvas.translate(-x, 0);
}
mSelectedString = suggestion;
mSelectedIndex = i;
}
if (canvas != null) {
canvas.drawText(suggestion, 0, wordLength, x + wordWidth / 2, y, paint);
paint.setColor(mColorOther);
canvas.translate(x + wordWidth, 0);
// Draw a divider unless it's after the hint
if (!(mShowingAddToDictionary && i == 1)) {
mDivider.draw(canvas);
}
canvas.translate(-x - wordWidth, 0);
}
paint.setTypeface(Typeface.DEFAULT);
x += wordWidth;
}
if (!isInEditMode())
mService.onAutoCompletionStateChanged(existsAutoCompletion);
mTotalWidth = x;
if (mTargetScrollX != scrollX) {
scrollToTarget();
}
}
private void scrollToTarget() {
int scrollX = getScrollX();
if (mTargetScrollX > scrollX) {
scrollX += SCROLL_PIXELS;
if (scrollX >= mTargetScrollX) {
scrollX = mTargetScrollX;
scrollTo(scrollX, getScrollY());
requestLayout();
} else {
scrollTo(scrollX, getScrollY());
}
} else {
scrollX -= SCROLL_PIXELS;
if (scrollX <= mTargetScrollX) {
scrollX = mTargetScrollX;
scrollTo(scrollX, getScrollY());
requestLayout();
} else {
scrollTo(scrollX, getScrollY());
}
}
invalidate();
}
public void setSuggestions(List<CharSequence> suggestions, boolean completions,
boolean typedWordValid, boolean haveMinimalSuggestion) {
clear();
if (suggestions != null) {
int insertCount = Math.min(suggestions.size(), MAX_SUGGESTIONS);
for (CharSequence suggestion : suggestions) {
mSuggestions.add(suggestion);
if (--insertCount == 0)
break;
}
}
mShowingCompletions = completions;
mTypedWordValid = typedWordValid;
scrollTo(0, getScrollY());
mTargetScrollX = 0;
mHaveMinimalSuggestion = haveMinimalSuggestion;
// Compute the total width
onDraw(null);
invalidate();
requestLayout();
}
public boolean isShowingAddToDictionaryHint() {
return mShowingAddToDictionary;
}
public void showAddToDictionaryHint(CharSequence word) {
ArrayList<CharSequence> suggestions = new ArrayList<CharSequence>();
suggestions.add(word);
suggestions.add(mAddToDictionaryHint);
setSuggestions(suggestions, false, false, false);
mShowingAddToDictionary = true;
}
public boolean dismissAddToDictionaryHint() {
if (!mShowingAddToDictionary) return false;
clear();
return true;
}
/* package */ List<CharSequence> getSuggestions() {
return mSuggestions;
}
public void clear() {
// Don't call mSuggestions.clear() because it's being used for logging
// in LatinIME.pickSuggestionManually().
mSuggestions.clear();
mTouchX = OUT_OF_BOUNDS_X_COORD;
mSelectedString = null;
mSelectedIndex = -1;
mShowingAddToDictionary = false;
invalidate();
Arrays.fill(mWordWidth, 0);
Arrays.fill(mWordX, 0);
}
@Override
public boolean onTouchEvent(MotionEvent me) {
if (mGestureDetector.onTouchEvent(me)) {
return true;
}
int action = me.getAction();
int x = (int) me.getX();
int y = (int) me.getY();
mTouchX = x;
switch (action) {
case MotionEvent.ACTION_DOWN:
invalidate();
break;
case MotionEvent.ACTION_MOVE:
if (y <= 0) {
// Fling up!?
if (mSelectedString != null) {
// If there are completions from the application, we don't change the state to
// STATE_PICKED_SUGGESTION
if (!mShowingCompletions) {
// This "acceptedSuggestion" will not be counted as a word because
// it will be counted in pickSuggestion instead.
//TextEntryState.acceptedSuggestion(mSuggestions.get(0), mSelectedString);
//TextEntryState.manualTyped(mSelectedString);
}
mService.pickSuggestionManually(mSelectedIndex, mSelectedString);
mSelectedString = null;
mSelectedIndex = -1;
}
}
break;
case MotionEvent.ACTION_UP:
if (!mScrolled) {
if (mSelectedString != null) {
if (mShowingAddToDictionary) {
longPressFirstWord();
clear();
} else {
if (!mShowingCompletions) {
//TextEntryState.acceptedSuggestion(mSuggestions.get(0), mSelectedString);
//TextEntryState.manualTyped(mSelectedString);
}
mService.pickSuggestionManually(mSelectedIndex, mSelectedString);
}
}
}
mSelectedString = null;
mSelectedIndex = -1;
requestLayout();
hidePreview();
invalidate();
break;
}
return true;
}
private void hidePreview() {
mTouchX = OUT_OF_BOUNDS_X_COORD;
mCurrentWordIndex = OUT_OF_BOUNDS_WORD_INDEX;
mPreviewPopup.dismiss();
}
private void showPreview(int wordIndex, String altText) {
int oldWordIndex = mCurrentWordIndex;
mCurrentWordIndex = wordIndex;
// If index changed or changing text
if (oldWordIndex != mCurrentWordIndex || altText != null) {
if (wordIndex == OUT_OF_BOUNDS_WORD_INDEX) {
hidePreview();
} else {
CharSequence word = altText != null? altText : mSuggestions.get(wordIndex);
mPreviewText.setText(word);
mPreviewText.measure(MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED),
MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
int wordWidth = (int) (mPaint.measureText(word, 0, word.length()) + X_GAP * 2);
final int popupWidth = wordWidth
+ mPreviewText.getPaddingLeft() + mPreviewText.getPaddingRight();
final int popupHeight = mPreviewText.getMeasuredHeight();
//mPreviewText.setVisibility(INVISIBLE);
mPopupPreviewX = mWordX[wordIndex] - mPreviewText.getPaddingLeft() - getScrollX()
+ (mWordWidth[wordIndex] - wordWidth) / 2;
mPopupPreviewY = - popupHeight;
int [] offsetInWindow = new int[2];
getLocationInWindow(offsetInWindow);
if (mPreviewPopup.isShowing()) {
mPreviewPopup.update(mPopupPreviewX, mPopupPreviewY + offsetInWindow[1],
popupWidth, popupHeight);
} else {
mPreviewPopup.setWidth(popupWidth);
mPreviewPopup.setHeight(popupHeight);
mPreviewPopup.showAtLocation(this, Gravity.NO_GRAVITY, mPopupPreviewX,
mPopupPreviewY + offsetInWindow[1]);
}
mPreviewText.setVisibility(VISIBLE);
}
}
}
private void longPressFirstWord() {
CharSequence word = mSuggestions.get(0);
if (word.length() < 2) return;
if (mService.addWordToDictionary(word.toString())) {
showPreview(0, getContext().getResources().getString(R.string.added_word, word));
}
}
@Override
public void onDetachedFromWindow() {
super.onDetachedFromWindow();
hidePreview();
}
}
| |
/*
* Muhimbi PDF
* Convert, Merge, Watermark, Secure and OCR files.
*
* OpenAPI spec version: 9.15
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.muhimbi.online.client.api;
import com.muhimbi.online.client.ApiCallback;
import com.muhimbi.online.client.ApiClient;
import com.muhimbi.online.client.ApiException;
import com.muhimbi.online.client.ApiResponse;
import com.muhimbi.online.client.Configuration;
import com.muhimbi.online.client.Pair;
import com.muhimbi.online.client.ProgressRequestBody;
import com.muhimbi.online.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import com.muhimbi.online.client.model.OperationResponse;
import com.muhimbi.online.client.model.SecureDocumentData;
import com.muhimbi.online.client.model.SecurePdfData;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SecureApi {
private ApiClient apiClient;
public SecureApi() {
this(Configuration.getDefaultApiClient());
}
public SecureApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for secureDocument */
private com.squareup.okhttp.Call secureDocumentCall(SecureDocumentData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = inputData;
// create path and map variables
String localVarPath = "/v1/operations/secure_document".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "oauth2_auth", "api_key" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call secureDocumentValidateBeforeCall(SecureDocumentData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'inputData' is set
if (inputData == null) {
throw new ApiException("Missing the required parameter 'inputData' when calling secureDocument(Async)");
}
com.squareup.okhttp.Call call = secureDocumentCall(inputData, progressListener, progressRequestListener);
return call;
}
/**
*
* Secure document. Apply security and encryption settings to PDF and Office documents.
* @param inputData (required)
* @return OperationResponse
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public OperationResponse secureDocument(SecureDocumentData inputData) throws ApiException {
ApiResponse<OperationResponse> resp = secureDocumentWithHttpInfo(inputData);
return resp.getData();
}
/**
*
* Secure document. Apply security and encryption settings to PDF and Office documents.
* @param inputData (required)
* @return ApiResponse<OperationResponse>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<OperationResponse> secureDocumentWithHttpInfo(SecureDocumentData inputData) throws ApiException {
com.squareup.okhttp.Call call = secureDocumentValidateBeforeCall(inputData, null, null);
Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* (asynchronously)
* Secure document. Apply security and encryption settings to PDF and Office documents.
* @param inputData (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call secureDocumentAsync(SecureDocumentData inputData, final ApiCallback<OperationResponse> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = secureDocumentValidateBeforeCall(inputData, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for securePdf */
private com.squareup.okhttp.Call securePdfCall(SecurePdfData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = inputData;
// create path and map variables
String localVarPath = "/v1/operations/secure_pdf".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "oauth2_auth", "api_key" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call securePdfValidateBeforeCall(SecurePdfData inputData, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'inputData' is set
if (inputData == null) {
throw new ApiException("Missing the required parameter 'inputData' when calling securePdf(Async)");
}
com.squareup.okhttp.Call call = securePdfCall(inputData, progressListener, progressRequestListener);
return call;
}
/**
* Secure document
* Apply security and encryption settings.
* @param inputData (required)
* @return OperationResponse
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public OperationResponse securePdf(SecurePdfData inputData) throws ApiException {
ApiResponse<OperationResponse> resp = securePdfWithHttpInfo(inputData);
return resp.getData();
}
/**
* Secure document
* Apply security and encryption settings.
* @param inputData (required)
* @return ApiResponse<OperationResponse>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<OperationResponse> securePdfWithHttpInfo(SecurePdfData inputData) throws ApiException {
com.squareup.okhttp.Call call = securePdfValidateBeforeCall(inputData, null, null);
Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Secure document (asynchronously)
* Apply security and encryption settings.
* @param inputData (required)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call securePdfAsync(SecurePdfData inputData, final ApiCallback<OperationResponse> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = securePdfValidateBeforeCall(inputData, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<OperationResponse>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.docsearch;
import org.apache.commons.lang.StringUtils;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.joda.time.DateTime;
import org.kuali.rice.core.api.CoreApiServiceLocator;
import org.kuali.rice.core.api.util.RiceConstants;
import org.kuali.rice.core.framework.persistence.jdbc.sql.SqlBuilder;
import org.kuali.rice.core.framework.persistence.jpa.OrmUtils;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kew.api.document.attribute.DocumentAttributeDateTime;
import org.kuali.rice.kew.api.document.attribute.DocumentAttributeFactory;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.service.KEWServiceLocator;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Transient;
import java.io.Serializable;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
/**
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
@Entity
@Table(name="KREW_DOC_HDR_EXT_DT_T")
//@Sequence(name="KREW_SRCH_ATTR_S",property="searchableAttributeValueId")
@NamedQueries({
@NamedQuery(name="SearchableAttributeDateTimeValue.FindByDocumentId", query="select s from SearchableAttributeDateTimeValue as s where s.documentId = :documentId"),
@NamedQuery(name="SearchableAttributeDateTimeValue.FindByKey", query="select s from SearchableAttributeDateTimeValue as s where s.documentId = :documentId and s.searchableAttributeKey = :searchableAttributeKey")
})
public class SearchableAttributeDateTimeValue implements SearchableAttributeValue, Serializable {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(SearchableAttributeDateTimeValue.class);
private static final long serialVersionUID = 3045621112943214772L;
private static final String ATTRIBUTE_DATABASE_TABLE_NAME = "KREW_DOC_HDR_EXT_DT_T";
private static final boolean DEFAULT_WILDCARD_ALLOWANCE_POLICY = false;
private static final boolean ALLOWS_RANGE_SEARCH = true;
private static final boolean ALLOWS_CASE_INSENSITIVE_SEARCH = false;
private static final String ATTRIBUTE_XML_REPRESENTATION = KewApiConstants.SearchableAttributeConstants.DATA_TYPE_DATE;
@Id
@GeneratedValue(generator="KREW_SRCH_ATTR_S")
@GenericGenerator(name="KREW_SRCH_ATTR_S",strategy="org.hibernate.id.enhanced.SequenceStyleGenerator",parameters={
@Parameter(name="sequence_name",value="KREW_SRCH_ATTR_S"),
@Parameter(name="value_column",value="id")
})
@Column(name="DOC_HDR_EXT_DT_ID")
private String searchableAttributeValueId;
@Column(name="KEY_CD")
private String searchableAttributeKey;
@Column(name="VAL")
private Timestamp searchableAttributeValue;
@Transient
protected String ojbConcreteClass; // attribute needed for OJB polymorphism - do not alter!
@Column(name="DOC_HDR_ID")
private String documentId;
@ManyToOne(fetch=FetchType.EAGER, cascade={CascadeType.PERSIST})
@JoinColumn(name="DOC_HDR_ID", insertable=false, updatable=false)
private DocumentRouteHeaderValue routeHeader;
/**
* Default constructor.
*/
public SearchableAttributeDateTimeValue() {
super();
this.ojbConcreteClass = this.getClass().getName();
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#setupAttributeValue(java.lang.String)
*/
public void setupAttributeValue(String value) {
this.setSearchableAttributeValue(convertStringToTimestamp(value));
}
private Timestamp convertStringToTimestamp(String value) {
if (org.apache.commons.lang.StringUtils.isEmpty(value)) {
return null;
} else {
Timestamp t;
try {
t = CoreApiServiceLocator.getDateTimeService().convertToSqlTimestamp(value);
} catch (ParseException e) {
t = null;
}
if (t == null) {
String errorMsg = "Error converting timestamp value '" + value + "' to valid timestamp object.";
LOG.error("setupAttributeValue() " + errorMsg);
throw new RuntimeException(errorMsg);
}
return t;
}
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#setupAttributeValue(java.sql.ResultSet, java.lang.String)
*/
public void setupAttributeValue(ResultSet resultSet, String columnName) throws SQLException {
Calendar c = Calendar.getInstance();
c.clear(Calendar.HOUR);
c.clear(Calendar.MINUTE);
c.clear(Calendar.SECOND);
c.clear(Calendar.MILLISECOND);
this.setSearchableAttributeValue(resultSet.getTimestamp(columnName, c));
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#getSearchableAttributeDisplayValue()
*/
public String getSearchableAttributeDisplayValue() {
return formatAttributeValue(null);
}
private String formatAttributeValue(String formatPattern) {
DateFormat df = getDateFormatToUse(formatPattern);
return df.format(new Date(getSearchableAttributeValue().getTime()));
}
private DateFormat getDateFormatToUse(String parameterFormatPattern) {
if (StringUtils.isNotBlank(parameterFormatPattern)) {
return new SimpleDateFormat(parameterFormatPattern);
}
return RiceConstants.getDefaultDateFormat();
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#getAttributeDataType()
*/
public String getAttributeDataType() {
return ATTRIBUTE_XML_REPRESENTATION;
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#getAttributeTableName()
*/
public String getAttributeTableName() {
return ATTRIBUTE_DATABASE_TABLE_NAME;
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#allowsWildcardsByDefault()
*/
public boolean allowsWildcards() {
return DEFAULT_WILDCARD_ALLOWANCE_POLICY;
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#allowsCaseInsensitivity()
*/
public boolean allowsCaseInsensitivity() {
return ALLOWS_CASE_INSENSITIVE_SEARCH;
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#allowsRangeSearches()
*/
public boolean allowsRangeSearches() {
return ALLOWS_RANGE_SEARCH;
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#isPassesDefaultValidation()
*/
public boolean isPassesDefaultValidation(String valueEntered) {
return new SqlBuilder().isValidDate(valueEntered);
//return (DocSearchUtils.getEntryFormattedDate(valueEntered) != null);
}
/* (non-Javadoc)
* @see org.kuali.rice.kew.docsearch.SearchableAttributeValue#isRangeValid(java.lang.String, java.lang.String)
*/
public Boolean isRangeValid(String lowerValue, String upperValue) {
if (allowsRangeSearches()) {
Timestamp lowerTime = convertStringToTimestamp(lowerValue);
Timestamp upperTime = convertStringToTimestamp(upperValue);
if ( (lowerTime != null) && (upperTime != null) ) {
return (lowerTime.compareTo(upperTime) <= 0);
}
return true;
}
return null;
}
public String getOjbConcreteClass() {
return ojbConcreteClass;
}
public void setOjbConcreteClass(String ojbConcreteClass) {
this.ojbConcreteClass = ojbConcreteClass;
}
public DocumentRouteHeaderValue getRouteHeader() {
return routeHeader;
}
public void setRouteHeader(DocumentRouteHeaderValue routeHeader) {
this.routeHeader = routeHeader;
}
public String getDocumentId() {
return documentId;
}
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
public String getSearchableAttributeKey() {
return searchableAttributeKey;
}
public void setSearchableAttributeKey(String searchableAttributeKey) {
this.searchableAttributeKey = searchableAttributeKey;
}
public Timestamp getSearchableAttributeValue() {
return searchableAttributeValue;
}
public void setSearchableAttributeValue(Timestamp searchableAttributeValue) {
this.searchableAttributeValue = searchableAttributeValue;
}
public String getSearchableAttributeValueId() {
return searchableAttributeValueId;
}
public void setSearchableAttributeValueId(String searchableAttributeValueId) {
this.searchableAttributeValueId = searchableAttributeValueId;
}
//@PrePersist
public void beforeInsert(){
OrmUtils.populateAutoIncValue(this, KEWServiceLocator.getEntityManagerFactory().createEntityManager());
}
@Override
public DocumentAttributeDateTime toDocumentAttribute() {
DateTime dateTime = null;
if (getSearchableAttributeValue() != null) {
dateTime = new DateTime(getSearchableAttributeValue().getTime());
}
return DocumentAttributeFactory.createDateTimeAttribute(getSearchableAttributeKey(), dateTime);
}
}
| |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: 18.02.2009
*
*******************************************************************************/
package org.oscm.ui.beans;
import static org.oscm.ui.common.Constants.REQ_PARAM_TENANT_ID;
import static org.oscm.ui.common.Constants.SESSION_PARAM_SAML_LOGOUT_REQUEST;
import java.io.IOException;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.SessionScoped;
import javax.faces.context.ExternalContext;
import javax.faces.context.FacesContext;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.oscm.billing.external.pricemodel.service.PriceModel;
import org.oscm.internal.intf.MarketplaceCacheService;
import org.oscm.internal.intf.MarketplaceService;
import org.oscm.internal.types.exception.MarketplaceRemovedException;
import org.oscm.internal.types.exception.ObjectNotFoundException;
import org.oscm.internal.types.exception.SaaSSystemException;
import org.oscm.logging.Log4jLogger;
import org.oscm.logging.LoggerFactory;
import org.oscm.types.enumtypes.LogMessageIdentifier;
import org.oscm.ui.common.*;
import org.oscm.ui.filter.AuthorizationRequestData;
/**
* Managed bean to store session specific values which are not persisted in the
* database.
*
*/
@SessionScoped
@ManagedBean(name = "sessionBean")
public class SessionBean implements Serializable {
private static final Log4jLogger logger = LoggerFactory
.getLogger(SessionBean.class);
public static final String USER_AGENT_HEADER = "user-agent";
private static final long serialVersionUID = -8453011510859899681L;
private Boolean ie;
private int navHeight = 580;
private int navWidth = 240;
private Long subscribeToServiceKey;
private transient MarketplaceService marketplaceService = null;
private Boolean selfRegistrationEnabled = null;
@EJB
private MarketplaceCacheService mkpCache;
@EJB
private MarketplaceService mkpService;
/**
* The key of the last edited user group.
*/
private String selectedUserGroupId;
/**
* The key of the last edited user.
*/
private String selectedUserIdToEdit;
/**
* The key of the last selected technical service - applies to technology
* provider and supplier (operations on technical and marketable services).
*/
private long selectedTechnicalServiceKey;
/**
* The key of the last selected marketable service - applies to supplier
* (operations on price model and marketable services).
*/
private Long selectedServiceKeyForSupplier;
/**
* The id of the last selected customer - applies to supplier (operations on
* customer and price model).
*/
private String selectedCustomerId;
/**
* The id of the last selected subscription id - applies to customer
* (operations on subscriptions).
*/
private String selectedSubscriptionId;
/**
* TODO add jdoc
*/
private long selectedSubscriptionKey;
/**
* The id of the last selected User id -
*/
private String selectedUserId;
/**
* The key for the selected service
*/
private Long serviceKeyForPayment;
/**
* The key of the last selected marketable service - applies to customer
* (browsing in marketplace).
*/
private long selectedServiceKeyForCustomer;
/**
* The marketplace brand URL which is currently used as a String.
*/
private Map<String, String> brandUrlMidMapping = new HashMap<>();
/**
* Caching marketplace trackingCodes
*/
private Map<String, String> trackingCodeMapping = new HashMap<>();
/**
* The name of selected user group
*/
private String selectedGroupId;
/**
* The name of selected tab
*/
private String selectedTab;
/**
* The status of check box "Display my operations only"
*/
private boolean myOperationsOnly = true;
/**
* The status of check box "Display my processes only check box"
*/
private boolean myProcessesOnly = true;
private PriceModel selectedExternalPriceModel;
private String samlLogoutRequest;
private String tenantID;
public boolean isMyOperationsOnly() {
return myOperationsOnly;
}
public void setMyOperationsOnly(boolean myOperationsOnly) {
this.myOperationsOnly = myOperationsOnly;
}
public boolean isMyProcessesOnly() {
return myProcessesOnly;
}
public void setMyProcessesOnly(boolean myProcessesOnly) {
this.myProcessesOnly = myProcessesOnly;
}
/**
* Initial state - no service key set
*/
static int SERIVE_KEY_NOT_SET = 0;
/**
* A valid service key was given, but service could not be retrieved
*/
static int SERIVE_KEY_ERROR = -1;
/**
* @return true if the browser of the current user is an internet explorer
*/
public boolean isIe() {
if (ie != null) {
return ie.booleanValue();
}
FacesContext context = FacesContext.getCurrentInstance();
if (context == null) {
return false;
}
Object obj = context.getExternalContext().getRequest();
if (!(obj instanceof HttpServletRequest)) {
return false;
}
HttpServletRequest request = (HttpServletRequest) obj;
// The user-agent string contains information about which
// browser is used to view the pages
String useragent = request.getHeader(USER_AGENT_HEADER);
if (useragent == null) {
ie = Boolean.FALSE;
return ie.booleanValue();
}
if (useragent.toLowerCase().contains("msie")) {
ie = Boolean.TRUE;
return ie.booleanValue();
}
// Check if browser is IE11
if (useragent.toLowerCase().contains("trident")
&& useragent.toLowerCase().contains("rv:11")) {
ie = Boolean.TRUE;
} else {
ie = Boolean.FALSE;
}
return ie.booleanValue();
}
/**
* @return true if the browser of the current user is an internet explorer
*/
public boolean isAutoOpenMpLogonDialog() {
final FacesContext context = getFacesContext();
if (context == null) {
return false;
}
final Object obj = context.getExternalContext().getRequest();
return obj instanceof HttpServletRequest && Boolean.TRUE.toString()
.equals(((ServletRequest) obj).getParameter(
Constants.REQ_PARAM_AUTO_OPEN_MP_LOGIN_DIALOG));
}
protected HttpServletRequest getRequest() {
return (HttpServletRequest) FacesContext.getCurrentInstance()
.getExternalContext().getRequest();
}
public int getNavWidth() {
return navWidth;
}
public void setNavWidth(int width) {
this.navWidth = width;
}
public int getNavHeight() {
return navHeight;
}
public void setNavHeight(int height) {
this.navHeight = height;
}
public Map<Long, Long> getTableHeightMap() {
return new TableHeightMap(navHeight, isIe());
}
public void setSelectedTechnicalServiceKey(
long selectedTechnicalServiceKey) {
this.selectedTechnicalServiceKey = selectedTechnicalServiceKey;
}
public long getSelectedTechnicalServiceKey() {
return selectedTechnicalServiceKey;
}
public void setSelectedServiceKeyForSupplier(Long selectedServiceKey) {
this.selectedServiceKeyForSupplier = selectedServiceKey;
}
public Long getSelectedServiceKeyForSupplier() {
return selectedServiceKeyForSupplier;
}
public void setSelectedCustomerId(String selectedCustomerId) {
this.selectedCustomerId = selectedCustomerId;
}
public String getSelectedCustomerId() {
return selectedCustomerId;
}
/**
* Set the subscription id selected by the customer.
*
* @param selectedSubscriptionId
* the subscription id
*/
public void setSelectedSubscriptionId(String selectedSubscriptionId) {
this.selectedSubscriptionId = selectedSubscriptionId;
}
/**
* Get the subscription id last selected by the customer.
*
* @return the subscription id
*/
public String getSelectedSubscriptionId() {
return selectedSubscriptionId;
}
/**
* @return the selectedUserId
*/
public String getSelectedUserId() {
return selectedUserId;
}
/**
* @param selectedUserId
* the selectedUserId to set
*/
public void setSelectedUserId(String selectedUserId) {
this.selectedUserId = selectedUserId;
}
public void setSubscribeToServiceKey(Long subscribeToServiceKey) {
this.subscribeToServiceKey = subscribeToServiceKey;
}
public Long getSubscribeToServiceKey() {
return subscribeToServiceKey;
}
public long determineSelectedServiceKeyForCustomer() {
if (selectedServiceKeyForCustomer == SERIVE_KEY_NOT_SET) {
HttpServletRequest httpRequest = getRequest();
String key = httpRequest
.getParameter(Constants.REQ_PARAM_SELECTED_SERVICE_KEY);
if (ADMStringUtils.isBlank(key)) {
// Bug 9466: Read the service key from temporary cookie
// be able to continue a subscription in case of a possible
// session timeout
String serviceKeyVal = JSFUtils.getCookieValue(httpRequest,
Constants.REQ_PARAM_SERVICE_KEY);
if (serviceKeyVal != null && serviceKeyVal.length() > 0)
selectedServiceKeyForCustomer = Long
.parseLong(serviceKeyVal);
}
}
return selectedServiceKeyForCustomer;
}
public void setServiceKeyForPayment(Long serviceKeyForPayment) {
this.serviceKeyForPayment = serviceKeyForPayment;
}
public Long getServiceKeyForPayment() {
return serviceKeyForPayment;
}
public long getSelectedServiceKeyForCustomer() {
return selectedServiceKeyForCustomer;
}
public void setSelectedServiceKeyForCustomer(
long selectedServiceKeyForCustomer) {
this.selectedServiceKeyForCustomer = selectedServiceKeyForCustomer;
if (isValidServiceKey(selectedServiceKeyForCustomer)) {
try {
final HttpServletResponse httpResponse = JSFUtils.getResponse();
if (httpResponse != null) {
// store the service key in a temporary cookie in order to
// be able to continue a subscription in case of a possible
// session timeout
JSFUtils.setCookieValue(JSFUtils.getRequest(), httpResponse,
Constants.REQ_PARAM_SERVICE_KEY,
URLEncoder.encode(
Long.valueOf(selectedServiceKeyForCustomer)
.toString(),
Constants.CHARACTER_ENCODING_UTF8),
-1);
}
} catch (SaaSSystemException e) {
// Faces context is not initialized, just return
logger.logDebug(e.getMessage());
} catch (UnsupportedEncodingException e) {
logger.logError(Log4jLogger.SYSTEM_LOG, e,
LogMessageIdentifier.ERROR_UNSUPPORTED_ENCODING);
}
}
}
public String getMarketplaceBrandUrl() {
String marketplaceBrandUrl = brandUrlMidMapping.get(getMarketplaceId());
if (marketplaceBrandUrl == null) {
try {
marketplaceBrandUrl = getMarketplaceService()
.getBrandingUrl(getMarketplaceId());
if (marketplaceBrandUrl == null) {
marketplaceBrandUrl = getWhiteLabelBrandingUrl();
}
} catch (ObjectNotFoundException e) {
marketplaceBrandUrl = getWhiteLabelBrandingUrl();
}
setMarketplaceBrandUrl(marketplaceBrandUrl);
}
return marketplaceBrandUrl;
}
public void setMarketplaceBrandUrl(String marketplaceBrandUrl) {
brandUrlMidMapping.put(getMarketplaceId(), marketplaceBrandUrl);
}
public String getMarketplaceTrackingCode() {
return trackingCodeMapping.get(getMarketplaceId());
}
public void setMarketplaceTrackingCode(String marketplaceTrackingCode) {
trackingCodeMapping.put(getMarketplaceId(), marketplaceTrackingCode);
}
public String getMarketplaceId() {
return BaseBean.getMarketplaceIdStatic();
}
FacesContext getFacesContext() {
return FacesContext.getCurrentInstance();
}
public String getWhiteLabelBrandingUrl() {
return getFacesContext().getExternalContext().getRequestContextPath()
+ "/marketplace/css/mp.css";
}
/**
* Checks if the error in the request header was also added to the faces
* context. This method is used to avoid that the same error is rendered
* twice.
*
* @return boolean
*/
public boolean isErrorMessageDuplicate() {
FacesContext fc = FacesContext.getCurrentInstance();
String errorKey = (String) getRequest()
.getAttribute(Constants.REQ_ATTR_ERROR_KEY);
List<Object> params = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Object param = getRequest()
.getAttribute(Constants.REQ_ATTR_ERROR_PARAM + i);
if (param != null) {
params.add(param);
}
}
String errorMessage = JSFUtils.getText(errorKey, params.toArray());
return JSFUtils.existMessageInList(fc, errorMessage);
}
public boolean isHasWarnings() {
return JSFUtils.hasWarnings(FacesContext.getCurrentInstance());
}
protected MarketplaceService getMarketplaceService() {
if (marketplaceService == null) {
marketplaceService = ServiceAccess
.getServiceAcccessFor(JSFUtils.getRequest().getSession())
.getService(MarketplaceService.class);
}
return marketplaceService;
}
public void redirectToIdpLogout() throws IOException {
ExternalContext externalContext = getFacesContext().getExternalContext();
externalContext.redirect(getSamlLogoutRequest());
}
public void setSelfRegistrationEnabled(Boolean selfRegistrationEnabled) {
this.selfRegistrationEnabled = selfRegistrationEnabled;
}
public Boolean getSelfRegistrationEnabled() {
return selfRegistrationEnabled;
}
public boolean getNameSequenceReversed() {
return new UiDelegate().isNameSequenceReversed();
}
static boolean isValidServiceKey(long key) {
return ((key != SERIVE_KEY_ERROR) && (key != SERIVE_KEY_NOT_SET));
}
/**
* @return the selectedGroupId
*/
public String getSelectedGroupId() {
return selectedGroupId;
}
/**
* @param selectedGroupId
* the selectedGroupId to set
*/
public void setSelectedGroupId(String selectedGroupId) {
this.selectedGroupId = selectedGroupId;
}
/**
* @return the selectedTab
*/
public String getSelectedTab() {
return selectedTab;
}
/**
* @param selectedTab
* the selectedTab to set
*/
public void setSelectedTab(String selectedTab) {
this.selectedTab = selectedTab;
}
/**
* @return - selected subscription key
*/
public long getSelectedSubscriptionKey() {
return selectedSubscriptionKey;
}
/**
* @param selectedSubscriptionKey
* - the selected subscription key
*/
public void setSelectedSubscriptionKey(long selectedSubscriptionKey) {
this.selectedSubscriptionKey = selectedSubscriptionKey;
}
public String getSelectedUserGroupId() {
return selectedUserGroupId;
}
public void setSelectedUserGroupId(String selectedUserGroupId) {
this.selectedUserGroupId = selectedUserGroupId;
}
public String getSelectedUserIdToEdit() {
return selectedUserIdToEdit;
}
public void setSelectedUserIdToEdit(String selectedUserIdToEdit) {
this.selectedUserIdToEdit = selectedUserIdToEdit;
}
public PriceModel getSelectedExternalPriceModel() {
return selectedExternalPriceModel;
}
public void setSelectedExternalPriceModel(
PriceModel selectedExternalPriceModel) {
this.selectedExternalPriceModel = selectedExternalPriceModel;
}
public void setSamlLogoutRequest(String samlLogoutRequest) {
this.samlLogoutRequest = samlLogoutRequest;
}
public String getSamlLogoutRequest() {
if(samlLogoutRequest == null) {
samlLogoutRequest = (String) new UiDelegate().getSession().getAttribute(SESSION_PARAM_SAML_LOGOUT_REQUEST);
}
return samlLogoutRequest;
}
public String getTenantID() throws MarketplaceRemovedException {
if(StringUtils.isBlank(tenantID)) {
tenantID = getTenantIDFromMarketplace();
}
if(StringUtils.isBlank(tenantID)) {
tenantID = (String) new UiDelegate().getSession().getAttribute(REQ_PARAM_TENANT_ID);
}
return tenantID;
}
public void setTenantID(String tenantId) {
this.tenantID = tenantId;
}
private String getTenantIDFromMarketplace() throws MarketplaceRemovedException {
String tenantID = null;
String marketplaceId = getMarketplaceId();
if (StringUtils.isNotBlank(marketplaceId)) {
tenantID = mkpCache
.getConfiguration(marketplaceId).getTenantId();
if (StringUtils.isBlank(tenantID)) {
try {
tenantID = mkpService
.getMarketplaceById(marketplaceId).getTenantId();
} catch (ObjectNotFoundException e) {
throw new MarketplaceRemovedException();
}
}
}
return tenantID;
}
}
| |
package liquibase.datatype;
import liquibase.change.ColumnConfig;
import liquibase.database.Database;
import liquibase.database.core.OracleDatabase;
import liquibase.datatype.core.BigIntType;
import liquibase.datatype.core.CharType;
import liquibase.datatype.core.IntType;
import liquibase.datatype.core.UnknownType;
import liquibase.exception.ServiceNotFoundException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.servicelocator.ServiceLocator;
import liquibase.structure.core.DataType;
import liquibase.util.ObjectUtil;
import liquibase.util.StringUtils;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
public class DataTypeFactory {
private static DataTypeFactory instance;
private Map<String, List<Class<? extends LiquibaseDataType>>> registry = new ConcurrentHashMap<>();
/**
* Build the factory registry from all classes in the classpath that implement
* {@link LiquibaseDataType}
*/
protected DataTypeFactory() {
Class<? extends LiquibaseDataType>[] classes;
try {
classes = ServiceLocator.getInstance().findClasses(LiquibaseDataType.class);
for (Class<? extends LiquibaseDataType> clazz : classes) {
//noinspection unchecked
register(clazz);
}
} catch (ServiceNotFoundException e) {
throw new UnexpectedLiquibaseException(e);
}
}
/**
* Get this factory singleton
* @return a reference to this factory
*/
public static synchronized DataTypeFactory getInstance() {
if (instance == null) {
instance = new DataTypeFactory();
}
return instance;
}
/**
* Discards the active factory and creates a new singleton instance.
*/
public static synchronized void reset() {
instance = new DataTypeFactory();
}
/**
* Registers an implementation of {@link LiquibaseDataType} with both its name and all aliases for the data type
* as a handler in the factory's registry. Classes implement the {@link LiquibaseDataType#getPriority()}, which will
* cause the class with the highest priority to become the primary handler for the data type.
* @param dataTypeClass the implementation to register
*/
public void register(Class<? extends LiquibaseDataType> dataTypeClass) {
try {
LiquibaseDataType example = dataTypeClass.newInstance();
List<String> names = new ArrayList<>();
names.add(example.getName());
names.addAll(Arrays.asList(example.getAliases()));
Comparator<Class<? extends LiquibaseDataType>> comparator = new Comparator<Class<? extends LiquibaseDataType>>() {
@Override
public int compare(Class<? extends LiquibaseDataType> o1, Class<? extends LiquibaseDataType> o2) {
try {
return -1 * Integer.valueOf(o1.newInstance().getPriority()).compareTo(o2.newInstance().getPriority());
} catch (Exception e) {
throw new UnexpectedLiquibaseException(e);
}
}
};
for (String name : names) {
name = name.toLowerCase(Locale.US);
if (registry.get(name) == null) {
registry.put(name, new ArrayList<>());
}
List<Class<? extends LiquibaseDataType>> classes = registry.get(name);
classes.add(dataTypeClass);
Collections.sort(classes, comparator);
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException(e);
}
}
/**
* Remove
* @param name
*/
public void unregister(String name) {
registry.remove(name.toLowerCase(Locale.US));
}
/**
* Translates a column data type definition (e.g. varchar(255), java.sql.Types.NVARCHAR(10),
* VARCHAR2(255 BYTE)... ) into a normalized data type in object form. Note that, due to variety of allowed ways
* to specify a data type (SQL-Standard, Java type, native RDBMS type...), the dataTypeDefinition we receive for
* processing may already be the native type for the target RDBMS.
* @param dataTypeDefinition the definition from the changeSet
* @param database the {@link Database} object from for which the native definition is to be generated
* @return the corresponding Liquibase data type in object form.
*/
public LiquibaseDataType fromDescription(String dataTypeDefinition, Database database) {
if (dataTypeDefinition == null) {
return null;
}
String dataTypeName = dataTypeDefinition;
// Remove the first occurrence of (anything within parentheses). This will remove the size information from
// most data types, e.g. VARCHAR2(255 CHAR) -> VARCHAR2. We will retrieve that length information again later,
// but for the moment, we are only interested in the "naked" data type name.
if (dataTypeName.matches(".+\\(.*\\).*")) {
dataTypeName = dataTypeName.replaceFirst("\\s*\\(.*\\)", "");
}
// Remove everything { after the first opening curly bracket
// e.g. int{autoIncrement:true}" -> "int"
if (dataTypeName.matches(".+\\{.*")) {
dataTypeName = dataTypeName.replaceFirst("\\s*\\{.*", "");
}
// If the remaining string ends with " identity", then remove the " identity" and remember than we want
// to set the autoIncrement property later.
boolean autoIncrement = false;
if (dataTypeName.toLowerCase(Locale.US).endsWith(" identity")) {
dataTypeName = dataTypeName.toLowerCase(Locale.US).replaceFirst(" identity$", "");
autoIncrement = true;
}
// unquote delimited identifiers
final String[][] quotePairs = new String[][] {
{ "\"", "\"" }, // double quotes
{ "[", "]" }, // square brackets (a la mssql)
{ "`", "`" }, // backticks (a la mysql)
{ "'", "'" } // single quotes
};
for (String[] quotePair : quotePairs) {
String openQuote = quotePair[0];
String closeQuote = quotePair[1];
if (dataTypeName.startsWith(openQuote)) {
int indexOfCloseQuote = dataTypeName.indexOf(closeQuote, openQuote.length());
if ((indexOfCloseQuote != -1) && (dataTypeName.indexOf(closeQuote, indexOfCloseQuote + closeQuote
.length()) == -1)) {
dataTypeName = dataTypeName.substring(openQuote.length(), indexOfCloseQuote) +
dataTypeName.substring(indexOfCloseQuote + closeQuote.length(), dataTypeName.length());
break;
}
}
}
// record additional information that is still attached to the data type name
String additionalInfo = null;
if (dataTypeName.toLowerCase(Locale.US).startsWith("bit varying")
|| dataTypeName.toLowerCase(Locale.US).startsWith("character varying")) {
// not going to do anything. Special case for postgres in our tests,
// need to better support handling these types of differences
} else {
// Heuristic: from what we now have left of the data type name, everything after the first space
// is counted as additional information.
String[] splitTypeName = dataTypeName.trim().split("\\s+", 2);
dataTypeName = splitTypeName[0];
if (splitTypeName.length > 1) {
additionalInfo = splitTypeName[1];
}
}
// try to find matching classes for the data type name in our registry
Collection<Class<? extends LiquibaseDataType>> classes = registry.get(dataTypeName.toLowerCase(Locale.US));
LiquibaseDataType liquibaseDataType = null;
if (classes == null) {
// Map (date/time) INTERVAL types to the UnknownType
if (dataTypeName.toUpperCase(Locale.US).startsWith("INTERVAL")) {
liquibaseDataType = new UnknownType(dataTypeDefinition);
} else {
liquibaseDataType = new UnknownType(dataTypeName);
}
} else {
// Iterate through the list (which is already sorted by priority) until we find a class
// for this dataTypeName that supports the given database.
Iterator<Class<? extends LiquibaseDataType>> iterator = classes.iterator();
do {
try {
liquibaseDataType = iterator.next().newInstance();
} catch (Exception e) {
throw new UnexpectedLiquibaseException(e);
}
} while ((database != null) && !liquibaseDataType.supports(database) && iterator.hasNext());
}
if ((database != null) && !liquibaseDataType.supports(database)) {
throw new UnexpectedLiquibaseException("Could not find type for " + liquibaseDataType.toString() +
" for DBMS "+database.getShortName());
}
if (liquibaseDataType == null) {
liquibaseDataType = new UnknownType(dataTypeName);
}
liquibaseDataType.setAdditionalInformation(additionalInfo);
// Does the type string have the form "some_data_type(additional,info,separated,by,commas)"?
// If so, process these as additional data type parameters.
if (dataTypeDefinition.matches(".+\\s*\\(.*")) {
// Cut out the part between the first ()
String paramStrings = dataTypeDefinition.replaceFirst(".*?\\(", "").replaceFirst("\\).*", "");
String[] params = paramStrings.split(",");
for (String param : params) {
param = StringUtils.trimToNull(param);
if (param != null) {
if ((liquibaseDataType instanceof CharType) && !(database instanceof OracleDatabase)) {
// TODO this might lead to wrong snapshot results in Oracle Database, because it assumes
// NLS_LENGTH_SEMANTICS=BYTE. If NLS_LENGTH_SEMANTICS=CHAR, we need to trim " CHAR" instead.
// not sure what else supports it:
param = param.replaceFirst(" BYTE", ""); //only use byte types on oracle,
}
liquibaseDataType.addParameter(param);
}
}
}
// Did the original definition have embedded information in curly braces, e.g.
// "int{autoIncrement:true}"? If so, we will extract and process it now.
if (dataTypeDefinition.matches(".*\\{.*")) {
String paramStrings = dataTypeDefinition.replaceFirst(".*?\\{", "")
.replaceFirst("\\}.*", "");
String[] params = paramStrings.split(",");
for (String param : params) {
param = StringUtils.trimToNull(param);
if (param != null) {
String[] paramAndValue = param.split(":", 2);
// TODO: A run-time exception will occur here if the user writes a property name into the
// data type which does not exist - but what else could we do in this case, except aborting?
ObjectUtil.setProperty(liquibaseDataType, paramAndValue[0], paramAndValue[1]);
}
}
}
if (autoIncrement && (liquibaseDataType instanceof IntType)) {
((IntType) liquibaseDataType).setAutoIncrement(true);
}
if (autoIncrement && (liquibaseDataType instanceof BigIntType)) {
((BigIntType) liquibaseDataType).setAutoIncrement(true);
}
liquibaseDataType.finishInitialization(dataTypeDefinition);
return liquibaseDataType;
}
public LiquibaseDataType fromObject(Object object, Database database) {
if (object instanceof ColumnConfig.ValueNumeric) {
object = ((ColumnConfig.ValueNumeric) object).getDelegate();
}
return fromDescription(object.getClass().getName(), database);
}
public LiquibaseDataType from(DataType type, Database database) {
if (type == null) {
return null;
}
return fromDescription(type.toString(), database);
}
public LiquibaseDataType from(DatabaseDataType type, Database database) {
if (type == null) {
return null;
}
return fromDescription(type.toString(), database);
}
public String getTrueBooleanValue(Database database) {
return fromDescription("boolean", database).objectToSql(true, database);
}
public String getFalseBooleanValue(Database database) {
return fromDescription("boolean", database).objectToSql(false, database);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.engine.mr.common;
/**
* @author George Song (ysong1)
*
*/
import static org.apache.hadoop.util.StringUtils.formatTime;
import static org.apache.kylin.engine.mr.common.JobRelatedMetaUtil.collectCubeMetadata;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.ClassUtil;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.KylinConfig.SetAndUnsetThreadLocalConfig;
import org.apache.kylin.common.KylinConfigExt;
import org.apache.kylin.common.StorageURL;
import org.apache.kylin.common.util.CliCommandExecutor;
import org.apache.kylin.common.util.HadoopUtil;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.common.util.StringSplitter;
import org.apache.kylin.common.util.StringUtil;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.model.CubeDescTiretreeGlobalDomainDictUtil;
import org.apache.kylin.job.JobInstance;
import org.apache.kylin.job.exception.JobException;
import org.apache.kylin.metadata.model.TableDesc;
import org.apache.kylin.metadata.project.ProjectManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kylin.shaded.com.google.common.collect.Maps;
@SuppressWarnings("static-access")
public abstract class AbstractHadoopJob extends Configured implements Tool {
private static final Logger logger = LoggerFactory.getLogger(AbstractHadoopJob.class);
protected static final Option OPTION_PROJECT = OptionBuilder.withArgName(BatchConstants.ARG_PROJECT).hasArg()
.isRequired(true).withDescription("Project name.").create(BatchConstants.ARG_PROJECT);
protected static final Option OPTION_JOB_NAME = OptionBuilder.withArgName(BatchConstants.ARG_JOB_NAME).hasArg()
.isRequired(true).withDescription("Job name. For example, Kylin_Cuboid_Builder-clsfd_v2_Step_22-D)")
.create(BatchConstants.ARG_JOB_NAME);
protected static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg()
.isRequired(true).withDescription("Cube name. For exmaple, flat_item_cube")
.create(BatchConstants.ARG_CUBE_NAME);
protected static final Option OPTION_CUBING_JOB_ID = OptionBuilder.withArgName(BatchConstants.ARG_CUBING_JOB_ID)
.hasArg().isRequired(false).withDescription("ID of cubing job executable")
.create(BatchConstants.ARG_CUBING_JOB_ID);
// @Deprecated
protected static final Option OPTION_SEGMENT_NAME = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_NAME)
.hasArg().isRequired(true).withDescription("Cube segment name").create(BatchConstants.ARG_SEGMENT_NAME);
protected static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_ID).hasArg()
.isRequired(true).withDescription("Cube segment id").create(BatchConstants.ARG_SEGMENT_ID);
protected static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
.isRequired(true).withDescription("Input path").create(BatchConstants.ARG_INPUT);
protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName(BatchConstants.ARG_INPUT_FORMAT)
.hasArg().isRequired(false).withDescription("Input format").create(BatchConstants.ARG_INPUT_FORMAT);
protected static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg()
.isRequired(true).withDescription("Output path").create(BatchConstants.ARG_OUTPUT);
protected static final Option OPTION_DICT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_DICT_PATH).hasArg()
.isRequired(false).withDescription("Dict path").create(BatchConstants.ARG_DICT_PATH);
protected static final Option OPTION_NCUBOID_LEVEL = OptionBuilder.withArgName(BatchConstants.ARG_LEVEL).hasArg()
.isRequired(true).withDescription("N-Cuboid build level, e.g. 1, 2, 3...").create(BatchConstants.ARG_LEVEL);
protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName(BatchConstants.ARG_PARTITION)
.hasArg().isRequired(true).withDescription("Partition file path.").create(BatchConstants.ARG_PARTITION);
protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME)
.hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
protected static final Option OPTION_DICTIONARY_SHRUNKEN_PATH = OptionBuilder
.withArgName(BatchConstants.ARG_SHRUNKEN_DICT_PATH).hasArg().isRequired(false)
.withDescription("Dictionary shrunken path").create(BatchConstants.ARG_SHRUNKEN_DICT_PATH);
protected static final Option OPTION_STATISTICS_OUTPUT = OptionBuilder.withArgName(BatchConstants.ARG_STATS_OUTPUT)
.hasArg().isRequired(false).withDescription("Statistics output").create(BatchConstants.ARG_STATS_OUTPUT);
protected static final Option OPTION_STATISTICS_SAMPLING_PERCENT = OptionBuilder
.withArgName(BatchConstants.ARG_STATS_SAMPLING_PERCENT).hasArg().isRequired(false)
.withDescription("Statistics sampling percentage").create(BatchConstants.ARG_STATS_SAMPLING_PERCENT);
protected static final Option OPTION_CUBOID_MODE = OptionBuilder.withArgName(BatchConstants.ARG_CUBOID_MODE)
.hasArg().isRequired(false).withDescription("Cuboid Mode").create(BatchConstants.ARG_CUBOID_MODE);
protected static final Option OPTION_NEED_UPDATE_BASE_CUBOID_SHARD = OptionBuilder
.withArgName(BatchConstants.ARG_UPDATE_SHARD).hasArg().isRequired(false)
.withDescription("If need to update base cuboid shard").create(BatchConstants.ARG_UPDATE_SHARD);
protected static final Option OPTION_TABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_TABLE_NAME).hasArg().isRequired(true).withDescription("Table name. For exmaple, default.table1").create(BatchConstants.ARG_TABLE_NAME);
protected static final Option OPTION_LOOKUP_SNAPSHOT_ID = OptionBuilder.withArgName(BatchConstants.ARG_LOOKUP_SNAPSHOT_ID).hasArg()
.isRequired(true).withDescription("Lookup table snapshotID")
.create(BatchConstants.ARG_LOOKUP_SNAPSHOT_ID);
protected static final Option OPTION_META_URL = OptionBuilder.withArgName(BatchConstants.ARG_META_URL)
.hasArg().isRequired(true).withDescription("HDFS metadata url").create(BatchConstants.ARG_META_URL);
public static final Option OPTION_HBASE_CONF_PATH = OptionBuilder.withArgName(BatchConstants.ARG_HBASE_CONF_PATH).hasArg()
.isRequired(true).withDescription("HBase config file path").create(BatchConstants.ARG_HBASE_CONF_PATH);
private static final String MAP_REDUCE_CLASSPATH = "mapreduce.application.classpath";
private static final Map<String, KylinConfig> kylinConfigCache = Maps.newConcurrentMap();
protected static void runJob(Tool job, String[] args) {
try {
int exitCode = ToolRunner.run(job, args);
System.exit(exitCode);
} catch (Exception e) {
e.printStackTrace(System.err);
System.exit(5);
}
}
// ============================================================================
protected String name;
protected boolean isAsync = false;
protected OptionsHelper optionsHelper = new OptionsHelper();
protected Job job;
public AbstractHadoopJob() {
super(HadoopUtil.getCurrentConfiguration());
}
protected void parseOptions(Options options, String[] args) throws ParseException {
optionsHelper.parseOptions(options, args);
}
public void printUsage(Options options) {
optionsHelper.printUsage(getClass().getSimpleName(), options);
}
public Option[] getOptions() {
return optionsHelper.getOptions();
}
public String getOptionsAsString() {
return optionsHelper.getOptionsAsString();
}
protected String getOptionValue(Option option) {
return optionsHelper.getOptionValue(option);
}
protected boolean hasOption(Option option) {
return optionsHelper.hasOption(option);
}
protected int waitForCompletion(Job job) throws IOException, InterruptedException, ClassNotFoundException {
int retVal = 0;
long start = System.nanoTime();
if (isAsync) {
job.submit();
} else {
job.waitForCompletion(true);
retVal = job.isSuccessful() ? 0 : 1;
logger.debug("Job '" + job.getJobName() + "' finished "
+ (job.isSuccessful() ? "successfully in " : "with failures. Time taken ")
+ formatTime((System.nanoTime() - start) / 1000000L));
}
return retVal;
}
protected void setJobClasspath(Job job, KylinConfig kylinConf) {
String jarPath = kylinConf.getKylinJobJarPath();
File jarFile = new File(jarPath);
if (jarFile.exists()) {
job.setJar(jarPath);
logger.trace("append job jar: " + jarPath);
} else {
job.setJarByClass(this.getClass());
}
String kylinHiveDependency = System.getProperty("kylin.hive.dependency");
String kylinKafkaDependency = System.getProperty("kylin.kafka.dependency");
Configuration jobConf = job.getConfiguration();
if (kylinConf.isUseLocalClasspathEnabled()) {
String classpath = jobConf.get(MAP_REDUCE_CLASSPATH);
if (classpath == null || classpath.length() == 0) {
logger.info("Didn't find " + MAP_REDUCE_CLASSPATH
+ " in job configuration, will run 'mapred classpath' to get the default value.");
classpath = getDefaultMapRedClasspath();
logger.info("The default mapred classpath is: " + classpath);
}
jobConf.set(MAP_REDUCE_CLASSPATH, classpath);
}
logger.trace("Hadoop job classpath is: " + job.getConfiguration().get(MAP_REDUCE_CLASSPATH));
/*
* set extra dependencies as tmpjars & tmpfiles if configured
*/
StringBuilder kylinDependency = new StringBuilder();
// for hive dependencies
if (kylinHiveDependency != null) {
// yarn classpath is comma separated
kylinHiveDependency = kylinHiveDependency.replace(":", ",");
logger.trace("Hive Dependencies Before Filtered: " + kylinHiveDependency);
String filteredHive = filterKylinHiveDependency(kylinHiveDependency, kylinConf);
logger.trace("Hive Dependencies After Filtered: " + filteredHive);
StringUtil.appendWithSeparator(kylinDependency, filteredHive);
} else {
logger.debug("No hive dependency jars set in the environment, will find them from classpath:");
try {
String hiveExecJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.ql.Driver"));
StringUtil.appendWithSeparator(kylinDependency, hiveExecJarPath);
logger.debug("hive-exec jar file: " + hiveExecJarPath);
String hiveHCatJarPath = ClassUtil
.findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
StringUtil.appendWithSeparator(kylinDependency, hiveHCatJarPath);
logger.debug("hive-catalog jar file: " + hiveHCatJarPath);
String hiveMetaStoreJarPath = ClassUtil
.findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
StringUtil.appendWithSeparator(kylinDependency, hiveMetaStoreJarPath);
logger.debug("hive-metastore jar file: " + hiveMetaStoreJarPath);
} catch (ClassNotFoundException e) {
logger.error("Cannot found hive dependency jars: " + e);
}
}
// for kafka dependencies
if (kylinKafkaDependency != null) {
kylinKafkaDependency = kylinKafkaDependency.replace(":", ",");
logger.trace("Kafka Dependencies: " + kylinKafkaDependency);
StringUtil.appendWithSeparator(kylinDependency, kylinKafkaDependency);
} else {
logger.debug("No Kafka dependency jar set in the environment, will find them from classpath:");
try {
String kafkaClientJarPath = ClassUtil
.findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer"));
StringUtil.appendWithSeparator(kylinDependency, kafkaClientJarPath);
logger.debug("kafka jar file: " + kafkaClientJarPath);
} catch (ClassNotFoundException e) {
logger.debug("Not found kafka client jar from classpath, it is optional for normal build: " + e);
}
}
// for KylinJobMRLibDir
String mrLibDir = kylinConf.getKylinJobMRLibDir();
logger.trace("MR additional lib dir: " + mrLibDir);
StringUtil.appendWithSeparator(kylinDependency, mrLibDir);
setJobTmpJarsAndFiles(job, kylinDependency.toString());
}
private String filterKylinHiveDependency(String kylinHiveDependency, KylinConfig config) {
if (StringUtils.isBlank(kylinHiveDependency))
return "";
StringBuilder jarList = new StringBuilder();
Pattern hivePattern = Pattern.compile(config.getHiveDependencyFilterList());
Matcher matcher = hivePattern.matcher(kylinHiveDependency);
while (matcher.find()) {
if (jarList.length() > 0)
jarList.append(",");
jarList.append(matcher.group());
}
return jarList.toString();
}
private void setJobTmpJarsAndFiles(Job job, String kylinDependency) {
if (StringUtils.isBlank(kylinDependency))
return;
logger.trace("setJobTmpJarsAndFiles: " + kylinDependency);
try {
Configuration jobConf = job.getConfiguration();
FileSystem localfs = FileSystem.getLocal(jobConf);
FileSystem hdfs = HadoopUtil.getWorkingFileSystem(jobConf);
StringBuilder jarList = new StringBuilder();
StringBuilder fileList = new StringBuilder();
for (String fileName : StringUtil.splitAndTrim(kylinDependency, ",")) {
Path p = new Path(fileName);
if (p.isAbsolute() == false) {
logger.warn("The directory of kylin dependency '" + fileName + "' is not absolute, skip");
continue;
}
FileSystem fs;
if (exists(hdfs, p)) {
fs = hdfs;
} else if (exists(localfs, p)) {
fs = localfs;
} else {
logger.warn("The directory of kylin dependency '" + fileName + "' does not exist, skip");
continue;
}
if (fs.getFileStatus(p).isDirectory()) {
logger.trace("Expanding depedency directory: " + p);
appendTmpDir(job, fs, p, jarList, fileList);
continue;
}
StringBuilder list = (p.getName().endsWith(".jar")) ? jarList : fileList;
if (list.length() > 0)
list.append(",");
list.append(fs.getFileStatus(p).getPath());
}
appendTmpFiles(fileList.toString(), jobConf);
appendTmpJars(jarList.toString(), jobConf);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void appendTmpDir(Job job, FileSystem fs, Path tmpDir, StringBuilder jarList, StringBuilder fileList) {
try {
FileStatus[] fList = fs.listStatus(tmpDir);
for (FileStatus file : fList) {
Path p = file.getPath();
if (fs.getFileStatus(p).isDirectory()) {
appendTmpDir(job, fs, p, jarList, fileList);
continue;
}
StringBuilder list = (p.getName().endsWith(".jar")) ? jarList : fileList;
if (list.length() > 0)
list.append(",");
list.append(fs.getFileStatus(p).getPath().toString());
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void appendTmpJars(String jarList, Configuration conf) {
if (StringUtils.isBlank(jarList))
return;
String tmpJars = conf.get("tmpjars", null);
if (tmpJars == null) {
tmpJars = jarList;
} else {
tmpJars += "," + jarList;
}
conf.set("tmpjars", tmpJars);
logger.trace("Job 'tmpjars' updated -- " + tmpJars);
}
private void appendTmpFiles(String fileList, Configuration conf) {
if (StringUtils.isBlank(fileList))
return;
String tmpFiles = conf.get("tmpfiles", null);
if (tmpFiles == null) {
tmpFiles = fileList;
} else {
tmpFiles += "," + fileList;
}
conf.set("tmpfiles", tmpFiles);
logger.trace("Job 'tmpfiles' updated -- " + tmpFiles);
}
private String getDefaultMapRedClasspath() {
String classpath = "";
try {
CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor();
String output = executor.execute("mapred classpath").getSecond();
classpath = output.trim().replace(':', ',');
} catch (IOException e) {
logger.error("Failed to run: 'mapred classpath'.", e);
}
return classpath;
}
private static boolean exists(FileSystem fs, Path p) throws IOException {
try {
return fs.exists(p);
} catch (IllegalArgumentException ex) {
// can happen when FS mismatch
return false;
}
}
public static int addInputDirs(String input, Job job) throws IOException {
int folderNum = addInputDirs(StringSplitter.split(input, ","), job);
logger.info("Number of added folders:" + folderNum);
return folderNum;
}
public static int addInputDirs(String[] inputs, Job job) throws IOException {
int ret = 0;//return number of added folders
for (String inp : inputs) {
inp = inp.trim();
if (inp.endsWith("/*")) {
inp = inp.substring(0, inp.length() - 2);
FileSystem fs = HadoopUtil.getWorkingFileSystem(job.getConfiguration());
Path path = new Path(inp);
if (!exists(fs, path)) {
logger.warn("Path not exist:" + path.toString());
continue;
}
FileStatus[] fileStatuses = fs.listStatus(path);
boolean hasDir = false;
for (FileStatus stat : fileStatuses) {
if (stat.isDirectory() && !stat.getPath().getName().startsWith("_")) {
hasDir = true;
ret += addInputDirs(new String[] { stat.getPath().toString() }, job);
}
}
if (fileStatuses.length > 0 && !hasDir) {
ret += addInputDirs(new String[] { path.toString() }, job);
}
} else {
logger.trace("Add input " + inp);
FileInputFormat.addInputPath(job, new Path(inp));
ret++;
}
}
return ret;
}
public static KylinConfig loadKylinPropsAndMetadata() throws IOException {
File metaDir = new File("meta");
if (!metaDir.getAbsolutePath().equals(System.getProperty(KylinConfig.KYLIN_CONF))) {
System.setProperty(KylinConfig.KYLIN_CONF, metaDir.getAbsolutePath());
logger.info("The absolute path for meta dir is " + metaDir.getAbsolutePath());
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
Map<String, String> paramsMap = new HashMap<>();
paramsMap.put("path", metaDir.getAbsolutePath());
StorageURL storageURL = new StorageURL(kylinConfig.getMetadataUrl().getIdentifier(), "ifile", paramsMap);
kylinConfig.setMetadataUrl(storageURL.toString());
return kylinConfig;
} else {
return KylinConfig.getInstanceFromEnv();
}
}
public static KylinConfig loadKylinConfigFromHdfs(SerializableConfiguration conf, String uri) {
HadoopUtil.setCurrentConfiguration(conf.get());
KylinConfig config = loadKylinConfigFromHdfs(uri);
// This is a bad example where the thread local KylinConfig cannot be auto-closed due to
// limitation of MR API. It works because MR task runs its own process. Do not copy.
@SuppressWarnings("unused")
SetAndUnsetThreadLocalConfig shouldAutoClose = KylinConfig.setAndUnsetThreadLocalConfig(config);
return config;
}
public static KylinConfig loadKylinConfigFromHdfs(String uri) {
if (uri == null)
throw new IllegalArgumentException("meta url should not be null");
if (!uri.contains("@hdfs"))
throw new IllegalArgumentException("meta url should like @hdfs schema");
if (kylinConfigCache.get(uri) != null) {
logger.info("KylinConfig cached for : {}", uri);
return kylinConfigCache.get(uri);
}
logger.info("Ready to load KylinConfig from uri: {}", uri);
KylinConfig config;
FileSystem fs;
String realHdfsPath = StorageURL.valueOf(uri).getParameter("path") + "/" + KylinConfig.KYLIN_CONF_PROPERTIES_FILE;
try {
fs = HadoopUtil.getFileSystem(realHdfsPath);
InputStream is = fs.open(new Path(realHdfsPath));
Properties prop = KylinConfig.streamToProps(is);
config = KylinConfig.createKylinConfig(prop);
} catch (IOException e) {
throw new RuntimeException(e);
}
kylinConfigCache.put(uri, config);
return config;
}
protected void attachTableMetadata(TableDesc table, Configuration conf) throws IOException {
Set<String> dumpList = new LinkedHashSet<>();
dumpList.add(table.getResourcePath());
dumpKylinPropsAndMetadata(table.getProject(), dumpList, KylinConfig.getInstanceFromEnv(), conf);
}
protected void attachCubeMetadata(CubeInstance cube, Configuration conf) throws IOException {
dumpKylinPropsAndMetadata(cube.getProject(), collectCubeMetadata(cube), cube.getConfig(),
conf);
}
protected void attachCubeMetadataWithDict(CubeInstance cube, Configuration conf) throws IOException {
Set<String> dumpList = new LinkedHashSet<>(collectCubeMetadata(cube));
for (CubeSegment segment : cube.getSegments()) {
dumpList.addAll(segment.getDictionaryPaths());
}
dumpKylinPropsAndMetadata(cube.getProject(), dumpList, cube.getConfig(), conf);
}
protected void attachSegmentsMetadataWithDict(List<CubeSegment> segments, Configuration conf) throws IOException {
CubeInstance cube = segments.get(0).getCubeInstance();
Set<String> dumpList = new LinkedHashSet<>(collectCubeMetadata(cube));
for (CubeSegment segment : segments) {
dumpList.addAll(segment.getDictionaryPaths());
}
dumpKylinPropsAndMetadata(cube.getProject(), dumpList, cube.getConfig(), conf);
}
protected void attachSegmentsMetadataWithDict(List<CubeSegment> segments, String metaUrl) throws IOException {
Set<String> dumpList = new LinkedHashSet<>(JobRelatedMetaUtil.collectCubeMetadata(segments.get(0).getCubeInstance()));
for (CubeSegment segment : segments) {
dumpList.addAll(segment.getDictionaryPaths());
dumpList.add(segment.getStatisticsResourcePath());
}
JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segments.get(0).getConfig(), metaUrl);
}
protected void attachSegmentMetadataWithDict(CubeSegment segment, Configuration conf) throws IOException {
attachSegmentMetadata(segment, conf, true, false);
}
protected void attachSegmentMetadataWithAll(CubeSegment segment, Configuration conf) throws IOException {
attachSegmentMetadata(segment, conf, true, true);
}
protected void attachSegmentMetadata(CubeSegment segment, Configuration conf, boolean ifDictIncluded,
boolean ifStatsIncluded) throws IOException {
Set<String> dumpList = new LinkedHashSet<>(collectCubeMetadata(segment.getCubeInstance()));
if (ifDictIncluded) {
dumpList.addAll(segment.getDictionaryPaths());
}
if (ifStatsIncluded) {
dumpList.add(segment.getStatisticsResourcePath());
}
//tiretree global domain dic
CubeDescTiretreeGlobalDomainDictUtil.cuboidJob(segment.getCubeDesc(), dumpList);
dumpKylinPropsAndMetadata(segment.getProject(), dumpList, segment.getConfig(), conf);
}
protected void dumpKylinPropsAndMetadata(String prj, Set<String> dumpList, KylinConfig kylinConfig,
Configuration conf) throws IOException {
File tmp = File.createTempFile("kylin_job_meta", "");
FileUtils.forceDelete(tmp); // we need a directory, so delete the file first
File metaDir = new File(tmp, "meta");
metaDir.mkdirs();
// write kylin.properties
File kylinPropsFile = new File(metaDir, "kylin.properties");
kylinConfig.exportToFile(kylinPropsFile);
if (prj != null) {
dumpList.add(ProjectManager.getInstance(kylinConfig).getProject(prj).getResourcePath());
}
if (prj != null) {
dumpList.add(ProjectManager.getInstance(kylinConfig).getProject(prj).getResourcePath());
}
// write resources
JobRelatedMetaUtil.dumpResources(kylinConfig, metaDir, dumpList);
// hadoop distributed cache
String hdfsMetaDir = OptionsHelper.convertToFileURL(metaDir.getAbsolutePath());
if (hdfsMetaDir.startsWith("/")) // note Path on windows is like "d:/../..."
hdfsMetaDir = "file://" + hdfsMetaDir;
else
hdfsMetaDir = "file:///" + hdfsMetaDir;
logger.info("HDFS meta dir is: " + hdfsMetaDir);
appendTmpFiles(hdfsMetaDir, conf);
}
protected void cleanupTempConfFile(Configuration conf) {
String[] tempfiles = StringUtils.split(conf.get("tmpfiles"), ",");
if (tempfiles == null) {
return;
}
for (String tempMetaFileString : tempfiles) {
logger.trace("tempMetaFileString is : " + tempMetaFileString);
if (tempMetaFileString != null) {
if (tempMetaFileString.startsWith("file://")) {
tempMetaFileString = tempMetaFileString.substring("file://".length());
File tempMetaFile = new File(tempMetaFileString);
if (tempMetaFile.exists()) {
try {
FileUtils.forceDelete(tempMetaFile.getParentFile());
} catch (IOException e) {
logger.warn("error when deleting " + tempMetaFile, e);
}
} else {
logger.info("" + tempMetaFileString + " does not exist");
}
} else {
logger.info("tempMetaFileString is not starting with file:// :" + tempMetaFileString);
}
}
}
}
protected void deletePath(Configuration conf, Path path) throws IOException {
HadoopUtil.deletePath(conf, path);
}
public static double getTotalMapInputMB(Job job)
throws ClassNotFoundException, IOException, InterruptedException, JobException {
if (job == null) {
throw new JobException("Job is null");
}
long mapInputBytes = 0;
InputFormat<?, ?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), job.getConfiguration());
for (InputSplit split : input.getSplits(job)) {
mapInputBytes += split.getLength();
}
// 0 input bytes is possible when the segment range hits no partition on a partitioned hive table (KYLIN-2470)
if (mapInputBytes == 0) {
logger.warn("Map input splits are 0 bytes, something is wrong?");
}
double totalMapInputMB = (double) mapInputBytes / 1024 / 1024;
return totalMapInputMB;
}
protected double getTotalMapInputMB()
throws ClassNotFoundException, IOException, InterruptedException, JobException {
return getTotalMapInputMB(job);
}
protected int getMapInputSplitCount()
throws ClassNotFoundException, JobException, IOException, InterruptedException {
if (job == null) {
throw new JobException("Job is null");
}
InputFormat<?, ?> input = ReflectionUtils.newInstance(job.getInputFormatClass(), job.getConfiguration());
return input.getSplits(job).size();
}
public void kill() throws JobException {
if (job != null) {
try {
job.killJob();
} catch (IOException e) {
throw new JobException(e);
}
}
}
public Map<String, String> getInfo() throws JobException {
if (job != null) {
Map<String, String> status = new HashMap<String, String>();
if (null != job.getJobID()) {
status.put(JobInstance.MR_JOB_ID, job.getJobID().toString());
}
if (null != job.getTrackingURL()) {
status.put(JobInstance.YARN_APP_URL, job.getTrackingURL().toString());
}
return status;
} else {
throw new JobException("Job is null");
}
}
public Counters getCounters() throws JobException {
if (job != null) {
try {
return job.getCounters();
} catch (IOException e) {
throw new JobException(e);
}
} else {
throw new JobException("Job is null");
}
}
public void setAsync(boolean isAsync) {
this.isAsync = isAsync;
}
public Job getJob() {
return this.job;
}
// tells MapReduceExecutable to skip this job
public boolean isSkipped() {
return false;
}
@Override
public void setConf(Configuration conf) {
Configuration healSickConf = HadoopUtil.healSickConfig(conf);
super.setConf(healSickConf);
}
}
| |
/*
* Copyright (C) 2010 Klaus Reimer <k@ailis.de>
* See LICENSE.txt for licensing information.
*/
package de.ailis.threedee.scene.physics;
import java.io.Serializable;
import de.ailis.gramath.MutableMatrix4f;
import de.ailis.gramath.MutableVector3f;
import de.ailis.threedee.scene.SceneNode;
/**
* Physics.
*
* @author Klaus Reimer (k@ailis.de)
*/
public class Physics implements Serializable
{
/** Serial version UID */
private static final long serialVersionUID = 1L;
/** The velocity. */
private final MutableVector3f velocity = new MutableVector3f();
/** The minimum velocity. */
private final MutableVector3f minVelocity = new MutableVector3f(
Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY);
/** The maximum velocity. */
private final MutableVector3f maxVelocity = new MutableVector3f(
Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY);
/** The acceleration. */
private final MutableVector3f acceleration = new MutableVector3f();
/** The deceleration. */
private final MutableVector3f deceleration = new MutableVector3f();
/** The spin velocity. */
private final MutableVector3f spinVelocity = new MutableVector3f();
/** The minimum spin velocity. */
private final MutableVector3f minSpinVelocity = new MutableVector3f(
Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY,
Float.NEGATIVE_INFINITY);
/** The maximum spin velocity. */
private final MutableVector3f maxSpinVelocity = new MutableVector3f(
Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY,
Float.POSITIVE_INFINITY);
/** The spin acceleration. */
private final MutableVector3f spinAcceleration = new MutableVector3f();
/** The spin deceleration. */
private final MutableVector3f spinDeceleration = new MutableVector3f();
/**
* Returns the spin velocity.
*
* @return The spin velocity.
*/
public MutableVector3f getSpinVelocity()
{
return this.spinVelocity;
}
/**
* Returns the velocity.
*
* @return The velocity.
*/
public MutableVector3f getVelocity()
{
return this.velocity;
}
/**
* Returns the min velocity.
*
* @return The min velocity.
*/
public MutableVector3f getMinVelocity()
{
return this.minVelocity;
}
/**
* Returns the max velocity.
*
* @return The max velocity.
*/
public MutableVector3f getMaxVelocity()
{
return this.maxVelocity;
}
/**
* Returns the acceleration.
*
* @return The acceleration
*/
public MutableVector3f getAcceleration()
{
return this.acceleration;
}
/**
* Returns the deceleration.
*
* @return The deceleration
*/
public MutableVector3f getDeceleration()
{
return this.deceleration;
}
/**
* Returns the min spin velocity.
*
* @return The min spin velocity.
*/
public MutableVector3f getMinSpinVelocity()
{
return this.minSpinVelocity;
}
/**
* Returns the max spin velocity.
*
* @return The max spin velocity.
*/
public MutableVector3f getMaxSpinVelocity()
{
return this.maxSpinVelocity;
}
/**
* Returns the spin acceleration.
*
* @return The spin acceleration.
*/
public MutableVector3f getSpinAcceleration()
{
return this.spinAcceleration;
}
/**
* Returns the spin deceleration.
*
* @return The spin deceleration.
*/
public MutableVector3f getSpinDeceleration()
{
return this.spinDeceleration;
}
/**
* Updates the spin physics.
*
* @param node
* The node to update
* @param delta
* The time delta in seconds
* @return True if the scene needs to be rendered again, false if not
*/
public boolean updateSpin(final SceneNode node, final float delta)
{
float tmp;
final MutableMatrix4f matrix = node.getTransform();
// Get the spin values
float x = this.spinVelocity.getX();
float y = this.spinVelocity.getY();
float z = this.spinVelocity.getZ();
// Apply the spin
if (x != 0) matrix.rotateX(x * delta);
if (y != 0) matrix.rotateY(y * delta);
if (z != 0) matrix.rotateZ(z * delta);
// Apply spin acceleration
tmp = this.spinAcceleration.getX();
if (tmp != 0)
x = Math.min(this.maxSpinVelocity.getX(),
Math.max(this.minSpinVelocity.getX(), x + tmp * delta));
tmp = this.spinAcceleration.getY();
if (tmp != 0)
y = Math.min(this.maxSpinVelocity.getY(),
Math.max(this.minSpinVelocity.getY(), y + tmp * delta));
tmp = this.spinAcceleration.getZ();
if (tmp != 0)
z = Math.min(this.maxSpinVelocity.getZ(),
Math.max(this.minSpinVelocity.getZ(), z + tmp * delta));
// Apply spin deceleration
tmp = Math.abs(this.spinDeceleration.getX());
if (tmp != 0)
if (x < 0)
x = Math.min(0, x + tmp * delta);
else if (x > 0)
x = Math.max(0, x - tmp * delta);
tmp = Math.abs(this.spinDeceleration.getY());
if (tmp != 0)
if (y < 0)
y = Math.min(0, y + tmp * delta);
else if (y > 0)
y = Math.max(0, y - tmp * delta);
tmp = Math.abs(this.spinDeceleration.getZ());
if (tmp != 0)
if (z < 0)
z = Math.min(0, z + tmp * delta);
else if (z > 0)
z = Math.max(0, z - tmp * delta);
// Set the values
this.spinVelocity.set(x, y, z);
// Check if there is still movement
return !this.spinVelocity.isNull() || !this.spinAcceleration.isNull();
}
/**
* Updates the velocity physics.
*
* @param node
* The node to update
* @param delta
* The time delta in seconds
* @return True if the scene needs to be rendered again, false if not
*/
public boolean updateVelocity(final SceneNode node, final float delta)
{
float tmp;
final MutableMatrix4f matrix = node.getTransform();
// Get velocity values
float x = this.velocity.getX();
float y = this.velocity.getY();
float z = this.velocity.getZ();
// Apply the velocity
matrix.translate(x * delta, y * delta, z * delta);
// Apply acceleration
tmp = this.acceleration.getX();
if (tmp != 0)
x = Math.min(this.maxVelocity.getX(),
Math.max(this.minVelocity.getX(), x + tmp * delta));
tmp = this.acceleration.getY();
if (tmp != 0)
y = Math.min(this.maxVelocity.getY(),
Math.max(this.minVelocity.getY(), y + tmp * delta));
tmp = this.acceleration.getZ();
if (tmp != 0)
z = Math.min(this.maxVelocity.getZ(),
Math.max(this.minVelocity.getZ(), z + tmp * delta));
// Set new velocity
this.velocity.set(x, y, z);
// Check if there is still movement
return !this.velocity.isNull() || !this.acceleration.isNull();
}
/**
* Updates the specified node with this physics data.
*
* @param node
* The scene node to update
* @param delta
* The time delta in seconds
* @return True if the scene needs to be rendered again, false if not
*/
public boolean update(final SceneNode node, final float delta)
{
boolean changed = false;
changed |= updateSpin(node, delta);
changed |= updateVelocity(node, delta);
return changed;
}
}
| |
package com.athenatics.ruleengine.web.rest;
import com.codahale.metrics.annotation.Timed;
import com.athenatics.ruleengine.domain.PersistentToken;
import com.athenatics.ruleengine.domain.User;
import com.athenatics.ruleengine.repository.PersistentTokenRepository;
import com.athenatics.ruleengine.repository.UserRepository;
import com.athenatics.ruleengine.security.SecurityUtils;
import com.athenatics.ruleengine.service.MailService;
import com.athenatics.ruleengine.service.UserService;
import com.athenatics.ruleengine.service.dto.UserDTO;
import com.athenatics.ruleengine.web.rest.vm.KeyAndPasswordVM;
import com.athenatics.ruleengine.web.rest.vm.ManagedUserVM;
import com.athenatics.ruleengine.web.rest.util.HeaderUtil;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.*;
/**
* REST controller for managing the current user's account.
*/
@RestController
@RequestMapping("/api")
public class AccountResource {
private final Logger log = LoggerFactory.getLogger(AccountResource.class);
private final UserRepository userRepository;
private final UserService userService;
private final MailService mailService;
private final PersistentTokenRepository persistentTokenRepository;
public AccountResource(UserRepository userRepository, UserService userService,
MailService mailService, PersistentTokenRepository persistentTokenRepository) {
this.userRepository = userRepository;
this.userService = userService;
this.mailService = mailService;
this.persistentTokenRepository = persistentTokenRepository;
}
/**
* POST /register : register the user.
*
* @param managedUserVM the managed user View Model
* @return the ResponseEntity with status 201 (Created) if the user is registered or 400 (Bad Request) if the login or email is already in use
*/
@PostMapping(path = "/register",
produces={MediaType.APPLICATION_JSON_VALUE, MediaType.TEXT_PLAIN_VALUE})
@Timed
public ResponseEntity registerAccount(@Valid @RequestBody ManagedUserVM managedUserVM) {
HttpHeaders textPlainHeaders = new HttpHeaders();
textPlainHeaders.setContentType(MediaType.TEXT_PLAIN);
return userRepository.findOneByLogin(managedUserVM.getLogin().toLowerCase())
.map(user -> new ResponseEntity<>("login already in use", textPlainHeaders, HttpStatus.BAD_REQUEST))
.orElseGet(() -> userRepository.findOneByEmail(managedUserVM.getEmail())
.map(user -> new ResponseEntity<>("email address already in use", textPlainHeaders, HttpStatus.BAD_REQUEST))
.orElseGet(() -> {
User user = userService
.createUser(managedUserVM.getLogin(), managedUserVM.getPassword(),
managedUserVM.getFirstName(), managedUserVM.getLastName(),
managedUserVM.getEmail().toLowerCase(), managedUserVM.getImageUrl(), managedUserVM.getLangKey());
mailService.sendActivationEmail(user);
return new ResponseEntity<>(HttpStatus.CREATED);
})
);
}
/**
* GET /activate : activate the registered user.
*
* @param key the activation key
* @return the ResponseEntity with status 200 (OK) and the activated user in body, or status 500 (Internal Server Error) if the user couldn't be activated
*/
@GetMapping("/activate")
@Timed
public ResponseEntity<String> activateAccount(@RequestParam(value = "key") String key) {
return userService.activateRegistration(key)
.map(user -> new ResponseEntity<String>(HttpStatus.OK))
.orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR));
}
/**
* GET /authenticate : check if the user is authenticated, and return its login.
*
* @param request the HTTP request
* @return the login if the user is authenticated
*/
@GetMapping("/authenticate")
@Timed
public String isAuthenticated(HttpServletRequest request) {
log.debug("REST request to check if the current user is authenticated");
return request.getRemoteUser();
}
/**
* GET /account : get the current user.
*
* @return the ResponseEntity with status 200 (OK) and the current user in body, or status 500 (Internal Server Error) if the user couldn't be returned
*/
@GetMapping("/account")
@Timed
public ResponseEntity<UserDTO> getAccount() {
return Optional.ofNullable(userService.getUserWithAuthorities())
.map(user -> new ResponseEntity<>(new UserDTO(user), HttpStatus.OK))
.orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR));
}
/**
* POST /account : update the current user information.
*
* @param userDTO the current user information
* @return the ResponseEntity with status 200 (OK), or status 400 (Bad Request) or 500 (Internal Server Error) if the user couldn't be updated
*/
@PostMapping("/account")
@Timed
public ResponseEntity saveAccount(@Valid @RequestBody UserDTO userDTO) {
Optional<User> existingUser = userRepository.findOneByEmail(userDTO.getEmail());
if (existingUser.isPresent() && (!existingUser.get().getLogin().equalsIgnoreCase(userDTO.getLogin()))) {
return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert("user-management", "emailexists", "Email already in use")).body(null);
}
return userRepository
.findOneByLogin(SecurityUtils.getCurrentUserLogin())
.map(u -> {
userService.updateUser(userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail(),
userDTO.getLangKey(), userDTO.getImageUrl());
return new ResponseEntity(HttpStatus.OK);
})
.orElseGet(() -> new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR));
}
/**
* POST /account/change_password : changes the current user's password
*
* @param password the new password
* @return the ResponseEntity with status 200 (OK), or status 400 (Bad Request) if the new password is not strong enough
*/
@PostMapping(path = "/account/change_password",
produces = MediaType.TEXT_PLAIN_VALUE)
@Timed
public ResponseEntity changePassword(@RequestBody String password) {
if (!checkPasswordLength(password)) {
return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST);
}
userService.changePassword(password);
return new ResponseEntity<>(HttpStatus.OK);
}
/**
* GET /account/sessions : get the current open sessions.
*
* @return the ResponseEntity with status 200 (OK) and the current open sessions in body,
* or status 500 (Internal Server Error) if the current open sessions couldn't be retrieved
*/
@GetMapping("/account/sessions")
@Timed
public ResponseEntity<List<PersistentToken>> getCurrentSessions() {
return userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin())
.map(user -> new ResponseEntity<>(
persistentTokenRepository.findByUser(user),
HttpStatus.OK))
.orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR));
}
/**
* DELETE /account/sessions?series={series} : invalidate an existing session.
*
* - You can only delete your own sessions, not any other user's session
* - If you delete one of your existing sessions, and that you are currently logged in on that session, you will
* still be able to use that session, until you quit your browser: it does not work in real time (there is
* no API for that), it only removes the "remember me" cookie
* - This is also true if you invalidate your current session: you will still be able to use it until you close
* your browser or that the session times out. But automatic login (the "remember me" cookie) will not work
* anymore.
* There is an API to invalidate the current session, but there is no API to check which session uses which
* cookie.
*
* @param series the series of an existing session
* @throws UnsupportedEncodingException if the series couldnt be URL decoded
*/
@DeleteMapping("/account/sessions/{series}")
@Timed
public void invalidateSession(@PathVariable String series) throws UnsupportedEncodingException {
String decodedSeries = URLDecoder.decode(series, "UTF-8");
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(u ->
persistentTokenRepository.findByUser(u).stream()
.filter(persistentToken -> StringUtils.equals(persistentToken.getSeries(), decodedSeries))
.findAny().ifPresent(t -> persistentTokenRepository.delete(decodedSeries)));
}
/**
* POST /account/reset_password/init : Send an email to reset the password of the user
*
* @param mail the mail of the user
* @return the ResponseEntity with status 200 (OK) if the email was sent, or status 400 (Bad Request) if the email address is not registered
*/
@PostMapping(path = "/account/reset_password/init",
produces = MediaType.TEXT_PLAIN_VALUE)
@Timed
public ResponseEntity requestPasswordReset(@RequestBody String mail) {
return userService.requestPasswordReset(mail)
.map(user -> {
mailService.sendPasswordResetMail(user);
return new ResponseEntity<>("email was sent", HttpStatus.OK);
}).orElse(new ResponseEntity<>("email address not registered", HttpStatus.BAD_REQUEST));
}
/**
* POST /account/reset_password/finish : Finish to reset the password of the user
*
* @param keyAndPassword the generated key and the new password
* @return the ResponseEntity with status 200 (OK) if the password has been reset,
* or status 400 (Bad Request) or 500 (Internal Server Error) if the password could not be reset
*/
@PostMapping(path = "/account/reset_password/finish",
produces = MediaType.TEXT_PLAIN_VALUE)
@Timed
public ResponseEntity<String> finishPasswordReset(@RequestBody KeyAndPasswordVM keyAndPassword) {
if (!checkPasswordLength(keyAndPassword.getNewPassword())) {
return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST);
}
return userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey())
.map(user -> new ResponseEntity<String>(HttpStatus.OK))
.orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR));
}
private boolean checkPasswordLength(String password) {
return !StringUtils.isEmpty(password) &&
password.length() >= ManagedUserVM.PASSWORD_MIN_LENGTH &&
password.length() <= ManagedUserVM.PASSWORD_MAX_LENGTH;
}
}
| |
/**
* Copyright (c) 2013-2014. Francisco Contreras, Holland Salazar.
* Copyright (c) 2015. Tobias Strebitzer, Francisco Contreras, Holland Salazar.
* All rights reserved.
* <p/>
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
* <p/>
* Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
* Neither the name of the Baker Framework nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written
* permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.bakerframework.baker.handler;
import android.util.Log;
import com.bakerframework.baker.BakerApplication;
import com.bakerframework.baker.R;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Date;
public class DownloadHandler {
private final String url;
private File targetFile;
private boolean completed = false;
private int percentComplete;
private HttpURLConnection connection = null;
private long totalBytes;
private InputStream inputStream;
public DownloadHandler(String url) {
int i = (int) (new Date().getTime()/1000);
this.url = url+"?v="+i;
this.percentComplete = 0;
}
public void download(File targetFile) throws Exception {
this.targetFile = targetFile;
Log.i("MLC-APP "+this.getClass().getName(), targetFile.getAbsolutePath());
try {
createTargetFile();
prepareDownload();
downloadToFile();
}catch (ConnectException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_connect));
}catch (MalformedURLException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_malformed_url));
}catch (FileNotFoundException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_file_not_found));
}catch (IOException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_io));
}finally {
this.cleanup();
}
}
public String read() throws Exception {
try {
prepareDownload();
return readAsString();
}catch (ConnectException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_connect));
}catch (MalformedURLException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_malformed_url));
}catch (FileNotFoundException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_file_not_found));
}catch (IOException e) {
throw new Exception(BakerApplication.getInstance().getString(R.string.err_download_task_io));
}finally {
this.cleanup();
}
}
public void cancel() {
this.completed = true;
}
public boolean isCompleted() {
return completed;
}
private void prepareDownload() throws Exception {
// Prepare Download
URL url = new URL(this.url);
connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Cache-Control", "no-cache");
connection.setUseCaches(false);
// Prepare streams
totalBytes = connection.getContentLength();
inputStream = connection.getInputStream();
}
private void downloadToFile() throws IOException {
// Download to file
OutputStream output = new FileOutputStream(targetFile);
// Transfer variables
long bytesSoFar = 0;
int progress;
int bytesRead;
byte[] buffer = new byte[4096];
// Download stream
while ((bytesRead = inputStream.read(buffer)) != -1) {
// Check if the task was cancelled
if (this.isCompleted()) {
this.deleteTargetFile();
break;
}
// Check for read bytes
if (bytesRead > 0) {
// Update progress
if (totalBytes != -1) {
bytesSoFar = bytesSoFar + bytesRead;
progress = (int) ((float) bytesSoFar / totalBytes * 100);
if (progress > percentComplete) {
percentComplete = progress;
onDownloadProgress(percentComplete, bytesSoFar, totalBytes);
}
}
// Write to file
output.write(buffer, 0, bytesRead);
}
}
// Close output
output.close();
}
private String readAsString() throws IOException {
// Download to string
StringBuilder sb = new StringBuilder();
String line;
BufferedReader br = new BufferedReader( new InputStreamReader(inputStream));
while ((line = br.readLine()) != null) {
sb.append(line);
}
return sb.toString();
}
private boolean deleteTargetFile() {
if(targetFile != null && targetFile.exists()) {
return targetFile.delete();
}else{
return false;
}
}
private void createTargetFile() throws Exception {
// Create directory structure
if(!targetFile.getParentFile().exists() || !targetFile.getParentFile().isDirectory()) {
targetFile.getParentFile().mkdirs();
}
if(!targetFile.exists()) {
targetFile.createNewFile();
}
if(!targetFile.exists()) {
throw new Exception("Unable to create target file");
}
}
private void cleanup() {
if(connection != null) {
connection.disconnect();
connection = null;
}
}
public void onDownloadProgress(int percentComplete, long bytesSoFar, long totalBytes) {
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2018 by Axway, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package org.appcelerator.titanium;
import java.util.Arrays;
import java.util.LinkedList;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollExceptionHandler;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.kroll.common.AsyncResult;
import org.appcelerator.kroll.common.CurrentActivityListener;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.common.TiMessenger;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.os.Handler;
import android.os.Message;
import android.os.Process;
import android.text.InputType;
import android.text.method.ScrollingMovementMethod;
import android.view.Window;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.Scroller;
import android.widget.TextView;
/**
* A utility class for creating a dialog that displays Javascript errors
*/
public class TiExceptionHandler implements Handler.Callback, KrollExceptionHandler
{
private static final String TAG = "TiExceptionHandler";
private static final int MSG_OPEN_ERROR_DIALOG = 10011;
private static LinkedList<ExceptionMessage> errorMessages = new LinkedList<ExceptionMessage>();
private static boolean dialogShowing = false;
private static Handler mainHandler;
public static final String ERROR_TITLE = "title";
public static final String ERROR_MESSAGE = "message";
public static final String ERROR_SOURCENAME = "sourceName";
public static final String ERROR_LINE = "line";
public static final String ERROR_LINESOURCE = "lineSource";
public static final String ERROR_LINEOFFSET = "lineOffset";
public static final String ERROR_JS_STACK = "javascriptStack";
public static final String ERROR_JAVA_STACK = "javaStack";
private static final String fill(int count)
{
char[] string = new char[count];
Arrays.fill(string, ' ');
return new String(string);
}
public static final KrollDict getErrorDict(ExceptionMessage error)
{
final KrollDict dict = new KrollDict();
dict.put(ERROR_TITLE, error.title);
dict.put(ERROR_MESSAGE, error.message);
dict.put(ERROR_SOURCENAME, error.sourceName);
dict.put(ERROR_LINE, error.line);
dict.put(ERROR_LINESOURCE, error.lineSource);
dict.put(ERROR_LINEOFFSET, error.lineOffset);
dict.put(ERROR_JS_STACK, error.jsStack);
dict.put(ERROR_JAVA_STACK, error.javaStack);
return dict;
}
public static String getError(KrollDict error)
{
String output = new String();
final String sourceName = error.getString(ERROR_SOURCENAME);
final int line = error.getInt(ERROR_LINE);
final String lineSource = error.getString(ERROR_LINESOURCE);
final int lineOffset = error.getInt(ERROR_LINEOFFSET);
final String jsStack = error.getString(ERROR_JS_STACK);
final String javaStack = error.getString(ERROR_JAVA_STACK);
final String message = error.getString(ERROR_MESSAGE);
if (sourceName != null) {
output += sourceName + ":" + line + "\n";
}
if (lineSource != null) {
output += lineSource + "\n";
output += fill(lineOffset - 1) + "^\n";
}
// sometimes the stacktrace can include the error
// don't re-print the error if that is the case
if (jsStack != null) {
if (!jsStack.contains("Error:")) {
output += message + "\n";
}
output += jsStack + "\n";
} else {
output += message + "\n";
}
if (javaStack != null) {
output += javaStack;
// no java stack, attempt to obtain last ten stack entries
// omitting our error handling entries
} else {
StackTraceElement[] trace = new Error().getStackTrace();
int startIndex = 0;
for (StackTraceElement e : trace) {
startIndex++;
if (e.getMethodName().equals("dispatchException")) {
break;
}
}
int endIndex = startIndex + 10;
for (int i = startIndex; trace.length >= endIndex && i < endIndex; i++) {
output += "\n " + trace[i].toString();
}
}
return output;
}
public TiExceptionHandler()
{
mainHandler = new Handler(TiMessenger.getMainMessenger().getLooper(), this);
}
public void openErrorDialog(ExceptionMessage error)
{
if (TiApplication.isUIThread()) {
handleOpenErrorDialog(error);
} else {
TiMessenger.sendBlockingMainMessage(mainHandler.obtainMessage(MSG_OPEN_ERROR_DIALOG), error);
}
}
protected static void handleOpenErrorDialog(final ExceptionMessage error)
{
final TiApplication tiApp = TiApplication.getInstance();
if (tiApp == null) {
return;
}
final Activity activity = tiApp.getRootOrCurrentActivity();
if (activity == null || activity.isFinishing()) {
return;
}
final KrollDict dict = getErrorDict(error);
tiApp.fireAppEvent("uncaughtException", dict);
Log.e(TAG, getError(dict));
if (tiApp.getDeployType().equals(TiApplication.DEPLOY_TYPE_PRODUCTION)) {
return;
}
if (!dialogShowing) {
dialogShowing = true;
tiApp.waitForCurrentActivity(new CurrentActivityListener() {
@Override
public void onCurrentActivityReady(Activity activity)
{
createDialog(dict);
}
});
} else {
errorMessages.add(error);
}
}
protected static void createDialog(final KrollDict error)
{
final TiApplication tiApp = TiApplication.getInstance();
if (tiApp == null) {
return;
}
final Context context = tiApp.getCurrentActivity();
final TextView errorView = new TextView(context);
errorView.setBackgroundColor(0xFFF5F5F5);
errorView.setTextColor(0xFFE53935);
errorView.setPadding(5, 5, 5, 5);
errorView.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT));
errorView.setInputType(InputType.TYPE_TEXT_FLAG_MULTI_LINE);
errorView.setSingleLine(false);
errorView.setScroller(new Scroller(context));
errorView.setVerticalScrollBarEnabled(true);
errorView.setHorizontallyScrolling(true);
errorView.setHorizontalScrollBarEnabled(true);
errorView.setMovementMethod(new ScrollingMovementMethod());
errorView.setTypeface(Typeface.MONOSPACE);
errorView.setText(getError(error));
final RelativeLayout layout = new RelativeLayout(context);
layout.setPadding(0, 50, 0, 0);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(
RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
layout.setLayoutParams(layoutParams);
layout.addView(errorView);
final OnClickListener clickListener = new OnClickListener() {
public void onClick(DialogInterface dialog, int which)
{
dialogShowing = false;
if (which == DialogInterface.BUTTON_POSITIVE) {
Process.killProcess(Process.myPid());
}
if (!errorMessages.isEmpty()) {
handleOpenErrorDialog(errorMessages.removeFirst());
}
}
};
final AlertDialog.Builder builder = new AlertDialog.Builder(context)
.setTitle(error.getString("title"))
.setView(layout)
.setPositiveButton("Kill", clickListener)
.setNeutralButton("Continue", clickListener)
.setCancelable(false);
final AlertDialog dialog = builder.create();
dialog.show();
final Window window = ((Activity) context).getWindow();
Rect displayRect = new Rect();
window.getDecorView().getWindowVisibleDisplayFrame(displayRect);
dialog.getWindow().setLayout(displayRect.width(), (int) (displayRect.height() * 0.95));
}
public boolean handleMessage(Message msg)
{
switch (msg.what) {
case MSG_OPEN_ERROR_DIALOG:
AsyncResult asyncResult = (AsyncResult) msg.obj;
ExceptionMessage errorMessage = (ExceptionMessage) asyncResult.getArg();
handleOpenErrorDialog(errorMessage);
asyncResult.setResult(null);
return true;
default:
break;
}
return false;
}
/**
* Handles the exception by opening an error dialog with an error message
* @param error An error message containing line number, error title, message, etc
* @module.api
*/
public void handleException(ExceptionMessage error)
{
openErrorDialog(error);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.apache.kafka.common.record;
import org.apache.kafka.test.TestUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(value = Parameterized.class)
public class MemoryRecordsBuilderTest {
private final CompressionType compressionType;
private final int bufferOffset;
public MemoryRecordsBuilderTest(int bufferOffset, CompressionType compressionType) {
this.bufferOffset = bufferOffset;
this.compressionType = compressionType;
}
@Test
public void testCompressionRateV0() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
Record[] records = new Record[] {
Record.create(Record.MAGIC_VALUE_V0, 0L, "a".getBytes(), "1".getBytes()),
Record.create(Record.MAGIC_VALUE_V0, 1L, "b".getBytes(), "2".getBytes()),
Record.create(Record.MAGIC_VALUE_V0, 2L, "c".getBytes(), "3".getBytes()),
};
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V0, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, buffer.capacity());
int uncompressedSize = 0;
for (Record record : records) {
uncompressedSize += record.sizeInBytes() + Records.LOG_OVERHEAD;
builder.append(record);
}
MemoryRecords built = builder.build();
if (compressionType == CompressionType.NONE) {
assertEquals(1.0, builder.compressionRate(), 0.00001);
} else {
int compressedSize = built.sizeInBytes() - Records.LOG_OVERHEAD - Record.RECORD_OVERHEAD_V0;
double computedCompressionRate = (double) compressedSize / uncompressedSize;
assertEquals(computedCompressionRate, builder.compressionRate(), 0.00001);
}
}
@Test
public void testCompressionRateV1() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
Record[] records = new Record[] {
Record.create(Record.MAGIC_VALUE_V1, 0L, "a".getBytes(), "1".getBytes()),
Record.create(Record.MAGIC_VALUE_V1, 1L, "b".getBytes(), "2".getBytes()),
Record.create(Record.MAGIC_VALUE_V1, 2L, "c".getBytes(), "3".getBytes()),
};
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, buffer.capacity());
int uncompressedSize = 0;
for (Record record : records) {
uncompressedSize += record.sizeInBytes() + Records.LOG_OVERHEAD;
builder.append(record);
}
MemoryRecords built = builder.build();
if (compressionType == CompressionType.NONE) {
assertEquals(1.0, builder.compressionRate(), 0.00001);
} else {
int compressedSize = built.sizeInBytes() - Records.LOG_OVERHEAD - Record.RECORD_OVERHEAD_V1;
double computedCompressionRate = (double) compressedSize / uncompressedSize;
assertEquals(computedCompressionRate, builder.compressionRate(), 0.00001);
}
}
@Test
public void buildUsingLogAppendTime() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.LOG_APPEND_TIME, 0L, logAppendTime, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes());
builder.append(0L, "b".getBytes(), "2".getBytes());
builder.append(0L, "c".getBytes(), "3".getBytes());
MemoryRecords records = builder.build();
MemoryRecordsBuilder.RecordsInfo info = builder.info();
assertEquals(logAppendTime, info.maxTimestamp);
assertEquals(2L, info.shallowOffsetOfMaxTimestamp);
for (Record record : records.records()) {
assertEquals(TimestampType.LOG_APPEND_TIME, record.timestampType());
assertEquals(logAppendTime, record.timestamp());
}
}
@Test
public void convertUsingLogAppendTime() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.LOG_APPEND_TIME, 0L, logAppendTime, buffer.capacity());
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "a".getBytes(), "1".getBytes()));
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "b".getBytes(), "2".getBytes()));
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "c".getBytes(), "3".getBytes()));
MemoryRecords records = builder.build();
MemoryRecordsBuilder.RecordsInfo info = builder.info();
assertEquals(logAppendTime, info.maxTimestamp);
assertEquals(2L, info.shallowOffsetOfMaxTimestamp);
for (Record record : records.records()) {
assertEquals(TimestampType.LOG_APPEND_TIME, record.timestampType());
assertEquals(logAppendTime, record.timestamp());
}
}
@Test
public void buildUsingCreateTime() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes());
builder.append(2L, "b".getBytes(), "2".getBytes());
builder.append(1L, "c".getBytes(), "3".getBytes());
MemoryRecords records = builder.build();
MemoryRecordsBuilder.RecordsInfo info = builder.info();
assertEquals(2L, info.maxTimestamp);
if (compressionType == CompressionType.NONE)
assertEquals(1L, info.shallowOffsetOfMaxTimestamp);
else
assertEquals(2L, info.shallowOffsetOfMaxTimestamp);
int i = 0;
long[] expectedTimestamps = new long[] {0L, 2L, 1L};
for (Record record : records.records()) {
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
assertEquals(expectedTimestamps[i++], record.timestamp());
}
}
@Test
public void testSmallWriteLimit() {
// with a small write limit, we always allow at least one record to be added
byte[] key = "foo".getBytes();
byte[] value = "bar".getBytes();
int writeLimit = 0;
ByteBuffer buffer = ByteBuffer.allocate(512);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.CURRENT_MAGIC_VALUE, compressionType,
TimestampType.CREATE_TIME, 0L, Record.NO_TIMESTAMP, writeLimit);
assertFalse(builder.isFull());
assertTrue(builder.hasRoomFor(key, value));
builder.append(0L, key, value);
assertTrue(builder.isFull());
assertFalse(builder.hasRoomFor(key, value));
MemoryRecords memRecords = builder.build();
List<Record> records = TestUtils.toList(memRecords.records());
assertEquals(1, records.size());
Record record = records.get(0);
assertEquals(ByteBuffer.wrap(key), record.key());
assertEquals(ByteBuffer.wrap(value), record.value());
}
@Test
public void writePastLimit() {
ByteBuffer buffer = ByteBuffer.allocate(64);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes());
builder.append(1L, "b".getBytes(), "2".getBytes());
assertFalse(builder.hasRoomFor("c".getBytes(), "3".getBytes()));
builder.append(2L, "c".getBytes(), "3".getBytes());
MemoryRecords records = builder.build();
MemoryRecordsBuilder.RecordsInfo info = builder.info();
assertEquals(2L, info.maxTimestamp);
assertEquals(2L, info.shallowOffsetOfMaxTimestamp);
long i = 0L;
for (Record record : records.records()) {
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
assertEquals(i++, record.timestamp());
}
}
@Test(expected = IllegalArgumentException.class)
public void testAppendAtInvalidOffset() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, buffer.capacity());
builder.appendWithOffset(0L, System.currentTimeMillis(), "a".getBytes(), null);
// offsets must increase monotonically
builder.appendWithOffset(0L, System.currentTimeMillis(), "b".getBytes(), null);
}
@Test(expected = IllegalArgumentException.class)
public void testAppendWithInvalidMagic() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, buffer.capacity());
builder.append(Record.create(Record.MAGIC_VALUE_V0, 0L, "a".getBytes(), null));
}
@Test
public void convertUsingCreateTime() {
ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, Record.MAGIC_VALUE_V1, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, buffer.capacity());
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "a".getBytes(), "1".getBytes()));
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "b".getBytes(), "2".getBytes()));
builder.convertAndAppend(Record.create(Record.MAGIC_VALUE_V0, 0L, "c".getBytes(), "3".getBytes()));
MemoryRecords records = builder.build();
MemoryRecordsBuilder.RecordsInfo info = builder.info();
assertEquals(Record.NO_TIMESTAMP, info.maxTimestamp);
assertEquals(2L, info.shallowOffsetOfMaxTimestamp);
for (Record record : records.records()) {
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
assertEquals(Record.NO_TIMESTAMP, record.timestamp());
}
}
@Parameterized.Parameters
public static Collection<Object[]> data() {
List<Object[]> values = new ArrayList<>();
for (int bufferOffset : Arrays.asList(0, 15))
for (CompressionType compressionType : CompressionType.values())
values.add(new Object[] {bufferOffset, compressionType});
return values;
}
}
| |
/******************************************************************
Copyright (c) 2008 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
*****************************************************************/
package org.datanucleus.tests;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.util.HashSet;
import java.util.Set;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import org.datanucleus.samples.annotations.embedded.collection.EmbeddedCollElement;
import org.datanucleus.samples.annotations.embedded.collection.EmbeddedCollectionOwner;
import org.datanucleus.samples.annotations.embedded.map.EmbeddedMapKey;
import org.datanucleus.samples.annotations.embedded.map.EmbeddedMapValue;
import org.datanucleus.samples.annotations.embedded.pc.EmbeddedPC;
import org.datanucleus.samples.annotations.embedded.pc.EmbeddedPCOwner;
import org.datanucleus.samples.annotations.embedded.map.EmbeddedMapOwner;
import org.datanucleus.samples.annotations.inheritance.InheritA;
import org.datanucleus.samples.annotations.inheritance.InheritA1;
import org.datanucleus.samples.annotations.inheritance.InheritA2;
import org.datanucleus.samples.annotations.inheritance.InheritA2a;
import org.datanucleus.samples.annotations.inheritance.InheritB;
import org.datanucleus.samples.annotations.inheritance.InheritB1;
import org.datanucleus.samples.annotations.inheritance.InheritB2;
import org.datanucleus.samples.annotations.inheritance.InheritC;
import org.datanucleus.samples.annotations.inheritance.InheritC1;
import org.datanucleus.samples.annotations.inheritance.InheritC2;
import org.datanucleus.samples.annotations.many_one.unidir.CarRental;
import org.datanucleus.samples.annotations.many_one.unidir.HireCar;
import org.datanucleus.samples.annotations.one_many.collection.CollectionHolder1;
import org.datanucleus.samples.annotations.one_many.collection.CollectionHolder1Element;
import org.datanucleus.samples.annotations.one_many.map.MapHolder1;
import org.datanucleus.samples.annotations.one_many.map.MapHolder1Key;
import org.datanucleus.samples.annotations.one_many.map.MapHolder1Value;
import org.datanucleus.samples.annotations.types.basic.DateHolder;
import org.datanucleus.samples.xml.one_many.map.MapHolder1Xml;
import org.datanucleus.store.connection.ManagedConnection;
import org.datanucleus.store.rdbms.RDBMSStoreManager;
/**
* Tests for schema creation.
* These tests only apply to a datastore that has a notion of a schema, consequently it should not be run for other datastores.
*/
public class SchemaTest extends JakartaPersistenceTestCase
{
public SchemaTest(String name)
{
super(name);
}
/**
* Test for JPA inheritance strategy "joined" on a hierarchy of classes.
* We have 4 classes. A base class, 2 direct subclasses, and a subclass of a subclass.
* This should create 4 tables, with each table only having the fields for that class.
*/
public void testInheritanceStrategyJoined()
throws Exception
{
addClassesToSchema(new Class[] {InheritA.class, InheritA1.class, InheritA2.class, InheritA2a.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_A", columnNames);
columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME1");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_A1", columnNames);
columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME2");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_A2", columnNames);
columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME2A");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_A2A", columnNames);
tx.commit();
}
catch (Exception e)
{
LOG.error(e);
fail("Specification of table and column names must have been ignored when creating the schema for " +
"inheritance case where the fields were overridden. Exception was thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA inheritance strategy "table-per-class" on a hierarchy of classes.
*/
public void testInheritanceStrategyTablePerClass()
throws Exception
{
addClassesToSchema(new Class[] {InheritB.class, InheritB1.class, InheritB2.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_B", columnNames);
columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME");
columnNames.add("NAME1");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_B1", columnNames);
columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME");
columnNames.add("NAME2");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_B2", columnNames);
tx.commit();
}
catch (Exception e)
{
LOG.error(e);
fail("Specification of table and column names must have been ignored when creating the schema for " +
"inheritance case where the fields were overridden. Exception was thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA inheritance strategy "single-table" on a hierarchy of classes.
*/
public void testInheritanceStrategySingleTable()
throws Exception
{
addClassesToSchema(new Class[] {InheritC.class, InheritC1.class, InheritC2.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("NAME");
columnNames.add("NAME1");
columnNames.add("NAME2");
columnNames.add("DTYPE");
// Check base table column names
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_INHERIT_C", columnNames);
tx.commit();
}
catch (Exception e)
{
LOG.error(e);
fail("Specification of table and column names must have been ignored when creating the schema for " +
"inheritance case where the fields were overridden. Exception was thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for allows null setting.
*/
public void testAllowsNull()
throws Exception
{
addClassesToSchema(new Class[] {DateHolder.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("ID");
columnNames.add("DATEFIELD");
columnNames.add("DATEFIELD2");
// Check base table column names
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_ANN_DATEHOLDER", columnNames);
String insensitiveTableName = RDBMSTestHelper.getIdentifierInCaseOfAdapter(storeMgr, "JPA_ANN_DATEHOLDER", false);
ResultSet rs = dmd.getColumns(null, null, insensitiveTableName, null);
while (rs.next())
{
String colName = rs.getString(4);
int nullValue = rs.getInt(11);
if (colName.equalsIgnoreCase("DATEFIELD"))
{
if (nullValue != 1)
{
fail("Column " + colName + " should have allowed nulls but doesnt");
}
}
else if (colName.equalsIgnoreCase("DATEFIELD2"))
{
if (nullValue != 1)
{
fail("Column " + colName + " should have allowed nulls but doesnt");
}
}
else if (colName.equalsIgnoreCase("ID"))
{
if (nullValue != 0)
{
fail("Column " + colName + " shouldnt have allowed nulls but does");
}
}
}
tx.commit();
}
catch (Exception e)
{
LOG.error(e);
fail("Specification of table and column names gave error when checking schema. Exception was thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA Map<NonPC, NonPC> using annotations.
*/
public void testMapOfSimpleSimpleViaAnnotations()
throws Exception
{
addClassesToSchema(new Class[] {MapHolder1.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("JPA_AN_MAPHOLDER1_ID");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_MAPHOLDER1", columnNames);
HashSet<String> columnNames2 = new HashSet<String>();
columnNames2.add("MAPHOLDER1_ID");
columnNames2.add("PROP_NAME");
columnNames2.add("PROP_VALUE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_MAPHOLDER1_PROPS", columnNames2);
HashSet<String> columnNames3 = new HashSet<String>();
columnNames3.add("MAPHOLDER1_JPA_AN_MAPHOLDER1_ID");
columnNames3.add("PROPERTIES2_KEY");
columnNames3.add("PROPERTIES2_VALUE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "MAPHOLDER1_PROPERTIES2", columnNames3);
tx.commit();
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA Map<NonPC, NonPC> using xml.
*/
public void testMapOfSimpleSimpleViaXml()
throws Exception
{
addClassesToSchema(new Class[] {MapHolder1Xml.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("JPA_XML_MAPHOLDER1_ID");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_XML_MAPHOLDER1", columnNames);
HashSet<String> columnNames2 = new HashSet<String>();
columnNames2.add("MAPHOLDER1_ID");
columnNames2.add("PROP_NAME");
columnNames2.add("PROP_VALUE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_XML_MAPHOLDER1_PROPS", columnNames2);
HashSet<String> columnNames3 = new HashSet<String>();
columnNames3.add("MAPHOLDER1XML_JPA_XML_MAPHOLDER1_ID");
columnNames3.add("PROPERTIES2_KEY");
columnNames3.add("PROPERTIES2_VALUE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "MAPHOLDER1XML_PROPERTIES2", columnNames3);
tx.commit();
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA embedded map keys/values.
*/
public void testEmbeddedMap()
throws Exception
{
addClassesToSchema(new Class[] {EmbeddedMapOwner.class, EmbeddedMapKey.class, EmbeddedMapValue.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// Map with embedded value taking default value column names
Set<String> columnNames = new HashSet<String>();
columnNames.add("JPA_MAP_EMB_OWNER_ID"); // FK to owner
columnNames.add("MAPEMBEDDEDVALUE_KEY"); // Key
columnNames.add("NAME"); // Value "name"
columnNames.add("VALUE"); // Value "value"
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_MAP_EMB_VALUE", columnNames);
// Map with embedded value overriding the value column names
Set<String> columnNames2 = new HashSet<String>();
columnNames2.add("JPA_MAP_EMB_OWNER_ID"); // FK to owner
columnNames2.add("MAP_KEY"); // Key "name"
columnNames2.add("MAP_VALUE_NAME"); // Value "name"
columnNames2.add("MAP_VALUE_VALUE"); // Value "value"
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_MAP_EMB_VALUE_OVERRIDE", columnNames2);
// Map with embedded key taking default key column names
Set<String> columnNames3 = new HashSet<String>();
columnNames3.add("JPA_MAP_EMB_OWNER_ID"); // FK to owner
columnNames3.add("NAME"); // Key "name"
columnNames3.add("VALUE"); // Key "value"
columnNames3.add("MAPEMBEDDEDKEY_VALUE"); // Value
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_MAP_EMB_KEY", columnNames3);
// Map with embedded key overriding the key column names
Set<String> columnNames4 = new HashSet<String>();
columnNames4.add("JPA_MAP_EMB_OWNER_ID"); // FK to owner
columnNames4.add("MAP_KEY_NAME"); // Key "name"
columnNames4.add("MAP_KEY_VALUE"); // Key "value"
columnNames4.add("MAPEMBEDDEDKEYOVERRIDE_VALUE"); // Value
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_MAP_EMB_KEY_OVERRIDE", columnNames4);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA embedded collection elements.
*/
public void testEmbeddedCollection()
throws Exception
{
addClassesToSchema(new Class[] {EmbeddedCollectionOwner.class, EmbeddedCollElement.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// Map with embedded value taking default value column names
Set<String> columnNames = new HashSet<String>();
columnNames.add("JPA_COLL_EMB_OWNER_ID"); // FK to owner
columnNames.add("NAME"); // Element "name"
columnNames.add("VALUE"); // Element "value"
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_COLL_EMB", columnNames);
// Map with embedded value overriding the value column names
Set<String> columnNames2 = new HashSet<String>();
columnNames2.add("JPA_COLL_EMB_OWNER_ID"); // FK to owner
columnNames2.add("COLL_ELEM_NAME"); // Element "name"
columnNames2.add("COLL_ELEM_VALUE"); // Element "value"
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_COLL_EMB_OVERRIDE", columnNames2);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA embedded PC.
*/
public void testEmbeddedPC()
throws Exception
{
addClassesToSchema(new Class[] {EmbeddedPCOwner.class, EmbeddedPC.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// Map with embedded value taking default value column names
Set<String> columnNames = new HashSet<String>();
columnNames.add("ID"); // Id
columnNames.add("EMB_NAME"); // PC "name"
columnNames.add("EMB_VALUE"); // PC "value"
columnNames.add("PC_EMB_NAME"); // PC "name" (overridden)
columnNames.add("PC_EMB_VALUE"); // PC "value" (overridden)
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_PC_EMBEDDED_OWNER", columnNames);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA map using join table with entity keys/values.
*/
public void testMapJoinTableEntityEntity()
throws Exception
{
addClassesToSchema(new Class[] {MapHolder1.class, MapHolder1Key.class, MapHolder1Value.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// Map with user-specified join table namings
Set<String> columnNames = new HashSet<String>();
columnNames.add("MAP4_OWNER_ID"); // FK to owner
columnNames.add("MAP4_KEY"); // Key
columnNames.add("MAP4_VALUE"); // Value
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_MAPHOLDER1_MAP4", columnNames);
// Map with default join table namings
columnNames = new HashSet<String>();
columnNames.add("MAPHOLDER1_JPA_AN_MAPHOLDER1_ID"); // FK to owner
columnNames.add("MAP3_KEY"); // Key
columnNames.add("MAP3_ID"); // Value
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_MAPHOLDER1_MAPHOLDER1VALUE", columnNames);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA Collection<NonPC> using annotations.
*/
public void testCollectionOfSimpleViaAnnotations()
throws Exception
{
addClassesToSchema(new Class[] {MapHolder1.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
HashSet<String> columnNames = new HashSet<String>();
columnNames.add("JPA_AN_COLLHOLDER1_ID");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_COLLHOLDER1", columnNames);
HashSet<String> columnNames2 = new HashSet<String>();
columnNames2.add("COLLHOLDER1_ID");
columnNames2.add("PROP_VALUE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_COLLHOLDER1_STRINGS", columnNames2);
HashSet<String> columnNames3 = new HashSet<String>();
columnNames3.add("COLLECTIONHOLDER1_JPA_AN_COLLHOLDER1_ID");
columnNames3.add("COLLBASIC2_ELEMENT");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "COLLECTIONHOLDER1_COLLBASIC2", columnNames3);
tx.commit();
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA collection using join table with entity elements.
*/
public void testCollectionJoinTableEntity()
throws Exception
{
addClassesToSchema(new Class[] {CollectionHolder1.class, CollectionHolder1Element.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// User-specified join table namings
Set<String> columnNames = new HashSet<String>();
columnNames.add("COLLECTIONHOLDER1_JPA_AN_COLLHOLDER1_ID"); // FK to owner
columnNames.add("COLL3_ID"); // Element FK
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_COLLHOLDER1_COLLECTIONHOLDER1ELEMENT", columnNames);
// Default join table namings
columnNames = new HashSet<String>();
columnNames.add("COLL4_OWNER_ID"); // FK to owner
columnNames.add("COLL4_ELEMENT"); // Element FK
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_AN_COLLHOLDER1_COLL4", columnNames);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
/**
* Test for JPA collection using join table with entity elements.
*/
public void testManyToOneUnidirJoin()
throws Exception
{
addClassesToSchema(new Class[] {CarRental.class, HireCar.class});
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
RDBMSStoreManager databaseMgr = (RDBMSStoreManager)storeMgr;
Connection conn = null; ManagedConnection mconn = null;
try
{
tx.begin();
Set<String> columnNames = new HashSet<String>();
mconn = databaseMgr.getConnectionManager().getConnection(0); conn = (Connection) mconn.getConnection();
DatabaseMetaData dmd = conn.getMetaData();
// Check table column names
columnNames.clear();
columnNames.add("REGISTRATIONID");
columnNames.add("MAKE");
columnNames.add("MODEL");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_HIRECAR", columnNames);
columnNames.clear();
columnNames.add("CUSTOMERID");
columnNames.add("STARTDATE");
columnNames.add("ENDDATE");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_CARRENTAL", columnNames);
columnNames.clear();
columnNames.add("CARRENTAL_CUSTOMERID");
columnNames.add("HIRECAR_REGISTRATIONID");
RDBMSTestHelper.checkColumnsForTable(storeMgr, dmd, "JPA_CARRENTAL_HIRE_JOIN", columnNames);
}
catch (Exception e)
{
LOG.error("Exception thrown", e);
fail("Exception thrown : " + e.getMessage());
}
finally
{
if (conn != null)
{
mconn.close();
}
if (tx.isActive())
{
tx.rollback();
}
em.close();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.scale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.S3AInputPolicy;
import org.apache.hadoop.fs.s3a.S3AInputStream;
import org.apache.hadoop.fs.s3a.S3AInstrumentation;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.util.LineReader;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.EOFException;
import java.io.IOException;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.assume;
/**
* Look at the performance of S3a operations.
*/
public class ITestS3AInputStreamPerformance extends S3AScaleTestBase {
private static final Logger LOG = LoggerFactory.getLogger(
ITestS3AInputStreamPerformance.class);
private S3AFileSystem s3aFS;
private Path testData;
private FileStatus testDataStatus;
private FSDataInputStream in;
private S3AInstrumentation.InputStreamStatistics streamStatistics;
public static final int BLOCK_SIZE = 32 * 1024;
public static final int BIG_BLOCK_SIZE = 256 * 1024;
/** Tests only run if the there is a named test file that can be read. */
private boolean testDataAvailable = true;
private String assumptionMessage = "test file";
/**
* Open the FS and the test data. The input stream is always set up here.
* @throws IOException IO Problems.
*/
@Before
public void openFS() throws IOException {
Configuration conf = getConf();
conf.setInt(SOCKET_SEND_BUFFER, 16 * 1024);
conf.setInt(SOCKET_RECV_BUFFER, 16 * 1024);
String testFile = conf.getTrimmed(KEY_CSVTEST_FILE, DEFAULT_CSVTEST_FILE);
if (testFile.isEmpty()) {
assumptionMessage = "Empty test property: " + KEY_CSVTEST_FILE;
LOG.warn(assumptionMessage);
testDataAvailable = false;
} else {
testData = new Path(testFile);
LOG.info("Using {} as input stream source", testData);
Path path = this.testData;
bindS3aFS(path);
try {
testDataStatus = s3aFS.getFileStatus(this.testData);
} catch (IOException e) {
LOG.warn("Failed to read file {} specified in {}",
testFile, KEY_CSVTEST_FILE, e);
throw e;
}
}
}
private void bindS3aFS(Path path) throws IOException {
s3aFS = (S3AFileSystem) FileSystem.newInstance(path.toUri(), getConf());
}
/**
* Cleanup: close the stream, close the FS.
*/
@After
public void cleanup() {
describe("cleanup");
IOUtils.closeStream(in);
IOUtils.closeStream(s3aFS);
}
/**
* Declare that the test requires the CSV test dataset.
*/
private void requireCSVTestData() {
assume(assumptionMessage, testDataAvailable);
}
/**
* Open the test file with the read buffer specified in the setting.
* {@link #KEY_READ_BUFFER_SIZE}; use the {@code Normal} policy
* @return the stream, wrapping an S3a one
* @throws IOException IO problems
*/
FSDataInputStream openTestFile() throws IOException {
return openTestFile(S3AInputPolicy.Normal, 0);
}
/**
* Open the test file with the read buffer specified in the setting
* {@link #KEY_READ_BUFFER_SIZE}.
* This includes the {@link #requireCSVTestData()} assumption; so
* if called before any FS op, will automatically skip the test
* if the CSV file is absent.
*
* @param inputPolicy input policy to use
* @param readahead readahead/buffer size
* @return the stream, wrapping an S3a one
* @throws IOException IO problems
*/
FSDataInputStream openTestFile(S3AInputPolicy inputPolicy, long readahead)
throws IOException {
requireCSVTestData();
return openDataFile(s3aFS, this.testData, inputPolicy, readahead);
}
/**
* Open a test file with the read buffer specified in the setting
* {@link org.apache.hadoop.fs.s3a.S3ATestConstants#KEY_READ_BUFFER_SIZE}.
*
* @param path path to open
* @param inputPolicy input policy to use
* @param readahead readahead/buffer size
* @return the stream, wrapping an S3a one
* @throws IOException IO problems
*/
private FSDataInputStream openDataFile(S3AFileSystem fs,
Path path,
S3AInputPolicy inputPolicy,
long readahead) throws IOException {
int bufferSize = getConf().getInt(KEY_READ_BUFFER_SIZE,
DEFAULT_READ_BUFFER_SIZE);
S3AInputPolicy policy = fs.getInputPolicy();
fs.setInputPolicy(inputPolicy);
try {
FSDataInputStream stream = fs.open(path, bufferSize);
if (readahead >= 0) {
stream.setReadahead(readahead);
}
streamStatistics = getInputStreamStatistics(stream);
return stream;
} finally {
fs.setInputPolicy(policy);
}
}
/**
* Assert that the stream was only ever opened once.
*/
protected void assertStreamOpenedExactlyOnce() {
assertOpenOperationCount(1);
}
/**
* Make an assertion count about the number of open operations.
* @param expected the expected number
*/
private void assertOpenOperationCount(long expected) {
assertEquals("open operations in\n" + in,
expected, streamStatistics.openOperations);
}
/**
* Log how long an IOP took, by dividing the total time by the
* count of operations, printing in a human-readable form.
* @param operation operation being measured
* @param timer timing data
* @param count IOP count.
*/
protected void logTimePerIOP(String operation,
NanoTimer timer,
long count) {
LOG.info("Time per {}: {} nS",
operation, toHuman(timer.duration() / count));
}
@Test
public void testTimeToOpenAndReadWholeFileBlocks() throws Throwable {
requireCSVTestData();
int blockSize = _1MB;
describe("Open the test file %s and read it in blocks of size %d",
testData, blockSize);
long len = testDataStatus.getLen();
in = openTestFile();
byte[] block = new byte[blockSize];
NanoTimer timer2 = new NanoTimer();
long count = 0;
// implicitly rounding down here
long blockCount = len / blockSize;
long totalToRead = blockCount * blockSize;
long minimumBandwidth = 128 * 1024;
int maxResetCount = 4;
int resetCount = 0;
for (long i = 0; i < blockCount; i++) {
int offset = 0;
int remaining = blockSize;
long blockId = i + 1;
NanoTimer blockTimer = new NanoTimer();
int reads = 0;
while (remaining > 0) {
NanoTimer readTimer = new NanoTimer();
int bytesRead = in.read(block, offset, remaining);
reads++;
if (bytesRead == 1) {
break;
}
remaining -= bytesRead;
offset += bytesRead;
count += bytesRead;
readTimer.end();
if (bytesRead != 0) {
LOG.debug("Bytes in read #{}: {} , block bytes: {}," +
" remaining in block: {}" +
" duration={} nS; ns/byte: {}, bandwidth={} MB/s",
reads, bytesRead, blockSize - remaining, remaining,
readTimer.duration(),
readTimer.nanosPerOperation(bytesRead),
readTimer.bandwidthDescription(bytesRead));
} else {
LOG.warn("0 bytes returned by read() operation #{}", reads);
}
}
blockTimer.end("Reading block %d in %d reads", blockId, reads);
String bw = blockTimer.bandwidthDescription(blockSize);
LOG.info("Bandwidth of block {}: {} MB/s: ", blockId, bw);
if (bandwidth(blockTimer, blockSize) < minimumBandwidth) {
LOG.warn("Bandwidth {} too low on block {}: resetting connection",
bw, blockId);
Assert.assertTrue("Bandwidth of " + bw +" too low after "
+ resetCount + " attempts", resetCount <= maxResetCount);
resetCount++;
// reset the connection
getS3AInputStream(in).resetConnection();
}
}
timer2.end("Time to read %d bytes in %d blocks", totalToRead, blockCount);
LOG.info("Overall Bandwidth {} MB/s; reset connections {}",
timer2.bandwidth(totalToRead), resetCount);
logStreamStatistics();
}
/**
* Work out the bandwidth in bytes/second.
* @param timer timer measuring the duration
* @param bytes bytes
* @return the number of bytes/second of the recorded operation
*/
public static double bandwidth(NanoTimer timer, long bytes) {
return bytes * 1.0e9 / timer.duration();
}
@Test
public void testLazySeekEnabled() throws Throwable {
describe("Verify that seeks do not trigger any IO");
in = openTestFile();
long len = testDataStatus.getLen();
NanoTimer timer = new NanoTimer();
long blockCount = len / BLOCK_SIZE;
for (long i = 0; i < blockCount; i++) {
in.seek(in.getPos() + BLOCK_SIZE - 1);
}
in.seek(0);
blockCount++;
timer.end("Time to execute %d seeks", blockCount);
logTimePerIOP("seek()", timer, blockCount);
logStreamStatistics();
assertOpenOperationCount(0);
assertEquals("bytes read", 0, streamStatistics.bytesRead);
}
@Test
public void testReadaheadOutOfRange() throws Throwable {
try {
in = openTestFile();
in.setReadahead(-1L);
fail("Stream should have rejected the request "+ in);
} catch (IllegalArgumentException e) {
// expected
}
}
@Test
public void testReadWithNormalPolicy() throws Throwable {
describe("Read big blocks with a big readahead");
executeSeekReadSequence(BIG_BLOCK_SIZE, BIG_BLOCK_SIZE * 2,
S3AInputPolicy.Normal);
assertStreamOpenedExactlyOnce();
}
@Test
public void testDecompressionSequential128K() throws Throwable {
describe("Decompress with a 128K readahead");
executeDecompression(128 * 1024, S3AInputPolicy.Sequential);
assertStreamOpenedExactlyOnce();
}
/**
* Execute a decompression + line read with the given input policy.
* @param readahead byte readahead
* @param inputPolicy read policy
* @throws IOException IO Problems
*/
private void executeDecompression(long readahead,
S3AInputPolicy inputPolicy) throws IOException {
CompressionCodecFactory factory
= new CompressionCodecFactory(getConf());
CompressionCodec codec = factory.getCodec(testData);
long bytesRead = 0;
int lines = 0;
FSDataInputStream objectIn = openTestFile(inputPolicy, readahead);
ContractTestUtils.NanoTimer timer = new ContractTestUtils.NanoTimer();
try (LineReader lineReader = new LineReader(
codec.createInputStream(objectIn), getConf())) {
Text line = new Text();
int read;
while ((read = lineReader.readLine(line)) > 0) {
bytesRead += read;
lines++;
}
} catch (EOFException eof) {
// done
}
timer.end("Time to read %d lines [%d bytes expanded, %d raw]" +
" with readahead = %d",
lines,
bytesRead,
testDataStatus.getLen(),
readahead);
logTimePerIOP("line read", timer, lines);
logStreamStatistics();
}
private void logStreamStatistics() {
LOG.info(String.format("Stream Statistics%n{}"), streamStatistics);
}
/**
* Execute a seek+read sequence.
* @param blockSize block size for seeks
* @param readahead what the readahead value of the stream should be
* @throws IOException IO problems
*/
protected void executeSeekReadSequence(long blockSize,
long readahead,
S3AInputPolicy policy) throws IOException {
in = openTestFile(policy, readahead);
long len = testDataStatus.getLen();
NanoTimer timer = new NanoTimer();
long blockCount = len / blockSize;
LOG.info("Reading {} blocks, readahead = {}",
blockCount, readahead);
for (long i = 0; i < blockCount; i++) {
in.seek(in.getPos() + blockSize - 1);
// this is the read
assertTrue(in.read() >= 0);
}
timer.end("Time to execute %d seeks of distance %d with readahead = %d",
blockCount,
blockSize,
readahead);
logTimePerIOP("seek(pos + " + blockCount+"); read()", timer, blockCount);
LOG.info("Effective bandwidth {} MB/S",
timer.bandwidthDescription(streamStatistics.bytesRead -
streamStatistics.bytesSkippedOnSeek));
logStreamStatistics();
}
public static final int _4K = 4 * 1024;
public static final int _8K = 8 * 1024;
public static final int _16K = 16 * 1024;
public static final int _32K = 32 * 1024;
public static final int _64K = 64 * 1024;
public static final int _128K = 128 * 1024;
public static final int _256K = 256 * 1024;
public static final int _1MB = 1024 * 1024;
public static final int _2MB = 2 * _1MB;
public static final int _10MB = _1MB * 10;
public static final int _5MB = _1MB * 5;
private static final int[][] RANDOM_IO_SEQUENCE = {
{_2MB, _128K},
{_128K, _128K},
{_5MB, _64K},
{_1MB, _1MB},
};
@Test
public void testRandomIORandomPolicy() throws Throwable {
executeRandomIO(S3AInputPolicy.Random, (long) RANDOM_IO_SEQUENCE.length);
assertEquals("streams aborted in " + streamStatistics,
0, streamStatistics.aborted);
}
@Test
public void testRandomIONormalPolicy() throws Throwable {
long expectedOpenCount = RANDOM_IO_SEQUENCE.length;
executeRandomIO(S3AInputPolicy.Normal, expectedOpenCount);
assertEquals("streams aborted in " + streamStatistics,
4, streamStatistics.aborted);
}
/**
* Execute the random IO {@code readFully(pos, bytes[])} sequence defined by
* {@link #RANDOM_IO_SEQUENCE}. The stream is closed afterwards; that's used
* in the timing too
* @param policy read policy
* @param expectedOpenCount expected number of stream openings
* @throws IOException IO problems
* @return the timer
*/
private ContractTestUtils.NanoTimer executeRandomIO(S3AInputPolicy policy,
long expectedOpenCount)
throws IOException {
describe("Random IO with policy \"%s\"", policy);
byte[] buffer = new byte[_1MB];
long totalBytesRead = 0;
in = openTestFile(policy, 0);
ContractTestUtils.NanoTimer timer = new ContractTestUtils.NanoTimer();
for (int[] action : RANDOM_IO_SEQUENCE) {
int position = action[0];
int range = action[1];
in.readFully(position, buffer, 0, range);
totalBytesRead += range;
}
int reads = RANDOM_IO_SEQUENCE.length;
timer.end("Time to execute %d reads of total size %d bytes",
reads,
totalBytesRead);
in.close();
assertOpenOperationCount(expectedOpenCount);
logTimePerIOP("byte read", timer, totalBytesRead);
LOG.info("Effective bandwidth {} MB/S",
timer.bandwidthDescription(streamStatistics.bytesRead -
streamStatistics.bytesSkippedOnSeek));
logStreamStatistics();
return timer;
}
S3AInputStream getS3aStream() {
return (S3AInputStream) in.getWrappedStream();
}
@Test
public void testRandomReadOverBuffer() throws Throwable {
describe("read over a buffer, making sure that the requests" +
" spans readahead ranges");
int datasetLen = _32K;
S3AFileSystem fs = getFileSystem();
Path dataFile = path("testReadOverBuffer.bin");
byte[] sourceData = dataset(datasetLen, 0, 64);
// relies on the field 'fs' referring to the R/W FS
writeDataset(fs, dataFile, sourceData, datasetLen, _16K, true);
byte[] buffer = new byte[datasetLen];
int readahead = _8K;
int halfReadahead = _4K;
in = openDataFile(fs, dataFile, S3AInputPolicy.Random, readahead);
LOG.info("Starting initial reads");
S3AInputStream s3aStream = getS3aStream();
assertEquals(readahead, s3aStream.getReadahead());
byte[] oneByte = new byte[1];
assertEquals(1, in.read(0, oneByte, 0, 1));
// make some assertions about the current state
assertEquals("remaining in\n" + in,
readahead - 1, s3aStream.remainingInCurrentRequest());
assertEquals("range start in\n" + in,
0, s3aStream.getContentRangeStart());
assertEquals("range finish in\n" + in,
readahead, s3aStream.getContentRangeFinish());
assertStreamOpenedExactlyOnce();
describe("Starting sequence of positioned read calls over\n%s", in);
NanoTimer readTimer = new NanoTimer();
int currentPos = halfReadahead;
int offset = currentPos;
int bytesRead = 0;
int readOps = 0;
// make multiple read() calls
while (bytesRead < halfReadahead) {
int length = buffer.length - offset;
int read = in.read(currentPos, buffer, offset, length);
bytesRead += read;
offset += read;
readOps++;
assertEquals("open operations on request #" + readOps
+ " after reading " + bytesRead
+ " current position in stream " + currentPos
+ " in\n" + fs
+ "\n " + in,
1, streamStatistics.openOperations);
for (int i = currentPos; i < currentPos + read; i++) {
assertEquals("Wrong value from byte " + i,
sourceData[i], buffer[i]);
}
currentPos += read;
}
assertStreamOpenedExactlyOnce();
// assert at the end of the original block
assertEquals(readahead, currentPos);
readTimer.end("read %d in %d operations", bytesRead, readOps);
bandwidth(readTimer, bytesRead);
LOG.info("Time per byte(): {} nS",
toHuman(readTimer.nanosPerOperation(bytesRead)));
LOG.info("Time per read(): {} nS",
toHuman(readTimer.nanosPerOperation(readOps)));
describe("read last byte");
// read one more
int read = in.read(currentPos, buffer, bytesRead, 1);
assertTrue("-1 from last read", read >= 0);
assertOpenOperationCount(2);
assertEquals("Wrong value from read ", sourceData[currentPos],
(int) buffer[currentPos]);
currentPos++;
// now scan all the way to the end of the file, using single byte read()
// calls
describe("read() to EOF over \n%s", in);
long readCount = 0;
NanoTimer timer = new NanoTimer();
LOG.info("seeking");
in.seek(currentPos);
LOG.info("reading");
while(currentPos < datasetLen) {
int r = in.read();
assertTrue("Negative read() at position " + currentPos + " in\n" + in,
r >= 0);
buffer[currentPos] = (byte)r;
assertEquals("Wrong value from read from\n" + in,
sourceData[currentPos], r);
currentPos++;
readCount++;
}
timer.end("read %d bytes", readCount);
bandwidth(timer, readCount);
LOG.info("Time per read(): {} nS",
toHuman(timer.nanosPerOperation(readCount)));
assertEquals("last read in " + in, -1, in.read());
}
}
| |
package org.yeastrc.xlink.www.searcher;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.List;
import org.slf4j.LoggerFactory; import org.slf4j.Logger;
import org.yeastrc.xlink.db.DBConnectionFactory;
import org.yeastrc.xlink.dto.AnnotationTypeDTO;
import org.yeastrc.xlink.dto.PsmDTO;
import org.yeastrc.xlink.enum_classes.FilterDirectionType;
import org.yeastrc.xlink.searcher_constants.SearcherGeneralConstants;
import org.yeastrc.xlink.searcher_psm_peptide_cutoff_objects.SearcherCutoffValuesAnnotationLevel;
import org.yeastrc.xlink.searcher_psm_peptide_cutoff_objects.SearcherCutoffValuesSearchLevel;
/**
* Is only this PSM associated with it's scan ids
* for the current search and meet the current Peptide and PSM cutoffs
*
*/
public class Psm_ScanCountForAssociatedScanId_From_PsmId_SearchId_Searcher {
private Psm_ScanCountForAssociatedScanId_From_PsmId_SearchId_Searcher() { }
public static Psm_ScanCountForAssociatedScanId_From_PsmId_SearchId_Searcher getInstance() { return new Psm_ScanCountForAssociatedScanId_From_PsmId_SearchId_Searcher(); }
private static final Logger log = LoggerFactory.getLogger( Psm_ScanCountForAssociatedScanId_From_PsmId_SearchId_Searcher.class);
private static final String SQL_MAIN = "SELECT COUNT(*) AS count FROM ( " ;
private static final String SQL_PSM_SUBSELECT =
" SELECT psm.id AS psm_id, psm.search_id, psm.reported_peptide_id FROM psm ";
private static final String SQL_PSM_SUBSELECT_WHERE_START =
" WHERE psm.scan_id = ? AND psm.id <> ? AND psm.search_id = ? ";
/**
* @param psmDTO
* @param searchId
* @param searcherCutoffValuesSearchLevel
* @return
* @throws Exception
*/
public int scanCountForAssociatedScanId( PsmDTO psmDTO, int searchId, SearcherCutoffValuesSearchLevel searcherCutoffValuesSearchLevel ) throws Exception {
if ( psmDTO == null ) {
throw new IllegalArgumentException( "psmDTO cannot be null" );
}
if ( psmDTO.getScanId() == null ) {
return 0;
}
int numPsms = 0;
Connection conn = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
List<SearcherCutoffValuesAnnotationLevel> peptideCutoffValuesList =
searcherCutoffValuesSearchLevel.getPeptidePerAnnotationCutoffsList();
List<SearcherCutoffValuesAnnotationLevel> psmCutoffValuesList =
searcherCutoffValuesSearchLevel.getPsmPerAnnotationCutoffsList();
//////////////////////
///// Start building the SQL
StringBuilder sqlSB = new StringBuilder( 1000 );
sqlSB.append( SQL_MAIN );
sqlSB.append( SQL_PSM_SUBSELECT );
// Add inner join for each PSM cutoff
{
for ( int counter = 1; counter <= psmCutoffValuesList.size(); counter++ ) {
sqlSB.append( " INNER JOIN " );
// If slow, use psm_filterable_annotation__generic_lookup and put more limits in query on search, reported peptide, and maybe link type
sqlSB.append( " psm_filterable_annotation__generic_lookup AS psm_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( " ON " );
sqlSB.append( " psm.id = " );
sqlSB.append( "psm_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".psm_id" );
}
}
//// Start PSM Subselect WHERE
sqlSB.append( SQL_PSM_SUBSELECT_WHERE_START );
{
int counter = 0;
for ( SearcherCutoffValuesAnnotationLevel searcherCutoffValuesPsmAnnotationLevel : psmCutoffValuesList ) {
AnnotationTypeDTO srchPgmFilterablePsmAnnotationTypeDTO = searcherCutoffValuesPsmAnnotationLevel.getAnnotationTypeDTO();
counter++;
sqlSB.append( " AND " );
sqlSB.append( " ( " );
sqlSB.append( "psm_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".annotation_type_id = ? AND " );
sqlSB.append( "psm_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".value_double " );
if ( srchPgmFilterablePsmAnnotationTypeDTO.getAnnotationTypeFilterableDTO() == null ) {
String msg = "ERROR: Annotation type data must contain Filterable DTO data. Annotation type id: " + srchPgmFilterablePsmAnnotationTypeDTO.getId();
log.error( msg );
throw new Exception(msg);
}
if ( srchPgmFilterablePsmAnnotationTypeDTO.getAnnotationTypeFilterableDTO().getFilterDirectionType() == FilterDirectionType.ABOVE ) {
sqlSB.append( SearcherGeneralConstants.SQL_END_BIGGER_VALUE_BETTER );
} else {
sqlSB.append( SearcherGeneralConstants.SQL_END_SMALLER_VALUE_BETTER );
}
sqlSB.append( " ? " );
sqlSB.append( " ) " );
}
}
sqlSB.append( " ) AS psm_search_results " ); // Close PSM Subselect
// End PSM Subselect
// Peptide Cutoffs
{
// Add inner join for each Peptide cutoff
for ( int counter = 1; counter <= peptideCutoffValuesList.size(); counter++ ) {
sqlSB.append( " INNER JOIN " );
sqlSB.append( " srch__rep_pept__annotation AS srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( " ON " );
sqlSB.append( " psm_search_results.search_id = " );
sqlSB.append( "srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".search_id" );
sqlSB.append( " AND " );
sqlSB.append( " psm_search_results.reported_peptide_id = " );
sqlSB.append( "srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".reported_peptide_id" );
}
}
// Defer adding main " WHERE " until add a condition
boolean isMainWhereAdded = false;
// Process Peptide Cutoffs for WHERE
{
int counter = 0;
for ( SearcherCutoffValuesAnnotationLevel searcherCutoffValuesReportedPeptideAnnotationLevel : peptideCutoffValuesList ) {
AnnotationTypeDTO srchPgmFilterableReportedPeptideAnnotationTypeDTO = searcherCutoffValuesReportedPeptideAnnotationLevel.getAnnotationTypeDTO();
counter++;
if ( ! isMainWhereAdded ) {
isMainWhereAdded = true;
///// Main WHERE
sqlSB.append( " WHERE " );
}
if ( counter > 1 ) {
sqlSB.append( " AND " );
}
sqlSB.append( " ( " );
sqlSB.append( "srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".search_id = ? AND " );
sqlSB.append( "srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".annotation_type_id = ? AND " );
sqlSB.append( "srch__rep_pept_fltrbl_tbl_" );
sqlSB.append( Integer.toString( counter ) );
sqlSB.append( ".value_double " );
if ( srchPgmFilterableReportedPeptideAnnotationTypeDTO.getAnnotationTypeFilterableDTO() == null ) {
String msg = "ERROR: Annotation type data must contain Filterable DTO data. Annotation type id: " + srchPgmFilterableReportedPeptideAnnotationTypeDTO.getId();
log.error( msg );
throw new Exception(msg);
}
if ( srchPgmFilterableReportedPeptideAnnotationTypeDTO.getAnnotationTypeFilterableDTO().getFilterDirectionType() == FilterDirectionType.ABOVE ) {
sqlSB.append( SearcherGeneralConstants.SQL_END_BIGGER_VALUE_BETTER );
} else {
sqlSB.append( SearcherGeneralConstants.SQL_END_SMALLER_VALUE_BETTER );
}
sqlSB.append( "? " );
sqlSB.append( " ) " );
}
}
// If add more SQL to main WHERE clause, add this code before it:
// if ( ! isMainWhereAdded ) {
//
// isMainWhereAdded = true;
//
// ///// Main WHERE
//
// sqlSB.append( " WHERE " );
// }
String sql = sqlSB.toString();
try {
conn = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL );
pstmt = conn.prepareStatement( sql );
int paramCounter = 0;
// PSM Subselect
paramCounter++;
pstmt.setInt( paramCounter, psmDTO.getScanId() );
paramCounter++;
pstmt.setInt( paramCounter, psmDTO.getId() );
paramCounter++;
pstmt.setInt( paramCounter, searchId );
// PSM cutoffs
for ( SearcherCutoffValuesAnnotationLevel searcherCutoffValuesPsmAnnotationLevel : psmCutoffValuesList ) {
AnnotationTypeDTO srchPgmFilterablePsmAnnotationTypeDTO = searcherCutoffValuesPsmAnnotationLevel.getAnnotationTypeDTO();
paramCounter++;
pstmt.setInt( paramCounter, srchPgmFilterablePsmAnnotationTypeDTO.getId() );
paramCounter++;
pstmt.setDouble( paramCounter, searcherCutoffValuesPsmAnnotationLevel.getAnnotationCutoffValue() );
}
// Process Peptide Cutoffs for WHERE
{
for ( SearcherCutoffValuesAnnotationLevel searcherCutoffValuesReportedPeptideAnnotationLevel : peptideCutoffValuesList ) {
AnnotationTypeDTO srchPgmFilterableReportedPeptideAnnotationTypeDTO = searcherCutoffValuesReportedPeptideAnnotationLevel.getAnnotationTypeDTO();
paramCounter++;
pstmt.setInt( paramCounter, searchId );
paramCounter++;
pstmt.setInt( paramCounter, srchPgmFilterableReportedPeptideAnnotationTypeDTO.getId() );
paramCounter++;
pstmt.setDouble( paramCounter, searcherCutoffValuesReportedPeptideAnnotationLevel.getAnnotationCutoffValue() );
}
}
rs = pstmt.executeQuery();
if( rs.next() ) {
numPsms = rs.getInt( "count" );
}
} catch ( Exception e ) {
log.error( "ERROR: SQL: " + sql, e );
throw e;
} finally {
// be sure database handles are closed
if( rs != null ) {
try { rs.close(); } catch( Throwable t ) { ; }
rs = null;
}
if( pstmt != null ) {
try { pstmt.close(); } catch( Throwable t ) { ; }
pstmt = null;
}
if( conn != null ) {
try { conn.close(); } catch( Throwable t ) { ; }
conn = null;
}
}
return numPsms;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package com.cloud.hypervisor.hyperv.manager;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.utils.identity.ManagementServerNode;
import org.apache.log4j.Logger;
import com.cloud.configuration.Config;
import com.cloud.storage.JavaStorageLayer;
import com.cloud.storage.StorageLayer;
import com.cloud.utils.FileUtil;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.script.Script;
import com.cloud.vm.dao.NicDao;
import com.cloud.vm.dao.VMInstanceDao;
public class HypervManagerImpl implements HypervManager {
public static final Logger s_logger = Logger.getLogger(HypervManagerImpl.class);
private String name;
private int runLevel;
private Map<String, Object> params;
private int _timeout;
Random _rand = new Random(System.currentTimeMillis());
Map<String, String> _storageMounts = new HashMap<String, String>();
StorageLayer _storage;
@Inject ConfigurationDao _configDao;
@Inject DataStoreManager _dataStoreMgr;
@Inject VMInstanceDao _vminstanceDao;
@Inject NicDao _nicDao;
int _routerExtraPublicNics = 2;
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
if (params != null) {
String value = (String)params.get("scripts.timeout");
_timeout = NumbersUtil.parseInt(value, 30) * 1000;
_storage = (StorageLayer)params.get(StorageLayer.InstanceConfigKey);
}
if (_storage == null) {
_storage = new JavaStorageLayer();
_storage.configure("StorageLayer", params);
}
_routerExtraPublicNics = NumbersUtil.parseInt(_configDao.getValue(Config.RouterExtraPublicNics.key()), 2);
return true;
}
@Override
public boolean start() {
startupCleanup(getMountParent());
return true;
}
@Override
public boolean stop() {
shutdownCleanup();
return true;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public void setConfigParams(Map<String, Object> params) {
this.params = params;
}
@Override
public Map<String, Object> getConfigParams() {
return params;
}
@Override
public int getRunLevel() {
return runLevel;
}
@Override
public void setRunLevel(int level) {
runLevel = level;
}
@Override
public String prepareSecondaryStorageStore(long zoneId) {
String secondaryStorageUri = getSecondaryStorageStoreUrl(zoneId);
if (secondaryStorageUri == null) {
s_logger.debug("Secondary storage uri for dc " + zoneId + " couldn't be obtained");
} else {
prepareSecondaryStorageStore(secondaryStorageUri);
}
return secondaryStorageUri;
}
private String getSecondaryStorageStoreUrl(long zoneId) {
String secUrl = null;
DataStore secStore = _dataStoreMgr.getImageStoreWithFreeCapacity(zoneId);
if (secStore != null) {
secUrl = secStore.getUri();
}
if (secUrl == null) {
s_logger.warn("Secondary storage uri couldn't be retrieved");
}
return secUrl;
}
private void prepareSecondaryStorageStore(String storageUrl) {
String mountPoint = getMountPoint(storageUrl);
GlobalLock lock = GlobalLock.getInternLock("prepare.systemvm");
try {
if (lock.lock(3600)) {
try {
File patchFolder = new File(mountPoint + "/systemvm");
if (!patchFolder.exists()) {
if (!patchFolder.mkdirs()) {
String msg = "Unable to create systemvm folder on secondary storage. location: " + patchFolder.toString();
s_logger.error(msg);
throw new CloudRuntimeException(msg);
}
}
File srcIso = getSystemVMPatchIsoFile();
File destIso = new File(mountPoint + "/systemvm/" + getSystemVMIsoFileNameOnDatastore());
if (!destIso.exists()) {
s_logger.info("Copy System VM patch ISO file to secondary storage. source ISO: " +
srcIso.getAbsolutePath() + ", destination: " + destIso.getAbsolutePath());
try {
FileUtil.copyfile(srcIso, destIso);
} catch (IOException e) {
s_logger.error("Unexpected exception ", e);
String msg = "Unable to copy systemvm ISO on secondary storage. src location: " + srcIso.toString() + ", dest location: " + destIso;
s_logger.error(msg);
throw new CloudRuntimeException(msg);
}
} else {
if (s_logger.isTraceEnabled()) {
s_logger.trace("SystemVM ISO file " + destIso.getPath() + " already exists");
}
}
} finally {
lock.unlock();
}
}
} finally {
lock.releaseRef();
}
}
private String getMountPoint(String storageUrl) {
String mountPoint = null;
synchronized (_storageMounts) {
mountPoint = _storageMounts.get(storageUrl);
if (mountPoint != null) {
return mountPoint;
}
URI uri;
try {
uri = new URI(storageUrl);
} catch (URISyntaxException e) {
s_logger.error("Invalid storage URL format ", e);
throw new CloudRuntimeException("Unable to create mount point due to invalid storage URL format " + storageUrl);
}
mountPoint = mount(File.separator + File.separator + uri.getHost() + uri.getPath(), getMountParent(),
uri.getScheme(), uri.getQuery());
if (mountPoint == null) {
s_logger.error("Unable to create mount point for " + storageUrl);
return "/mnt/sec";
}
_storageMounts.put(storageUrl, mountPoint);
return mountPoint;
}
}
protected String mount(String path, String parent, String scheme, String query) {
String mountPoint = setupMountPoint(parent);
if (mountPoint == null) {
s_logger.warn("Unable to create a mount point");
return null;
}
Script script = null;
String result = null;
if (scheme.equals("cifs")) {
String user = System.getProperty("user.name");
Script command = new Script(true, "mount", _timeout, s_logger);
command.add("-t", "cifs");
command.add(path);
command.add(mountPoint);
if (user != null) {
command.add("-o", "uid=" + user + ",gid=" + user);
}
if (query != null) {
query = query.replace('&', ',');
command.add("-o", query);
}
result = command.execute();
}
if (result != null) {
s_logger.warn("Unable to mount " + path + " due to " + result);
File file = new File(mountPoint);
if (file.exists()) {
file.delete();
}
return null;
}
// Change permissions for the mountpoint
script = new Script(true, "chmod", _timeout, s_logger);
script.add("-R", "777", mountPoint);
result = script.execute();
if (result != null) {
s_logger.warn("Unable to set permissions for " + mountPoint + " due to " + result);
}
return mountPoint;
}
private String setupMountPoint(String parent) {
String mountPoint = null;
long mshostId = ManagementServerNode.getManagementServerId();
for (int i = 0; i < 10; i++) {
String mntPt = parent + File.separator + String.valueOf(mshostId) + "." + Integer.toHexString(_rand.nextInt(Integer.MAX_VALUE));
File file = new File(mntPt);
if (!file.exists()) {
if (_storage.mkdir(mntPt)) {
mountPoint = mntPt;
break;
}
}
s_logger.error("Unable to create mount: " + mntPt);
}
return mountPoint;
}
private String getSystemVMIsoFileNameOnDatastore() {
String version = this.getClass().getPackage().getImplementationVersion();
String fileName = "systemvm-" + version + ".iso";
return fileName.replace(':', '-');
}
private File getSystemVMPatchIsoFile() {
// locate systemvm.iso
URL url = this.getClass().getClassLoader().getResource("vms/systemvm.iso");
File isoFile = null;
if (url != null) {
isoFile = new File(url.getPath());
}
if (isoFile == null || !isoFile.exists()) {
isoFile = new File("/usr/share/cloudstack-common/vms/systemvm.iso");
}
assert (isoFile != null);
if (!isoFile.exists()) {
s_logger.error("Unable to locate systemvm.iso in your setup at " + isoFile.toString());
}
return isoFile;
}
private String getMountParent() {
String mountParent = _configDao.getValue(Config.MountParent.key());
if (mountParent == null) {
mountParent = File.separator + "mnt";
}
String instance = _configDao.getValue(Config.InstanceName.key());
if (instance == null) {
instance = "DEFAULT";
}
if (instance != null) {
mountParent = mountParent + File.separator + instance;
}
return mountParent;
}
private void startupCleanup(String parent) {
s_logger.info("Cleanup mounted mount points used in previous session");
long mshostId = ManagementServerNode.getManagementServerId();
// cleanup left-over NFS mounts from previous session
String[] mounts = _storage.listFiles(parent + File.separator + String.valueOf(mshostId) + ".*");
if (mounts != null && mounts.length > 0) {
for (String mountPoint : mounts) {
s_logger.info("umount NFS mount from previous session: " + mountPoint);
String result = null;
Script command = new Script(true, "umount", _timeout, s_logger);
command.add(mountPoint);
result = command.execute();
if (result != null) {
s_logger.warn("Unable to umount " + mountPoint + " due to " + result);
}
File file = new File(mountPoint);
if (file.exists()) {
file.delete();
}
}
}
}
private void shutdownCleanup() {
s_logger.info("Cleanup mounted mount points used in current session");
synchronized (_storageMounts) {
for (String mountPoint : _storageMounts.values()) {
s_logger.info("umount NFS mount: " + mountPoint);
String result = null;
Script command = new Script(true, "umount", _timeout, s_logger);
command.add(mountPoint);
result = command.execute();
if (result != null) {
s_logger.warn("Unable to umount " + mountPoint + " due to " + result);
}
File file = new File(mountPoint);
if (file.exists()) {
file.delete();
}
}
}
}
@Override
public int getRouterExtraPublicNics() {
return _routerExtraPublicNics;
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.v7.ui;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jsoup.nodes.Element;
import com.vaadin.server.PaintException;
import com.vaadin.server.PaintTarget;
import com.vaadin.server.Resource;
import com.vaadin.ui.declarative.DesignAttributeHandler;
import com.vaadin.ui.declarative.DesignContext;
import com.vaadin.ui.declarative.DesignException;
import com.vaadin.v7.data.Collapsible;
import com.vaadin.v7.data.Container;
import com.vaadin.v7.data.Container.Hierarchical;
import com.vaadin.v7.data.Container.ItemSetChangeEvent;
import com.vaadin.v7.data.util.ContainerHierarchicalWrapper;
import com.vaadin.v7.data.util.HierarchicalContainer;
import com.vaadin.v7.data.util.HierarchicalContainerOrderedWrapper;
import com.vaadin.v7.shared.ui.treetable.TreeTableConstants;
import com.vaadin.v7.shared.ui.treetable.TreeTableState;
import com.vaadin.v7.ui.Tree.CollapseEvent;
import com.vaadin.v7.ui.Tree.CollapseListener;
import com.vaadin.v7.ui.Tree.ExpandEvent;
import com.vaadin.v7.ui.Tree.ExpandListener;
/**
* TreeTable extends the {@link Table} component so that it can also visualize a
* hierarchy of its Items in a similar manner that {@link Tree} does. The tree
* hierarchy is always displayed in the first actual column of the TreeTable.
* <p>
* The TreeTable supports the usual {@link Table} features like lazy loading, so
* it should be no problem to display lots of items at once. Only required rows
* and some cache rows are sent to the client.
* <p>
* TreeTable supports standard {@link Hierarchical} container interfaces, but
* also a more fine tuned version - {@link Collapsible}. A container
* implementing the {@link Collapsible} interface stores the collapsed/expanded
* state internally and can this way scale better on the server side than with
* standard Hierarchical implementations. Developer must however note that
* {@link Collapsible} containers can not be shared among several users as they
* share UI state in the container.
*
* @deprecated As of 8.0. New hierarchical components are planned in an upcoming version of Vaadin Framework 8.
*/
@SuppressWarnings({ "serial" })
@Deprecated
public class TreeTable extends Table implements Hierarchical {
private interface ContainerStrategy extends Serializable {
public int size();
public boolean isNodeOpen(Object itemId);
public int getDepth(Object itemId);
public void toggleChildVisibility(Object itemId);
public Object getIdByIndex(int index);
public int indexOfId(Object id);
public Object nextItemId(Object itemId);
public Object lastItemId();
public Object prevItemId(Object itemId);
public boolean isLastId(Object itemId);
public Collection<?> getItemIds();
public void containerItemSetChange(ItemSetChangeEvent event);
}
private abstract class AbstractStrategy implements ContainerStrategy {
/**
* Consider adding getDepth to {@link Collapsible}, might help
* scalability with some container implementations.
*/
@Override
public int getDepth(Object itemId) {
int depth = 0;
Hierarchical hierarchicalContainer = getContainerDataSource();
while (!hierarchicalContainer.isRoot(itemId)) {
depth++;
itemId = hierarchicalContainer.getParent(itemId);
}
return depth;
}
@Override
public void containerItemSetChange(ItemSetChangeEvent event) {
}
}
/**
* This strategy is used if current container implements {@link Collapsible}
* .
*
* open-collapsed logic diverted to container, otherwise use default
* implementations.
*/
private class CollapsibleStrategy extends AbstractStrategy {
private Collapsible c() {
return (Collapsible) getContainerDataSource();
}
@Override
public void toggleChildVisibility(Object itemId) {
c().setCollapsed(itemId, !c().isCollapsed(itemId));
}
@Override
public boolean isNodeOpen(Object itemId) {
return !c().isCollapsed(itemId);
}
@Override
public int size() {
return TreeTable.super.size();
}
@Override
public Object getIdByIndex(int index) {
return TreeTable.super.getIdByIndex(index);
}
@Override
public int indexOfId(Object id) {
return TreeTable.super.indexOfId(id);
}
@Override
public boolean isLastId(Object itemId) {
// using the default impl
return TreeTable.super.isLastId(itemId);
}
@Override
public Object lastItemId() {
// using the default impl
return TreeTable.super.lastItemId();
}
@Override
public Object nextItemId(Object itemId) {
return TreeTable.super.nextItemId(itemId);
}
@Override
public Object prevItemId(Object itemId) {
return TreeTable.super.prevItemId(itemId);
}
@Override
public Collection<?> getItemIds() {
return TreeTable.super.getItemIds();
}
}
/**
* Strategy for Hierarchical but not Collapsible container like
* {@link HierarchicalContainer}.
*
* Store collapsed/open states internally, fool Table to use preorder when
* accessing items from container via Ordered/Indexed methods.
*/
private class HierarchicalStrategy extends AbstractStrategy {
private final HashSet<Object> openItems = new HashSet<Object>();
@Override
public boolean isNodeOpen(Object itemId) {
return openItems.contains(itemId);
}
@Override
public int size() {
return getPreOrder().size();
}
@Override
public Collection<Object> getItemIds() {
return Collections.unmodifiableCollection(getPreOrder());
}
@Override
public boolean isLastId(Object itemId) {
if (itemId == null) {
return false;
}
return itemId.equals(lastItemId());
}
@Override
public Object lastItemId() {
if (getPreOrder().size() > 0) {
return getPreOrder().get(getPreOrder().size() - 1);
} else {
return null;
}
}
@Override
public Object nextItemId(Object itemId) {
int indexOf = getPreOrder().indexOf(itemId);
if (indexOf == -1) {
return null;
}
indexOf++;
if (indexOf == getPreOrder().size()) {
return null;
} else {
return getPreOrder().get(indexOf);
}
}
@Override
public Object prevItemId(Object itemId) {
int indexOf = getPreOrder().indexOf(itemId);
indexOf--;
if (indexOf < 0) {
return null;
} else {
return getPreOrder().get(indexOf);
}
}
@Override
public void toggleChildVisibility(Object itemId) {
boolean removed = openItems.remove(itemId);
if (!removed) {
openItems.add(itemId);
getLogger().log(Level.FINEST, "Item {0} is now expanded",
itemId);
} else {
getLogger().log(Level.FINEST, "Item {0} is now collapsed",
itemId);
}
clearPreorderCache();
}
private void clearPreorderCache() {
preOrder = null; // clear preorder cache
}
List<Object> preOrder;
/**
* Preorder of ids currently visible
*
* @return
*/
private List<Object> getPreOrder() {
if (preOrder == null) {
preOrder = new ArrayList<Object>();
Collection<?> rootItemIds = getContainerDataSource()
.rootItemIds();
for (Object id : rootItemIds) {
preOrder.add(id);
addVisibleChildTree(id);
}
}
return preOrder;
}
private void addVisibleChildTree(Object id) {
if (isNodeOpen(id)) {
Collection<?> children = getContainerDataSource()
.getChildren(id);
if (children != null) {
for (Object childId : children) {
preOrder.add(childId);
addVisibleChildTree(childId);
}
}
}
}
@Override
public int indexOfId(Object id) {
return getPreOrder().indexOf(id);
}
@Override
public Object getIdByIndex(int index) {
return getPreOrder().get(index);
}
@Override
public void containerItemSetChange(ItemSetChangeEvent event) {
// preorder becomes invalid on sort, item additions etc.
clearPreorderCache();
super.containerItemSetChange(event);
}
}
/**
* Creates an empty TreeTable with a default container.
*/
public TreeTable() {
super(null, new HierarchicalContainer());
}
/**
* Creates an empty TreeTable with a default container.
*
* @param caption
* the caption for the TreeTable
*/
public TreeTable(String caption) {
this();
setCaption(caption);
}
/**
* Creates a TreeTable instance with given captions and data source.
*
* @param caption
* the caption for the component
* @param dataSource
* the dataSource that is used to list items in the component
*/
public TreeTable(String caption, Container dataSource) {
super(caption, dataSource);
}
private ContainerStrategy cStrategy;
private Object focusedRowId = null;
private Object hierarchyColumnId;
/**
* The item id that was expanded or collapsed during this request. Reset at
* the end of paint and only used for determining if a partial or full paint
* should be done.
*
* Can safely be reset to null whenever a change occurs that would prevent a
* partial update from rendering the correct result, e.g. rows added or
* removed during an expand operation.
*/
private Object toggledItemId;
private boolean animationsEnabled;
private boolean clearFocusedRowPending;
/**
* If the container does not send item set change events, always do a full
* repaint instead of a partial update when expanding/collapsing nodes.
*/
private boolean containerSupportsPartialUpdates;
private ContainerStrategy getContainerStrategy() {
if (cStrategy == null) {
if (getContainerDataSource() instanceof Collapsible) {
cStrategy = new CollapsibleStrategy();
} else {
cStrategy = new HierarchicalStrategy();
}
}
return cStrategy;
}
@Override
protected void paintRowAttributes(PaintTarget target, Object itemId)
throws PaintException {
super.paintRowAttributes(target, itemId);
target.addAttribute("depth", getContainerStrategy().getDepth(itemId));
if (getContainerDataSource().areChildrenAllowed(itemId)) {
target.addAttribute("ca", true);
target.addAttribute("open",
getContainerStrategy().isNodeOpen(itemId));
}
}
@Override
protected void paintRowIcon(PaintTarget target, Object[][] cells,
int indexInRowbuffer) throws PaintException {
// always paint if present (in parent only if row headers visible)
if (getRowHeaderMode() == ROW_HEADER_MODE_HIDDEN) {
Resource itemIcon = getItemIcon(
cells[CELL_ITEMID][indexInRowbuffer]);
if (itemIcon != null) {
target.addAttribute("icon", itemIcon);
}
} else if (cells[CELL_ICON][indexInRowbuffer] != null) {
target.addAttribute("icon",
(Resource) cells[CELL_ICON][indexInRowbuffer]);
}
}
@Override
protected boolean rowHeadersAreEnabled() {
if (getRowHeaderMode() == RowHeaderMode.ICON_ONLY) {
return false;
}
return super.rowHeadersAreEnabled();
}
@Override
public void changeVariables(Object source, Map<String, Object> variables) {
super.changeVariables(source, variables);
if (variables.containsKey("toggleCollapsed")) {
String object = (String) variables.get("toggleCollapsed");
Object itemId = itemIdMapper.get(object);
toggledItemId = itemId;
toggleChildVisibility(itemId, false);
if (variables.containsKey("selectCollapsed")) {
// ensure collapsed is selected unless opened with selection
// head
if (isSelectable()) {
select(itemId);
}
}
} else if (variables.containsKey("focusParent")) {
String key = (String) variables.get("focusParent");
Object refId = itemIdMapper.get(key);
Object itemId = getParent(refId);
focusParent(itemId);
}
}
private void focusParent(Object itemId) {
boolean inView = false;
Object inPageId = getCurrentPageFirstItemId();
for (int i = 0; inPageId != null && i < getPageLength(); i++) {
if (inPageId.equals(itemId)) {
inView = true;
break;
}
inPageId = nextItemId(inPageId);
i++;
}
if (!inView) {
setCurrentPageFirstItemId(itemId);
}
// Select the row if it is selectable.
if (isSelectable()) {
if (isMultiSelect()) {
setValue(Collections.singleton(itemId));
} else {
setValue(itemId);
}
}
setFocusedRow(itemId);
}
private void setFocusedRow(Object itemId) {
focusedRowId = itemId;
if (focusedRowId == null) {
// Must still inform the client that the focusParent request has
// been processed
clearFocusedRowPending = true;
}
markAsDirty();
}
@Override
public void paintContent(PaintTarget target) throws PaintException {
if (focusedRowId != null) {
target.addAttribute("focusedRow", itemIdMapper.key(focusedRowId));
focusedRowId = null;
} else if (clearFocusedRowPending) {
// Must still inform the client that the focusParent request has
// been processed
target.addAttribute("clearFocusPending", true);
clearFocusedRowPending = false;
}
target.addAttribute("animate", animationsEnabled);
if (hierarchyColumnId != null) {
Object[] visibleColumns2 = getVisibleColumns();
for (int i = 0; i < visibleColumns2.length; i++) {
Object object = visibleColumns2[i];
if (hierarchyColumnId.equals(object)) {
target.addAttribute(
TreeTableConstants.ATTRIBUTE_HIERARCHY_COLUMN_INDEX,
i);
break;
}
}
}
super.paintContent(target);
toggledItemId = null;
}
/*
* Override methods for partial row updates and additions when expanding /
* collapsing nodes.
*/
@Override
protected boolean isPartialRowUpdate() {
return toggledItemId != null && containerSupportsPartialUpdates
&& !isRowCacheInvalidated();
}
@Override
protected int getFirstAddedItemIndex() {
return indexOfId(toggledItemId) + 1;
}
@Override
protected int getAddedRowCount() {
return countSubNodesRecursively(getContainerDataSource(),
toggledItemId);
}
private int countSubNodesRecursively(Hierarchical hc, Object itemId) {
int count = 0;
// we need the number of children for toggledItemId no matter if its
// collapsed or expanded. Other items' children are only counted if the
// item is expanded.
if (getContainerStrategy().isNodeOpen(itemId)
|| itemId == toggledItemId) {
Collection<?> children = hc.getChildren(itemId);
if (children != null) {
count += children != null ? children.size() : 0;
for (Object id : children) {
count += countSubNodesRecursively(hc, id);
}
}
}
return count;
}
@Override
protected int getFirstUpdatedItemIndex() {
return indexOfId(toggledItemId);
}
@Override
protected int getUpdatedRowCount() {
return 1;
}
@Override
protected boolean shouldHideAddedRows() {
return !getContainerStrategy().isNodeOpen(toggledItemId);
}
private void toggleChildVisibility(Object itemId,
boolean forceFullRefresh) {
getContainerStrategy().toggleChildVisibility(itemId);
// ensure that page still has first item in page, DON'T clear the
// caches.
setCurrentPageFirstItemIndex(getCurrentPageFirstItemIndex(), false);
if (isCollapsed(itemId)) {
fireCollapseEvent(itemId);
} else {
fireExpandEvent(itemId);
}
if (containerSupportsPartialUpdates && !forceFullRefresh) {
markAsDirty();
} else {
// For containers that do not send item set change events, always do
// full repaint instead of partial row update.
refreshRowCache();
}
}
@Override
public int size() {
return getContainerStrategy().size();
}
@Override
public Hierarchical getContainerDataSource() {
return (Hierarchical) super.getContainerDataSource();
}
@Override
public void setContainerDataSource(Container newDataSource) {
cStrategy = null;
// FIXME: This disables partial updates until TreeTable is fixed so it
// does not change component hierarchy during paint
containerSupportsPartialUpdates = (newDataSource instanceof ItemSetChangeNotifier)
&& false;
if (newDataSource != null && !(newDataSource instanceof Hierarchical)) {
newDataSource = new ContainerHierarchicalWrapper(newDataSource);
}
if (newDataSource != null && !(newDataSource instanceof Ordered)) {
newDataSource = new HierarchicalContainerOrderedWrapper(
(Hierarchical) newDataSource);
}
super.setContainerDataSource(newDataSource);
}
@Override
public void containerItemSetChange(Container.ItemSetChangeEvent event) {
// Can't do partial repaints if items are added or removed during the
// expand/collapse request
toggledItemId = null;
getContainerStrategy().containerItemSetChange(event);
super.containerItemSetChange(event);
}
@Override
protected Object getIdByIndex(int index) {
return getContainerStrategy().getIdByIndex(index);
}
@Override
protected int indexOfId(Object itemId) {
return getContainerStrategy().indexOfId(itemId);
}
@Override
public Object nextItemId(Object itemId) {
return getContainerStrategy().nextItemId(itemId);
}
@Override
public Object lastItemId() {
return getContainerStrategy().lastItemId();
}
@Override
public Object prevItemId(Object itemId) {
return getContainerStrategy().prevItemId(itemId);
}
@Override
public boolean isLastId(Object itemId) {
return getContainerStrategy().isLastId(itemId);
}
@Override
public Collection<?> getItemIds() {
return getContainerStrategy().getItemIds();
}
@Override
public boolean areChildrenAllowed(Object itemId) {
return getContainerDataSource().areChildrenAllowed(itemId);
}
@Override
public Collection<?> getChildren(Object itemId) {
return getContainerDataSource().getChildren(itemId);
}
@Override
public Object getParent(Object itemId) {
return getContainerDataSource().getParent(itemId);
}
@Override
public boolean hasChildren(Object itemId) {
return getContainerDataSource().hasChildren(itemId);
}
@Override
public boolean isRoot(Object itemId) {
return getContainerDataSource().isRoot(itemId);
}
@Override
public Collection<?> rootItemIds() {
return getContainerDataSource().rootItemIds();
}
@Override
public boolean setChildrenAllowed(Object itemId, boolean areChildrenAllowed)
throws UnsupportedOperationException {
return getContainerDataSource().setChildrenAllowed(itemId,
areChildrenAllowed);
}
@Override
public boolean setParent(Object itemId, Object newParentId)
throws UnsupportedOperationException {
return getContainerDataSource().setParent(itemId, newParentId);
}
/**
* Sets the Item specified by given identifier as collapsed or expanded. If
* the Item is collapsed, its children are not displayed to the user.
*
* @param itemId
* the identifier of the Item
* @param collapsed
* true if the Item should be collapsed, false if expanded
*/
public void setCollapsed(Object itemId, boolean collapsed) {
if (isCollapsed(itemId) != collapsed) {
if (null == toggledItemId && !isRowCacheInvalidated()
&& getVisibleItemIds().contains(itemId)) {
// optimization: partial refresh if only one item is
// collapsed/expanded
toggledItemId = itemId;
toggleChildVisibility(itemId, false);
} else {
// make sure a full refresh takes place - otherwise neither
// partial nor full repaint of table content is performed
toggledItemId = null;
toggleChildVisibility(itemId, true);
}
}
}
/**
* Checks if Item with given identifier is collapsed in the UI.
*
* <p>
*
* @param itemId
* the identifier of the checked Item
* @return true if the Item with given id is collapsed
* @see Collapsible#isCollapsed(Object)
*/
public boolean isCollapsed(Object itemId) {
return !getContainerStrategy().isNodeOpen(itemId);
}
/**
* Explicitly sets the column in which the TreeTable visualizes the
* hierarchy. If hierarchyColumnId is not set, the hierarchy is visualized
* in the first visible column.
*
* @param hierarchyColumnId
*/
public void setHierarchyColumn(Object hierarchyColumnId) {
this.hierarchyColumnId = hierarchyColumnId;
}
/**
* @return the identifier of column into which the hierarchy will be
* visualized or null if the column is not explicitly defined.
*/
public Object getHierarchyColumnId() {
return hierarchyColumnId;
}
/**
* Adds an expand listener.
*
* @param listener
* the Listener to be added.
*/
public void addExpandListener(ExpandListener listener) {
addListener(ExpandEvent.class, listener, ExpandListener.EXPAND_METHOD);
}
/**
* @deprecated As of 7.0, replaced by
* {@link #addExpandListener(ExpandListener)}
**/
@Deprecated
public void addListener(ExpandListener listener) {
addExpandListener(listener);
}
/**
* Removes an expand listener.
*
* @param listener
* the Listener to be removed.
*/
public void removeExpandListener(ExpandListener listener) {
removeListener(ExpandEvent.class, listener,
ExpandListener.EXPAND_METHOD);
}
/**
* @deprecated As of 7.0, replaced by
* {@link #removeExpandListener(ExpandListener)}
**/
@Deprecated
public void removeListener(ExpandListener listener) {
removeExpandListener(listener);
}
/**
* Emits an expand event.
*
* @param itemId
* the item id.
*/
protected void fireExpandEvent(Object itemId) {
fireEvent(new ExpandEvent(this, itemId));
}
/**
* Adds a collapse listener.
*
* @param listener
* the Listener to be added.
*/
public void addCollapseListener(CollapseListener listener) {
addListener(CollapseEvent.class, listener,
CollapseListener.COLLAPSE_METHOD);
}
/**
* @deprecated As of 7.0, replaced by
* {@link #addCollapseListener(CollapseListener)}
**/
@Deprecated
public void addListener(CollapseListener listener) {
addCollapseListener(listener);
}
/**
* Removes a collapse listener.
*
* @param listener
* the Listener to be removed.
*/
public void removeCollapseListener(CollapseListener listener) {
removeListener(CollapseEvent.class, listener,
CollapseListener.COLLAPSE_METHOD);
}
/**
* @deprecated As of 7.0, replaced by
* {@link #removeCollapseListener(CollapseListener)}
**/
@Deprecated
public void removeListener(CollapseListener listener) {
removeCollapseListener(listener);
}
/**
* Emits a collapse event.
*
* @param itemId
* the item id.
*/
protected void fireCollapseEvent(Object itemId) {
fireEvent(new CollapseEvent(this, itemId));
}
/**
* @return true if animations are enabled
*/
public boolean isAnimationsEnabled() {
return animationsEnabled;
}
/**
* Animations can be enabled by passing true to this method. Currently
* expanding rows slide in from the top and collapsing rows slide out the
* same way. NOTE! not supported in Internet Explorer 6 or 7.
*
* @param animationsEnabled
* true or false whether to enable animations or not.
*/
public void setAnimationsEnabled(boolean animationsEnabled) {
this.animationsEnabled = animationsEnabled;
markAsDirty();
}
private static final Logger getLogger() {
return Logger.getLogger(TreeTable.class.getName());
}
@Override
protected List<Object> getItemIds(int firstIndex, int rows) {
List<Object> itemIds = new ArrayList<Object>();
for (int i = firstIndex; i < firstIndex + rows; i++) {
itemIds.add(getIdByIndex(i));
}
return itemIds;
}
@Override
protected void readBody(Element design, DesignContext context) {
Element tbody = design.select("> table > tbody").first();
if (tbody == null) {
return;
}
Set<String> selected = new HashSet<String>();
Stack<Object> parents = new Stack<Object>();
int lastDepth = -1;
for (Element tr : tbody.children()) {
int depth = DesignAttributeHandler.readAttribute("depth",
tr.attributes(), 0, int.class);
if (depth < 0 || depth > lastDepth + 1) {
throw new DesignException(
"Malformed TreeTable item hierarchy at " + tr
+ ": last depth was " + lastDepth);
} else if (depth <= lastDepth) {
for (int d = depth; d <= lastDepth; d++) {
parents.pop();
}
}
Object itemId = readItem(tr, selected, context);
setParent(itemId, !parents.isEmpty() ? parents.peek() : null);
parents.push(itemId);
lastDepth = depth;
}
}
@Override
protected Object readItem(Element tr, Set<String> selected,
DesignContext context) {
Object itemId = super.readItem(tr, selected, context);
if (tr.hasAttr("collapsed")) {
boolean collapsed = DesignAttributeHandler
.readAttribute("collapsed", tr.attributes(), boolean.class);
setCollapsed(itemId, collapsed);
}
return itemId;
}
@Override
protected void writeItems(Element design, DesignContext context) {
if (getVisibleColumns().length == 0) {
return;
}
Element tbody = design.child(0).appendElement("tbody");
writeItems(tbody, rootItemIds(), 0, context);
}
protected void writeItems(Element tbody, Collection<?> itemIds, int depth,
DesignContext context) {
for (Object itemId : itemIds) {
Element tr = writeItem(tbody, itemId, context);
DesignAttributeHandler.writeAttribute("depth", tr.attributes(),
depth, 0, int.class, context);
if (getChildren(itemId) != null) {
writeItems(tbody, getChildren(itemId), depth + 1, context);
}
}
}
@Override
protected Element writeItem(Element tbody, Object itemId,
DesignContext context) {
Element tr = super.writeItem(tbody, itemId, context);
DesignAttributeHandler.writeAttribute("collapsed", tr.attributes(),
isCollapsed(itemId), true, boolean.class, context);
return tr;
}
@Override
protected TreeTableState getState() {
return (TreeTableState) super.getState();
}
}
| |
package com.example.root.margarita.util;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.Reader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
public class JSONTokener {
private long character;
private boolean eof;
private long index;
private long line;
private char previous;
private Reader reader;
private boolean usePrevious;
/**
* Construct a JSONTokener from a Reader.
*
* @param reader A reader.
*/
public JSONTokener(Reader reader) {
this.reader = reader.markSupported()
? reader
: new BufferedReader(reader);
this.eof = false;
this.usePrevious = false;
this.previous = 0;
this.index = 0;
this.character = 1;
this.line = 1;
}
/**
* Construct a JSONTokener from an InputStream.
* @param inputStream The source.
*/
public JSONTokener(InputStream inputStream) throws JSONException {
this(new InputStreamReader(inputStream));
}
/**
* Construct a JSONTokener from a string.
*
* @param s A source string.
*/
public JSONTokener(String s) {
this(new StringReader(s));
}
/**
* Back up one character. This provides a sort of lookahead capability,
* so that you can test for a digit or letter before attempting to parse
* the next number or identifier.
*/
public void back() throws JSONException {
if (this.usePrevious || this.index <= 0) {
throw new JSONException("Stepping back two steps is not supported");
}
this.index -= 1;
this.character -= 1;
this.usePrevious = true;
this.eof = false;
}
/**
* Get the hex value of a character (base16).
* @param c A character between '0' and '9' or between 'A' and 'F' or
* between 'a' and 'f'.
* @return An int between 0 and 15, or -1 if c was not a hex digit.
*/
public static int dehexchar(char c) {
if (c >= '0' && c <= '9') {
return c - '0';
}
if (c >= 'A' && c <= 'F') {
return c - ('A' - 10);
}
if (c >= 'a' && c <= 'f') {
return c - ('a' - 10);
}
return -1;
}
public boolean end() {
return this.eof && !this.usePrevious;
}
/**
* Determine if the source string still contains characters that next()
* can consume.
* @return true if not yet at the end of the source.
*/
public boolean more() throws JSONException {
this.next();
if (this.end()) {
return false;
}
this.back();
return true;
}
/**
* Get the next character in the source string.
*
* @return The next character, or 0 if past the end of the source string.
*/
public char next() throws JSONException {
int c = 0;
if (this.usePrevious) {
this.usePrevious = false;
c = this.previous;
} else {
try {
c = this.reader.read();
} catch (IOException exception) {
exception.printStackTrace();
}
if (c <= 0) { // End of stream
this.eof = true;
c = 0;
}
}
this.index += 1;
if (this.previous == '\r') {
this.line += 1;
this.character = c == '\n' ? 0 : 1;
} else if (c == '\n') {
this.line += 1;
this.character = 0;
} else {
this.character += 1;
}
this.previous = (char) c;
return this.previous;
}
/**
* Consume the next character, and check that it matches a specified
* character.
* @param c The character to match.
* @return The character.
* @throws JSONException if the character does not match.
*/
public char next(char c) throws JSONException {
char n = this.next();
if (n != c) {
throw this.syntaxError("Expected '" + c + "' and instead saw '" +
n + "'");
}
return n;
}
/**
* Get the next n characters.
*
* @param n The number of characters to take.
* @return A string of n characters.
* @throws JSONException
* Substring bounds error if there are not
* n characters remaining in the source string.
*/
public String next(int n) throws JSONException {
if (n == 0) {
return "";
}
char[] chars = new char[n];
int pos = 0;
while (pos < n) {
chars[pos] = this.next();
if (this.end()) {
throw this.syntaxError("Substring bounds error");
}
pos += 1;
}
return new String(chars);
}
/**
* Get the next char in the string, skipping whitespace.
* @throws JSONException
* @return A character, or 0 if there are no more characters.
*/
public char nextClean() throws JSONException {
for (;;) {
char c = this.next();
if (c == 0 || c > ' ') {
return c;
}
}
}
/**
* Return the characters up to the next close quote character.
* Backslash processing is done. The formal JSON format does not
* allow strings in single quotes, but an implementation is allowed to
* accept them.
* @param quote The quoting character, either
* <code>"</code> <small>(double quote)</small> or
* <code>'</code> <small>(single quote)</small>.
* @return A String.
* @throws JSONException Unterminated string.
*/
public String nextString(char quote) throws JSONException {
char c;
StringBuilder sb = new StringBuilder();
for (;;) {
c = this.next();
switch (c) {
case 0:
case '\n':
case '\r':
throw this.syntaxError("Unterminated string");
case '\\':
c = this.next();
switch (c) {
case 'b':
sb.append('\b');
break;
case 't':
sb.append('\t');
break;
case 'n':
sb.append('\n');
break;
case 'f':
sb.append('\f');
break;
case 'r':
sb.append('\r');
break;
case 'u':
sb.append((char)Integer.parseInt(this.next(4), 16));
break;
case '"':
case '\'':
case '\\':
case '/':
sb.append(c);
break;
default:
throw this.syntaxError("Illegal escape.");
}
break;
default:
if (c == quote) {
return sb.toString();
}
sb.append(c);
}
}
}
/**
* Get the text up but not including the specified character or the
* end of line, whichever comes first.
* @param delimiter A delimiter character.
* @return A string.
*/
public String nextTo(char delimiter) throws JSONException {
StringBuilder sb = new StringBuilder();
for (;;) {
char c = this.next();
if (c == delimiter || c == 0 || c == '\n' || c == '\r') {
if (c != 0) {
this.back();
}
return sb.toString().trim();
}
sb.append(c);
}
}
/**
* Get the text up but not including one of the specified delimiter
* characters or the end of line, whichever comes first.
* @param delimiters A set of delimiter characters.
* @return A string, trimmed.
*/
public String nextTo(String delimiters) throws JSONException {
char c;
StringBuilder sb = new StringBuilder();
for (;;) {
c = this.next();
if (delimiters.indexOf(c) >= 0 || c == 0 ||
c == '\n' || c == '\r') {
if (c != 0) {
this.back();
}
return sb.toString().trim();
}
sb.append(c);
}
}
/**
* Get the next value. The value can be a Boolean, Double, Integer,
* JSONArray, JSONObject, Long, or String, or the JSONObject.NULL object.
* @throws JSONException If syntax error.
*
* @return An object.
*/
public Object nextValue() throws JSONException {
char c = this.nextClean();
String string;
switch (c) {
case '"':
case '\'':
return this.nextString(c);
case '{':
this.back();
return new JSONObject(String.valueOf(this));
case '[':
this.back();
return new JSONArray(this);
}
/*
* Handle unquoted text. This could be the values true, false, or
* null, or it can be a number. An implementation (such as this one)
* is allowed to also accept non-standard forms.
*
* Accumulate characters until we reach the end of the text or a
* formatting character.
*/
StringBuilder sb = new StringBuilder();
while (c >= ' ' && ",:]}/\\\"[{;=#".indexOf(c) < 0) {
sb.append(c);
c = this.next();
}
this.back();
string = sb.toString().trim();
if ("".equals(string)) {
throw this.syntaxError("Missing value");
}
return stringToValue(String.valueOf(string));
}
public static Object stringToValue(String string) {
Double d;
if (string.equals("")) {
return string;
}
if (string.equalsIgnoreCase("true")) {
return Boolean.TRUE;
}
if (string.equalsIgnoreCase("false")) {
return Boolean.FALSE;
}
if (string.equalsIgnoreCase("null")) {
return JSONObject.NULL;
}
/*
* If it might be a number, try converting it. If a number cannot be
* produced, then the value will just be a string.
*/
char b = string.charAt(0);
if ((b >= '0' && b <= '9') || b == '-') {
try {
if (string.indexOf('.') > -1 || string.indexOf('e') > -1
|| string.indexOf('E') > -1) {
d = Double.valueOf(string);
if (!d.isInfinite() && !d.isNaN()) {
return d;
}
} else {
Long myLong = new Long(string);
if (string.equals(myLong.toString())) {
if (myLong == myLong.intValue()) {
return myLong.intValue();
} else {
return myLong;
}
}
}
} catch (Exception ignore) {
}
}
return string;
}
/**
* Skip characters until the next character is the requested character.
* If the requested character is not found, no characters are skipped.
* @param to A character to skip to.
* @return The requested character, or zero if the requested character
* is not found.
*/
public char skipTo(char to) throws JSONException {
char c = 0;
try {
long startIndex = this.index;
long startCharacter = this.character;
long startLine = this.line;
this.reader.mark(1000000);
do {
c = this.next();
if (c == 0) {
this.reader.reset();
this.index = startIndex;
this.character = startCharacter;
this.line = startLine;
return c;
}
} while (c != to);
} catch (IOException exception) {
exception.printStackTrace();
}
this.back();
return c;
}
/**
* Make a JSONException to signal a syntax error.
*
* @param message The error message.
* @return A JSONException object, suitable for throwing
*/
public JSONException syntaxError(String message) {
return new JSONException(message + this.toString());
}
/**
* Make a printable string of this JSONTokener.
*
* @return " at {index} [character {character} line {line}]"
*/
public String toString() {
return " at " + this.index + " [character " + this.character + " line " +
this.line + "]";
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring;
import com.intellij.JavaTestUtil;
import com.intellij.codeInsight.TargetElementUtil;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.refactoring.safeDelete.SafeDeleteHandler;
import com.intellij.testFramework.PsiTestUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.model.java.JavaSourceRootType;
import org.jetbrains.jps.model.java.JpsJavaExtensionService;
public class SafeDeleteTest extends MultiFileTestCase {
@Override
protected String getTestDataPath() {
return JavaTestUtil.getJavaTestDataPath();
}
@NotNull
@Override
protected String getTestRoot() {
return "/refactoring/safeDelete/";
}
public void testImplicitCtrCall() throws Exception {
try {
doTest("Super");
fail();
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertTrue(message, message.startsWith("constructor <b><code>Super.Super()</code></b> has 1 usage that is not safe to delete"));
}
}
public void testImplicitCtrCall2() throws Exception {
try {
doTest("Super");
fail();
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertTrue(message, message.startsWith("constructor <b><code>Super.Super()</code></b> has 1 usage that is not safe to delete"));
}
}
public void testMultipleInterfacesImplementation() throws Exception {
doTest("IFoo");
}
public void testMultipleInterfacesImplementationThroughCommonInterface() throws Exception {
doTest("IFoo");
}
public void testUsageInExtendsList() throws Exception {
doSingleFileTest();
}
public void testDeepDeleteParameterSimple() throws Exception {
doSingleFileTest();
}
public void testDeepDeleteParameterOtherTypeInBinaryExpression() throws Exception {
doSingleFileTest();
}
public void testImpossibleToDeepDeleteParameter() throws Exception {
doSingleFileTest();
}
public void testNoDeepDeleteParameterUsedInCallQualifier() throws Exception {
doSingleFileTest();
}
public void testNoDeepDeleteParameterUsedInNextArgumentExpression() throws Exception {
doSingleFileTest();
}
public void testToDeepDeleteParameterOverriders() throws Exception {
doSingleFileTest();
}
public void testDeleteMethodCascade() throws Exception {
doSingleFileTest();
}
public void testDeleteMethodCascadeRecursive() throws Exception {
doSingleFileTest();
}
public void testDeleteMethodCascadeOverridden() throws Exception {
doSingleFileTest();
}
public void testDeleteParameterAndUpdateJavadocRef() throws Exception {
doSingleFileTest();
}
public void testDeleteConstructorParameterWithAnonymousClassUsage() throws Exception {
doSingleFileTest();
}
public void testParameterInHierarchy() throws Exception {
doTest("C2");
}
public void testTopLevelDocComment() throws Exception {
doTest("foo.C1");
}
public void testOverloadedMethods() throws Exception {
doTest("foo.A");
}
public void testTopParameterInHierarchy() throws Exception {
doTest("I");
}
public void testExtendsList() throws Exception {
doTest("B");
}
public void testJavadocParamRef() throws Exception {
doTest("Super");
}
public void testEnumConstructorParameter() throws Exception {
doTest("UserFlags");
}
public void testSafeDeleteStaticImports() throws Exception {
doTest("A");
}
public void testSafeDeleteImports() throws Exception {
doTest("B");
}
public void testRemoveOverridersInspiteOfUnsafeUsages() throws Exception {
try {
BaseRefactoringProcessor.ConflictsInTestsException.setTestIgnore(true);
doTest("A");
}
finally {
BaseRefactoringProcessor.ConflictsInTestsException.setTestIgnore(false);
}
}
public void testLocalVariable() throws Exception {
doTest("Super");
}
public void testOverrideAnnotation() throws Exception {
doTest("Super");
}
public void testSuperCall() throws Exception {
try {
doTest("Super");
fail("Conflict was not detected");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("method <b><code>Super.foo()</code></b> has 1 usage that is not safe to delete.", message);
}
}
public void testParameterFromFunctionalInterface() throws Exception {
try {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8);
doSingleFileTest();
fail("Conflict was not detected");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("class <b><code>SAM</code></b> has 1 usage that is not safe to delete.", message);
}
}
public void testFunctionalInterfaceMethod() throws Exception {
try {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8);
doSingleFileTest();
fail("Conflict was not detected");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("class <b><code>SAM</code></b> has 1 usage that is not safe to delete.", message);
}
}
public void testAmbiguityAfterParameterDelete() throws Exception {
try {
doSingleFileTest();
fail("Conflict was not detected");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("Method foo() is already defined in the class <b><code>Test</code></b>", message);
}
}
public void testFunctionalInterfaceDefaultMethod() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8);
doSingleFileTest();
}
public void testMethodDeepHierarchy() throws Exception {
doTest("Super");
}
public void testInterfaceAsTypeParameterBound() throws Exception {
doSingleFileTest();
}
public void testLocalVariableSideEffect() throws Exception {
try {
doTest("Super");
fail("Side effect was ignored");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("local variable <b><code>varName</code></b> has 1 usage that is not safe to delete.", message);
}
}
public void testUsageInGenerated() throws Exception {
doTest("A");
}
public void testLastResourceVariable() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7);
doSingleFileTest();
}
public void testLastResourceVariableWithFinallyBlock() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7);
doSingleFileTest();
}
public void testLastTypeParam() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7);
doSingleFileTest();
}
public void testTypeParamFromDiamond() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7);
doSingleFileTest();
}
public void testStripOverride() throws Exception {
doSingleFileTest();
}
public void testEmptyIf() throws Exception {
doSingleFileTest();
}
public void testTypeParameterWithinMethodHierarchy() throws Exception {
doSingleFileTest();
}
public void testTypeParameterNoMethodHierarchy() throws Exception {
doSingleFileTest();
}
public void testClassWithInnerStaticImport() throws Exception {
doTest("ClassWithInnerStaticImport");
}
public void testInnerClassUsedInTheSameFile() throws Exception {
try {
doSingleFileTest();
fail("Side effect was ignored");
}
catch (BaseRefactoringProcessor.ConflictsInTestsException e) {
String message = e.getMessage();
assertEquals("class <b><code>Test.Foo</code></b> has 1 usage that is not safe to delete.", message);
}
}
public void testParameterInMethodUsedInMethodReference() throws Exception {
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_8);
doSingleFileTest();
}
public void testShowConflictsButRemoveAnnotationsIfAnnotationTypeIsDeleted() throws Exception {
try {
BaseRefactoringProcessor.ConflictsInTestsException.setTestIgnore(true);
doSingleFileTest();
}
finally {
BaseRefactoringProcessor.ConflictsInTestsException.setTestIgnore(false);
}
}
private void doTest(@NonNls final String qClassName) throws Exception {
doTest((rootDir, rootAfter) -> this.performAction(qClassName));
}
@Override
protected void prepareProject(VirtualFile rootDir) {
VirtualFile src = rootDir.findChild("src");
if (src == null) {
super.prepareProject(rootDir);
}
else {
PsiTestUtil.addContentRoot(myModule, rootDir);
PsiTestUtil.addSourceRoot(myModule, src);
}
VirtualFile gen = rootDir.findChild("gen");
if (gen != null) {
PsiTestUtil.addSourceRoot(myModule, gen, JavaSourceRootType.SOURCE, JpsJavaExtensionService.getInstance().createSourceRootProperties("", true));
}
}
private void doSingleFileTest() throws Exception {
configureByFile(getTestRoot() + getTestName(false) + ".java");
performAction();
checkResultByFile(getTestRoot() + getTestName(false) + "_after.java");
}
private void performAction(final String qClassName) throws Exception {
final PsiClass aClass = myJavaFacade.findClass(qClassName, GlobalSearchScope.allScope(getProject()));
assertNotNull("Class " + qClassName + " not found", aClass);
configureByExistingFile(aClass.getContainingFile().getVirtualFile());
performAction();
}
private void performAction() {
final PsiElement psiElement = TargetElementUtil
.findTargetElement(myEditor, TargetElementUtil.ELEMENT_NAME_ACCEPTED | TargetElementUtil.REFERENCED_ELEMENT_ACCEPTED);
assertNotNull("No element found in text:\n" + getFile().getText(), psiElement);
SafeDeleteHandler.invoke(getProject(), new PsiElement[]{psiElement}, true);
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package co.aikar.timings;
import static co.aikar.timings.TimingsManager.HISTORY;
import co.aikar.util.JSONUtil;
import co.aikar.util.JSONUtil.JsonObjectBuilder;
import com.google.common.base.Joiner;
import com.google.common.collect.Sets;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import net.minecraft.block.Block;
import net.minecraft.server.MinecraftServer;
import ninja.leaping.configurate.ConfigurationNode;
import org.spongepowered.api.block.BlockType;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.source.ConsoleSource;
import org.spongepowered.api.command.source.RconSource;
import org.spongepowered.api.entity.EntityType;
import org.spongepowered.api.text.Texts;
import org.spongepowered.api.text.action.TextActions;
import org.spongepowered.api.text.format.TextColors;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.entity.SpongeEntityType;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.URL;
import java.util.Map.Entry;
import java.util.Set;
import java.util.zip.GZIPOutputStream;
class TimingsExport extends Thread {
// private static final Joiner AUTHOR_LIST_JOINER = Joiner.on(", ");
private static final Joiner RUNTIME_FLAG_JOINER = Joiner.on(" ");
private static final Joiner CONFIG_PATH_JOINER = Joiner.on(".");
// Sponge doesn't have a server-name property
private static final String SERVER_NAME = "A Sponge Server";
private final CommandSource sender;
private final JsonObject out;
private final TimingHistory[] history;
TimingsExport(CommandSource sender, JsonObject out, TimingHistory[] history) {
super("Timings paste thread");
this.sender = sender;
this.out = out;
this.history = history;
}
/**
* Builds an XML report of the timings to be uploaded for parsing.
*
* @param sender Who to report to
*/
static void reportTimings(CommandSource sender) {
JsonObjectBuilder builder = JSONUtil.objectBuilder()
// Get some basic system details about the server
.add("version", SpongeImpl.getGame().getPlatform().getImplementation().getVersion())
.add("maxplayers", SpongeImpl.getGame().getServer().getMaxPlayers())
.add("start", TimingsManager.timingStart / 1000)
.add("end", System.currentTimeMillis() / 1000)
.add("sampletime", (System.currentTimeMillis() - TimingsManager.timingStart) / 1000);
if (!TimingsManager.privacy) {
builder.add("server", SERVER_NAME)
.add("motd", Texts.toPlain(SpongeImpl.getGame().getServer().getMotd()))
.add("online-mode", SpongeImpl.getGame().getServer().getOnlineMode())
.add("icon", MinecraftServer.getServer().getServerStatusResponse().getFavicon());
}
final Runtime runtime = Runtime.getRuntime();
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
builder.add("system", JSONUtil.objectBuilder()
.add("timingcost", getCost())
.add("name", System.getProperty("os.name"))
.add("version", System.getProperty("os.version"))
.add("jvmversion", System.getProperty("java.version"))
.add("arch", System.getProperty("os.arch"))
.add("maxmem", runtime.maxMemory())
.add("cpu", runtime.availableProcessors())
.add("runtime", ManagementFactory.getRuntimeMXBean().getUptime())
.add("flags", RUNTIME_FLAG_JOINER.join(runtimeBean.getInputArguments()))
.add("gc", JSONUtil.mapArrayToObject(ManagementFactory.getGarbageCollectorMXBeans(), (input) -> {
return JSONUtil.singleObjectPair(input.getName(), JSONUtil.arrayOf(input.getCollectionCount(), input.getCollectionTime()));
})));
Set<BlockType> blockTypeSet = Sets.newHashSet();
Set<EntityType> entityTypeSet = Sets.newHashSet();
int size = HISTORY.size();
TimingHistory[] history = new TimingHistory[size + 1];
int i = 0;
for (TimingHistory timingHistory : HISTORY) {
blockTypeSet.addAll(timingHistory.blockTypeSet);
entityTypeSet.addAll(timingHistory.entityTypeSet);
history[i++] = timingHistory;
}
history[i] = new TimingHistory(); // Current snapshot
blockTypeSet.addAll(history[i].blockTypeSet);
entityTypeSet.addAll(history[i].entityTypeSet);
JsonObjectBuilder handlersBuilder = JSONUtil.objectBuilder();
for (TimingIdentifier.TimingGroup group : TimingIdentifier.GROUP_MAP.values()) {
for (TimingHandler id : group.handlers) {
if (!id.timed && !id.isSpecial()) {
continue;
}
handlersBuilder.add(id.id, JSONUtil.arrayOf(
group.id,
id.name));
}
}
builder.add("idmap", JSONUtil.objectBuilder()
.add("groups", JSONUtil.mapArrayToObject(TimingIdentifier.GROUP_MAP.values(), (group) -> {
return JSONUtil.singleObjectPair(group.id, group.name);
}))
.add("handlers", handlersBuilder)
.add("worlds", JSONUtil.mapArrayToObject(TimingHistory.worldMap.entrySet(), (entry) -> {
return JSONUtil.singleObjectPair(entry.getValue(), entry.getKey());
}))
.add("tileentity", JSONUtil.mapArrayToObject(blockTypeSet, (blockType) -> {
return JSONUtil.singleObjectPair(Block.getIdFromBlock((Block) blockType), blockType.getId());
}))
.add("entity", JSONUtil.mapArrayToObject(entityTypeSet, (entityType) -> {
return JSONUtil.singleObjectPair(((SpongeEntityType) entityType).entityTypeId, entityType.getId());
})));
// Information about loaded plugins
builder.add("plugins", JSONUtil.mapArrayToObject(SpongeImpl.getGame().getPluginManager().getPlugins(), (plugin) -> {
// TODO This is only available on Forge
// ModMetadata metadata = ((ModContainer) plugin).getMetadata();
return JSONUtil.objectBuilder().add(plugin.getId(), JSONUtil.objectBuilder()
.add("version", plugin.getVersion())
// .add("description", metadata.description)
// .add("website", metadata.url)
// .add("authors", AUTHOR_LIST_JOINER.join(metadata.authorList))
).build();
}));
// Information on the users Config
builder.add("config", JSONUtil.objectBuilder()
.add("sponge", serializeConfigNode(SpongeImpl.getGlobalConfig().getRootNode())));
new TimingsExport(sender, builder.build(), history).start();
}
static long getCost() {
// Benchmark the users System.nanotime() for cost basis
int passes = 500000;
TimingHandler SAMPLER1 = SpongeTimingsFactory.ofSafe("Timings Sampler 1");
TimingHandler SAMPLER2 = SpongeTimingsFactory.ofSafe("Timings Sampler 2");
TimingHandler SAMPLER3 = SpongeTimingsFactory.ofSafe("Timings Sampler 3");
TimingHandler SAMPLER4 = SpongeTimingsFactory.ofSafe("Timings Sampler 4");
TimingHandler SAMPLER5 = SpongeTimingsFactory.ofSafe("Timings Sampler 5");
TimingHandler SAMPLER6 = SpongeTimingsFactory.ofSafe("Timings Sampler 6");
long start = System.nanoTime();
for (int i = 0; i < passes; i++) {
SAMPLER1.startTiming();
SAMPLER2.startTiming();
SAMPLER3.startTiming();
SAMPLER3.stopTiming();
SAMPLER4.startTiming();
SAMPLER5.startTiming();
SAMPLER6.startTiming();
SAMPLER6.stopTiming();
SAMPLER5.stopTiming();
SAMPLER4.stopTiming();
SAMPLER2.stopTiming();
SAMPLER1.stopTiming();
}
long timingsCost = (System.nanoTime() - start) / passes / 6;
SAMPLER1.reset(true);
SAMPLER2.reset(true);
SAMPLER3.reset(true);
SAMPLER4.reset(true);
SAMPLER5.reset(true);
SAMPLER6.reset(true);
return timingsCost;
}
private static JsonElement serializeConfigNode(ConfigurationNode node) {
if (node.hasMapChildren()) {
JsonObject object = new JsonObject();
for (Entry<Object, ? extends ConfigurationNode> entry : node.getChildrenMap().entrySet()) {
String fullPath = CONFIG_PATH_JOINER.join(entry.getValue().getPath());
if (fullPath.equals("sponge.sql") || TimingsManager.hiddenConfigs.contains(fullPath)) {
continue;
}
object.add(entry.getKey().toString(), serializeConfigNode(entry.getValue()));
}
return object;
}
if (node.hasListChildren()) {
JsonArray array = new JsonArray();
for (ConfigurationNode child : node.getChildrenList()) {
array.add(serializeConfigNode(child));
}
return array;
}
return JSONUtil.toJsonElement(node.getValue());
}
@Override
public synchronized void start() {
if (this.sender instanceof RconSource) {
this.sender.sendMessage(Texts.of(TextColors.RED, "Warning: Timings report done over RCON will cause lag spikes."));
this.sender.sendMessage(Texts.of(TextColors.RED, "You should use ", TextColors.YELLOW,
"/sponge timings report" + TextColors.RED, " in game or console."));
run();
} else {
super.start();
}
}
@Override
public void run() {
this.sender.sendMessage(Texts.of(TextColors.GREEN, "Preparing Timings Report..."));
this.out.add("data", JSONUtil.mapArray(this.history, TimingHistory::export));
String response = null;
try {
HttpURLConnection con = (HttpURLConnection) new URL("http://timings.aikar.co/post").openConnection();
con.setDoOutput(true);
con.setRequestProperty("User-Agent", "Sponge/" + SERVER_NAME + "/" + InetAddress.getLocalHost().getHostName());
con.setRequestMethod("POST");
con.setInstanceFollowRedirects(false);
OutputStream request = new GZIPOutputStream(con.getOutputStream()) {
{
this.def.setLevel(7);
}
};
request.write(JSONUtil.toString(this.out).getBytes("UTF-8"));
request.close();
response = getResponse(con);
if (con.getResponseCode() != 302) {
this.sender.sendMessage(Texts.of(
TextColors.RED, "Upload Error: " + con.getResponseCode() + ": " + con.getResponseMessage()));
this.sender.sendMessage(Texts.of(TextColors.RED, "Check your logs for more information"));
if (response != null) {
SpongeImpl.getLogger().fatal(response);
}
return;
}
String location = con.getHeaderField("Location");
this.sender.sendMessage(Texts.of(TextColors.GREEN, "View Timings Report: ", TextActions.openUrl(new URL(location)), location));
if (!(this.sender instanceof ConsoleSource)) {
SpongeImpl.getLogger().info("View Timings Report: " + location);
}
if (response != null && !response.isEmpty()) {
SpongeImpl.getLogger().info("Timing Response: " + response);
}
} catch (IOException ex) {
this.sender.sendMessage(Texts.of(TextColors.RED, "Error uploading timings, check your logs for more information"));
if (response != null) {
SpongeImpl.getLogger().fatal(response);
}
SpongeImpl.getLogger().fatal("Could not paste timings", ex);
}
}
private String getResponse(HttpURLConnection con) throws IOException {
InputStream is = null;
try {
is = con.getInputStream();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] b = new byte[1024];
int bytesRead;
while ((bytesRead = is.read(b)) != -1) {
bos.write(b, 0, bytesRead);
}
return bos.toString();
} catch (IOException ex) {
this.sender.sendMessage(Texts.of(TextColors.RED, "Error uploading timings, check your logs for more information"));
SpongeImpl.getLogger().warn(con.getResponseMessage(), ex);
return null;
} finally {
if (is != null) {
is.close();
}
}
}
}
| |
/*
* Copyright (C) 2015 Piotr Wittchen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pwittchen.kirai.library;
import com.github.pwittchen.kirai.library.html.HtmlPiece;
import com.github.pwittchen.kirai.library.html.HtmlSyntax;
import org.junit.Before;
import org.junit.Test;
import static com.google.common.truth.Truth.assertThat;
@SuppressWarnings("PMD")
public class HtmlPieceTest {
private String testKey;
private String testValue;
private Syntax htmlSyntax;
@Before
public void setUp() {
testKey = "testKey";
testValue = "testValue";
htmlSyntax = new HtmlSyntax();
}
@Test
public void testPut() throws Exception {
// given
// data in setUp() method
// when
Piece piece = HtmlPiece.put(testKey, testValue);
// then
assertThat(piece.getKey()).isEqualTo(testKey);
assertThat(piece.getValue()).isEqualTo(testValue);
}
@Test(expected = IllegalArgumentException.class)
public void testPutShouldThrowExceptionWhenKeyAndValueAreNull() throws Exception {
// given
String nullTestKey = null;
String nullTestValue = null;
// when
HtmlPiece.put(nullTestKey, nullTestValue);
// then
// throw an exception
}
@Test(expected = IllegalArgumentException.class)
public void testPutShouldThrowExceptionWhenKeyAndValueAreEmpty() throws Exception {
// given
String emptyTestKey = "";
String emptyTestValue = "";
// when
HtmlPiece.put(emptyTestKey, emptyTestValue);
// then
// throw an exception
}
@Test
public void testBoldHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getBoldFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).bold();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testItalicHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getItalicFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).italic();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testUnderlineHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getUnderlineFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).underline();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testBigHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getBigFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).big();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testSmallHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getSmallFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).small();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testSubHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getSubFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).sub();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testSupHtml() throws Exception {
// given
String expectedFormattedValue = String.format(htmlSyntax.getSupFormat(), testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).sup();
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test
public void testColorHtml() throws Exception {
// given
String testColorHexValue = "#FF0000";
String colorFormat = htmlSyntax.getColorFormat();
String expectedFormattedValue = String.format(colorFormat, testColorHexValue, testValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue).color(testColorHexValue);
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
@Test(expected = IllegalArgumentException.class)
public void testColorShouldThrowExceptionWhenHexIsNullForHtml() throws Exception {
// given
String nullTestColorHexValue = null;
// when
HtmlPiece.put(testKey, testValue).color(nullTestColorHexValue);
// then
// throw an exception
}
@Test(expected = IllegalArgumentException.class)
public void testColorShouldThrowExceptionWhenHexIsEmptyForHtml() throws Exception {
// given
String emptyTestColorHexValue = "";
// when
HtmlPiece.put(testKey, testValue).color(emptyTestColorHexValue);
// then
// throw an exception
}
@Test
public void testColorHexShouldContainSevenCharactersForHtml() throws Exception {
// given
String correctTestColorHexValue = "#FFFFFF";
// when
Piece piece = HtmlPiece.put(testKey, testValue).color(correctTestColorHexValue);
// then
// no exception occurs and object is created
assertThat(piece).isNotNull();
}
@Test(expected = IllegalArgumentException.class)
public void testColorShouldThrowExceptionWhenHexDoesNotContainSevenCharacters() throws Exception {
// given
String incorrectTestColorHexValue = "#FFFFFFF";
// when
HtmlPiece.put(testKey, testValue).color(incorrectTestColorHexValue);
// then
// throw an exception
}
@Test(expected = IllegalArgumentException.class)
public void testColorShouldThrowExceptionWhenHexDoesNotStartFromHashForHtml() throws Exception {
// given
String incorrectTestColorHexValue = "F#FFFFF";
// when
HtmlPiece.put(testKey, testValue).color(incorrectTestColorHexValue);
// then
// throw an exception
}
@Test
public void testAllFormatsHtml() {
/**
* I realize that setting big and small at the same time doesn't make sense
* as well as setting sub and sup at the same time.
* This is done just for testing purposes in order to check
* correctness of the formatting with all possible methods.
*/
// given
String expectedFormattedValue;
String testColorHexValue = "#FF0000";
expectedFormattedValue = String.format(htmlSyntax.getBoldFormat(), testValue);
expectedFormattedValue = String.format(htmlSyntax.getItalicFormat(), expectedFormattedValue);
expectedFormattedValue = String.format(htmlSyntax.getUnderlineFormat(), expectedFormattedValue);
expectedFormattedValue = String.format(htmlSyntax.getBigFormat(), expectedFormattedValue);
expectedFormattedValue = String.format(htmlSyntax.getSmallFormat(), expectedFormattedValue);
expectedFormattedValue = String.format(htmlSyntax.getSubFormat(), expectedFormattedValue);
expectedFormattedValue = String.format(htmlSyntax.getSupFormat(), expectedFormattedValue);
expectedFormattedValue =
String.format(htmlSyntax.getColorFormat(), testColorHexValue, expectedFormattedValue);
// when
Piece piece = HtmlPiece.put(testKey, testValue)
.bold()
.italic()
.underline()
.big()
.small()
.sub()
.sup()
.color(testColorHexValue);
// then
assertThat(piece.getValue()).isEqualTo(expectedFormattedValue);
}
}
| |
package com.jesusm.holocircleseekbar.lib;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SweepGradient;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.example.jesus.lib.R;
public class HoloCircleSeekBar extends View {
private static final String STATE_PARENT = "parent";
private static final String STATE_ANGLE = "angle";
private static final int TEXT_SIZE_DEFAULT_VALUE = 25;
private static final int END_WHEEL_DEFAULT_VALUE = 360;
public static final int COLOR_WHEEL_STROKE_WIDTH_DEF_VALUE = 16;
public static final float POINTER_RADIUS_DEF_VALUE = 8;
public static final int MAX_POINT_DEF_VALUE = 100;
public static final int START_ANGLE_DEF_VALUE = 0;
private OnCircleSeekBarChangeListener mOnCircleSeekBarChangeListener;
/**
* {@code Paint} instance used to draw the color wheel.
*/
private Paint mColorWheelPaint;
/**
* {@code Paint} instance used to draw the pointer's "halo".
*/
private Paint mPointerHaloPaint;
private Paint mPointerHaloPaint2;
/**
* {@code Paint} instance used to draw the pointer (the selected color).
*/
private Paint mPointerColor;
/**
* The stroke width used to paint the color wheel (in pixels).
*/
private float mColorWheelStrokeWidth;
/**
* The radius of the pointer (in pixels).
*/
private float mPointerRadius;
/**
* The rectangle enclosing the color wheel.
*/
private RectF mColorWheelRectangle = new RectF();
/**
* {@code true} if the user clicked on the pointer to start the move mode.
* {@code false} once the user stops touching the screen.
*
* @see #onTouchEvent(MotionEvent)
*/
private boolean mUserIsMovingPointer = false;
/**
* Number of pixels the origin of this view is moved in X- and Y-direction.
* <p>
* <p>
* We use the center of this (quadratic) View as origin of our internal
* coordinate system. Android uses the upper left corner as origin for the
* View-specific coordinate system. So this is the value we use to translate
* from one coordinate system to the other.
* </p>
* <p>
* <p>
* Note: (Re)calculated in {@link #onMeasure(int, int)}.
* </p>
*
* @see #onDraw(Canvas)
*/
private float mTranslationOffset;
/**
* Radius of the color wheel in pixels.
* <p>
* <p>
* Note: (Re)calculated in {@link #onMeasure(int, int)}.
* </p>
*/
private float mColorWheelRadius;
/**
* The pointer's position expressed as angle (in rad).
*/
private float mAngle;
private Paint textPaint;
private String text;
private int max = 100;
private SweepGradient s;
private Paint mArcColor;
private int wheel_color, unactive_wheel_color, pointer_color, pointer_halo_color, text_size, text_color;
private int init_position = -1;
private int glow_color = -1;
private boolean block_end = false;
private float lastX;
private int last_radians = 0;
private boolean block_start = false;
private int arc_finish_radians = 360;
private int start_arc = 270;
private float[] pointerPosition;
private RectF mColorCenterHaloRectangle = new RectF();
private int end_wheel;
private boolean show_text = true;
private Rect bounds = new Rect();
private Paint mArcColor2;
public HoloCircleSeekBar(Context context) {
super(context);
init(null, 0);
}
public HoloCircleSeekBar(Context context, AttributeSet attrs) {
super(context, attrs);
init(attrs, 0);
}
public HoloCircleSeekBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(attrs, defStyle);
}
private void init(AttributeSet attrs, int defStyle) {
final TypedArray a = getContext().obtainStyledAttributes(attrs,
R.styleable.HoloCircleSeekBar, defStyle, 0);
initAttributes(a);
a.recycle();
// mAngle = (float) (-Math.PI / 2);
mColorWheelPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mColorWheelPaint.setShader(s);
mColorWheelPaint.setColor(unactive_wheel_color);
mColorWheelPaint.setStyle(Style.STROKE);
mColorWheelPaint.setStrokeWidth(mColorWheelStrokeWidth);
mColorWheelPaint.setDither(true);
Paint mColorCenterHalo = new Paint(Paint.ANTI_ALIAS_FLAG);
mColorCenterHalo.setColor(Color.CYAN);
mColorCenterHalo.setAlpha(0xCC);
// mColorCenterHalo.setStyle(Paint.Style.STROKE);
// mColorCenterHalo.setStrokeWidth(mColorCenterHaloRectangle.width() /
// 2);
mPointerHaloPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPointerHaloPaint.setColor(pointer_halo_color);
mPointerHaloPaint.setStrokeWidth(mPointerRadius + 10);
mPointerHaloPaint.setDither(true);
// mPointerHaloPaint.setAlpha(150);
mPointerHaloPaint2 = new Paint(Paint.ANTI_ALIAS_FLAG);
mPointerHaloPaint2.setColor(glow_color);
mPointerHaloPaint2.setStrokeWidth(mPointerRadius + 10);
mPointerHaloPaint2.setDither(true);
// mPointerHaloPaint.setAlpha(150);
textPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.LINEAR_TEXT_FLAG);
textPaint.setColor(text_color);
textPaint.setStyle(Style.FILL_AND_STROKE);
textPaint.setTextAlign(Align.LEFT);
// canvas.drawPaint(textPaint);
textPaint.setTextSize(text_size);
mPointerColor = new Paint(Paint.ANTI_ALIAS_FLAG);
mPointerColor.setStrokeWidth(mPointerRadius);
// mPointerColor.setColor(calculateColor(mAngle));
mPointerColor.setColor(pointer_color);
mArcColor = new Paint(Paint.ANTI_ALIAS_FLAG);
mArcColor.setColor(wheel_color);
mArcColor.setStyle(Style.STROKE);
mArcColor.setStrokeWidth(mColorWheelStrokeWidth);
mArcColor2 = new Paint(Paint.ANTI_ALIAS_FLAG);
mArcColor2.setColor(glow_color);
mArcColor2.setStyle(Style.STROKE);
mArcColor2.setStrokeWidth(mColorWheelStrokeWidth * 1.6f);
Paint mCircleTextColor = new Paint(Paint.ANTI_ALIAS_FLAG);
mCircleTextColor.setColor(Color.WHITE);
mCircleTextColor.setStyle(Style.FILL);
arc_finish_radians = (int) calculateAngleFromText(init_position) - 90;
if (arc_finish_radians > end_wheel)
arc_finish_radians = end_wheel;
mAngle = calculateAngleFromRadians(arc_finish_radians > end_wheel ? end_wheel
: arc_finish_radians);
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
invalidate();
}
private void setTextFromAngle(int angleValue) {
this.text = String.valueOf(angleValue);
}
private void initAttributes(TypedArray a) {
mColorWheelStrokeWidth = a.getDimension(
R.styleable.HoloCircleSeekBar_wheel_size, COLOR_WHEEL_STROKE_WIDTH_DEF_VALUE);
mPointerRadius = a.getDimension(
R.styleable.HoloCircleSeekBar_pointer_size, POINTER_RADIUS_DEF_VALUE);
max = a.getInteger(R.styleable.HoloCircleSeekBar_max, MAX_POINT_DEF_VALUE);
String wheel_color_attr = a
.getString(R.styleable.HoloCircleSeekBar_wheel_active_color);
String wheel_unactive_color_attr = a
.getString(R.styleable.HoloCircleSeekBar_wheel_unactive_color);
String pointer_color_attr = a
.getString(R.styleable.HoloCircleSeekBar_pointer_color);
String pointer_halo_color_attr = a
.getString(R.styleable.HoloCircleSeekBar_pointer_halo_color);
String text_color_attr = a.getString(R.styleable.HoloCircleSeekBar_text_color);
String glow_color_attr = a.getString(R.styleable.HoloCircleSeekBar_glow_color);
text_size = a.getDimensionPixelSize(R.styleable.HoloCircleSeekBar_text_size, TEXT_SIZE_DEFAULT_VALUE);
init_position = a.getInteger(R.styleable.HoloCircleSeekBar_init_position, 0);
start_arc = a.getInteger(R.styleable.HoloCircleSeekBar_start_angle, START_ANGLE_DEF_VALUE);
end_wheel = a.getInteger(R.styleable.HoloCircleSeekBar_end_angle, END_WHEEL_DEFAULT_VALUE);
show_text = a.getBoolean(R.styleable.HoloCircleSeekBar_show_text, true);
last_radians = end_wheel;
if (init_position < start_arc)
init_position = calculateTextFromStartAngle(start_arc);
if (wheel_color_attr != null) {
try {
wheel_color = Color.parseColor(wheel_color_attr);
} catch (IllegalArgumentException e) {
wheel_color = Color.DKGRAY;
}
} else {
wheel_color = Color.DKGRAY;
}
if (wheel_unactive_color_attr != null) {
try {
unactive_wheel_color = Color
.parseColor(wheel_unactive_color_attr);
} catch (IllegalArgumentException e) {
unactive_wheel_color = Color.CYAN;
}
} else {
unactive_wheel_color = Color.CYAN;
}
if (glow_color_attr != null) {
try {
glow_color = Color
.parseColor(glow_color_attr);
} catch (IllegalArgumentException e) {
glow_color = Color.TRANSPARENT;
}
} else {
glow_color = Color.TRANSPARENT;
}
if (pointer_color_attr != null) {
try {
pointer_color = Color.parseColor(pointer_color_attr);
} catch (IllegalArgumentException e) {
pointer_color = Color.CYAN;
}
} else {
pointer_color = Color.CYAN;
}
if (pointer_halo_color_attr != null) {
try {
pointer_halo_color = Color.parseColor(pointer_halo_color_attr);
} catch (IllegalArgumentException e) {
pointer_halo_color = Color.CYAN;
}
} else {
pointer_halo_color = Color.DKGRAY;
}
if (text_color_attr != null) {
try {
text_color = Color.parseColor(text_color_attr);
} catch (IllegalArgumentException e) {
text_color = Color.CYAN;
}
} else {
text_color = Color.CYAN;
}
}
@Override
protected void onDraw(Canvas canvas) {
// All of our positions are using our internal coordinate system.
// Instead of translating
// them we let Canvas do the work for us.
canvas.translate(mTranslationOffset, mTranslationOffset);
// Draw the pointer's "halo"
canvas.drawCircle(pointerPosition[0], pointerPosition[1],
mColorWheelStrokeWidth * 1.70f, mPointerHaloPaint2);
// Draw the color wheel.
canvas.drawArc(mColorWheelRectangle, start_arc + 270, end_wheel
- (start_arc), false, mColorWheelPaint);
canvas.drawArc(mColorWheelRectangle, start_arc + 270,
(arc_finish_radians) > (end_wheel) ? end_wheel - (start_arc)
: arc_finish_radians - start_arc, false, mArcColor2);
canvas.drawArc(mColorWheelRectangle, start_arc + 270,
(arc_finish_radians) > (end_wheel) ? end_wheel - (start_arc)
: arc_finish_radians - start_arc, false, mArcColor);
// Draw the pointer's "halo"
canvas.drawCircle(pointerPosition[0], pointerPosition[1],
mColorWheelStrokeWidth * 1.40f, mPointerHaloPaint);
// Draw the pointer (the currently selected color) slightly smaller on
// top.
canvas.drawCircle(pointerPosition[0], pointerPosition[1],
mColorWheelStrokeWidth / 1.40f, mPointerColor);
textPaint.getTextBounds(text, 0, text.length(), bounds);
// canvas.drawCircle(mColorWheelRectangle.centerX(),
// mColorWheelRectangle.centerY(), (bounds.width() / 2) + 5,
// mCircleTextColor);
if (show_text)
canvas.drawText(
text,
(mColorWheelRectangle.centerX())
- (textPaint.measureText(text) / 2),
mColorWheelRectangle.centerY() + bounds.height() / 2,
textPaint);
// last_radians = calculateRadiansFromAngle(mAngle);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int height = getDefaultSize(getSuggestedMinimumHeight(),
heightMeasureSpec);
int width = getDefaultSize(getSuggestedMinimumWidth(), widthMeasureSpec);
int min = Math.min(width, height);
setMeasuredDimension(min, min);
mTranslationOffset = min * 0.5f;
mColorWheelRadius = mTranslationOffset - mPointerRadius;
mColorWheelRectangle.set(-mColorWheelRadius, -mColorWheelRadius,
mColorWheelRadius, mColorWheelRadius);
mColorCenterHaloRectangle.set(-mColorWheelRadius / 2,
-mColorWheelRadius / 2, mColorWheelRadius / 2,
mColorWheelRadius / 2);
updatePointerPosition();
}
private int calculateValueFromAngle(float angle) {
float m = angle - start_arc;
float f = (end_wheel - start_arc) / m;
return (int) (max / f);
}
private int calculateTextFromStartAngle(float angle) {
float f = (end_wheel - start_arc) / angle;
return (int) (max / f);
}
private double calculateAngleFromText(int position) {
if (position == 0 || position >= max)
return (float) 90;
double f = (double) max / (double) position;
double f_r = 360 / f;
return f_r + 90;
}
private int calculateRadiansFromAngle(float angle) {
float unit = (float) (angle / (2 * Math.PI));
if (unit < 0) {
unit += 1;
}
int radians = (int) ((unit * 360) - ((360 / 4) * 3));
if (radians < 0)
radians += 360;
return radians;
}
private float calculateAngleFromRadians(int radians) {
return (float) (((radians + 270) * (2 * Math.PI)) / 360);
}
/**
* Get the selected value
*
* @return the value between 0 and max
*/
public int getValue() {
return Integer.valueOf(text);
}
public void setMax(int max) {
this.max = max;
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
updatePointerPosition();
invalidate();
}
public void setValue(float newValue) {
if (newValue == 0) {
arc_finish_radians = start_arc;
} else if (newValue == this.max) {
arc_finish_radians = end_wheel;
} else {
float newAngle = (float) (360.0 * (newValue / max));
arc_finish_radians = (int) calculateAngleFromRadians(calculateRadiansFromAngle(newAngle)) + 1;
}
mAngle = calculateAngleFromRadians(arc_finish_radians);
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
updatePointerPosition();
invalidate();
}
private void updatePointerPosition() {
pointerPosition = calculatePointerPosition(mAngle);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
// Convert coordinates to our internal coordinate system
float x = event.getX() - mTranslationOffset;
float y = event.getY() - mTranslationOffset;
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Check whether the user pressed on (or near) the pointer
mAngle = (float) Math.atan2(y, x);
block_end = false;
block_start = false;
mUserIsMovingPointer = true;
arc_finish_radians = calculateRadiansFromAngle(mAngle);
if (arc_finish_radians > end_wheel) {
arc_finish_radians = end_wheel;
block_end = true;
}
if (!block_end) {
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
updatePointerPosition();
invalidate();
}
if (mOnCircleSeekBarChangeListener != null) {
mOnCircleSeekBarChangeListener.onStartTrackingTouch(this);
}
break;
case MotionEvent.ACTION_MOVE:
if (mUserIsMovingPointer) {
mAngle = (float) Math.atan2(y, x);
int radians = calculateRadiansFromAngle(mAngle);
if (last_radians > radians && radians < (360 / 6) && x > lastX
&& last_radians > (360 / 6)) {
if (!block_end && !block_start)
block_end = true;
// if (block_start)
// block_start = false;
} else if (last_radians >= start_arc
&& last_radians <= (360 / 4) && radians <= (360 - 1)
&& radians >= ((360 / 4) * 3) && x < lastX) {
if (!block_start && !block_end)
block_start = true;
// if (block_end)
// block_end = false;
} else if (radians >= end_wheel && !block_start
&& last_radians < radians) {
block_end = true;
} else if (radians < end_wheel && block_end
&& last_radians > end_wheel) {
block_end = false;
} else if (radians < start_arc && last_radians > radians
&& !block_end) {
block_start = true;
} else if (block_start && last_radians < radians
&& radians > start_arc && radians < end_wheel) {
block_start = false;
}
if (block_end) {
arc_finish_radians = end_wheel - 1;
setTextFromAngle(max);
mAngle = calculateAngleFromRadians(arc_finish_radians);
updatePointerPosition();
} else if (block_start) {
arc_finish_radians = start_arc;
mAngle = calculateAngleFromRadians(arc_finish_radians);
setTextFromAngle(0);
updatePointerPosition();
} else {
arc_finish_radians = calculateRadiansFromAngle(mAngle);
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
updatePointerPosition();
}
invalidate();
if (mOnCircleSeekBarChangeListener != null)
mOnCircleSeekBarChangeListener.onProgressChanged(this,
Integer.parseInt(text), true);
last_radians = radians;
}
break;
case MotionEvent.ACTION_UP:
mUserIsMovingPointer = false;
if (mOnCircleSeekBarChangeListener != null) {
mOnCircleSeekBarChangeListener.onStopTrackingTouch(this);
}
break;
}
// Fix scrolling
if (event.getAction() == MotionEvent.ACTION_MOVE && getParent() != null) {
getParent().requestDisallowInterceptTouchEvent(true);
}
lastX = x;
return true;
}
/**
* Calculate the pointer's coordinates on the color wheel using the supplied
* angle.
*
* @param angle The position of the pointer expressed as angle (in rad).
* @return The coordinates of the pointer's center in our internal
* coordinate system.
*/
private float[] calculatePointerPosition(float angle) {
// if (calculateRadiansFromAngle(angle) > end_wheel)
// angle = calculateAngleFromRadians(end_wheel);
float x = (float) (mColorWheelRadius * Math.cos(angle));
float y = (float) (mColorWheelRadius * Math.sin(angle));
return new float[]{x, y};
}
@Override
protected Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
Bundle state = new Bundle();
state.putParcelable(STATE_PARENT, superState);
state.putFloat(STATE_ANGLE, mAngle);
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
Bundle savedState = (Bundle) state;
Parcelable superState = savedState.getParcelable(STATE_PARENT);
super.onRestoreInstanceState(superState);
mAngle = savedState.getFloat(STATE_ANGLE);
arc_finish_radians = calculateRadiansFromAngle(mAngle);
setTextFromAngle(calculateValueFromAngle(arc_finish_radians));
updatePointerPosition();
}
public void setInitPosition(int init) {
init_position = init;
setTextFromAngle(init_position);
mAngle = calculateAngleFromRadians(init_position);
arc_finish_radians = calculateRadiansFromAngle(mAngle);
updatePointerPosition();
invalidate();
}
public void setOnSeekBarChangeListener(OnCircleSeekBarChangeListener l) {
mOnCircleSeekBarChangeListener = l;
}
public int getMaxValue() {
return max;
}
public interface OnCircleSeekBarChangeListener {
void onProgressChanged(HoloCircleSeekBar seekBar, int progress, boolean fromUser);
void onStartTrackingTouch(HoloCircleSeekBar seekBar);
void onStopTrackingTouch(HoloCircleSeekBar seekBar);
}
}
| |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.util.concurrent;
import static com.google.common.util.concurrent.InterruptionUtil.repeatedlyInterruptTestThread;
import static com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.testing.TearDown;
import com.google.common.testing.TearDownStack;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import junit.framework.TestCase;
// TODO(cpovirk): Should this be merged into UninterruptiblesTest?
/**
* Unit test for {@link Uninterruptibles#getUninterruptibly}
*
* @author Kevin Bourrillion
* @author Chris Povirk
*/
public class UninterruptibleFutureTest extends TestCase {
private SleepingRunnable sleeper;
private Future<Boolean> delayedFuture;
private final TearDownStack tearDownStack = new TearDownStack();
@Override
protected void setUp() {
final ExecutorService executor = Executors.newSingleThreadExecutor();
tearDownStack.addTearDown(
new TearDown() {
@Override
public void tearDown() {
executor.shutdownNow();
}
});
sleeper = new SleepingRunnable(1000);
delayedFuture = executor.submit(sleeper, true);
tearDownStack.addTearDown(
new TearDown() {
@Override
public void tearDown() {
Thread.interrupted();
}
});
}
@Override
protected void tearDown() {
tearDownStack.runTearDown();
}
/**
* This first test doesn't test anything in Uninterruptibles, just demonstrates some normal
* behavior of futures so that you can contrast the next test with it.
*/
public void testRegularFutureInterrupted() throws ExecutionException {
/*
* Here's the order of events that we want.
*
* 1. The client thread begins to block on a get() call to a future.
* 2. The client thread is interrupted sometime before the result would be
* available.
* 3. We expect the client's get() to throw an InterruptedException.
* 4. We expect the client thread's interrupt state to be false.
* 5. The client thread again makes a blocking call to get().
* 6. Now the result becomes available.
* 7. We expect get() to return this result.
* 8. We expect the test thread's interrupt state to be false.
*/
InterruptionUtil.requestInterruptIn(200, TimeUnit.MILLISECONDS);
assertFalse(Thread.interrupted());
try {
delayedFuture.get(20000, TimeUnit.MILLISECONDS);
fail("expected to be interrupted");
} catch (InterruptedException expected) {
} catch (TimeoutException e) {
throw new RuntimeException(e);
}
// we were interrupted, but it's been cleared now
assertFalse(Thread.interrupted());
assertFalse(sleeper.completed);
try {
assertTrue(delayedFuture.get());
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertTrue(sleeper.completed);
}
public void testMakeUninterruptible_timeoutPreservedThroughInterruption()
throws ExecutionException {
repeatedlyInterruptTestThread(100, tearDownStack);
try {
getUninterruptibly(delayedFuture, 500, TimeUnit.MILLISECONDS);
fail("expected to time out");
} catch (TimeoutException expected) {
}
assertTrue(Thread.interrupted()); // clears the interrupt state, too
assertFalse(sleeper.completed);
assertTrue(getUninterruptibly(delayedFuture));
assertTrue(Thread.interrupted()); // clears the interrupt state, too
assertTrue(sleeper.completed);
}
private static class SleepingRunnable implements Runnable {
final int millis;
volatile boolean completed;
public SleepingRunnable(int millis) {
this.millis = millis;
}
@Override
public void run() {
try {
Thread.sleep(millis);
} catch (InterruptedException wontHappen) {
throw new AssertionError();
}
completed = true;
}
}
public void testMakeUninterruptible_untimed_uninterrupted() throws Exception {
runUntimedInterruptsTest(0);
}
public void testMakeUninterruptible_untimed_interrupted() throws Exception {
runUntimedInterruptsTest(1);
}
public void testMakeUninterruptible_untimed_multiplyInterrupted() throws Exception {
runUntimedInterruptsTest(38);
}
public void testMakeUninterruptible_timed_uninterrupted() throws Exception {
runTimedInterruptsTest(0);
}
public void testMakeUninterruptible_timed_interrupted() throws Exception {
runTimedInterruptsTest(1);
}
public void testMakeUninterruptible_timed_multiplyInterrupted() throws Exception {
runTimedInterruptsTest(38);
}
private static void runUntimedInterruptsTest(int times)
throws InterruptedException, ExecutionException, TimeoutException {
SettableFuture<String> future = SettableFuture.create();
FutureTask<Boolean> interruptReporter = untimedInterruptReporter(future, false);
runNInterruptsTest(times, future, interruptReporter);
}
private static void runTimedInterruptsTest(int times)
throws InterruptedException, ExecutionException, TimeoutException {
SettableFuture<String> future = SettableFuture.create();
FutureTask<Boolean> interruptReporter = timedInterruptReporter(future);
runNInterruptsTest(times, future, interruptReporter);
}
private static void runNInterruptsTest(
int times, SettableFuture<String> future, FutureTask<Boolean> interruptReporter)
throws InterruptedException, ExecutionException, TimeoutException {
Thread waitingThread = new Thread(interruptReporter);
waitingThread.start();
for (int i = 0; i < times; i++) {
waitingThread.interrupt();
}
future.set(RESULT);
assertEquals(times > 0, (boolean) interruptReporter.get(20, SECONDS));
}
/**
* Confirms that the test code triggers {@link InterruptedException} in a standard {@link Future}.
*/
public void testMakeUninterruptible_plainFutureSanityCheck() throws Exception {
SettableFuture<String> future = SettableFuture.create();
FutureTask<Boolean> wasInterrupted = untimedInterruptReporter(future, true);
Thread waitingThread = new Thread(wasInterrupted);
waitingThread.start();
waitingThread.interrupt();
try {
wasInterrupted.get();
fail();
} catch (ExecutionException expected) {
assertTrue(
expected.getCause().toString(), expected.getCause() instanceof InterruptedException);
}
}
public void testMakeUninterruptible_timedGetZeroTimeoutAttempted()
throws TimeoutException, ExecutionException {
SettableFuture<String> future = SettableFuture.create();
future.set(RESULT);
/*
* getUninterruptibly should call the timed get method once with a
* wait of 0 seconds (and it should succeed, since the result is already
* available).
*/
assertEquals(RESULT, getUninterruptibly(future, 0, SECONDS));
}
public void testMakeUninterruptible_timedGetNegativeTimeoutAttempted()
throws TimeoutException, ExecutionException {
SettableFuture<String> future = SettableFuture.create();
future.set(RESULT);
/*
* The getUninterruptibly should call the timed get method once with a
* wait of -1 seconds (and it should succeed, since the result is already
* available).
*/
assertEquals(RESULT, getUninterruptibly(future, -1, SECONDS));
}
private static FutureTask<Boolean> untimedInterruptReporter(
final Future<?> future, final boolean allowInterruption) {
return new FutureTask<>(
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
Object actual;
if (allowInterruption) {
actual = future.get();
} else {
actual = getUninterruptibly(future);
}
assertEquals(RESULT, actual);
return Thread.interrupted();
}
});
}
private static FutureTask<Boolean> timedInterruptReporter(final Future<?> future) {
return new FutureTask<>(
new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
assertEquals(RESULT, getUninterruptibly(future, 10, MINUTES));
return Thread.interrupted();
}
});
}
private static final String RESULT = "result";
}
| |
/*
* Copyright 2014 Dan Haywood
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.isisaddons.module.security.dom.permission;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import org.apache.isis.applib.DomainObjectContainer;
import org.apache.isis.applib.annotation.DomainService;
import org.apache.isis.applib.annotation.NatureOfService;
import org.apache.isis.applib.annotation.Programmatic;
import org.apache.isis.applib.query.QueryDefault;
import org.apache.isis.applib.services.appfeat.ApplicationMemberType;
import org.apache.isis.applib.services.queryresultscache.QueryResultsCache;
import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeature;
import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeatureId;
import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeatureRepositoryDefault;
import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeatureType;
import org.isisaddons.module.security.dom.role.ApplicationRole;
import org.isisaddons.module.security.dom.user.ApplicationUser;
@DomainService(
nature = NatureOfService.DOMAIN,
repositoryFor = ApplicationPermission.class
)
public class ApplicationPermissionRepository {
//region > findByRole (programmatic)
@Programmatic
public List<ApplicationPermission> findByRoleCached(final ApplicationRole role) {
return queryResultsCache.execute(new Callable<List<ApplicationPermission>>() {
@Override
public List<ApplicationPermission> call() throws Exception {
return findByRole(role);
}
}, ApplicationPermissionRepository.class, "findByRoleCached", role);
}
@Programmatic
public List<ApplicationPermission> findByRole(final ApplicationRole role) {
return container.allMatches(
new QueryDefault<>(
ApplicationPermission.class, "findByRole",
"role", role));
}
//endregion
//region > findByUser (programmatic)
@Programmatic
public List<ApplicationPermission> findByUserCached(final ApplicationUser user) {
return queryResultsCache.execute(new Callable<List<ApplicationPermission>>() {
@Override public List<ApplicationPermission> call() throws Exception {
return findByUser(user);
}
}, ApplicationPermissionRepository.class, "findByUserCached", user);
}
@Programmatic
public List<ApplicationPermission> findByUser(final ApplicationUser user) {
final String username = user.getUsername();
return findByUser(username);
}
private List<ApplicationPermission> findByUser(final String username) {
return container.allMatches(
new QueryDefault<>(
ApplicationPermission.class, "findByUser",
"username", username));
}
//endregion
//region > findByUserAndPermissionValue (programmatic)
/**
* Uses the {@link QueryResultsCache} in order to support
* multiple lookups from <code>org.isisaddons.module.security.app.user.UserPermissionViewModel</code>.
*/
@Programmatic
public ApplicationPermission findByUserAndPermissionValue(final String username, final ApplicationPermissionValue permissionValue) {
// obtain all permissions for this user, map by its value, and
// put into query cache (so that this method can be safely called in a tight loop)
final Map<ApplicationPermissionValue, List<ApplicationPermission>> permissions =
queryResultsCache.execute(new Callable<Map<ApplicationPermissionValue, List<ApplicationPermission>>>() {
@Override
public Map<ApplicationPermissionValue, List<ApplicationPermission>> call() throws Exception {
final List<ApplicationPermission> applicationPermissions = findByUser(username);
final ImmutableListMultimap<ApplicationPermissionValue, ApplicationPermission> index = Multimaps
.index(applicationPermissions, ApplicationPermission.Functions.AS_VALUE);
return Multimaps.asMap(index);
}
// note: it is correct that only username (and not permissionValue) is the key
// (we are obtaining all the perms for this user)
}, ApplicationPermissionRepository.class, "findByUserAndPermissionValue", username);
// now simply return the permission from the required value (if it exists)
final List<ApplicationPermission> applicationPermissions = permissions.get(permissionValue);
return applicationPermissions != null && !applicationPermissions.isEmpty()
? applicationPermissions.get(0)
: null;
}
//endregion
//region > findByRoleAndRuleAndFeatureType (programmatic)
@Programmatic
public List<ApplicationPermission> findByRoleAndRuleAndFeatureTypeCached(
final ApplicationRole role, final ApplicationPermissionRule rule,
final ApplicationFeatureType type) {
return queryResultsCache.execute(new Callable<List<ApplicationPermission>>() {
@Override public List<ApplicationPermission> call() throws Exception {
return findByRoleAndRuleAndFeatureType(role, rule, type);
}
}, ApplicationPermissionRepository.class, "findByRoleAndRuleAndFeatureTypeCached", role, rule, type);
}
@Programmatic
public List<ApplicationPermission> findByRoleAndRuleAndFeatureType(
final ApplicationRole role, final ApplicationPermissionRule rule,
final ApplicationFeatureType type) {
return container.allMatches(
new QueryDefault<>(
ApplicationPermission.class, "findByRoleAndRuleAndFeatureType",
"role", role,
"rule", rule,
"featureType", type));
}
//endregion
//region > findByRoleAndRuleAndFeature (programmatic)
@Programmatic
public ApplicationPermission findByRoleAndRuleAndFeatureCached(
final ApplicationRole role,
final ApplicationPermissionRule rule,
final ApplicationFeatureType type,
final String featureFqn) {
return queryResultsCache.execute(new Callable<ApplicationPermission>() {
@Override public ApplicationPermission call() throws Exception {
return findByRoleAndRuleAndFeature(role, rule, type, featureFqn);
}
}, ApplicationPermissionRepository.class, "findByRoleAndRuleAndFeatureCached", role, rule, type, featureFqn);
}
@Programmatic
public ApplicationPermission findByRoleAndRuleAndFeature(
final ApplicationRole role,
final ApplicationPermissionRule rule, final ApplicationFeatureType type, final String featureFqn) {
return container.firstMatch(
new QueryDefault<>(
ApplicationPermission.class, "findByRoleAndRuleAndFeature",
"role", role,
"rule", rule,
"featureType", type,
"featureFqn", featureFqn ));
}
//endregion
//region > findByFeature (programmatic)
@Programmatic
public List<ApplicationPermission> findByFeatureCached(final ApplicationFeatureId featureId) {
return queryResultsCache.execute(new Callable<List<ApplicationPermission>>() {
@Override public List<ApplicationPermission> call() throws Exception {
return findByFeature(featureId);
}
}, ApplicationPermissionRepository.class, "findByFeatureCached", featureId);
}
@Programmatic
public List<ApplicationPermission> findByFeature(final ApplicationFeatureId featureId) {
return container.allMatches(
new QueryDefault<>(
ApplicationPermission.class, "findByFeature",
"featureType", featureId.getType(),
"featureFqn", featureId.getFullyQualifiedName()));
}
//endregion
//region > newPermission (programmatic)
@Programmatic
public ApplicationPermission newPermission(
final ApplicationRole role,
final ApplicationPermissionRule rule,
final ApplicationPermissionMode mode,
final ApplicationFeatureType featureType,
final String featureFqn) {
final ApplicationFeatureId featureId = ApplicationFeatureId.newFeature(featureType, featureFqn);
final ApplicationFeature feature = applicationFeatureRepository.findFeature(featureId);
if(feature == null) {
container.warnUser("No such " + featureType.name().toLowerCase() + ": " + featureFqn);
return null;
}
return newPermissionNoCheck(role, rule, mode, featureType, featureFqn);
}
@Programmatic
public ApplicationPermission newPermissionNoCheck(
final ApplicationRole role,
final ApplicationPermissionRule rule,
final ApplicationPermissionMode mode,
final ApplicationFeatureType featureType,
final String featureFqn) {
ApplicationPermission permission = findByRoleAndRuleAndFeature(role, rule, featureType, featureFqn);
if (permission != null) {
return permission;
}
permission = getApplicationPermissionFactory().newApplicationPermission();
permission.setRole(role);
permission.setRule(rule);
permission.setMode(mode);
permission.setFeatureType(featureType);
permission.setFeatureFqn(featureFqn);
container.persistIfNotAlready(permission);
return permission;
}
@Programmatic
public ApplicationPermission newPermission(
final ApplicationRole role,
final ApplicationPermissionRule rule,
final ApplicationPermissionMode mode,
final String featurePackage,
final String featureClassName,
final String featureMemberName) {
final ApplicationFeatureId featureId = ApplicationFeatureId.newFeature(featurePackage, featureClassName, featureMemberName);
final ApplicationFeatureType featureType = featureId.getType();
final String featureFqn = featureId.getFullyQualifiedName();
final ApplicationFeature feature = applicationFeatureRepository.findFeature(featureId);
if(feature == null) {
container.warnUser("No such " + featureType.name().toLowerCase() + ": " + featureFqn);
return null;
}
final ApplicationPermission permission = container.newTransientInstance(ApplicationPermission.class);
permission.setRole(role);
permission.setRule(rule);
permission.setMode(mode);
permission.setFeatureType(featureType);
permission.setFeatureFqn(featureFqn);
container.persistIfNotAlready(permission);
return permission;
}
//endregion
//region > allPermission (programmatic)
@Programmatic
public List<ApplicationPermission> allPermissions() {
return container.allInstances(ApplicationPermission.class);
}
//endregion
//region > findOrphaned (programmatic)
@Programmatic
public List<ApplicationPermission> findOrphaned() {
final List<String> packageNames = applicationFeatureRepository.packageNames();
final Set<String> availableClasses = Sets.newTreeSet();
for (String packageName : packageNames) {
appendClasses(packageName, ApplicationMemberType.PROPERTY, availableClasses);
appendClasses(packageName, ApplicationMemberType.COLLECTION, availableClasses);
appendClasses(packageName, ApplicationMemberType.ACTION, availableClasses);
}
final List<ApplicationPermission> orphaned = Lists.newArrayList();
final List<ApplicationPermission> permissions = allPermissions();
for (ApplicationPermission permission : permissions) {
final ApplicationFeatureType featureType = permission.getFeatureType();
final String featureFqn = permission.getFeatureFqn();
switch (featureType) {
case PACKAGE:
if(!packageNames.contains(featureFqn)) {
orphaned.add(permission);
}
break;
case CLASS:
if(!availableClasses.contains(featureFqn)) {
orphaned.add(permission);
}
break;
case MEMBER:
final List<String> split = Splitter.on('#').splitToList(featureFqn);
final String fqClassName = split.get(0);
final String memberName = split.get(1);
final int lastDot = fqClassName.lastIndexOf('.');
final String packageName = fqClassName.substring(0, lastDot);
final String className = fqClassName.substring(lastDot + 1);
final List<String> memberNames = memberNamesOf(packageName, className);
if(!memberNames.contains(memberName)) {
orphaned.add(permission);
}
break;
}
}
return orphaned;
}
private void appendClasses(
final String packageName, final ApplicationMemberType memberType, final Set<String> availableClasses) {
final List<String> classNames = applicationFeatureRepository.classNamesContainedIn(packageName, memberType);
for (String className : classNames) {
availableClasses.add(packageName + "." + className);
}
}
private List<String> memberNamesOf(final String packageName, final String className) {
final List<String> memberNames = Lists.newArrayList();
appendMembers(packageName, className, ApplicationMemberType.PROPERTY, memberNames);
appendMembers(packageName, className, ApplicationMemberType.COLLECTION, memberNames);
appendMembers(packageName, className, ApplicationMemberType.ACTION, memberNames);
return memberNames;
}
private void appendMembers(
final String packageName,
final String className,
final ApplicationMemberType applicationMemberType,
final List<String> memberNames) {
final List<String> memberNamesOf =
applicationFeatureRepository.memberNamesOf(packageName, className, applicationMemberType);
memberNames.addAll(memberNamesOf);
}
//endregion
//region > (injected)
@Inject
DomainObjectContainer container;
@Inject
ApplicationFeatureRepositoryDefault applicationFeatureRepository;
/**
* Will only be injected to if the programmer has supplied an implementation. Otherwise
* this class will install a default implementation in the {@link #getApplicationPermissionFactory() accessor}.
*/
@Inject
ApplicationPermissionFactory applicationPermissionFactory;
private ApplicationPermissionFactory getApplicationPermissionFactory() {
return applicationPermissionFactory != null
? applicationPermissionFactory
: (applicationPermissionFactory = new ApplicationPermissionFactory.Default(container));
}
@Inject
QueryResultsCache queryResultsCache;
//endregion
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.managers;
import org.commonjava.maven.atlas.ident.ref.ProjectVersionRef;
import org.commonjava.maven.atlas.ident.ref.SimpleProjectVersionRef;
import org.jboss.pnc.causewayclient.CausewayClient;
import org.jboss.pnc.causewayclient.remotespi.Build;
import org.jboss.pnc.causewayclient.remotespi.BuildImportRequest;
import org.jboss.pnc.causewayclient.remotespi.BuildRoot;
import org.jboss.pnc.causewayclient.remotespi.BuiltArtifact;
import org.jboss.pnc.causewayclient.remotespi.CallbackTarget;
import org.jboss.pnc.causewayclient.remotespi.Dependency;
import org.jboss.pnc.causewayclient.remotespi.Logfile;
import org.jboss.pnc.causewayclient.remotespi.MavenBuild;
import org.jboss.pnc.causewayclient.remotespi.MavenBuiltArtifact;
import org.jboss.pnc.common.maven.Gav;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.model.BuildEnvironment;
import org.jboss.pnc.model.BuildRecord;
import org.jboss.pnc.model.BuildRecordPushResult;
import org.jboss.pnc.rest.restmodel.BuildRecordPushResultRest;
import org.jboss.pnc.spi.coordinator.ProcessException;
import org.jboss.pnc.spi.datastore.predicates.ArtifactPredicates;
import org.jboss.pnc.spi.datastore.repositories.ArtifactRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildRecordPushResultRepository;
import org.jboss.pnc.spi.datastore.repositories.BuildRecordRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ejb.Stateless;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a>
*/
@Stateless
public class BuildResultPushManager {
private BuildRecordRepository buildRecordRepository;
private BuildRecordPushResultRepository buildRecordPushResultRepository;
private ArtifactRepository artifactRepository;
private InProgress inProgress;
private CausewayClient causewayClient;
private Event<BuildRecordPushResultRest> buildRecordPushResultRestEvent;
private Logger logger = LoggerFactory.getLogger(BuildResultPushManager.class);
private static final String PNC_BUILD_RECORD_PATH = "/pnc-rest/rest/build-records/%d";
private static final String PNC_BUILD_LOG_PATH = "/pnc-rest/rest/build-records/%d/log";
private static final String PNC_REPOUR_LOG_PATH = "/pnc-rest/rest/build-records/%d/repour-log";
@Deprecated //required by EJB
public BuildResultPushManager() {
}
@Inject
public BuildResultPushManager(BuildRecordRepository buildRecordRepository,
BuildRecordPushResultRepository buildRecordPushResultRepository,
InProgress inProgress,
Event<BuildRecordPushResultRest> buildRecordPushResultRestEvent,
ArtifactRepository artifactRepository,
CausewayClient causewayClient
) {
this.buildRecordRepository = buildRecordRepository;
this.buildRecordPushResultRepository = buildRecordPushResultRepository;
this.inProgress = inProgress;
this.buildRecordPushResultRestEvent = buildRecordPushResultRestEvent;
this.artifactRepository = artifactRepository;
this.causewayClient = causewayClient;
}
/**
*
* @param buildRecordIds
* @param authToken
* @param callBackUrlTemplate %d in the template will be replaced with BuildRecord.id
* @param tagPrefix
* @return
* @throws ProcessException
*/
public Set<Result> push(
Set<Integer> buildRecordIds,
String authToken,
String callBackUrlTemplate, String tagPrefix) throws ProcessException {
Set<Result> result = new HashSet<>();
for (Integer buildRecordId : buildRecordIds) {
Result pushResult = pushToCauseway(
authToken,
buildRecordId,
String.format(callBackUrlTemplate, buildRecordId),
tagPrefix);
result.add(pushResult);
}
return result;
}
private Result pushToCauseway(String authToken, Integer buildRecordId, String callBackUrl, String tagPrefix) throws ProcessException {
logger.info("Pushing to causeway BR.id: {}", buildRecordId);
if (!inProgress.add(buildRecordId, tagPrefix)) {
logger.warn("Push for BR.id {} already running.", buildRecordId);
return new Result(buildRecordId.toString(), Result.Status.REJECTED, "A push for this buildRecord is already running.");
}
BuildRecord buildRecord = buildRecordRepository.findByIdFetchProperties(buildRecordId);
if (buildRecord == null) {
logger.warn("Did not find build record by id: " + buildRecordId);
return new Result(buildRecordId.toString(), Result.Status.REJECTED, "Did not find build record by given id.");
}
if (!buildRecord.getStatus().completedSuccessfully()) {
logger.warn("Not pushing record id: " + buildRecordId + " because it is a failed build.");
return new Result(buildRecordId.toString(), Result.Status.REJECTED, "Cannot push failed build.");
}
BuildImportRequest buildImportRequest = createCausewayPushRequest(buildRecord, tagPrefix, callBackUrl, authToken);
boolean successfullyPushed = causewayClient.importBuild(buildImportRequest, authToken);
if (!successfullyPushed) {
inProgress.remove(buildRecordId);
return new Result(buildRecordId.toString(), Result.Status.REJECTED, "Failed to push to Causeway.");
} else {
return new Result(buildRecordId.toString(), Result.Status.ACCEPTED);
}
}
private BuildImportRequest createCausewayPushRequest(
BuildRecord buildRecord,
String tagPrefix,
String callBackUrl,
String authToken) {
BuildEnvironment buildEnvironment = buildRecord.getBuildConfigurationAudited().getBuildEnvironment();
logger.debug("BuildRecord: {}", buildRecord.getId());
logger.debug("BuildEnvironment: {}", buildEnvironment);
BuildRoot buildRoot = new BuildRoot(
"DOCKER_IMAGE",
"x86_64", //TODO set based on env, some env has native build tools
"rhel",
"x86_64",
buildEnvironment.getAttributes()
);
List<Artifact> builtArtifactEntities = artifactRepository.queryWithPredicates(ArtifactPredicates.withBuildRecordId(buildRecord.getId()));
List<Artifact> dependencyEntities = artifactRepository.queryWithPredicates(ArtifactPredicates.withDependantBuildRecordId(buildRecord.getId()));
logger.debug("Preparing BuildImportRequest containing {} built artifacts and {} dependencies.", builtArtifactEntities.size(), dependencyEntities.size());
Set<Dependency> dependencies = collectDependencies(dependencyEntities);
Set<BuiltArtifact> builtArtifacts = collectBuiltArtifacts(builtArtifactEntities);
CallbackTarget callbackTarget = CallbackTarget.callbackPost(callBackUrl, authToken);
ProjectVersionRef projectVersionRef = buildRootToGAV(
buildRecord.getExecutionRootName(),
buildRecord.getExecutionRootVersion());
Set<Logfile> logs = new HashSet<>();
addLogs(buildRecord, logs);
Build build = new MavenBuild(
projectVersionRef.getGroupId(),
projectVersionRef.getArtifactId(),
projectVersionRef.getVersionString(),
buildRecord.getExecutionRootName(),
buildRecord.getExecutionRootVersion(),
"PNC",
buildRecord.getId(),
String.format(PNC_BUILD_RECORD_PATH, buildRecord.getId()),
buildRecord.getStartTime(),
buildRecord.getEndTime(),
buildRecord.getScmRepoURL(),
buildRecord.getScmRevision(),
buildRoot,
logs,
dependencies,
builtArtifacts,
tagPrefix
);
return new BuildImportRequest(callbackTarget, build);
}
private void addLogs(BuildRecord buildRecord, Set<Logfile> logs) {
if (buildRecord.getBuildLogSize() != null && buildRecord.getBuildLogSize() > 0) {
logs.add(new Logfile("build.log", getBuildLogPath(buildRecord.getId()), buildRecord.getBuildLogSize(), buildRecord.getBuildLogMd5()));
} else {
logger.warn("Missing build log for BR.id: {}.", buildRecord.getId());
}
if (buildRecord.getRepourLogSize() != null && buildRecord.getRepourLogSize() > 0) {
logs.add(new Logfile("repour.log", getRepourLogPath(buildRecord.getId()), buildRecord.getRepourLogSize(), buildRecord.getRepourLogMd5()));
} else {
logger.warn("Missing repour log for BR.id: {}.", buildRecord.getId());
}
//TODO respond with error if logs are missing
}
private String getRepourLogPath(Integer id) {
return String.format(PNC_REPOUR_LOG_PATH, id);
}
private String getBuildLogPath(Integer id) {
return String.format(PNC_BUILD_LOG_PATH, id);
}
private ProjectVersionRef buildRootToGAV(String executionRootName, String executionRootVersion) {
String[] splittedName = executionRootName.split(":");
if(splittedName.length != 2)
throw new IllegalArgumentException("Execution root '" + executionRootName + "' doesn't seem to be maven G:A.");
return new SimpleProjectVersionRef(
splittedName[0],
splittedName.length < 2 ? null : splittedName[1],
executionRootVersion);
}
private Set<BuiltArtifact> collectBuiltArtifacts(Collection<Artifact> builtArtifacts) {
return builtArtifacts.stream().map(artifact -> {
Gav gav = Gav.parse(artifact.getIdentifier());
return new MavenBuiltArtifact(
gav.getGroupId(),
gav.getArtifactId(),
gav.getVersion(),
artifact.getId(),
artifact.getFilename(),
artifact.getTargetRepository().getRepositoryType().toString(),
artifact.getMd5(),
artifact.getDeployPath(),
artifact.getTargetRepository().getRepositoryPath(),
artifact.getSize().intValue()
);
})
.collect(Collectors.toSet());
}
private Set<Dependency> collectDependencies(Collection<Artifact> dependencies) {
return dependencies.stream()
.map(artifact -> new Dependency(
artifact.getFilename(),
artifact.getMd5(),
artifact.getSize())
)
.collect(Collectors.toSet());
}
public Integer complete(Integer buildRecordId, BuildRecordPushResult buildRecordPushResult) throws ProcessException {
//accept only listed elements otherwise a new request might be wrongly completed from response of an older one
String completedTag = inProgress.remove(buildRecordId);
if (completedTag == null) {
throw new ProcessException("Did not find the referenced element.");
}
buildRecordPushResult.setTagPrefix(completedTag);
BuildRecordPushResult saved = buildRecordPushResultRepository.save(buildRecordPushResult);
buildRecordPushResultRestEvent.fire(new BuildRecordPushResultRest(saved));
return saved.getId();
}
public boolean cancelInProgressPush(Integer buildRecordId) {
BuildRecordPushResultRest buildRecordPushResultRest = BuildRecordPushResultRest.builder()
.status(BuildRecordPushResult.Status.CANCELED)
.buildRecordId(buildRecordId)
.log("Canceled.")
.build();
boolean canceled = inProgress.remove(buildRecordId) != null;
buildRecordPushResultRestEvent.fire(buildRecordPushResultRest);
return canceled;
}
public Set<Integer> getInProgress() {
return inProgress.getAllIds();
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.lwjgl;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import java.nio.ShortBuffer;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import org.lwjgl.opengl.GL21;
import org.lwjgl.opengl.GL30;
import org.lwjgl.opengl.GL31;
import org.lwjgl.opengl.GL32;
import org.lwjgl.opengl.GL33;
import org.lwjgl.opengl.GL40;
import org.lwjgl.opengl.GL41;
import org.lwjgl.opengl.GL43;
import com.badlogic.gdx.utils.GdxRuntimeException;
class LwjglGL30 extends LwjglGL20 implements com.badlogic.gdx.graphics.GL30 {
@Override
public void glReadBuffer (int mode) {
GL11.glReadBuffer(mode);
}
@Override
public void glDrawRangeElements (int mode, int start, int end, int count, int type, Buffer indices) {
if(indices instanceof ByteBuffer) GL12.glDrawRangeElements(mode, start, end, (ByteBuffer)indices);
else if(indices instanceof ShortBuffer) GL12.glDrawRangeElements(mode, start, end, (ShortBuffer)indices);
else if(indices instanceof IntBuffer) GL12.glDrawRangeElements(mode, start, end, (IntBuffer)indices);
else throw new GdxRuntimeException("indices must be byte, short or int buffer");
}
@Override
public void glDrawRangeElements (int mode, int start, int end, int count, int type, int offset) {
GL12.glDrawRangeElements(mode, start, end, count, type, offset);
}
@Override
public void glTexImage3D (int target, int level, int internalformat, int width, int height, int depth, int border, int format,
int type, Buffer pixels) {
if(pixels instanceof ByteBuffer) GL12.glTexImage3D(target, level, internalformat, width, height, depth, border, format, type, (ByteBuffer)pixels);
else throw new GdxRuntimeException("pixels must be byte buffer");
}
@Override
public void glTexImage3D (int target, int level, int internalformat, int width, int height, int depth, int border, int format,
int type, int offset) {
GL12.glTexImage3D(target, level, internalformat, width, height, depth, border, format, type, offset);
}
@Override
public void glTexSubImage3D (int target, int level, int xoffset, int yoffset, int zoffset, int width, int height, int depth,
int format, int type, Buffer pixels) {
if(pixels instanceof ByteBuffer) GL12.glTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, (ByteBuffer)pixels);
else throw new GdxRuntimeException("pixels must be byte buffer");
}
@Override
public void glTexSubImage3D (int target, int level, int xoffset, int yoffset, int zoffset, int width, int height, int depth,
int format, int type, int offset) {
GL12.glTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, offset);
}
@Override
public void glCopyTexSubImage3D (int target, int level, int xoffset, int yoffset, int zoffset, int x, int y, int width,
int height) {
GL12.glCopyTexSubImage3D(target, level, xoffset, yoffset, zoffset, x, y, width, height);
}
@Override
public void glGenQueries (int n, int[] ids, int offset) {
for(int i = offset; i < offset + n; i++) {
ids[i] = GL15.glGenQueries();
}
}
@Override
public void glGenQueries (int n, IntBuffer ids) {
for(int i = 0; i < n; i++) {
ids.put(GL15.glGenQueries());
}
}
@Override
public void glDeleteQueries (int n, int[] ids, int offset) {
for(int i = offset; i < offset + n; i++) {
GL15.glDeleteQueries(ids[i]);
}
}
@Override
public void glDeleteQueries (int n, IntBuffer ids) {
for(int i = 0; i < n; i++) {
GL15.glDeleteQueries(ids.get());
}
}
@Override
public boolean glIsQuery (int id) {
return GL15.glIsQuery(id);
}
@Override
public void glBeginQuery (int target, int id) {
GL15.glBeginQuery(target, id);
}
@Override
public void glEndQuery (int target) {
GL15.glEndQuery(target);
}
@Override
public void glGetQueryiv (int target, int pname, IntBuffer params) {
GL15.glGetQuery(target, pname, params);
}
@Override
public void glGetQueryObjectuiv (int id, int pname, IntBuffer params) {
GL15.glGetQueryObjectu(id, pname, params);
}
@Override
public boolean glUnmapBuffer (int target) {
return GL15.glUnmapBuffer(target);
}
@Override
public Buffer glGetBufferPointerv (int target, int pname) {
return GL15.glGetBufferPointer(target, pname);
}
@Override
public void glDrawBuffers (int n, IntBuffer bufs) {
GL20.glDrawBuffers(bufs);
}
@Override
public void glUniformMatrix2x3fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix2x3(location, transpose, value);
}
@Override
public void glUniformMatrix3x2fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix3x2(location, transpose, value);
}
@Override
public void glUniformMatrix2x4fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix2x4(location, transpose, value);
}
@Override
public void glUniformMatrix4x2fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix4x2(location, transpose, value);
}
@Override
public void glUniformMatrix3x4fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix3x4(location, transpose, value);
}
@Override
public void glUniformMatrix4x3fv (int location, int count, boolean transpose, FloatBuffer value) {
GL21.glUniformMatrix4x3(location, transpose, value);
}
@Override
public void glBlitFramebuffer (int srcX0, int srcY0, int srcX1, int srcY1, int dstX0, int dstY0, int dstX1, int dstY1,
int mask, int filter) {
GL30.glBlitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter);
}
@Override
public void glBindFramebuffer (int target, int framebuffer) {
GL30.glBindFramebuffer(target, framebuffer);
}
@Override
public void glBindRenderbuffer (int target, int renderbuffer) {
GL30.glBindRenderbuffer(target, renderbuffer);
}
@Override
public int glCheckFramebufferStatus (int target) {
return GL30.glCheckFramebufferStatus(target);
}
@Override
public void glDeleteFramebuffers (int n, IntBuffer framebuffers) {
GL30.glDeleteFramebuffers(framebuffers);
}
@Override
public void glDeleteFramebuffer (int framebuffer) {
GL30.glDeleteFramebuffers(framebuffer);
}
@Override
public void glDeleteRenderbuffers (int n, IntBuffer renderbuffers) {
GL30.glDeleteRenderbuffers(renderbuffers);
}
@Override
public void glDeleteRenderbuffer (int renderbuffer) {
GL30.glDeleteRenderbuffers(renderbuffer);
}
@Override
public void glGenerateMipmap (int target) {
GL30.glGenerateMipmap(target);
}
@Override
public void glGenFramebuffers (int n, IntBuffer framebuffers) {
GL30.glGenFramebuffers(framebuffers);
}
@Override
public int glGenFramebuffer () {
return GL30.glGenFramebuffers();
}
@Override
public void glGenRenderbuffers (int n, IntBuffer renderbuffers) {
GL30.glGenRenderbuffers(renderbuffers);
}
@Override
public int glGenRenderbuffer () {
return GL30.glGenRenderbuffers();
}
@Override
public void glGetRenderbufferParameteriv (int target, int pname, IntBuffer params) {
GL30.glGetRenderbufferParameter(target, pname, params);
}
@Override
public boolean glIsFramebuffer (int framebuffer) {
return GL30.glIsFramebuffer(framebuffer);
}
@Override
public boolean glIsRenderbuffer (int renderbuffer) {
return GL30.glIsRenderbuffer(renderbuffer);
}
@Override
public void glRenderbufferStorage (int target, int internalformat, int width, int height) {
GL30.glRenderbufferStorage(target, internalformat, width, height);
}
@Override
public void glRenderbufferStorageMultisample (int target, int samples, int internalformat, int width, int height) {
GL30.glRenderbufferStorageMultisample(target, samples, internalformat, width, height);
}
@Override
public void glFramebufferTexture2D (int target, int attachment, int textarget, int texture, int level) {
GL30.glFramebufferTexture2D(target, attachment, textarget, texture, level);
}
@Override
public void glFramebufferRenderbuffer (int target, int attachment, int renderbuffertarget, int renderbuffer) {
GL30.glFramebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer);
}
@Override
public void glFramebufferTextureLayer (int target, int attachment, int texture, int level, int layer) {
GL30.glFramebufferTextureLayer(target, attachment, texture, level, layer);
}
@Override
public void glFlushMappedBufferRange (int target, int offset, int length) {
GL30.glFlushMappedBufferRange(target, offset, length);
}
@Override
public void glBindVertexArray (int array) {
GL30.glBindVertexArray(array);
}
@Override
public void glDeleteVertexArrays (int n, int[] arrays, int offset) {
for(int i = offset; i < offset + n; i++) {
GL30.glDeleteVertexArrays(arrays[i]);
}
}
@Override
public void glDeleteVertexArrays (int n, IntBuffer arrays) {
GL30.glDeleteVertexArrays(arrays);
}
@Override
public void glGenVertexArrays (int n, int[] arrays, int offset) {
for(int i = offset; i < offset + n; i++) {
arrays[i] = GL30.glGenVertexArrays();
}
}
@Override
public void glGenVertexArrays (int n, IntBuffer arrays) {
GL30.glGenVertexArrays(arrays);
}
@Override
public boolean glIsVertexArray (int array) {
return GL30.glIsVertexArray(array);
}
@Override
public void glBeginTransformFeedback (int primitiveMode) {
GL30.glBeginTransformFeedback(primitiveMode);
}
@Override
public void glEndTransformFeedback () {
GL30.glEndTransformFeedback();
}
@Override
public void glBindBufferRange (int target, int index, int buffer, int offset, int size) {
GL30.glBindBufferRange(target, index, buffer, offset, size);
}
@Override
public void glBindBufferBase (int target, int index, int buffer) {
GL30.glBindBufferBase(target, index, buffer);
}
@Override
public void glTransformFeedbackVaryings (int program, String[] varyings, int bufferMode) {
GL30.glTransformFeedbackVaryings(program, varyings, bufferMode);
}
@Override
public void glVertexAttribIPointer (int index, int size, int type, int stride, int offset) {
GL30.glVertexAttribIPointer(index, size, type, stride, offset);
}
@Override
public void glGetVertexAttribIiv (int index, int pname, IntBuffer params) {
GL30.glGetVertexAttribI(index, pname, params);
}
@Override
public void glGetVertexAttribIuiv (int index, int pname, IntBuffer params) {
GL30.glGetVertexAttribIu(index, pname, params);
}
@Override
public void glVertexAttribI4i (int index, int x, int y, int z, int w) {
GL30.glVertexAttribI4i(index, x, y, z, w);
}
@Override
public void glVertexAttribI4ui (int index, int x, int y, int z, int w) {
GL30.glVertexAttribI4ui(index, x, y, z, w);
}
@Override
public void glGetUniformuiv (int program, int location, IntBuffer params) {
GL30.glGetUniformu(program, location, params);
}
@Override
public int glGetFragDataLocation (int program, String name) {
return GL30.glGetFragDataLocation(program, name);
}
@Override
public void glUniform1uiv (int location, int count, IntBuffer value) {
GL30.glUniform1u(location, value);
}
@Override
public void glUniform3uiv (int location, int count, IntBuffer value) {
GL30.glUniform3u(location, value);
}
@Override
public void glUniform4uiv (int location, int count, IntBuffer value) {
GL30.glUniform4u(location, value);
}
@Override
public void glClearBufferiv (int buffer, int drawbuffer, IntBuffer value) {
GL30.glClearBuffer(buffer, drawbuffer, value);
}
@Override
public void glClearBufferuiv (int buffer, int drawbuffer, IntBuffer value) {
GL30.glClearBufferu(buffer, drawbuffer, value);
}
@Override
public void glClearBufferfv (int buffer, int drawbuffer, FloatBuffer value) {
GL30.glClearBuffer(buffer, drawbuffer, value);
}
@Override
public void glClearBufferfi (int buffer, int drawbuffer, float depth, int stencil) {
GL30.glClearBufferfi(buffer, drawbuffer, depth, stencil);
}
@Override
public String glGetStringi (int name, int index) {
return GL30.glGetStringi(name, index);
}
@Override
public void glCopyBufferSubData (int readTarget, int writeTarget, int readOffset, int writeOffset, int size) {
GL31.glCopyBufferSubData(readTarget, writeTarget, readOffset, writeOffset, size);
}
@Override
public void glGetUniformIndices (int program, String[] uniformNames, IntBuffer uniformIndices) {
GL31.glGetUniformIndices(program, uniformNames, uniformIndices);
}
@Override
public void glGetActiveUniformsiv (int program, int uniformCount, IntBuffer uniformIndices, int pname, IntBuffer params) {
GL31.glGetActiveUniforms(program, uniformIndices, pname, params);
}
@Override
public int glGetUniformBlockIndex (int program, String uniformBlockName) {
return GL31.glGetUniformBlockIndex(program, uniformBlockName);
}
@Override
public void glGetActiveUniformBlockiv (int program, int uniformBlockIndex, int pname, IntBuffer params) {
params.put(GL31.glGetActiveUniformBlocki(program, uniformBlockIndex, pname));
}
@Override
public void glGetActiveUniformBlockName (int program, int uniformBlockIndex, Buffer length, Buffer uniformBlockName) {
GL31.glGetActiveUniformBlockName(program, uniformBlockIndex, (IntBuffer)length, (ByteBuffer)uniformBlockName);
}
@Override
public String glGetActiveUniformBlockName (int program, int uniformBlockIndex) {
return GL31.glGetActiveUniformBlockName(program, uniformBlockIndex, 1024);
}
@Override
public void glUniformBlockBinding (int program, int uniformBlockIndex, int uniformBlockBinding) {
GL31.glUniformBlockBinding(program, uniformBlockIndex, uniformBlockBinding);
}
@Override
public void glDrawArraysInstanced (int mode, int first, int count, int instanceCount) {
GL31.glDrawArraysInstanced(mode, first, count, instanceCount);
}
@Override
public void glDrawElementsInstanced (int mode, int count, int type, int indicesOffset, int instanceCount) {
GL31.glDrawElementsInstanced(mode, count, type, indicesOffset, instanceCount);
}
@Override
public void glGetInteger64v (int pname, LongBuffer params) {
GL32.glGetInteger64(pname, params);
}
@Override
public void glGetBufferParameteri64v (int target, int pname, LongBuffer params) {
params.put(GL32.glGetBufferParameteri64(target, pname));
}
@Override
public void glGenSamplers (int count, int[] samplers, int offset) {
for(int i = offset; i < offset + count; i++) {
samplers[i] = GL33.glGenSamplers();
}
}
@Override
public void glGenSamplers (int count, IntBuffer samplers) {
GL33.glGenSamplers(samplers);
}
@Override
public void glDeleteSamplers (int count, int[] samplers, int offset) {
for(int i = offset; i < offset + count; i++) {
GL33.glDeleteSamplers(samplers[i]);
}
}
@Override
public void glDeleteSamplers (int count, IntBuffer samplers) {
GL33.glDeleteSamplers(samplers);
}
@Override
public boolean glIsSampler (int sampler) {
return GL33.glIsSampler(sampler);
}
@Override
public void glBindSampler (int unit, int sampler) {
GL33.glBindSampler(unit, sampler);
}
@Override
public void glSamplerParameteri (int sampler, int pname, int param) {
GL33.glSamplerParameteri(sampler, pname, param);
}
@Override
public void glSamplerParameteriv (int sampler, int pname, IntBuffer param) {
GL33.glSamplerParameter(sampler, pname, param);
}
@Override
public void glSamplerParameterf (int sampler, int pname, float param) {
GL33.glSamplerParameterf(sampler, pname, param);
}
@Override
public void glSamplerParameterfv (int sampler, int pname, FloatBuffer param) {
GL33.glSamplerParameter(sampler, pname, param);
}
@Override
public void glGetSamplerParameteriv (int sampler, int pname, IntBuffer params) {
GL33.glGetSamplerParameterI(sampler, pname, params);
}
@Override
public void glGetSamplerParameterfv (int sampler, int pname, FloatBuffer params) {
GL33.glGetSamplerParameter(sampler, pname, params);
}
@Override
public void glVertexAttribDivisor (int index, int divisor) {
GL33.glVertexAttribDivisor(index, divisor);
}
@Override
public void glBindTransformFeedback (int target, int id) {
GL40.glBindTransformFeedback(target, id);
}
@Override
public void glDeleteTransformFeedbacks (int n, int[] ids, int offset) {
for(int i = offset; i < offset + n; i++) {
GL40.glDeleteTransformFeedbacks(ids[i]);
}
}
@Override
public void glDeleteTransformFeedbacks (int n, IntBuffer ids) {
GL40.glDeleteTransformFeedbacks(ids);
}
@Override
public void glGenTransformFeedbacks (int n, int[] ids, int offset) {
for(int i = offset; i < offset + n; i++) {
ids[i] = GL40.glGenTransformFeedbacks();
}
}
@Override
public void glGenTransformFeedbacks (int n, IntBuffer ids) {
GL40.glGenTransformFeedbacks(ids);
}
@Override
public boolean glIsTransformFeedback (int id) {
return GL40.glIsTransformFeedback(id);
}
@Override
public void glPauseTransformFeedback () {
GL40.glPauseTransformFeedback();
}
@Override
public void glResumeTransformFeedback () {
GL40.glResumeTransformFeedback();
}
@Override
public void glProgramParameteri (int program, int pname, int value) {
GL41.glProgramParameteri(program, pname, value);
}
@Override
public void glInvalidateFramebuffer (int target, int numAttachments, IntBuffer attachments) {
GL43.glInvalidateFramebuffer(target, attachments);
}
@Override
public void glInvalidateSubFramebuffer (int target, int numAttachments, IntBuffer attachments, int x, int y, int width,
int height) {
GL43.glInvalidateSubFramebuffer(target, attachments, x, y, width, height);
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package sql.generic.ddl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import sql.generic.GenericBBHelper;
public class TableInfo implements java.io.Serializable {
private static final long serialVersionUID = 1L;
String tableName;
SchemaInfo schema;
Map<String, ColumnInfo> columns;
List<ColumnInfo> columnList;
boolean hasConstraints = false;
PKConstraint primaryKey;
List<FKConstraint> foreignKeys;
List<UniqueConstraint> uniqueKeys;
List<CheckConstraint> checkConstraints;
String derbyDDL;
String gfxdDDL;
int numPrForRecovery;
TableInfo colocatedParent = null;
String partitioningClause = "";
List<ColumnInfo> partitioningColumns;
String persistClause = "";
String evictionClause = "";
String serverGroups = "";
boolean isOffHeap;
boolean enableConcurrencyCheck = false;
String hdfsClause = "";
List<String> asyncEventListnerList = new ArrayList<String>();
List<String> gatewaySenderList = new ArrayList<String>();
public TableInfo(String schemaName, String tableName) {
this.tableName = tableName;
this.schema = GenericBBHelper.getSchemaInfo(schemaName);
this.foreignKeys = new ArrayList<FKConstraint>();
this.uniqueKeys = new ArrayList<UniqueConstraint>();
this.checkConstraints = new ArrayList<CheckConstraint>();
}
public TableInfo(String fullyQualifiedtableName) {
this(fullyQualifiedtableName.split("\\.")[0], fullyQualifiedtableName
.split("\\.")[1]);
}
public int getNumPrForRecovery() {
return numPrForRecovery;
}
public void setNumPrForRecovery(int numPrForRecovery) {
this.numPrForRecovery = numPrForRecovery;
}
public String getPartitioningClause() {
return partitioningClause;
}
public void setPartitioningClause(String partitioningClause) {
this.partitioningClause = partitioningClause;
}
public List<ColumnInfo> getPartitioningColumns() {
return partitioningColumns;
}
public void setPartitioningColumns(List<ColumnInfo> partitioningColumns) {
this.partitioningColumns = partitioningColumns;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public SchemaInfo getSchemaInfo() {
return this.schema;
}
public String getSchemaName() {
return schema.getSchemaName();
}
public void setSchemaInfo(SchemaInfo schemaInfo) {
this.schema = schemaInfo;
}
public Map<String, ColumnInfo> getColumns() {
return columns;
}
public void setColumns(Map<String, ColumnInfo> columns) {
this.columns = columns;
}
public ColumnInfo getColumn(String name) {
if (name.contains(tableName)) {
name = name.split("\\.")[2];
}
return columns.get(name);
}
public boolean isHasConstraints() {
return hasConstraints;
}
public void setHasConstraints(boolean hasConstraints) {
this.hasConstraints = hasConstraints;
}
public String getFullyQualifiedTableName() {
return schema.getSchemaName() + "." + tableName;
}
public PKConstraint getPrimaryKey() {
return primaryKey;
}
public void setPrimaryKey(PKConstraint primaryKey) {
this.primaryKey = primaryKey;
}
public List<FKConstraint> getForeignKeys() {
return Collections.unmodifiableList(foreignKeys);
}
public void setForeignKeys(List<FKConstraint> foreignKeys, ConstraintInfoHolder constraintInfoHolder) {
//List<String> tableinfoList = new ArrayList<String>(constraintInfoHolder.getTablesWithFKConstraint());
List<FKConstraint> oldFK = new ArrayList<FKConstraint>(this.foreignKeys);
List <FKConstraint> newFK = new ArrayList<FKConstraint>(foreignKeys);
HashMap<String, List<FKConstraint>> droppedFKMap = new HashMap<String, List<FKConstraint>>(constraintInfoHolder.getDroppedFkList());
List<FKConstraint> droppedFKs = new ArrayList<FKConstraint>();
if (!droppedFKMap.isEmpty()) {
droppedFKs = droppedFKMap.get(this.getFullyQualifiedTableName());
if (droppedFKs == null)
droppedFKs = new ArrayList<FKConstraint>();
}
//drop FK
if (oldFK.size() > newFK.size()) {
oldFK.removeAll(newFK);
droppedFKs.addAll(oldFK);
droppedFKMap.put(this.getFullyQualifiedTableName(), droppedFKs);
}
else { // add FK
newFK.removeAll(oldFK);
droppedFKs.removeAll(newFK);
if (droppedFKs.isEmpty())
droppedFKMap.remove(this);
else
droppedFKMap.put(this.getFullyQualifiedTableName(), droppedFKs);
}
constraintInfoHolder.setDroppedFkList(droppedFKMap);
this.foreignKeys = foreignKeys;
}
public List<UniqueConstraint> getUniqueKeys() {
return Collections.unmodifiableList(uniqueKeys);
}
public void setUniqueKeys(List<UniqueConstraint> uniqueKeys) {
this.uniqueKeys = uniqueKeys;
}
public List<ColumnInfo> getColumnList() {
return columnList;
}
public String getHdfsClause() {
return hdfsClause;
}
public void setHdfsClause(String hdfsClause) {
this.hdfsClause = hdfsClause;
}
public void setColumnList(List<ColumnInfo> columnList) {
// generate the Map as well
this.columnList = columnList;
columns = new HashMap<String, ColumnInfo>();
for (ColumnInfo column : columnList) {
columns.put(column.getColumnName(), column);
}
}
public String getServerGroups() {
return serverGroups;
}
public void setServerGroups(String serverGroups) {
this.serverGroups = serverGroups;
}
public TableInfo getColocatedParent() {
return colocatedParent;
}
public void setColocatedParent(TableInfo colocatedParent) {
this.colocatedParent = colocatedParent;
}
public boolean isOffHeap() {
return isOffHeap;
}
public void setOffHeap(boolean isOffHeap) {
this.isOffHeap = isOffHeap;
}
public String getPersistClause() {
return persistClause;
}
public void setPersistClause(String persistClause) {
this.persistClause = persistClause;
}
public List<String> getAsyncEventListnerList() {
return asyncEventListnerList;
}
public void addAsyncEventListnerToList(String asyncEventListnerName) {
if (!this.asyncEventListnerList.contains(asyncEventListnerName)) {
this.asyncEventListnerList.add(asyncEventListnerName);
}
}
public List<String> getGatewaySenderList() {
return gatewaySenderList;
}
public void addGatewaySenderToList(String gatewaySenderName) {
if (!this.gatewaySenderList.contains(gatewaySenderName)) {
this.gatewaySenderList.add(gatewaySenderName);
}
}
public boolean isEnableConcurrencyCheck() {
return enableConcurrencyCheck;
}
public void setEnableConcurrencyCheck(boolean enableConcurrencyCheck) {
this.enableConcurrencyCheck = enableConcurrencyCheck;
}
public String getEvictionClause() {
return evictionClause;
}
public void setEvictionClause(String evictionClause) {
this.evictionClause = evictionClause;
}
public String toString() {
StringBuilder finalInfo = new StringBuilder();
String columnInfo = tableName + " "
+ getLoggingForColumnList(columnList, false);
String partitionClause = "\n Partitioning Clause :: " + getPartitioningClause();
// get PK information
StringBuilder pkInfo = new StringBuilder();
if (primaryKey != null) {
pkInfo = new StringBuilder("\n Primary Key: ")
.append(primaryKey.getConstraintName()).append(" ")
.append(getLoggingForColumnList(primaryKey.getColumns(), false));
}
// get FK information
StringBuilder fkInfo = new StringBuilder("\n Foreign Keys: ");
;
for (FKConstraint foreignKey : foreignKeys)
fkInfo.append(foreignKey.getConstraintName()).append(" ")
.append(getLoggingForColumnList(foreignKey.getColumns(), false))
.append("\n");
// get UK information
StringBuilder ukInfo = new StringBuilder("\n Unique Keys: ");
for (UniqueConstraint uniqueKey : uniqueKeys)
ukInfo.append(uniqueKey.getConstraintName()).append(" ")
.append(getLoggingForColumnList(uniqueKey.getColumns(), false))
.append("\n");
// get Check Information
StringBuilder checkInfo = new StringBuilder("\n Check Constraints: ");
for (CheckConstraint check : checkConstraints)
checkInfo.append(check.getConstraintName()).append(" ")
.append(check.definition)
.append(getLoggingForColumnList(check.getColumns(), false))
.append(" ").append("\n");
return finalInfo.append(columnInfo).append(partitionClause).append(pkInfo).append(fkInfo)
.append(ukInfo).append(checkInfo).toString();
}
public String getLoggingForColumnList(List<ColumnInfo> columns,
boolean valueNeeded) {
String columnInfo = " \n ColumnInformation [";
// get columnInformation
for (ColumnInfo column : columns) {
columnInfo += column.getColumnName() + ":" + column.getColumnType()
+ " isNull:" + column.isNull() + ",";
if (valueNeeded) {
columnInfo += " ValueList: ";
if (column.getValueList() != null) {
for (Object Obj : column.getValueList()) {
columnInfo += " " + Obj + " ";
}
}
else
columnInfo += column.getValueList();
}
}
columnInfo += "]";
return columnInfo;
}
public SchemaInfo getSchema() {
return schema;
}
public void setSchema(SchemaInfo schema) {
this.schema = schema;
}
public List<CheckConstraint> getCheckConstraints() {
return Collections.unmodifiableList(checkConstraints);
}
public void setCheckconstraints(List<CheckConstraint> checkConstraints) {
this.checkConstraints = checkConstraints;
}
public String getDerbyDDL() {
return derbyDDL;
}
public void setDerbyDDL(String derbyDDL) {
this.derbyDDL = derbyDDL;
}
public static long getSerialVersionUID() {
return serialVersionUID;
}
public String getGfxdDDL() {
return new CreateTableDDL(this).getDDL();
}
public boolean checkUniqIsPartOfPk(UniqueConstraint constraint) {
List<ColumnInfo> uniqColumns = constraint.getColumns();
List<ColumnInfo> pkColumns = primaryKey.getColumns();
boolean partOfPk = true;
for (ColumnInfo column : pkColumns) {
if (!uniqColumns.contains(column))
partOfPk = false;
}
return partOfPk;
}
@Override
public boolean equals(Object obj) {
// TODO Auto-generated method stub
if ( obj instanceof TableInfo && obj != null ) {
if ( ((TableInfo) obj ).getFullyQualifiedTableName().equals( this.getFullyQualifiedTableName() ))
return true;
else
return false;
}
else
return false;
}
@Override
public int hashCode() {
// TODO Auto-generated method stub
return this.getFullyQualifiedTableName().hashCode();
}
}
| |
package com.netflix.astyanax.recipes;
import java.util.concurrent.TimeUnit;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.recipes.locks.ColumnPrefixDistributedRowLock;
import com.netflix.astyanax.recipes.locks.StaleLockException;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.test.EmbeddedCassandra;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
/**
* Ignore for now because of issues with running embedded cassandra from multiple unit tests
* @author elandau
*
*/
@Ignore
public class LockRecipeTest {
private static ColumnFamily<String, String> LOCK_CF_LONG =
ColumnFamily.newColumnFamily("LockCfLong", StringSerializer.get(), StringSerializer.get(), LongSerializer.get());
private static ColumnFamily<String, String> LOCK_CF_STRING =
ColumnFamily.newColumnFamily("LockCfString", StringSerializer.get(), StringSerializer.get(), StringSerializer.get());
private static final int TTL = 20;
private static final int TIMEOUT = 10;
private static final String SEEDS = "localhost:9160";
private static final long CASSANDRA_WAIT_TIME = 3000;
private static Keyspace keyspace;
private static AstyanaxContext<Keyspace> keyspaceContext;
private static EmbeddedCassandra cassandra;
private static String TEST_CLUSTER_NAME = "cass_sandbox";
private static String TEST_KEYSPACE_NAME = "LockUnitTest";
@BeforeClass
public static void setup() throws Exception {
System.out.println("TESTING THRIFT KEYSPACE");
cassandra = new EmbeddedCassandra();
cassandra.start();
Thread.sleep(CASSANDRA_WAIT_TIME);
createKeyspace();
}
@AfterClass
public static void teardown() {
if (keyspaceContext != null)
keyspaceContext.shutdown();
if (cassandra != null)
cassandra.stop();
}
public static void createKeyspace() throws Exception {
keyspaceContext = new AstyanaxContext.Builder()
.forCluster(TEST_CLUSTER_NAME)
.forKeyspace(TEST_KEYSPACE_NAME)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE)
.setConnectionPoolType(ConnectionPoolType.TOKEN_AWARE))
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(TEST_CLUSTER_NAME
+ "_" + TEST_KEYSPACE_NAME)
.setSocketTimeout(30000)
.setMaxTimeoutWhenExhausted(2000)
.setMaxConnsPerHost(10)
.setInitConnsPerHost(10)
.setSeeds(SEEDS))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
keyspaceContext.start();
keyspace = keyspaceContext.getEntity();
try {
keyspace.dropKeyspace();
}
catch (Exception e) {
}
keyspace.createKeyspace(ImmutableMap.<String, Object>builder()
.put("strategy_options", ImmutableMap.<String, Object>builder()
.put("replication_factor", "1")
.build())
.put("strategy_class", "SimpleStrategy")
.build()
);
keyspace.createColumnFamily(LOCK_CF_LONG, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "LongType")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
keyspace.createColumnFamily(LOCK_CF_STRING, ImmutableMap.<String, Object>builder()
.put("default_validation_class", "UTF8Type")
.put("key_validation_class", "UTF8Type")
.put("comparator_type", "UTF8Type")
.build());
;
KeyspaceDefinition ki = keyspaceContext.getEntity().describeKeyspace();
System.out.println("Describe Keyspace: " + ki.getName());
}
@Test
public void testTtl() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testTtlString() throws Exception {
ColumnPrefixDistributedRowLock<String> lock =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testTtl")
.withTtl(2)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
try {
lock.acquire();
Assert.assertEquals(1, lock.readLockColumns().size());
Thread.sleep(3000);
Assert.assertEquals(0, lock.readLockColumns().size());
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock.release();
}
Assert.assertEquals(0, lock.readLockColumns().size());
}
@Test
public void testStaleLockWithFail() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLockWithFail_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(5000);
try {
lock2.acquire();
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testStaleLock_String() throws Exception {
ColumnPrefixDistributedRowLock<String> lock1 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(1, TimeUnit.SECONDS);
ColumnPrefixDistributedRowLock<String> lock2 =
new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_STRING, "testStaleLock")
.failOnStaleLock(true)
.withTtl(TTL)
.withConsistencyLevel(ConsistencyLevel.CL_ONE)
.expireLockAfter(9, TimeUnit.SECONDS);
try {
lock1.acquire();
Thread.sleep(2000);
try {
lock2.acquire();
Assert.fail();
}
catch (StaleLockException e) {
}
catch (Exception e) {
Assert.fail(e.getMessage());
}
finally {
lock2.release();
}
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
finally {
lock1.release();
}
}
@Test
public void testLockAndMutate() throws Exception {
// String rowKey = "testLockAndMutate";
// String dataColumn = "SomeDataColumn";
// Integer value = 1;
// // Write some data
// try {
// MutationBatch m = keyspace.prepareMutationBatch().setConsistencyLevel(ConsistencyLevel.CL_ONE);
// m.withRow(LOCK_CF_LONG, rowKey)
// .putColumn(dataColumn, value, null);
// m.execute();
// }
// catch (Exception e) {
// e.printStackTrace();
// Assert.fail(e.getMessage());
// }
//
// // Take a lock
// ColumnPrefixDistributedRowLock<String> lock =
// new ColumnPrefixDistributedRowLock<String>(keyspace, LOCK_CF_LONG, rowKey)
// .expireLockAfter(1, TimeUnit.SECONDS);
//
// try {
// ColumnMap<String> columns = lock
// .withColumnPrefix("$lock$_")
// .withLockId("myLockId")
// .withConsistencyLevel(ConsistencyLevel.CL_ONE)
// .acquireLockAndReadRow();
//
// // Read data and update
// Assert.assertNotNull(columns);
// Assert.assertEquals(1, columns.size());
//
// value = columns.get(dataColumn).getIntegerValue() + 1;
// MutationBatch m = keyspace.prepareMutationBatch();
// m.withRow(LOCK_CF_LONG, rowKey)
// .putColumn(dataColumn, value, null);
//
// // Write data and release the lock
// lock.releaseWithMutation(m);
// }
// catch (Exception e) {
// e.printStackTrace();
// Assert.fail(e.getMessage());
// lock.release();
// }
//
// ColumnList<String> columns = keyspace
// .prepareQuery(LOCK_CF_LONG)
// .setConsistencyLevel(ConsistencyLevel.CL_ONE)
// .getKey(rowKey)
// .execute()
// .getResult();
// Assert.assertEquals(1, columns.size());
// Assert.assertEquals(value, columns.getIntegerValue(dataColumn, 0));
}
}
| |
package com.xtremelabs.robolectric;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Application;
import android.app.Dialog;
import android.app.ListActivity;
import android.appwidget.AppWidgetManager;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.ContentValues;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.database.sqlite.SQLiteCursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteQueryBuilder;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ColorMatrix;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.hardware.Camera;
import android.location.Geocoder;
import android.location.LocationManager;
import android.media.AudioManager;
import android.media.MediaRecorder;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Handler;
import android.os.Looper;
import android.view.Display;
import android.view.LayoutInflater;
import android.view.MenuInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.widget.AdapterView;
import android.widget.ExpandableListView;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.RemoteViews;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ZoomButtonsController;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.ItemizedOverlay;
import com.google.android.maps.MapController;
import com.google.android.maps.MapView;
import com.xtremelabs.robolectric.bytecode.RobolectricInternals;
import com.xtremelabs.robolectric.bytecode.ShadowWrangler;
import com.xtremelabs.robolectric.internal.Implements;
import com.xtremelabs.robolectric.shadows.FakeHttpLayer;
import com.xtremelabs.robolectric.shadows.ShadowAbsSpinner;
import com.xtremelabs.robolectric.shadows.ShadowAbsoluteLayout;
import com.xtremelabs.robolectric.shadows.ShadowAbstractCursor;
import com.xtremelabs.robolectric.shadows.ShadowActivity;
import com.xtremelabs.robolectric.shadows.ShadowAdapterView;
import com.xtremelabs.robolectric.shadows.ShadowAddress;
import com.xtremelabs.robolectric.shadows.ShadowAlertDialog;
import com.xtremelabs.robolectric.shadows.ShadowAppWidgetManager;
import com.xtremelabs.robolectric.shadows.ShadowApplication;
import com.xtremelabs.robolectric.shadows.ShadowArrayAdapter;
import com.xtremelabs.robolectric.shadows.ShadowAssetManager;
import com.xtremelabs.robolectric.shadows.ShadowAsyncTask;
import com.xtremelabs.robolectric.shadows.ShadowAudioManager;
import com.xtremelabs.robolectric.shadows.ShadowBaseAdapter;
import com.xtremelabs.robolectric.shadows.ShadowBitmap;
import com.xtremelabs.robolectric.shadows.ShadowBitmapDrawable;
import com.xtremelabs.robolectric.shadows.ShadowBitmapFactory;
import com.xtremelabs.robolectric.shadows.ShadowBluetoothAdapter;
import com.xtremelabs.robolectric.shadows.ShadowBluetoothDevice;
import com.xtremelabs.robolectric.shadows.ShadowBundle;
import com.xtremelabs.robolectric.shadows.ShadowCamera;
import com.xtremelabs.robolectric.shadows.ShadowCameraParameters;
import com.xtremelabs.robolectric.shadows.ShadowCanvas;
import com.xtremelabs.robolectric.shadows.ShadowColorMatrix;
import com.xtremelabs.robolectric.shadows.ShadowColorMatrixColorFilter;
import com.xtremelabs.robolectric.shadows.ShadowComponentName;
import com.xtremelabs.robolectric.shadows.ShadowCompoundButton;
import com.xtremelabs.robolectric.shadows.ShadowConnectivityManager;
import com.xtremelabs.robolectric.shadows.ShadowContentResolver;
import com.xtremelabs.robolectric.shadows.ShadowContentValues;
import com.xtremelabs.robolectric.shadows.ShadowContext;
import com.xtremelabs.robolectric.shadows.ShadowContextThemeWrapper;
import com.xtremelabs.robolectric.shadows.ShadowContextWrapper;
import com.xtremelabs.robolectric.shadows.ShadowDefaultRequestDirector;
import com.xtremelabs.robolectric.shadows.ShadowDialog;
import com.xtremelabs.robolectric.shadows.ShadowDisplay;
import com.xtremelabs.robolectric.shadows.ShadowDrawable;
import com.xtremelabs.robolectric.shadows.ShadowEditText;
import com.xtremelabs.robolectric.shadows.ShadowExpandableListView;
import com.xtremelabs.robolectric.shadows.ShadowFloatMath;
import com.xtremelabs.robolectric.shadows.ShadowGeoPoint;
import com.xtremelabs.robolectric.shadows.ShadowGeocoder;
import com.xtremelabs.robolectric.shadows.ShadowHandler;
import com.xtremelabs.robolectric.shadows.ShadowImageView;
import com.xtremelabs.robolectric.shadows.ShadowIntent;
import com.xtremelabs.robolectric.shadows.ShadowIntentFilter;
import com.xtremelabs.robolectric.shadows.ShadowItemizedOverlay;
import com.xtremelabs.robolectric.shadows.ShadowLayoutInflater;
import com.xtremelabs.robolectric.shadows.ShadowLayoutParams;
import com.xtremelabs.robolectric.shadows.ShadowListActivity;
import com.xtremelabs.robolectric.shadows.ShadowListView;
import com.xtremelabs.robolectric.shadows.ShadowLocation;
import com.xtremelabs.robolectric.shadows.ShadowLocationManager;
import com.xtremelabs.robolectric.shadows.ShadowLooper;
import com.xtremelabs.robolectric.shadows.ShadowMapActivity;
import com.xtremelabs.robolectric.shadows.ShadowMapController;
import com.xtremelabs.robolectric.shadows.ShadowMapView;
import com.xtremelabs.robolectric.shadows.ShadowMatrix;
import com.xtremelabs.robolectric.shadows.ShadowMediaRecorder;
import com.xtremelabs.robolectric.shadows.ShadowMediaStore;
import com.xtremelabs.robolectric.shadows.ShadowMenuInflater;
import com.xtremelabs.robolectric.shadows.ShadowMotionEvent;
import com.xtremelabs.robolectric.shadows.ShadowNetworkInfo;
import com.xtremelabs.robolectric.shadows.ShadowOverlayItem;
import com.xtremelabs.robolectric.shadows.ShadowPaint;
import com.xtremelabs.robolectric.shadows.ShadowPath;
import com.xtremelabs.robolectric.shadows.ShadowPendingIntent;
import com.xtremelabs.robolectric.shadows.ShadowPoint;
import com.xtremelabs.robolectric.shadows.ShadowPointF;
import com.xtremelabs.robolectric.shadows.ShadowPreferenceManager;
import com.xtremelabs.robolectric.shadows.ShadowRect;
import com.xtremelabs.robolectric.shadows.ShadowRemoteViews;
import com.xtremelabs.robolectric.shadows.ShadowResources;
import com.xtremelabs.robolectric.shadows.ShadowSQLiteCursor;
import com.xtremelabs.robolectric.shadows.ShadowSQLiteDatabase;
import com.xtremelabs.robolectric.shadows.ShadowSQLiteOpenHelper;
import com.xtremelabs.robolectric.shadows.ShadowSQLiteQueryBuilder;
import com.xtremelabs.robolectric.shadows.ShadowService;
import com.xtremelabs.robolectric.shadows.ShadowSettings;
import com.xtremelabs.robolectric.shadows.ShadowSpannableStringBuilder;
import com.xtremelabs.robolectric.shadows.ShadowSurfaceView;
import com.xtremelabs.robolectric.shadows.ShadowTextUtils;
import com.xtremelabs.robolectric.shadows.ShadowTextView;
import com.xtremelabs.robolectric.shadows.ShadowToast;
import com.xtremelabs.robolectric.shadows.ShadowTypedValue;
import com.xtremelabs.robolectric.shadows.ShadowURLSpan;
import com.xtremelabs.robolectric.shadows.ShadowView;
import com.xtremelabs.robolectric.shadows.ShadowViewGroup;
import com.xtremelabs.robolectric.shadows.ShadowWebView;
import com.xtremelabs.robolectric.shadows.ShadowWifiManager;
import com.xtremelabs.robolectric.shadows.ShadowZoomButtonsController;
import com.xtremelabs.robolectric.util.HttpRequestInfo;
import com.xtremelabs.robolectric.util.Scheduler;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.impl.client.DefaultRequestDirector;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.List;
@SuppressWarnings({"UnusedDeclaration"})
public class Robolectric {
public static Application application;
public static <T> T newInstanceOf(Class<T> clazz) {
try {
Constructor<T> defaultConstructor = clazz.getDeclaredConstructor();
defaultConstructor.setAccessible(true);
return defaultConstructor.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
throw new RuntimeException(e);
}
}
public static void bindShadowClass(Class<?> shadowClass) {
Implements realClass = shadowClass.getAnnotation(Implements.class);
if (realClass == null) {
throw new IllegalArgumentException(shadowClass + " is not annotated with @Implements");
}
try {
ShadowWrangler.getInstance().bindShadowClass(realClass.value(), shadowClass);
} catch (TypeNotPresentException ignored) {
//this allows users of the robolectric.jar file to use the non-Google APIs version of the api
System.out.println("Warning: an error occurred while binding shadow class: " + shadowClass.getSimpleName());
}
}
public static void bindDefaultShadowClasses() {
bindShadowClasses(getDefaultShadowClasses());
}
public static void bindShadowClasses(List<Class<?>> shadowClasses) {
for (Class<?> shadowClass : shadowClasses) {
bindShadowClass(shadowClass);
}
}
/**
* Invoke this utility method in tests to reveal which Android api classes and methods are being invoked
* for which there are no shadows or shadow methods. This helps expose which methods are being invoked
* either by a third party library or application code which need new shadow methods to be written. Generates
* output for the current test only.
*/
public static void logMissingInvokedShadowMethods() {
ShadowWrangler.getInstance().logMissingInvokedShadowMethods();
}
public static List<Class<?>> getDefaultShadowClasses() {
return Arrays.asList(
ShadowAbsoluteLayout.class,
ShadowAbsSpinner.class,
ShadowAbstractCursor.class,
ShadowActivity.class,
ShadowAdapterView.class,
ShadowAddress.class,
ShadowAlertDialog.class,
ShadowAlertDialog.ShadowBuilder.class,
ShadowApplication.class,
ShadowAppWidgetManager.class,
ShadowArrayAdapter.class,
ShadowAssetManager.class,
ShadowAsyncTask.class,
ShadowAudioManager.class,
ShadowBaseAdapter.class,
ShadowBitmap.class,
ShadowBitmapDrawable.class,
ShadowBitmapFactory.class,
ShadowBluetoothAdapter.class,
ShadowBluetoothDevice.class,
ShadowBundle.class,
ShadowCamera.class,
ShadowCameraParameters.class,
ShadowCanvas.class,
ShadowColorMatrix.class,
ShadowColorMatrixColorFilter.class,
ShadowCompoundButton.class,
ShadowComponentName.class,
ShadowConnectivityManager.class,
ShadowContentResolver.class,
ShadowContentValues.class,
ShadowContext.class,
ShadowContextWrapper.class,
ShadowContextThemeWrapper.class,
ShadowDefaultRequestDirector.class,
ShadowDisplay.class,
ShadowDrawable.class,
ShadowDialog.class,
ShadowEditText.class,
ShadowExpandableListView.class,
ShadowFloatMath.class,
ShadowGeocoder.class,
ShadowGeoPoint.class,
ShadowHandler.class,
ShadowImageView.class,
ShadowIntent.class,
ShadowIntentFilter.class,
ShadowItemizedOverlay.class,
ShadowLayoutInflater.class,
ShadowLayoutParams.class,
ShadowListActivity.class,
ShadowListView.class,
ShadowLocation.class,
ShadowLocationManager.class,
ShadowLooper.class,
ShadowMapController.class,
ShadowMapActivity.class,
ShadowMapView.class,
ShadowMatrix.class,
ShadowMediaRecorder.class,
ShadowMediaStore.ShadowImages.ShadowMedia.class,
ShadowMenuInflater.class,
ShadowMotionEvent.class,
ShadowNetworkInfo.class,
ShadowOverlayItem.class,
ShadowPaint.class,
ShadowPath.class,
ShadowPendingIntent.class,
ShadowPoint.class,
ShadowPointF.class,
ShadowPreferenceManager.class,
ShadowRect.class,
ShadowRemoteViews.class,
ShadowResources.class,
ShadowService.class,
ShadowSettings.class,
ShadowSettings.ShadowSecure.class,
ShadowSettings.ShadowSystem.class,
ShadowSpannableStringBuilder.class,
ShadowSQLiteDatabase.class,
ShadowSQLiteCursor.class,
ShadowSQLiteOpenHelper.class,
ShadowSQLiteQueryBuilder.class,
ShadowSurfaceView.class,
ShadowTextUtils.class,
ShadowTextView.class,
ShadowToast.class,
ShadowTypedValue.class,
ShadowURLSpan.class,
ShadowView.class,
ShadowViewGroup.class,
ShadowWebView.class,
ShadowWifiManager.class,
ShadowZoomButtonsController.class
);
}
public static void resetStaticState() {
ShadowWrangler.getInstance().silence();
Robolectric.application = new Application();
ShadowBitmapFactory.reset();
}
public static <T> T directlyOn(T shadowedObject) {
return RobolectricInternals.directlyOn(shadowedObject);
}
public static ShadowDrawable shadowOf(Drawable instance) {
return (ShadowDrawable) shadowOf_(instance);
}
public static ShadowToast shadowOf(Toast instance) {
return (ShadowToast) shadowOf_(instance);
}
public static ShadowNetworkInfo shadowOf(NetworkInfo instance) {
return (ShadowNetworkInfo) shadowOf_(instance);
}
public static ShadowConnectivityManager shadowOf(ConnectivityManager instance) {
return (ShadowConnectivityManager) shadowOf_(instance);
}
public static ShadowBitmapDrawable shadowOf(BitmapDrawable instance) {
return (ShadowBitmapDrawable) shadowOf_(instance);
}
public static ShadowZoomButtonsController shadowOf(ZoomButtonsController instance) {
return (ShadowZoomButtonsController) shadowOf_(instance);
}
public static ShadowGeoPoint shadowOf(GeoPoint instance) {
return (ShadowGeoPoint) shadowOf_(instance);
}
public static ShadowMapView shadowOf(MapView instance) {
return (ShadowMapView) shadowOf_(instance);
}
public static ShadowMapController shadowOf(MapController instance) {
return (ShadowMapController) shadowOf_(instance);
}
public static ShadowItemizedOverlay shadowOf(ItemizedOverlay instance) {
return (ShadowItemizedOverlay) shadowOf_(instance);
}
public static ShadowListView shadowOf(ListView instance) {
return (ShadowListView) shadowOf_(instance);
}
public static ExpandableListView shadowOf(ExpandableListView instance) {
return (ExpandableListView) shadowOf_(instance);
}
public static ShadowActivity shadowOf(Activity instance) {
return (ShadowActivity) shadowOf_(instance);
}
public static ShadowContextWrapper shadowOf(ContextWrapper instance) {
return (ShadowContextWrapper) shadowOf_(instance);
}
public static ShadowApplication shadowOf(Application instance) {
return (ShadowApplication) shadowOf_(instance);
}
public static ShadowContext shadowOf(Context instance) {
return (ShadowContext) shadowOf_(instance);
}
public static ShadowPaint shadowOf(Paint instance) {
return (ShadowPaint) shadowOf_(instance);
}
public static ShadowPath shadowOf(Path instance) {
return (ShadowPath) shadowOf_(instance);
}
public static ShadowListActivity shadowOf(ListActivity instance) {
return (ShadowListActivity) shadowOf_(instance);
}
public static ShadowHandler shadowOf(Handler instance) {
return (ShadowHandler) shadowOf_(instance);
}
public static ShadowColorMatrix shadowOf(ColorMatrix instance) {
return (ShadowColorMatrix) shadowOf_(instance);
}
public static ShadowIntent shadowOf(Intent instance) {
return (ShadowIntent) shadowOf_(instance);
}
public static ShadowView shadowOf(View instance) {
return (ShadowView) shadowOf_(instance);
}
public static ShadowViewGroup shadowOf(ViewGroup instance) {
return (ShadowViewGroup) shadowOf_(instance);
}
public static ShadowWebView shadowOf(WebView instance) {
return (ShadowWebView) shadowOf_(instance);
}
public static ShadowAdapterView shadowOf(AdapterView instance) {
return (ShadowAdapterView) shadowOf_(instance);
}
public static ShadowTextView shadowOf(TextView instance) {
return (ShadowTextView) shadowOf_(instance);
}
public static ShadowImageView shadowOf(ImageView instance) {
return (ShadowImageView) shadowOf_(instance);
}
public static ShadowRemoteViews shadowOf(RemoteViews instance) {
return (ShadowRemoteViews) shadowOf_(instance);
}
public static ShadowDialog shadowOf(Dialog instance) {
return (ShadowDialog) shadowOf_(instance);
}
public static ShadowDefaultRequestDirector shadowOf(DefaultRequestDirector instance) {
return (ShadowDefaultRequestDirector) shadowOf_(instance);
}
public static ShadowAlertDialog shadowOf(AlertDialog instance) {
return (ShadowAlertDialog) shadowOf_(instance);
}
public static ShadowLooper shadowOf(Looper instance) {
return (ShadowLooper) shadowOf_(instance);
}
public static ShadowCanvas shadowOf(Canvas instance) {
return (ShadowCanvas) shadowOf_(instance);
}
public static ShadowLocationManager shadowOf(LocationManager instance) {
return (ShadowLocationManager) shadowOf_(instance);
}
public static ShadowAppWidgetManager shadowOf(AppWidgetManager instance) {
return (ShadowAppWidgetManager) shadowOf_(instance);
}
public static ShadowResources shadowOf(Resources instance) {
return (ShadowResources) shadowOf_(instance);
}
public static ShadowLayoutInflater shadowOf(LayoutInflater instance) {
return (ShadowLayoutInflater) shadowOf_(instance);
}
public static ShadowMenuInflater shadowOf(MenuInflater instance) {
return (ShadowMenuInflater) shadowOf_(instance);
}
public static ShadowDisplay shadowOf(Display instance) {
return (ShadowDisplay) shadowOf_(instance);
}
public static ShadowAudioManager shadowOf(AudioManager instance) {
return (ShadowAudioManager) shadowOf_(instance);
}
public static ShadowGeocoder shadowOf(Geocoder instance) {
return (ShadowGeocoder) shadowOf_(instance);
}
public static ShadowSQLiteDatabase shadowOf(SQLiteDatabase other) {
return (ShadowSQLiteDatabase) Robolectric.shadowOf_(other);
}
public static ShadowSQLiteCursor shadowOf(SQLiteCursor other) {
return (ShadowSQLiteCursor) Robolectric.shadowOf_(other);
}
public static ShadowSQLiteOpenHelper shadowOf(SQLiteOpenHelper other) {
return (ShadowSQLiteOpenHelper) Robolectric.shadowOf_(other);
}
public static ShadowSQLiteQueryBuilder shadowOf(SQLiteQueryBuilder other) {
return (ShadowSQLiteQueryBuilder) Robolectric.shadowOf_(other);
}
public static ShadowContentValues shadowOf(ContentValues other) {
return (ShadowContentValues) Robolectric.shadowOf_(other);
}
public static ShadowCamera shadowOf(Camera instance) {
return (ShadowCamera) shadowOf_(instance);
}
public static ShadowCameraParameters shadowOf(Camera.Parameters instance) {
return (ShadowCameraParameters) shadowOf_(instance);
}
public static ShadowMediaRecorder shadowOf(MediaRecorder instance) {
return (ShadowMediaRecorder) shadowOf_(instance);
}
public static ShadowAssetManager shadowOf(AssetManager instance) {
return (ShadowAssetManager) Robolectric.shadowOf_(instance);
}
public static ShadowBitmap shadowOf(Bitmap other) {
return (ShadowBitmap) Robolectric.shadowOf_(other);
}
public static ShadowBluetoothAdapter shadowOf(BluetoothAdapter other) {
return (ShadowBluetoothAdapter) Robolectric.shadowOf_(other);
}
public static ShadowBluetoothDevice shadowOf(BluetoothDevice other) {
return (ShadowBluetoothDevice) Robolectric.shadowOf_(other);
}
public static ShadowMatrix shadowOf(Matrix other) {
return (ShadowMatrix) Robolectric.shadowOf_(other);
}
public static ShadowMotionEvent shadowOf(MotionEvent other) {
return (ShadowMotionEvent) Robolectric.shadowOf_(other);
}
@SuppressWarnings({"unchecked"})
public static <P, R> P shadowOf_(R instance) {
return (P) ShadowWrangler.getInstance().shadowOf(instance);
}
/**
* Runs any background tasks previously queued by {@link android.os.AsyncTask#execute(Object[])}.
* <p/>
* <p/>
* Note: calling this method does not pause or un-pause the scheduler.
*/
public static void runBackgroundTasks() {
getBackgroundScheduler().advanceBy(0);
}
/**
* Runs any immediately runnable tasks previously queued on the UI thread,
* e.g. by {@link Activity#runOnUiThread(Runnable)} or {@link android.os.AsyncTask#onPostExecute(Object)}.
* <p/>
* <p/>
* Note: calling this method does not pause or un-pause the scheduler.
*/
public static void runUiThreadTasks() {
getUiThreadScheduler().advanceBy(0);
}
/**
* Sets up an HTTP response to be returned by calls to Apache's {@code HttpClient} implementers.
*
* @param statusCode the status code of the response
* @param responseBody the body of the response
*/
public static void addPendingHttpResponse(int statusCode, String responseBody) {
getFakeHttpLayer().addPendingHttpResponse(statusCode, responseBody);
}
/**
* Sets up an HTTP response to be returned by calls to Apache's {@code HttpClient} implementers.
*
* @param httpResponse the response
*/
public static void addPendingHttpResponse(HttpResponse httpResponse) {
getFakeHttpLayer().addPendingHttpResponse(httpResponse);
}
/**
* Accessor to obtain HTTP requests made during the current test in the order in which they were made.
*
* @param index index of the request to retrieve.
* @return the requested request.
*/
public static HttpRequest getSentHttpRequest(int index) {
return ShadowDefaultRequestDirector.getSentHttpRequest(index);
}
/**
* Accessor to obtain metadata for an HTTP request made during the current test in the order in which they were made.
*
* @param index index of the request to retrieve.
* @return the requested request metadata.
*/
public static HttpRequestInfo getSentHttpRequestInfo(int index) {
return ShadowDefaultRequestDirector.getSentHttpRequestInfo(index);
}
/**
* Adds an HTTP response rule. The response will be returned when the rule is matched.
*
* @param method method to match.
* @param uri uri to match.
* @param response response to return when a match is found.
*/
public static void addHttpResponseRule(String method, String uri, HttpResponse response) {
getFakeHttpLayer().addHttpResponseRule(method, uri, response);
}
/**
* Adds an HTTP response rule with a default method of GET. The response will be returned when the rule is matched.
*
* @param uri uri to match.
* @param response response to return when a match is found.
*/
public static void addHttpResponseRule(String uri, HttpResponse response) {
getFakeHttpLayer().addHttpResponseRule(uri, response);
}
/**
* Adds an HTTP response rule. The response will be returned when the rule is matched.
*
* @param uri uri to match.
* @param response response to return when a match is found.
*/
public static void addHttpResponseRule(String uri, String response) {
getFakeHttpLayer().addHttpResponseRule(uri, response);
}
/**
* Adds an HTTP response rule. The response will be returned when the rule is matched.
*
* @param requestMatcher custom {@code RequestMatcher}.
* @param response response to return when a match is found.
*/
public static void addHttpResponseRule(FakeHttpLayer.RequestMatcher requestMatcher, HttpResponse response) {
getFakeHttpLayer().addHttpResponseRule(requestMatcher, response);
}
public static FakeHttpLayer getFakeHttpLayer() {
return getShadowApplication().getFakeHttpLayer();
}
/**
* Sets the default http response. This response will be returned if no other rules are matched.
*
* @param defaultHttpResponse the {@code HttpResponse} to return.
*/
public static void setDefaultHttpResponse(HttpResponse defaultHttpResponse) {
getFakeHttpLayer().setDefaultHttpResponse(defaultHttpResponse);
}
public static void pauseLooper(Looper looper) {
ShadowLooper.pauseLooper(looper);
}
public static void unPauseLooper(Looper looper) {
ShadowLooper.unPauseLooper(looper);
}
public static void pauseMainLooper() {
ShadowLooper.pauseMainLooper();
}
public static void unPauseMainLooper() {
ShadowLooper.unPauseMainLooper();
}
public static Scheduler getUiThreadScheduler() {
return shadowOf(Looper.getMainLooper()).getScheduler();
}
public static Scheduler getBackgroundScheduler() {
return getShadowApplication().getBackgroundScheduler();
}
public static ShadowApplication getShadowApplication() {
return shadowOf(Robolectric.application);
}
/**
* Calls {@code performClick()} on a {@code View} after ensuring that it and its ancestors are visible and that it
* is enabled.
*
* @param view the view to click on
* @return true if {@code View.OnClickListener}s were found and fired, false otherwise.
* @throws RuntimeException if the preconditions are not met.
*/
public static boolean clickOn(View view) {
return shadowOf(view).checkedPerformClick();
}
public static String visualize(View view) {
Canvas canvas = new Canvas();
view.draw(canvas);
return shadowOf(canvas).getDescription();
}
public static String visualize(Canvas canvas) {
return shadowOf(canvas).getDescription();
}
public static String visualize(Bitmap bitmap) {
return shadowOf(bitmap).getDescription();
}
}
| |
/**
* Copyright (C) 2012 KRM Associates, Inc. healtheme@krminc.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.krminc.phr.api.converter;
import com.krminc.phr.domain.Visit;
import java.net.URI;
import java.util.Date;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlAttribute;
import javax.ws.rs.core.UriBuilder;
import javax.persistence.EntityManager;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.krminc.phr.api.converter.util.ConverterUtils;
import com.krminc.phr.domain.HealthRecord;
/**
*
* @author cmccall
*/
@XmlRootElement(name = "visit")
public class VisitConverter {
private Visit entity;
private URI uri;
private int expandLevel;
final Logger logger = LoggerFactory.getLogger(VisitConverter.class);
public boolean hasError = false;
/** Creates a new instance of VisitConverter */
public VisitConverter() {
entity = new Visit();
}
/**
* Creates a new instance of VisitConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded@param isUriExtendable indicates whether the uri can be extended
*/
public VisitConverter(Visit entity, URI uri, int expandLevel, boolean isUriExtendable) {
this.entity = entity;
this.uri = (isUriExtendable) ? UriBuilder.fromUri(uri).path(entity.getVisitId() + "/").build() : uri;
this.expandLevel = expandLevel;
}
/**
* Creates a new instance of VisitConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded
*/
public VisitConverter(Visit entity, URI uri, int expandLevel) {
this(entity, uri, expandLevel, false);
}
/**
* Getter for visitId.
*
* @return value for visitId
*/
@XmlElement
public Long getVisitId() {
return (expandLevel > 0) ? entity.getVisitId() : null;
}
/**
* Setter for visitId.
*
* @param value the value to set
*/
public void setVisitId(Long value) {
try {
entity.setVisitId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for title.
*
* @return value for title
*/
@XmlElement
public String getTitle() {
return (expandLevel > 0) ? entity.getTitle() : null;
}
/**
* Setter for title.
*
* @param value the value to set
*/
public void setTitle(String value) {
try {
value = ConverterUtils.prepareInput(value);
if (value.length() > 0 && value.length() <= 255) {
entity.setTitle(value);
} else {
throw new Exception();
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for visitDate.
*
* @return value for visitDate
*/
@XmlElement
@XmlJavaTypeAdapter(DateAdapter.class)
public Date getVisitDate() {
return (expandLevel > 0) ? entity.getVisitDate() : null;
}
/**
* Setter for visitDate.
*
* @param value the value to set
*/
public void setVisitDate(Date value) {
try {
entity.setVisitDate(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Setter for ObservedDate.
*
* @param value the value to set
*/
public void setVisitDate(String value) {
try {
value = ConverterUtils.prepareInput(value);
DateTime date = new DateTime(value);
this.setVisitDate(date.toDate());
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for visitTime.
*
* @return value for visitTime
*/
@XmlElement
@XmlJavaTypeAdapter(TimeAdapter.class)
public Date getVisitTime() {
return (expandLevel > 0) ? entity.getVisitTime() : null;
}
/**
* Setter for visitTime.
*
* @param value the value to set
*/
public void setVisitTime(Date value) {
try {
entity.setVisitTime(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Setter for visitTime.
*
* @param value the value to set
*/
public void setVisitTime(String value) {
try {
value = ConverterUtils.prepareInput(value);
DateTime date = new DateTime(value);
this.setVisitTime(date.toDate());
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for purpose.
*
* @return value for purpose
*/
@XmlElement
public String getPurpose() {
return (expandLevel > 0) ? entity.getPurpose() : null;
}
/**
* Setter for purpose.
*
* @param value the value to set
*/
public void setPurpose(String value) {
try {
value = ConverterUtils.prepareInput(value);
if (value.length() > 0) {
if (value.length() <=255) {
entity.setPurpose(value);
} else {
throw new Exception();
}
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for location.
*
* @return value for location
*/
@XmlElement
public String getLocation() {
return (expandLevel > 0) ? entity.getLocation() : null;
}
/**
* Setter for location.
*
* @param value the value to set
*/
public void setLocation(String value) {
try {
value = ConverterUtils.prepareInput(value);
if (value.length() > 0) {
if (value.length() <=255) {
entity.setLocation(value);
} else {
throw new Exception();
}
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for provider.
*
* @return value for provider
*/
@XmlElement
public String getProvider() {
return (expandLevel > 0) ? entity.getProvider() : null;
}
/**
* Setter for provider.
*
* @param value the value to set
*/
public void setProvider(String value) {
try {
value = ConverterUtils.prepareInput(value);
if (value.length() > 0) {
if (value.length() <=255) {
entity.setProvider(value);
} else {
throw new Exception();
}
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for comments.
*
* @return value for comments
*/
@XmlElement
public String getComments() {
return (expandLevel > 0) ? entity.getComments() : null;
}
/**
* Setter for comments.
*
* @param value the value to set
*/
public void setComments(String value) {
try {
value = ConverterUtils.prepareInput(value);
entity.setComments(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for addedDate.
*
* @return value for addedDate
*/
@XmlElement
@XmlJavaTypeAdapter(DateAdapter.class)
public Date getDateAdded() {
return (expandLevel > 0) ? entity.getDateAdded() : null;
}
/**
* Getter for dataSourceId.
*
* @return value for dataSourceId
*/
@XmlElement
public Long getDataSourceId() {
return (expandLevel > 0) ? entity.getDataSourceId() : null;
}
/**
* Setter for dataSourceId.
*
* @param value the value to set
*/
public void setDataSourceId(Long value) {
try {
if (value != 1) {
throw new Exception();
}
entity.setDataSourceId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for HealthRecordId.
*
* @return value for HealthRecordId
*/
@XmlElement
public Long getHealthRecordId() {
return (expandLevel > 0) ? entity.getHealthRecordId() : null;
}
/**
* Setter for HealthRecordId.
*
* @param value the value to set
*/
public void setHealthRecordId(Long value) {
try {
entity.setHealthRecordId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for sourceId.
*
* @return value for sourceId
*/
@XmlElement
public Long getSourceId() {
return (expandLevel > 0) ? entity.getSourceId() : null;
}
/**
* Setter for sourceId.
*
* @param value the value to set
*/
public void setSourceId(Long value) {
try {
if (value != 1) {
throw new Exception();
}
entity.setSourceId(value);
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Getter for mask.
*
* @return value for mask
*/
@XmlElement
public String getMask() {
return (expandLevel > 0) ? entity.getMask() : null;
}
/**
* Setter for mask.
*
* @param value the value to set
*/
public void setMask(String value) {
try {
if (ConverterUtils.isValidMask(value)) {
entity.setMask(value.trim());
} else {
throw new Exception();
}
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Returns the URI associated with this converter.
*
* @return the uri
*/
@XmlAttribute
public URI getUri() {
return uri;
}
/**
* Sets the URI for this reference converter.
*
*/
public void setUri(URI uri) {
try {
this.uri = uri;
}
catch(Exception ex) {
hasError = true;
}
}
/**
* Returns the Visit entity.
*
* @return an entity
*/
@XmlTransient
public Visit getEntity() {
if (entity.getVisitId() == null) {
VisitConverter converter = UriResolver.getInstance().resolve(VisitConverter.class, uri);
if (converter != null) {
entity = converter.getEntity();
}
}
return entity;
}
/**
* Returns the resolved Visit entity.
*
* @return an resolved entity
*/
public Visit resolveEntity(EntityManager em) {
HealthRecord healthRecord = entity.getHealthRecord();
if (healthRecord != null) {
entity.setHealthRecord(em.getReference(HealthRecord.class, healthRecord.getHealthRecordId()));
}
return entity;
}
}
| |
/*
* Copyright 2012 Matt Corallo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.woollysammoth.nubitj.store;
import com.woollysammoth.nubitj.core.*;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.*;
import java.util.LinkedList;
import java.util.List;
// Originally written for Apache Derby, but its DELETE (and general) performance was awful
/**
* A full pruned block store using the H2 pure-java embedded database.
*
* Note that because of the heavy delete load on the database, during IBD,
* you may see the database files grow quite large (around 1.5G).
* H2 automatically frees some space at shutdown, so close()ing the database
* decreases the space usage somewhat (to only around 1.3G).
*/
public class H2FullPrunedBlockStore implements FullPrunedBlockStore {
private static final Logger log = LoggerFactory.getLogger(H2FullPrunedBlockStore.class);
private Sha256Hash chainHeadHash;
private StoredBlock chainHeadBlock;
private Sha256Hash verifiedChainHeadHash;
private StoredBlock verifiedChainHeadBlock;
private NetworkParameters params;
private ThreadLocal<Connection> conn;
private List<Connection> allConnections;
private String connectionURL;
private int fullStoreDepth;
static final String driver = "org.h2.Driver";
static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings ( "
+ "name VARCHAR(32) NOT NULL CONSTRAINT settings_pk PRIMARY KEY,"
+ "value BLOB"
+ ")";
static final String CHAIN_HEAD_SETTING = "chainhead";
static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead";
static final String VERSION_SETTING = "version";
static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers ( "
+ "hash BINARY(28) NOT NULL CONSTRAINT headers_pk PRIMARY KEY,"
+ "chainWork BLOB NOT NULL,"
+ "height INT NOT NULL,"
+ "header BLOB NOT NULL,"
+ "wasUndoable BOOL NOT NULL"
+ ")";
static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableBlocks ( "
+ "hash BINARY(28) NOT NULL CONSTRAINT undoableBlocks_pk PRIMARY KEY,"
+ "height INT NOT NULL,"
+ "txOutChanges BLOB,"
+ "transactions BLOB"
+ ")";
static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)";
static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openOutputs ("
+ "hash BINARY(32) NOT NULL,"
+ "index INT NOT NULL,"
+ "height INT NOT NULL,"
+ "value BLOB NOT NULL,"
+ "scriptBytes BLOB NOT NULL,"
+ "PRIMARY KEY (hash, index),"
+ ")";
/**
* Creates a new H2FullPrunedBlockStore
* @param params A copy of the NetworkParameters used
* @param dbName The path to the database on disk
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @throws BlockStoreException if the database fails to open for any reason
*/
public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth) throws BlockStoreException {
this.params = params;
this.fullStoreDepth = fullStoreDepth;
// We choose a very lax timeout to avoid the database throwing exceptions on complex operations, as time is not
// a particularly precious resource when just keeping up with the chain.
connectionURL = "jdbc:h2:" + dbName + ";create=true;LOCK_TIMEOUT=60000";
conn = new ThreadLocal<Connection>();
allConnections = new LinkedList<Connection>();
try {
Class.forName(driver);
log.info(driver + " loaded. ");
} catch (java.lang.ClassNotFoundException e) {
log.error("check CLASSPATH for H2 jar ", e);
}
maybeConnect();
try {
// Create tables if needed
if (!tableExists("settings"))
createTables();
initFromDatabase();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
/**
* Creates a new H2FullPrunedBlockStore with the given cache size
* @param params A copy of the NetworkParameters used
* @param dbName The path to the database on disk
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @param cacheSize The number of kilobytes to dedicate to H2 Cache (the default value of 16MB (16384) is a safe bet
* to achieve good performance/cost when importing blocks from disk, past 32MB makes little sense,
* and below 4MB sees a sharp drop in performance)
* @throws BlockStoreException if the database fails to open for any reason
*/
public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth, int cacheSize) throws BlockStoreException {
this(params, dbName, fullStoreDepth);
try {
Statement s = conn.get().createStatement();
s.executeUpdate("SET CACHE_SIZE " + cacheSize);
s.close();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
private synchronized void maybeConnect() throws BlockStoreException {
try {
if (conn.get() != null)
return;
conn.set(DriverManager.getConnection(connectionURL));
allConnections.add(conn.get());
log.info("Made a new connection to database " + connectionURL);
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public synchronized void close() {
for (Connection conn : allConnections) {
try {
conn.rollback();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
allConnections.clear();
}
public void resetStore() throws BlockStoreException {
maybeConnect();
try {
Statement s = conn.get().createStatement();
s.executeUpdate("DROP TABLE settings");
s.executeUpdate("DROP TABLE headers");
s.executeUpdate("DROP TABLE undoableBlocks");
s.executeUpdate("DROP TABLE openOutputs");
s.close();
createTables();
initFromDatabase();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
private void createTables() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
log.debug("H2FullPrunedBlockStore : CREATE headers table");
s.executeUpdate(CREATE_HEADERS_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE settings table");
s.executeUpdate(CREATE_SETTINGS_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE undoable block table");
s.executeUpdate(CREATE_UNDOABLE_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE undoable block index");
s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX);
log.debug("H2FullPrunedBlockStore : CREATE open output table");
s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE);
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERSION_SETTING + "', '03')");
s.close();
createNewStore(params);
}
private void initFromDatabase() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
ResultSet rs = s.executeQuery("SHOW TABLES");
while (rs.next())
if (rs.getString(1).equalsIgnoreCase("openOutputsIndex"))
throw new BlockStoreException("Attempted to open a H2 database with an old schema, please reset database.");
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt H2 block store - no chain head pointer");
}
Sha256Hash hash = new Sha256Hash(rs.getBytes(1));
rs.close();
this.chainHeadBlock = get(hash);
this.chainHeadHash = hash;
if (this.chainHeadBlock == null)
{
throw new BlockStoreException("corrupt H2 block store - head block not found");
}
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt H2 block store - no verified chain head pointer");
}
hash = new Sha256Hash(rs.getBytes(1));
rs.close();
s.close();
this.verifiedChainHeadBlock = get(hash);
this.verifiedChainHeadHash = hash;
if (this.verifiedChainHeadBlock == null)
{
throw new BlockStoreException("corrupt H2 block store - verified head block not found");
}
}
private void createNewStore(NetworkParameters params) throws BlockStoreException {
try {
// Set up the genesis block. When we start out fresh, it is by
// definition the top of the chain.
StoredBlock storedGenesisHeader = new StoredBlock(params.getGenesisBlock().cloneAsHeader(), params.getGenesisBlock().getWork(), 0);
// The coinbase in the genesis block is not spendable. This is because of how the reference client inits
// its database - the genesis transaction isn't actually in the db so its spent flags can never be updated.
List<Transaction> genesisTransactions = Lists.newLinkedList();
StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.getGenesisBlock().getHash(), genesisTransactions);
put(storedGenesisHeader, storedGenesis);
setChainHead(storedGenesisHeader);
setVerifiedChainHead(storedGenesisHeader);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
private boolean tableExists(String table) throws SQLException {
Statement s = conn.get().createStatement();
try {
ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2");
results.close();
return true;
} catch (SQLException ex) {
return false;
} finally {
s.close();
}
}
/**
* Dumps information about the size of actual data in the database to standard output
* The only truly useless data counted is printed in the form "N in id indexes"
* This does not take database indexes into account
*/
public void dumpSizes() throws SQLException, BlockStoreException {
maybeConnect();
Statement s = conn.get().createStatement();
long size = 0;
long totalSize = 0;
int count = 0;
ResultSet rs = s.executeQuery("SELECT name, value FROM settings");
while (rs.next()) {
size += rs.getString(1).length();
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT chainWork, header FROM headers");
while (rs.next()) {
size += 28; // hash
size += rs.getBytes(1).length;
size += 4; // height
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks");
while (rs.next()) {
size += 28; // hash
size += 4; // height
byte[] txOutChanges = rs.getBytes(1);
byte[] transactions = rs.getBytes(2);
if (txOutChanges == null)
size += transactions.length;
else
size += txOutChanges.length;
// size += the space to represent NULL
count++;
}
rs.close();
System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
long scriptSize = 0;
rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs");
while (rs.next()) {
size += 32; // hash
size += 4; // index
size += 4; // height
size += rs.getBytes(1).length;
size += rs.getBytes(2).length;
scriptSize += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n",
size, count, (double)size/count, (double)scriptSize/count, count * 8);
totalSize += size;
System.out.println("Total Size: " + totalSize);
s.close();
}
private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)"
+ " VALUES(?, ?, ?, ?, ?)");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
s.setBytes(2, storedBlock.getChainWork().toByteArray());
s.setInt(3, storedBlock.getHeight());
s.setBytes(4, storedBlock.getHeader().unsafeNubitSerialize());
s.setBoolean(5, wasUndoable);
s.executeUpdate();
s.close();
} catch (SQLException e) {
// It is possible we try to add a duplicate StoredBlock if we upgraded
// In that case, we just update the entry to mark it wasUndoable
if (e.getErrorCode() != 23505 || !wasUndoable)
throw e;
PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?");
s.setBoolean(1, true);
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(2, hashBytes);
s.executeUpdate();
s.close();
}
}
public void put(StoredBlock storedBlock) throws BlockStoreException {
maybeConnect();
try {
putUpdateStoredBlock(storedBlock, false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException {
maybeConnect();
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
int height = storedBlock.getHeight();
byte[] transactions = null;
byte[] txOutChanges = null;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (undoableBlock.getTxOutChanges() != null) {
undoableBlock.getTxOutChanges().serializeToStream(bos);
txOutChanges = bos.toByteArray();
} else {
int numTxn = undoableBlock.getTransactions().size();
bos.write((int) (0xFF & (numTxn >> 0)));
bos.write((int) (0xFF & (numTxn >> 8)));
bos.write((int) (0xFF & (numTxn >> 16)));
bos.write((int) (0xFF & (numTxn >> 24)));
for (Transaction tx : undoableBlock.getTransactions())
tx.nubitSerialize(bos);
transactions = bos.toByteArray();
}
bos.close();
} catch (IOException e) {
throw new BlockStoreException(e);
}
try {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)"
+ " VALUES(?, ?, ?, ?)");
s.setBytes(1, hashBytes);
s.setInt(2, height);
if (transactions == null) {
s.setBytes(3, txOutChanges);
s.setNull(4, Types.BLOB);
} else {
s.setNull(3, Types.BLOB);
s.setBytes(4, transactions);
}
s.executeUpdate();
s.close();
try {
putUpdateStoredBlock(storedBlock, true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
} catch (SQLException e) {
if (e.getErrorCode() != 23505)
throw new BlockStoreException(e);
// There is probably an update-or-insert statement, but it wasn't obvious from the docs
PreparedStatement s =
conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?"
+ " WHERE hash = ?");
s.setBytes(3, hashBytes);
if (transactions == null) {
s.setBytes(1, txOutChanges);
s.setNull(2, Types.BLOB);
} else {
s.setNull(1, Types.BLOB);
s.setBytes(2, transactions);
}
s.executeUpdate();
s.close();
}
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
@Nullable
public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException {
// Optimize for chain head
if (chainHeadHash != null && chainHeadHash.equals(hash))
return chainHeadBlock;
if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash))
return verifiedChainHeadBlock;
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get().prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
if (wasUndoableOnly && !results.getBoolean(4))
return null;
BigInteger chainWork = new BigInteger(results.getBytes(1));
int height = results.getInt(2);
Block b = new Block(params, results.getBytes(3));
b.verifyHeader();
return new StoredBlock(b, chainWork, height);
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (VerificationException e) {
// Should not be able to happen unless the database contains bad
// blocks.
throw new BlockStoreException(e);
} finally {
if (s != null) {
try {
s.close();
} catch (SQLException e) {
throw new BlockStoreException("Failed to close PreparedStatement");
}
}
}
}
@Nullable
public StoredBlock get(Sha256Hash hash) throws BlockStoreException {
return get(hash, false);
}
@Nullable
public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException {
return get(hash, true);
}
@Nullable
public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
byte[] txOutChanges = results.getBytes(1);
byte[] transactions = results.getBytes(2);
StoredUndoableBlock block;
if (txOutChanges == null) {
int offset = 0;
int numTxn = ((transactions[offset++] & 0xFF) << 0) |
((transactions[offset++] & 0xFF) << 8) |
((transactions[offset++] & 0xFF) << 16) |
((transactions[offset++] & 0xFF) << 24);
List<Transaction> transactionList = new LinkedList<Transaction>();
for (int i = 0; i < numTxn; i++) {
Transaction tx = new Transaction(params, transactions, offset);
transactionList.add(tx);
offset += tx.getMessageSize();
}
block = new StoredUndoableBlock(hash, transactionList);
} else {
TransactionOutputChanges outChangesObject =
new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges));
block = new StoredUndoableBlock(hash, outChangesObject);
}
return block;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (NullPointerException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ClassCastException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (IOException e) {
// Corrupted database.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock getChainHead() throws BlockStoreException {
return chainHeadBlock;
}
public void setChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.chainHeadHash = hash;
this.chainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredBlock getVerifiedChainHead() throws BlockStoreException {
return verifiedChainHeadBlock;
}
public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.verifiedChainHeadHash = hash;
this.verifiedChainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, VERIFIED_CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
if (this.chainHeadBlock.getHeight() < chainHead.getHeight())
setChainHead(chainHead);
removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth);
}
private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException {
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?");
s.setInt(1, height);
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
@Nullable
public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT height, value, scriptBytes FROM openOutputs " +
"WHERE hash = ? AND index = ?");
s.setBytes(1, hash.getBytes());
// index is actually an unsigned int
s.setInt(2, (int)index);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
int height = results.getInt(1);
BigInteger value = new BigInteger(results.getBytes(2));
// Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height
return new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3));
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get().prepareStatement("INSERT INTO openOutputs (hash, index, height, value, scriptBytes) " +
"VALUES (?, ?, ?, ?, ?)");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.setInt(3, out.getHeight());
s.setBytes(4, out.getValue().toByteArray());
s.setBytes(5, out.getScriptBytes());
s.executeUpdate();
s.close();
} catch (SQLException e) {
if (e.getErrorCode() != 23505)
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException(e); }
}
}
public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM openOutputs WHERE hash = ? AND index = ?");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.executeUpdate();
int updateCount = s.getUpdateCount();
s.close();
if (updateCount == 0)
throw new BlockStoreException("Tried to remove a StoredTransactionOutput from H2FullPrunedBlockStore that it didn't have!");
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void beginDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().setAutoCommit(false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void commitDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().commit();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void abortDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().rollback();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT COUNT(*) FROM openOutputs WHERE hash = ?");
s.setBytes(1, hash.getBytes());
ResultSet results = s.executeQuery();
if (!results.next()) {
throw new BlockStoreException("Got no results from a COUNT(*) query");
}
int count = results.getInt(1);
return count != 0;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.program.model.util;
import java.util.*;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.*;
import ghidra.program.model.symbol.Reference;
import ghidra.program.model.symbol.ReferenceManager;
import ghidra.util.exception.CancelledException;
import ghidra.util.graph.AbstractDependencyGraph;
import ghidra.util.graph.DependencyGraph;
import ghidra.util.task.TaskMonitor;
/**
* Class to build an DependencyGraph base on a acyclic function call graph. This is useful when
* you want to process functions "bottom up".
*/
public class AcyclicCallGraphBuilder {
private Program program;
private Set<Address> functionSet;
private boolean killThunks;
/**
* Creates a DependencyGraph of all functions in a program based on the call graph.
* @param program the program to create an acyclic call graph
* @param killThunks true if thunked functions should be eliminated from the graph
*/
public AcyclicCallGraphBuilder(Program program, boolean killThunks) {
this(program, program.getMemory(), killThunks);
}
/**
* Creates a DependencyGraph of all functions in the given addressSet based on the call graph.
* Calls to or from functions outside the given address set are ignored.
* @param program the program to create an acyclic call graph
* @param set the address to restrict the call graph.
* @param killThunks true if thunked functions should be eliminated from the graph
*/
public AcyclicCallGraphBuilder(Program program, AddressSetView set, boolean killThunks) {
this.program = program;
this.functionSet = findFunctions(program, set, killThunks);
this.killThunks = killThunks;
}
/**
* Creates a DependencyGraph of all functions in the given set of functions based on the call graph.
* Calls to or from functions not in the given set are ignored.
* @param program the program to create an acyclic call graph
* @param functions the set of functions to include in the call graph.
* @param killThunks true if thunked functions should be eliminated from the graph
*/
public AcyclicCallGraphBuilder(Program program, Collection<Function> functions,
boolean killThunks) {
this.program = program;
functionSet = new HashSet<>();
for (Function function : functions) {
if (killThunks) {
if (function.isThunk()) {
function = function.getThunkedFunction(true);
}
}
functionSet.add(function.getEntryPoint());
}
this.killThunks = killThunks;
}
/**
* Builds the DependencyGraph for the acyclic call graph represented by this object.
* @param monitor the taskMonitor to use for reporting progress or cancelling.
* @return the DependencyGraph for the acyclic call graph represented by this object.
* @throws CancelledException if the monitor was cancelled.
*/
public AbstractDependencyGraph<Address> getDependencyGraph(TaskMonitor monitor)
throws CancelledException {
AbstractDependencyGraph<Address> graph = new DependencyGraph<>();
Deque<Address> startPoints = findStartPoints();
Set<Address> unprocessed = new TreeSet<>(functionSet); // reliable processing order
monitor.initialize(unprocessed.size());
while (!unprocessed.isEmpty()) {
monitor.checkCanceled();
Address functionEntry = getNextStartFunction(startPoints, unprocessed);
processForward(graph, unprocessed, functionEntry, monitor);
}
return graph;
}
private Address getNextStartFunction(Deque<Address> startPoints, Set<Address> unProcessedSet) {
while (!startPoints.isEmpty()) {
Address address = startPoints.pop();
if (unProcessedSet.contains(address)) {
return address;
}
}
return unProcessedSet.iterator().next();
}
private Deque<Address> findStartPoints() {
Deque<Address> startPoints = new LinkedList<>();
// populate startPoints with functions that have no callers or are an entry point
for (Address address : functionSet) {
if (isStartFunction(address)) {
startPoints.add(address);
}
}
return startPoints;
}
private void initializeNode(StackNode node) {
FunctionManager fmanage = program.getFunctionManager();
Function function = fmanage.getFunctionAt(node.address);
if (function.isThunk()) {
Function thunkedfunc = function.getThunkedFunction(false);
node.children = new Address[1];
node.children[0] = thunkedfunc.getEntryPoint();
return;
}
ArrayList<Address> children = new ArrayList<>();
ReferenceManager referenceManager = program.getReferenceManager();
AddressIterator referenceSourceIterator =
referenceManager.getReferenceSourceIterator(function.getBody(), true);
while (referenceSourceIterator.hasNext()) {
Address fromAddr = referenceSourceIterator.next();
for (Reference ref : referenceManager.getFlowReferencesFrom(fromAddr)) {
Address toAddr = ref.getToAddress();
if (ref.getReferenceType().isCall()) {
Function childfunc = fmanage.getFunctionAt(toAddr);
if (childfunc != null && killThunks) {
if (childfunc.isThunk()) {
childfunc = childfunc.getThunkedFunction(true);
toAddr = childfunc.getEntryPoint();
}
}
if (functionSet.contains(toAddr)) {
children.add(toAddr);
}
}
}
}
node.children = new Address[children.size()];
children.toArray(node.children);
}
private void processForward(AbstractDependencyGraph<Address> graph, Set<Address> unprocessed,
Address startFunction, TaskMonitor monitor) throws CancelledException {
VisitStack stack = new VisitStack(startFunction);
StackNode curnode = stack.peek();
initializeNode(curnode);
graph.addValue(curnode.address);
while (!stack.isEmpty()) {
monitor.checkCanceled();
curnode = stack.peek();
if (curnode.nextchild >= curnode.children.length) { // Node more to children to traverse for this node
unprocessed.remove(curnode.address);
monitor.incrementProgress(1);
stack.pop();
}
else {
Address childAddr = curnode.children[curnode.nextchild++];
if (!stack.contains(childAddr)) {
if (unprocessed.contains(childAddr)) {
stack.push(childAddr);
StackNode nextnode = stack.peek();
initializeNode(nextnode);
childAddr = nextnode.address;
graph.addValue(nextnode.address);
}
graph.addDependency(curnode.address, childAddr);
}
}
}
}
private boolean isStartFunction(Address address) {
ReferenceManager referenceManager = program.getReferenceManager();
Iterable<Reference> referencesTo = referenceManager.getReferencesTo(address);
for (Reference reference : referencesTo) {
if (reference.isEntryPointReference()) {
return true;
}
if (reference.getReferenceType().isCall()) {
//Assume that any call implies that none of the references will be entry point reference.
return false;
}
}
return true;
}
private static Set<Address> findFunctions(Program program, AddressSetView set,
boolean killThunks) {
Set<Address> functionStarts = new HashSet<>();
FunctionIterator functions = program.getFunctionManager().getFunctions(set, true);
for (Function function : functions) {
if (killThunks) {
if (function.isThunk()) {
function = function.getThunkedFunction(true);
}
}
functionStarts.add(function.getEntryPoint());
}
return functionStarts;
}
private static class StackNode {
public Address address;
public Address[] children;
public int nextchild;
@Override
public String toString() {
return address == null ? ""
: address.toString() +
(children == null ? " <no children>" : " " + Arrays.toString(children));
}
}
private static class VisitStack {
private Set<Address> inStack = new HashSet<>();
private Deque<StackNode> stack = new LinkedList<>();
public VisitStack(Address functionEntry) {
push(functionEntry);
}
public boolean isEmpty() {
return stack.isEmpty();
}
public StackNode peek() {
return stack.peek();
}
public boolean contains(Address address) {
return inStack.contains(address);
}
public void push(Address address) {
if (!inStack.add(address)) {
throw new IllegalStateException(
"Attempted to visit an address that is already on the stack");
}
StackNode newnode = new StackNode();
newnode.address = address;
newnode.nextchild = 0;
stack.push(newnode);
}
public void pop() {
Address address = stack.pop().address;
inStack.remove(address);
}
}
}
| |
package Main;
/**Class:
* @author Kevin Stevens
* @version 1.0
* Course :
* Written:
*
*
* Purpose: -
*/
import inventory.Coin;
import inventory.Item;
import inventory.Usable;
import inventory.Weapon;
import java.util.ArrayList;
import java.util.Iterator;
import Entity.Player;
import Room.ItemPuzzle;
import Room.Riddle;
import Room.Room;
import Room.RoomControl;
import javafx.scene.Scene;
import javafx.stage.Stage;
import javafx.util.Duration;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.application.Application;
public class Main extends Application
{
private TextPane txt = new TextPane();
private Timeline mainLoopCheck;
public static String currentRoom = "S1";
private Room currentRoomObject;
private Player player = new Player();
private static String displayText = "";
private ArrayList<Room> rooms = new ArrayList<Room>();
public void start(Stage primaryStage)
{
Scene scene = new Scene(txt, 500, 500);
primaryStage.setTitle("Tales of Titans");
primaryStage.setScene(scene);
primaryStage.show();
txt.setStyle("-fx-background-color: black");
txt.requestFocus();
Room mainMenu = new Room("S1",
"Welcome to Tales of Titans!\n\nType Start to begin!",
false, null);
rooms.add(mainMenu);
Room deathScreen = new Room("D1",
"-- You have died. Game Over.",
false, null);
rooms.add(deathScreen);
Room victoryScreen = new Room("V1",
"Congratulations! You have defeated Briarios! You are victorious!",
false, null);
rooms.add(victoryScreen);
RoomControl rc = new RoomControl(rooms, player);
currentRoom = "S1";
currentRoom = "C3";
setRoom(currentRoom);
mainLoopCheck = new Timeline(
new KeyFrame(Duration.millis(5), e -> mainLoop()));
mainLoopCheck.setCycleCount(Timeline.INDEFINITE);
mainLoopCheck.play();
Usable newPotion = new Usable(6);
player.addItem(newPotion);
player.addGold(3);
//player.setHealth(9999999);
}
public void mainLoop()
{
if (displayText.equals("") && currentRoomObject != null)
{
setRoom(currentRoom);
}
Iterator<Item> i = player.getInv().iterator();
while(i.hasNext())
{
Item i2 = i.next();
if (i2 instanceof Coin)
{
player.addGold(1);
i.remove();
}
}
getObject();
if (displayText.length() > 0) displayText = cropText(displayText);
if (!txt.getInput().equals(""))
{
if (!currentRoomObject.getID().equals("S1")
&& !currentRoomObject.getID().equals("D1")
&& !currentRoomObject.getID().equals("V1"))
{
parseText(txt.getInput());
}
else
{
parseTextStart(txt.getInput());
}
txt.resetInput();
}
while (countLines(displayText) > 15)
{
displayText = displayText.substring(displayText.indexOf("\n") + 1);
}
if (player.getHealth() < 1 && !currentRoom.equals("D1"))
{
clearScreen();
setRoom("D1");
}
txt.changeText(displayText);
}
public static void main(String[] args)
{
launch(args);
}
public static void display(String newText)
{
displayText = displayText + "\n" + newText;
}
private void clearScreen()
{
displayText = "";
}
private String cropText(String cropString)
{
String newString = cropString;
for (int i = 1; i < newString.length() - 50; i++)
{
if (!newString.substring(i-1, i+1).contains("\n"))
{
if (!newString.substring(i, i + 50).contains("\n"))
{
int lastSpace = newString.substring(i + 2, i + 50).lastIndexOf(" ") + i + 2;
newString = newString.substring(0, lastSpace) + "\n" + newString.substring(lastSpace + 1);
}
}
}
return newString;
}
public int countLines(String testString)
{
int numLines = 0;
int nextIndex = 0;
String testStringCount = testString;
while (testStringCount.indexOf("\n") > -1)
{
nextIndex = testStringCount.indexOf("\n") + 1;
numLines ++;
testStringCount = testStringCount.substring(nextIndex);
}
return numLines;
}
private void look()
{
display(currentRoomObject.getDescription());
if (currentRoomObject.getMonster() != null)
{
display(currentRoomObject.getMonster().getDescription());
}
if (currentRoomObject.getPuzzle() != null)
{
display(currentRoomObject.getPuzzle().getDescription());
}
if (currentRoomObject.getInv().size() > 0)
{
display("-- Items you can see");
String invString = "";
for (int i = 0; i < currentRoomObject.getInv().size(); i++)
{
invString = invString + "-" + currentRoomObject.getInv().get(i).getName() + " ";
}
display(invString);
}
}
private void setRoom(String newRoom)
{
currentRoom = newRoom;
getObject();
look();
if (currentRoomObject.getMonster() != null)
{
if (Math.random() * 100 < currentRoomObject.getMonster().getAggressiveness())
{
currentRoomObject.getMonster().attack(player);
}
}
}
private void getObject()
{
for (Room r : rooms)
{
if (r.getID() == currentRoom)
{
currentRoomObject = r;
}
}
}
private void parseTextStart(String input)
{
if (input.toLowerCase().equals("start"))
{
currentRoom = "M1";
setRoom(currentRoom);
getObject();
}
}
private void parseText(String input)
{
String tempInput = input;
int tempInt = 0;
boolean recognized = false;
if ((tempInput.toLowerCase().contains("attack")
|| tempInput.equalsIgnoreCase("a"))
&& !recognized)
{
if (currentRoomObject.getMonster() != null)
{
player.attack(currentRoomObject.getMonster());
if (currentRoomObject.getMonster().getHealth() > 0)
{
currentRoomObject.getMonster().attack(player);
}
else
{
if (currentRoomObject.getMonster().getMonsterID() != 7)
{
currentRoomObject.killMonster(player);
}
else
{
clearScreen();
setRoom("V1");
}
}
}
else
{
display("-- There is nothing to attack.");
}
recognized = true;
}
String nothing = "-- Can't go that direction.";
String roomLocked = "-- Something is preventing you from going that way.";
if ((tempInput.toLowerCase().contains("north")
|| tempInput.equalsIgnoreCase("n"))
&& !recognized)
{
if (currentRoomObject.getNorth() != null && !currentRoomObject.getLocked(0))
{
setRoom(currentRoomObject.getNorth().getID());
}
else if (currentRoomObject.getNorth() != null && currentRoomObject.getLocked(0))
{
display(roomLocked);
}
else
{
display(nothing);
}
recognized = true;
}
if ((tempInput.toLowerCase().contains("east")
|| tempInput.equalsIgnoreCase("e"))
&& !recognized)
{
if (currentRoomObject.getEast() != null && !currentRoomObject.getLocked(1))
{
setRoom(currentRoomObject.getEast().getID());
}
else if (currentRoomObject.getEast() != null && currentRoomObject.getLocked(1))
{
display(roomLocked);
}
else
{
display(nothing);
}
recognized = true;
}
if ((tempInput.toLowerCase().contains("south")
|| tempInput.equalsIgnoreCase("s"))
&& !recognized)
{
if (currentRoomObject.getSouth() != null && !currentRoomObject.getLocked(2))
{
setRoom(currentRoomObject.getSouth().getID());
}
else if (currentRoomObject.getSouth() != null && currentRoomObject.getLocked(2))
{
display(roomLocked);
}
else
{
display(nothing);
}
recognized = true;
}
if ((tempInput.toLowerCase().contains("west")
|| tempInput.equalsIgnoreCase("w"))
&& !recognized)
{
if (currentRoomObject.getWest() != null && !currentRoomObject.getLocked(3))
{
setRoom(currentRoomObject.getWest().getID());
}
else if (currentRoomObject.getWest() != null && currentRoomObject.getLocked(3))
{
display(roomLocked);
}
else
{
display(nothing);
}
recognized = true;
}
if ((tempInput.toLowerCase().contains("look")
|| tempInput.equalsIgnoreCase("l"))
&& !recognized)
{
look();
recognized = true;
}
if (tempInput.toLowerCase().contains("help") && !recognized)
{
display("-- Try these commands: "
+ "\n - North/South/East/West "
+ "\n - Look"
+ "\n - Attack"
+ "\n - Get (item)"
+ "\n - Buy (item)"
+ "\n - Use (item)"
+ "\n - Equip (weapon)"
+ "\n - Inventory"
+ "\n - Hint");
recognized = true;
}
if ((tempInput.toLowerCase().contains("get ")
|| tempInput.toLowerCase().contains("take "))
&& !recognized)
{
int startIndex = tempInput.toLowerCase().indexOf(" ") + 1;
String getItemName = tempInput.toLowerCase().substring(startIndex);
boolean itemGotten = false;
ArrayList<Item> currentInv = currentRoomObject.getInv();
if (currentInv.size() > 0 && !currentRoomObject.getIsShop())
{
for (int i = 0; i < currentInv.size(); i++)
{
if (currentInv.get(i).getName().toLowerCase().equals(getItemName))
{
player.getItem(currentInv.get(i));
currentInv.remove(i);
itemGotten = true;
}
}
}
if (!itemGotten && !currentRoomObject.getIsShop())
{
display("-- No item to get");
}
if (currentRoomObject.getIsShop())
{
display("-- This is a shop, ya gotta pay for that!");
}
recognized = true;
}
if (tempInput.toLowerCase().contains("buy ") && !recognized)
{
int startIndex = tempInput.toLowerCase().indexOf("buy") + 4;
String getItemName = tempInput.toLowerCase().substring(startIndex);
boolean itemGotten = false;
ArrayList<Item> currentInv = currentRoomObject.getInv();
if (currentInv.size() > 0 && currentRoomObject.getIsShop())
{
boolean itemBought = false;
Iterator<Item> i = currentRoomObject.getInv().iterator();
while(i.hasNext() && !itemBought)
{
Item i2 = i.next();
if (i2.getName().toLowerCase().equals(getItemName))
{
if (player.buyItem(i2))
{
i.remove();
}
itemBought = true;
}
}
}
if (!itemGotten && !currentRoomObject.getIsShop())
{
display("-- No item to buy.");
}
if (!currentRoomObject.getIsShop())
{
display("-- You don't need to pay for that. Just get it.");
}
recognized = true;
}
if ((tempInput.toLowerCase().contains("inventory")
|| tempInput.toLowerCase().equals("inv")
|| tempInput.toLowerCase().equals("i"))
&& !recognized)
{
display("-- Inventory");
display("- Health: " + player.getHealth());
display("- " + player.getGold() + " gold");
for (int i = 0; i < player.getInv().size(); i++)
{
display("- " + player.getInv().get(i).getName());
}
recognized = true;
}
if (tempInput.toLowerCase().contains("use ") && !recognized)
{
String useItem = tempInput.toLowerCase().substring(4);
boolean itemUsed = false;
boolean inInventory = false;
Iterator<Item> i = player.getInv().iterator();
while(i.hasNext() && !itemUsed)
{
Item i2 = i.next();
if (i2 instanceof Usable && i2.getName().toLowerCase().equals(useItem))
{
Usable i3 = (Usable) i2;
if (i3.getHealthRecov() > 0)
{
player.heal(i3.getHealthRecov());
i.remove();
itemUsed = true;
}
if (currentRoomObject.getPuzzle() != null
&& currentRoomObject.getPuzzle() instanceof ItemPuzzle)
{
ItemPuzzle ip = (ItemPuzzle) currentRoomObject.getPuzzle();
if (i3.getName().toLowerCase().equals(ip.getSolution().toLowerCase()))
{
ip.setSolved(player);
itemUsed = true;
}
}
if (currentRoomObject.getMonster() != null
&& currentRoomObject.getMonster().getMonsterID() == 0
&& i3.getName().equals("Silver Dagger"))
{
display("-- The silver dagger vanquished the wraith!");
currentRoomObject.killMonster(player);
itemUsed = true;
}
inInventory = true;
}
}
if (!inInventory)
{
display("-- Item name not recognized.");
}
else if (inInventory && !itemUsed)
{
display("-- Cannot use that item right now.");
}
recognized = true;
}
if (tempInput.toLowerCase().contains("equip ") && !recognized)
{
String useItem = tempInput.toLowerCase().substring(6);
boolean inInventory = false;
Iterator<Item> i = player.getInv().iterator();
while(i.hasNext())
{
Item i2 = i.next();
if (i2 instanceof Weapon && i2.getName().toLowerCase().equals(useItem))
{
Weapon i3 = (Weapon) i2;
player.setEquipment(i3);
display("-- Equipped " + i3.getName());
inInventory = true;
}
}
if (!inInventory)
{
display("-- Item name not recognized.");
}
recognized = true;
}
if (tempInput.toLowerCase().contains("hint") && !recognized)
{
if (currentRoomObject.getPuzzle() != null)
{
display(currentRoomObject.getPuzzle().getHint());
}
else
{
display("-- No puzzle is present.");
}
recognized = true;
}
if (!recognized)
{
if (currentRoomObject.getPuzzle() != null
&& currentRoomObject.getPuzzle() instanceof Riddle)
{
Riddle r = (Riddle) currentRoomObject.getPuzzle();
if (tempInput.toLowerCase().equals(r.getSolution().toLowerCase()))
{
r.setSolved(player);
recognized = true;
}
}
if (!recognized)
{
display("-- Command not recognized.");
}
}
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.testutil;
import android.os.Looper;
import com.google.android.exoplayer2.BasePlayer;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.PlayerMessage;
import com.google.android.exoplayer2.SeekParameters;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
/**
* An abstract {@link ExoPlayer} implementation that throws {@link UnsupportedOperationException}
* from every method.
*/
public abstract class StubExoPlayer extends BasePlayer implements ExoPlayer {
@Override
public AudioComponent getAudioComponent() {
throw new UnsupportedOperationException();
}
@Override
public VideoComponent getVideoComponent() {
throw new UnsupportedOperationException();
}
@Override
public TextComponent getTextComponent() {
throw new UnsupportedOperationException();
}
@Override
public MetadataComponent getMetadataComponent() {
throw new UnsupportedOperationException();
}
@Override
public Looper getPlaybackLooper() {
throw new UnsupportedOperationException();
}
@Override
public Looper getApplicationLooper() {
throw new UnsupportedOperationException();
}
@Override
public void addListener(Player.EventListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void removeListener(Player.EventListener listener) {
throw new UnsupportedOperationException();
}
@Override
public int getPlaybackState() {
throw new UnsupportedOperationException();
}
@Override
public ExoPlaybackException getPlaybackError() {
throw new UnsupportedOperationException();
}
@Override
public void retry() {
throw new UnsupportedOperationException();
}
@Override
public void prepare(MediaSource mediaSource) {
throw new UnsupportedOperationException();
}
@Override
public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
throw new UnsupportedOperationException();
}
@Override
public void setPlayWhenReady(boolean playWhenReady) {
throw new UnsupportedOperationException();
}
@Override
public boolean getPlayWhenReady() {
throw new UnsupportedOperationException();
}
@Override
public void setRepeatMode(@RepeatMode int repeatMode) {
throw new UnsupportedOperationException();
}
@Override
public int getRepeatMode() {
throw new UnsupportedOperationException();
}
@Override
public void setShuffleModeEnabled(boolean shuffleModeEnabled) {
throw new UnsupportedOperationException();
}
@Override
public boolean getShuffleModeEnabled() {
throw new UnsupportedOperationException();
}
@Override
public boolean isLoading() {
throw new UnsupportedOperationException();
}
@Override
public void seekTo(int windowIndex, long positionMs) {
throw new UnsupportedOperationException();
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
throw new UnsupportedOperationException();
}
@Override
public PlaybackParameters getPlaybackParameters() {
throw new UnsupportedOperationException();
}
@Override
public void setSeekParameters(SeekParameters seekParameters) {
throw new UnsupportedOperationException();
}
@Override
public SeekParameters getSeekParameters() {
throw new UnsupportedOperationException();
}
@Override
public void stop(boolean resetStateAndPosition) {
throw new UnsupportedOperationException();
}
@Override
public void release() {
throw new UnsupportedOperationException();
}
@Override
public PlayerMessage createMessage(PlayerMessage.Target target) {
throw new UnsupportedOperationException();
}
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void sendMessages(ExoPlayerMessage... messages) {
throw new UnsupportedOperationException();
}
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void blockingSendMessages(ExoPlayerMessage... messages) {
throw new UnsupportedOperationException();
}
@Override
public int getRendererCount() {
throw new UnsupportedOperationException();
}
@Override
public int getRendererType(int index) {
throw new UnsupportedOperationException();
}
@Override
public TrackGroupArray getCurrentTrackGroups() {
throw new UnsupportedOperationException();
}
@Override
public TrackSelectionArray getCurrentTrackSelections() {
throw new UnsupportedOperationException();
}
@Override
public Object getCurrentManifest() {
throw new UnsupportedOperationException();
}
@Override
public Timeline getCurrentTimeline() {
throw new UnsupportedOperationException();
}
@Override
public int getCurrentPeriodIndex() {
throw new UnsupportedOperationException();
}
@Override
public int getCurrentWindowIndex() {
throw new UnsupportedOperationException();
}
@Override
public long getDuration() {
throw new UnsupportedOperationException();
}
@Override
public long getCurrentPosition() {
throw new UnsupportedOperationException();
}
@Override
public long getBufferedPosition() {
throw new UnsupportedOperationException();
}
@Override
public long getTotalBufferedDuration() {
throw new UnsupportedOperationException();
}
@Override
public boolean isPlayingAd() {
throw new UnsupportedOperationException();
}
@Override
public int getCurrentAdGroupIndex() {
throw new UnsupportedOperationException();
}
@Override
public int getCurrentAdIndexInAdGroup() {
throw new UnsupportedOperationException();
}
@Override
public long getContentPosition() {
throw new UnsupportedOperationException();
}
@Override
public long getContentBufferedPosition() {
throw new UnsupportedOperationException();
}
@Override
public void setForegroundMode(boolean foregroundMode) {
throw new UnsupportedOperationException();
}
}
| |
package nz.ac.auckland.morc.tests.orchestrated;
import nz.ac.auckland.morc.MorcMethods;
import nz.ac.auckland.morc.MorcTest;
import nz.ac.auckland.morc.MorcTestBuilder;
import nz.ac.auckland.morc.mock.MockDefinition;
import nz.ac.auckland.morc.specification.AsyncOrchestratedTestBuilder;
import nz.ac.auckland.morc.specification.OrchestratedTestSpecification;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.seda.SedaEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.annotation.DirtiesContext;
import java.util.Collection;
public class EachCaseMultiExpectationSyncFailureTest extends CamelTestSupport implements MorcMethods {
Logger logger = LoggerFactory.getLogger(EachCaseMultiExpectationSyncFailureTest.class);
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("vm:totalOrderOrderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:s", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "3");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
}
});
from("vm:totalOrderUnorderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:s", "1");
Thread.sleep(1000);
template.sendBody("vm:a", "2");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
}
});
from("vm:partialOrderOrderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:s", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
Thread.sleep(1000);
template.sendBody("vm:a", "2");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
}
});
from("vm:partialOrderUnorderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:a", "2");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
}
});
from("vm:partialOrderUnorderedEndpoint2")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:a", "1");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
}
});
from("vm:partialOrderUnorderedEndpoint3")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:s", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
Thread.sleep(1000);
template.sendBody("vm:s", "3");
Thread.sleep(1000);
template.sendBody("vm:a", "3");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
Thread.sleep(1000);
template.sendBody("vm:a", "1");
}
});
from("vm:testNoOrderOrderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:x", "1");
Thread.sleep(1000);
template.sendBody("vm:x", "1");
}
});
from("vm:testNoOrderOrderedEndpoint2")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:x", "1");
Thread.sleep(1000);
template.sendBody("vm:x", "2");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
Thread.sleep(1000);
template.sendBody("vm:s", "1");
}
});
from("vm:testNoOrderUnorderedEndpoint")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:x", "2");
Thread.sleep(1000);
template.sendBody("vm:x", "1");
Thread.sleep(1000);
template.sendBody("vm:s", "2");
Thread.sleep(1000);
template.sendBody("vm:s", "1");
}
});
from("vm:testNoOrderUnorderedEndpoint2")
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
template.sendBody("vm:x", "1");
Thread.sleep(1000);
template.sendBody("vm:x", "1");
Thread.sleep(1000);
}
});
}
};
}
public MorcTestBuilder createMorcTestBuilder() {
return new MorcTestBuilder() {
@Override
protected void configure() {
}
};
}
private void runTest(OrchestratedTestSpecification spec) throws Exception {
Collection<Endpoint> endpoints = context.getEndpoints();
for (Endpoint endpoint : endpoints) {
if (endpoint instanceof SedaEndpoint) ((SedaEndpoint) endpoint).setPurgeWhenStopping(true);
}
AssertionError e = null;
try {
MorcTest test = new MorcTest(spec);
test.setUp();
test.runOrchestratedTest();
} catch (AssertionError ex) {
if (!ex.getMessage().contains("Received message count. Expected"))
e = ex;
logger.info("Exception ({}): ", spec.getDescription(), e);
}
assertNotNull(e);
}
@DirtiesContext
@Test
public void testTotalOrderOrderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Total Order Ordered Endpoint", "vm:totalOrderOrderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("3")))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testTotalOrderUnOrderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Total Order Unordered Endpoint", "vm:totalOrderUnorderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:a").expectation(text("1")).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:a").expectation(text("2")).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testPartialOrderOrderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Partial Order Ordered Endpoint", "vm:partialOrderOrderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.asyncMock("vm:a").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.asyncMock("vm:a").expectation(text("2")))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testPartialOrderUnorderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Partial Order Unordered Endpoint", "vm:partialOrderUnorderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.syncMock("vm:a").expectation(text("1")).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:a").expectation(text("2")).endpointNotOrdered())
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testPartialOrderUnorderedEndpoint2() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Partial Order Unordered Endpoint 2", "vm:partialOrderUnorderedEndpoint2")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:a").expectation(text("1")).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:a").expectation(text("2")).endpointNotOrdered())
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testPartialOrderUnorderedEndpoint3() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("Partial Order Unordered Endpoint 3", "vm:partialOrderUnorderedEndpoint3")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:a").expectation(text("1")).endpointNotOrdered().ordering(MockDefinition.OrderingType.PARTIAL))
.addMock(morcMethods.syncMock("vm:a").expectation(text("2")).endpointNotOrdered().ordering(MockDefinition.OrderingType.PARTIAL))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("3")))
.addMock(morcMethods.syncMock("vm:a").expectation(text("3")).endpointNotOrdered().ordering(MockDefinition.OrderingType.PARTIAL))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testNoOrderOrderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("No Order Ordered Endpoint", "vm:testNoOrderOrderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:x").expectation(text("1")).ordering(MockDefinition.OrderingType.NONE))
.addMock(morcMethods.syncMock("vm:x").expectation(text("2")).ordering(MockDefinition.OrderingType.NONE))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testNoOrderOrderedEndpoint2() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("No Order Ordered Endpoint 2", "vm:testNoOrderOrderedEndpoint2")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:x").expectation(text("1")).ordering(MockDefinition.OrderingType.NONE))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.syncMock("vm:x").expectation(text("2")).ordering(MockDefinition.OrderingType.NONE))
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testNoOrderUnorderedEndpoint() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("No Order Unordered Endpoint", "vm:testNoOrderUnorderedEndpoint")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:s").expectation(text("1")))
.addMock(morcMethods.syncMock("vm:s").expectation(text("2")))
.addMock(morcMethods.syncMock("vm:x").expectation(text("1")).ordering(MockDefinition.OrderingType.NONE).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:x").expectation(text("2")).ordering(MockDefinition.OrderingType.NONE).endpointNotOrdered())
.build();
runTest(spec);
}
@DirtiesContext
@Test
public void testNoOrderUnorderedEndpoint2() throws Exception {
MorcTestBuilder morcMethods = createMorcTestBuilder();
OrchestratedTestSpecification spec = new AsyncOrchestratedTestBuilder("No Order Unordered Endpoint 2", "vm:testNoOrderUnorderedEndpoint2")
.input(text("0"))
.addMock(morcMethods.syncMock("vm:x").expectation(text("1")).ordering(MockDefinition.OrderingType.NONE).endpointNotOrdered())
.addMock(morcMethods.syncMock("vm:x").expectation(text("2")).ordering(MockDefinition.OrderingType.NONE).endpointNotOrdered())
.build();
runTest(spec);
}
}
| |
package com.wang.baseadapter.itemdecoration;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.Region;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.Log;
import android.util.SparseArray;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.wang.baseadapter.SwipeStickyAdapter;
public class StickyHeaderDecoration extends RecyclerView.ItemDecoration {
private static final StickyHeaderCreator DEFAULT_CREATOR = new StickyHeaderCreator() {
@Override
public boolean create(RecyclerView parent, int adapterPosition) {
return true;
}
};
private int mHeaderPosition;
private int mCurrentItemType;
private int mStickyHeaderTop;
private boolean mIsAdapterDataChanged;
private Rect mClipBounds;
private RecyclerView.Adapter mAdapter;
private View mStickyView;
private Drawable mBackground;
private boolean mBackgroundSet = false;
private boolean mHeaderHaveMarginTop;
private final SparseArray<StickyHeaderCreator> mTypeStickyHeaderFactories = new SparseArray<>();
private final RecyclerView.AdapterDataObserver mAdapterDataObserver = new RecyclerView.AdapterDataObserver() {
@Override
public void onChanged() {
mIsAdapterDataChanged = true;
}
};
public StickyHeaderDecoration(int... viewTypes) {
this(false, viewTypes);
}
public StickyHeaderDecoration(boolean headerHaveMarginTop, int... viewTypes) {
this(headerHaveMarginTop);
if (viewTypes != null && viewTypes.length > 0) {
for (int viewType : viewTypes) {
registerTypePinnedHeader(viewType);
}
}
}
public StickyHeaderDecoration() {
this(false);
}
public StickyHeaderDecoration(boolean headerHaveMarginTop) {
this.mHeaderPosition = -1;
this.mCurrentItemType = -1;
mHeaderHaveMarginTop = headerHaveMarginTop;
}
@Override
public void onDraw(@NonNull Canvas c, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) {
createPinnedHeader(parent);
if (mStickyView != null) {
View v = parent.findChildViewUnder(c.getWidth() / 2f, mStickyView.getHeight() + 0.5f);
// View firstVisibleItemView = parent.getLayoutManager().getChildAt(0);
// int firstVisiblePosition = ((RecyclerView.LayoutParams) firstVisibleItemView.getLayoutParams()).getViewAdapterPosition();
int position;
if (v != null && isStickyView(parent, (position = parent.getChildAdapterPosition(v)))) {
mStickyHeaderTop = v.getTop() - mStickyView.getHeight() - (position == 0 ? parent.getPaddingTop() : 0);
} else {
mStickyHeaderTop = 0;
}
mClipBounds = c.getClipBounds();
mClipBounds.top = mStickyHeaderTop;
mClipBounds.bottom = mStickyHeaderTop + mStickyView.getHeight();
}
}
@Override
public void onDrawOver(@NonNull Canvas c, @NonNull RecyclerView parent, @NonNull RecyclerView.State state) {
if (mStickyView != null) {
c.save();
mClipBounds.top = 0;
c.clipRect(mClipBounds);
c.translate(0, mStickyHeaderTop);
mStickyView.draw(c);
c.restore();
}
}
/**
* Gets the position of the header under the specified (x, y) coordinates.
*
* @param x x-coordinate
* @param y y-coordinate
* @return position of header, or -1 if not found
*/
public int findHeaderPositionUnder(int x, int y) {
if (mStickyView != null) {
if (x >= 0 && x < mStickyView.getRight() && y >= 0 && y < mStickyView.getBottom() + mStickyHeaderTop) {
return mHeaderPosition;
}
}
return -1;
}
public int findCurrentHeaderViewType() {
return mCurrentItemType;
}
protected void createPinnedHeader(RecyclerView parent) {
updateStickyHeader(parent);
RecyclerView.LayoutManager layoutManager = parent.getLayoutManager();
if (layoutManager == null || layoutManager.getChildCount() <= 0) {
return;
}
int firstVisiblePosition = ((LinearLayoutManager) layoutManager).findFirstVisibleItemPosition();
int firstCompletelyVisiblePosition = ((LinearLayoutManager) layoutManager).findFirstCompletelyVisibleItemPosition();
int headerPosition = findStickyHeaderPosition(parent, firstVisiblePosition);
if (mHeaderHaveMarginTop && isNotViewHolderTop(parent, headerPosition)) {
firstCompletelyVisiblePosition--;
headerPosition = findStickyHeaderPosition(parent, firstVisiblePosition - 1);
}
if (headerPosition == -1 || (headerPosition == firstCompletelyVisiblePosition)) {
resetPinnedHeader();
return;
}
if (headerPosition >= 0 && mHeaderPosition != headerPosition) {
mHeaderPosition = headerPosition;
int viewType = mAdapter.getItemViewType(headerPosition);
mCurrentItemType = viewType;
RecyclerView.ViewHolder stickyViewHolder = mAdapter.createViewHolder(parent, viewType);
if (mAdapter instanceof SwipeStickyAdapter) {
((SwipeStickyAdapter) mAdapter).onBindSwipeViewHolder(stickyViewHolder, headerPosition);
} else {
mAdapter.bindViewHolder(stickyViewHolder, headerPosition);
}
mStickyView = stickyViewHolder.itemView;
if (mBackgroundSet) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mStickyView.setBackground(mBackground);
} else {
mStickyView.setBackgroundDrawable(mBackground);
}
}
// read layout parameters
ViewGroup.LayoutParams layoutParams = mStickyView.getLayoutParams();
if (layoutParams == null) {
layoutParams = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
mStickyView.setLayoutParams(layoutParams);
}
int heightMode = View.MeasureSpec.getMode(layoutParams.height);
int heightSize = View.MeasureSpec.getSize(layoutParams.height);
if (heightMode == View.MeasureSpec.UNSPECIFIED) {
heightMode = View.MeasureSpec.EXACTLY;
}
int maxHeight = parent.getHeight() - parent.getPaddingTop() - parent.getPaddingBottom();
if (heightSize > maxHeight) {
heightSize = maxHeight;
}
// measure & layout
int ws = View.MeasureSpec.makeMeasureSpec(parent.getWidth() - parent.getPaddingLeft() - parent.getPaddingRight(), View.MeasureSpec.EXACTLY);
int hs = View.MeasureSpec.makeMeasureSpec(heightSize, heightMode);
mStickyView.measure(ws, hs);
mStickyView.layout(0, 0, mStickyView.getMeasuredWidth(), mStickyView.getMeasuredHeight());
}
}
private int findStickyHeaderPosition(RecyclerView parent, int fromPosition) {
if (fromPosition > mAdapter.getItemCount() || fromPosition < 0) {
return -1;
}
for (int position = fromPosition; position >= 0; position--) {
final int viewType = mAdapter.getItemViewType(position);
if (isStickyViewType(parent, position, viewType)) {
return position;
}
}
return -1;
}
private boolean isNotViewHolderTop(RecyclerView parent, int position) {
RecyclerView.ViewHolder holder = parent.findViewHolderForAdapterPosition(position);
ViewGroup.LayoutParams params = holder == null ? null : holder.itemView.getLayoutParams();
if (params instanceof ViewGroup.MarginLayoutParams && ((ViewGroup.MarginLayoutParams) params).topMargin > 0) {
return holder.itemView.getTop() > 0;
}
return false;
}
private boolean isStickyViewType(RecyclerView parent, int adapterPosition, int viewType) {
StickyHeaderCreator stickyHeaderCreator = mTypeStickyHeaderFactories.get(viewType);
return stickyHeaderCreator != null && stickyHeaderCreator.create(parent, adapterPosition);
}
private boolean isStickyView(RecyclerView parent, View v) {
if (v == null) {
return false;
}
int position = parent.getChildAdapterPosition(v);
return isStickyView(parent, position);
}
private boolean isStickyView(RecyclerView parent, int position) {
if (position == RecyclerView.NO_POSITION) {
return false;
}
return isStickyViewType(parent, position, mAdapter.getItemViewType(position));
}
private void updateStickyHeader(RecyclerView parent) {
RecyclerView.Adapter adapter = parent.getAdapter();
if (mAdapter != adapter || mIsAdapterDataChanged) {
resetPinnedHeader();
if (mAdapter != null) {
mAdapter.unregisterAdapterDataObserver(mAdapterDataObserver);
}
mAdapter = adapter;
if (mAdapter != null) {
mAdapter.registerAdapterDataObserver(mAdapterDataObserver);
}
}
}
private void resetPinnedHeader() {
mHeaderPosition = -1;
mCurrentItemType = -1;
mStickyView = null;
}
public View getStickyView() {
return mStickyView;
}
public void setBackground(Drawable background) {
mBackground = background;
mBackgroundSet = true;
if (mStickyView != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mStickyView.setBackground(mBackground);
} else {
mStickyView.setBackgroundDrawable(mBackground);
}
}
}
public void setHeaderHaveMarginTop(boolean headerHaveMarginTop) {
mHeaderHaveMarginTop = headerHaveMarginTop;
}
public void registerTypePinnedHeader(int itemType) {
this.registerTypePinnedHeader(itemType, DEFAULT_CREATOR);
}
public void registerTypePinnedHeader(int itemType, StickyHeaderCreator stickyHeaderCreator) {
mTypeStickyHeaderFactories.put(itemType, stickyHeaderCreator);
}
public interface StickyHeaderCreator {
boolean create(RecyclerView parent, int adapterPosition);
}
}
| |
package org.tigris.scarab.tools;
/* ================================================================
* Copyright (c) 2000-2001 CollabNet. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement: "This product includes
* software developed by Collab.Net <http://www.Collab.Net/>."
* Alternately, this acknowlegement may appear in the software itself, if
* and wherever such third-party acknowlegements normally appear.
*
* 4. The hosted project names must not be used to endorse or promote
* products derived from this software without prior written
* permission. For written permission, please contact info@collab.net.
*
* 5. Products derived from this software may not use the "Tigris" or
* "Scarab" names nor may "Tigris" or "Scarab" appear in their names without
* prior written permission of Collab.Net.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL COLLAB.NET OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of Collab.Net.
*/
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
// Turbine
import org.apache.turbine.Log;
import org.apache.torque.om.NumberKey;
import org.apache.torque.om.ObjectKey;
import org.apache.torque.om.ComboKey;
import org.apache.turbine.RunData;
import org.apache.turbine.modules.Module;
import org.apache.turbine.tool.IntakeTool;
import org.apache.fulcrum.intake.Intake;
import org.apache.fulcrum.intake.model.Group;
import org.apache.fulcrum.pool.RecyclableSupport;
import org.apache.fulcrum.util.parser.StringValueParser;
import org.apache.fulcrum.util.parser.ValueParser;
import org.apache.commons.util.SequencedHashtable;
// Scarab
import org.tigris.scarab.om.ScarabUser;
import org.tigris.scarab.services.user.UserManager;
import org.tigris.scarab.om.Issue;
import org.tigris.scarab.om.IssuePeer;
import org.tigris.scarab.om.IssueType;
import org.tigris.scarab.om.IssueTypePeer;
import org.tigris.scarab.om.Query;
import org.tigris.scarab.om.QueryPeer;
import org.tigris.scarab.om.IssueTemplateInfo;
import org.tigris.scarab.om.IssueTemplateInfoPeer;
import org.tigris.scarab.om.Depend;
import org.tigris.scarab.om.DependPeer;
import org.tigris.scarab.om.ScopePeer;
import org.tigris.scarab.om.FrequencyPeer;
import org.tigris.scarab.om.Attribute;
import org.tigris.scarab.om.AttributeGroup;
import org.tigris.scarab.om.AttributeGroupPeer;
import org.tigris.scarab.om.Attachment;
import org.tigris.scarab.om.AttachmentPeer;
import org.tigris.scarab.om.AttributeOption;
import org.tigris.scarab.om.ROptionOption;
import org.tigris.scarab.om.AttributeOptionPeer;
import org.tigris.scarab.om.RModuleAttribute;
import org.tigris.scarab.om.RModuleAttributePeer;
import org.tigris.scarab.om.AttributeValue;
import org.tigris.scarab.om.ParentChildAttributeOption;
import org.tigris.scarab.services.module.ModuleEntity;
import org.tigris.scarab.services.module.ModuleManager;
import org.tigris.scarab.util.ScarabConstants;
import org.tigris.scarab.util.word.IssueSearch;
import org.tigris.scarab.om.Report;
import org.tigris.scarab.om.ReportPeer;
/**
* This class is used by the Scarab API
*/
public class ScarabRequestTool
extends RecyclableSupport
implements ScarabRequestScope
{
/** the object containing request specific data */
private RunData data;
/**
* A User object for use within the Scarab API.
*/
private ScarabUser user = null;
/**
* A Issue object for use within the Scarab API.
*/
private Issue issue = null;
/**
* A Attribute object for use within the Scarab API.
*/
private Attribute attribute = null;
/**
* A Attachment object for use within the Scarab API.
*/
private Attachment attachment = null;
/**
* A Depend object for use within the Scarab API.
*/
private Depend depend = null;
/**
* A Query object for use within the Scarab API.
*/
private Query query = null;
/**
* An IssueTemplateInfo object for use within the Scarab API.
*/
private IssueTemplateInfo templateInfo = null;
/**
* An IssueType object for use within the Scarab API.
*/
private IssueType issueType = null;
/**
* An AttributeGroup object
*/
private AttributeGroup group = null;
/**
* A ModuleEntity object which represents the current module
* selected by the user within a request.
*/
private ModuleEntity currentModule = null;
/**
* A IssueType object which represents the current issue type
* selected by the user within a request.
*/
private IssueType currentIssueType = null;
/**
* The issue that is currently being entered.
*/
private Issue reportingIssue = null;
/**
* The most recent query.
*/
private String currentQuery = null;
/**
* A ModuleEntity object
*/
private ModuleEntity module = null;
/**
* A AttributeOption object for use within the Scarab API.
*/
private AttributeOption attributeOption = null;
/**
* A ROptionOption
*/
private ROptionOption roo = null;
/**
* A ParentChildAttributeOption
*/
private ParentChildAttributeOption pcao = null;
/**
* A list of Issues
*/
private List issueList;
/**
* A ReportGenerator
*/
private Report reportGenerator = null;
/**
* A AttributeOption object for use within the Scarab API.
*/
private int nbrPages = 0;
private int prevPage = 0;
private int nextPage = 0;
public void init(Object data)
{
this.data = (RunData)data;
}
/**
* nulls out the issue and user objects
*/
public void refresh()
{
this.user = null;
this.issue = null;
}
/**
* Constructor does initialization stuff
*/
public ScarabRequestTool()
{
//intake = new IntakeSystem();
}
/**
* A Attribute object for use within the Scarab API.
*/
public void setAttribute (Attribute attribute)
{
this.attribute = attribute;
}
/**
* A Depend object for use within the Scarab API.
*/
public void setDepend (Depend depend)
{
this.depend = depend;
}
/**
* A Query object for use within the Scarab API.
*/
public void setQuery (Query query)
{
this.query = query;
}
/**
* Get the intake tool. FIXME: why is it getting it
* from the Module and not from the IntakeService?
*/
private IntakeTool getIntakeTool()
{
return (IntakeTool)Module.getTemplateContext(data)
.get(ScarabConstants.INTAKE_TOOL);
}
/**
* Gets an instance of a ROptionOption from this tool.
* if it is null it will return a new instance of an
* empty ROptionOption and set it within this tool.
*/
public ROptionOption getROptionOption()
{
if (roo == null)
{
roo = ROptionOption.getInstance();
}
return roo;
}
/**
* Sets an instance of a ROptionOption
*/
public void setROptionOption(ROptionOption roo)
{
this.roo = roo;
}
/**
* A IssueTemplateInfo object for use within the Scarab API.
*/
public void setIssueTemplateInfo (IssueTemplateInfo templateInfo)
{
this.templateInfo = templateInfo;
}
/**
* A IssueType object for use within the Scarab API.
*/
public void setIssueType (IssueType issuetype)
{
this.issueType = issueType;
}
/**
* Gets an instance of a ParentChildAttributeOption from this tool.
* if it is null it will return a new instance of an
* empty ParentChildAttributeOption and set it within this tool.
*/
public ParentChildAttributeOption getParentChildAttributeOption()
{
if (pcao == null)
{
pcao = ParentChildAttributeOption.getInstance();
}
return pcao;
}
/**
* Sets an instance of a ParentChildAttributeOption
*/
public void setParentChildAttributeOption(ParentChildAttributeOption roo)
{
this.pcao = pcao;
}
/**
* A Attribute object for use within the Scarab API.
*/
public void setAttributeOption (AttributeOption option)
{
this.attributeOption = option;
}
/**
* A Attribute object for use within the Scarab API.
*/
public AttributeOption getAttributeOption()
throws Exception
{
try{
if (attributeOption == null)
{
String optId = getIntakeTool()
.get("AttributeOption", IntakeTool.DEFAULT_KEY)
.get("OptionId").toString();
if ( optId == null || optId.length() == 0 )
{
attributeOption = AttributeOption.getInstance();
}
else
{
attributeOption = AttributeOptionPeer
.retrieveByPK(new NumberKey(optId));
}
}
}catch(Exception e){e.printStackTrace();}
return attributeOption;
}
/**
* @see org.tigris.scarab.tools.ScarabRequestScope#setUser(ScarabUser)
*/
public void setUser (ScarabUser user)
{
this.user = user;
}
/**
* @see org.tigris.scarab.tools.ScarabRequestScope#getUser()
*/
public ScarabUser getUser()
{
return this.user;
}
/**
* Return a specific User by ID from within the system.
* You can pass in either a NumberKey or something that
* will resolve to a String object as id.toString() is
* called on everything that isn't a NumberKey.
*/
public ScarabUser getUser(Object id)
throws Exception
{
ScarabUser su = null;
try
{
ObjectKey pk = null;
if (id instanceof NumberKey)
{
pk = (ObjectKey) id;
}
else
{
pk = (ObjectKey)new NumberKey(id.toString());
}
su = UserManager.getInstance(pk);
}
catch (Exception e)
{
e.printStackTrace();
}
return su;
}
/**
* A Attribute object for use within the Scarab API.
*/
public Attribute getAttribute()
throws Exception
{
try{
if (attribute == null)
{
String attId = getIntakeTool()
.get("Attribute", IntakeTool.DEFAULT_KEY)
.get("Id").toString();
if ( attId == null || attId.length() == 0 )
{
attribute = Attribute.getInstance();
}
else
{
attribute = Attribute.getInstance(new NumberKey(attId));
}
}
}catch(Exception e){e.printStackTrace();}
return attribute;
}
/**
* A Query object for use within the Scarab API.
*/
public Query getQuery()
throws Exception
{
try
{
if (query == null)
{
String queryId = data.getParameters()
.getString("queryId");
if ( queryId == null || queryId.length() == 0 )
{
query = Query.getInstance();
}
else
{
query = QueryPeer.retrieveByPK(new NumberKey(queryId));
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
return query;
}
/**
* A IssueTemplateInfo object for use within the Scarab API.
*/
public IssueTemplateInfo getIssueTemplateInfo()
throws Exception
{
try
{
if (templateInfo == null)
{
String issueId = data.getParameters()
.getString("issue_id");
if ( issueId == null || issueId.length() == 0 )
{
templateInfo = IssueTemplateInfo.getInstance();
}
else
{
templateInfo = IssueTemplateInfoPeer
.retrieveByPK(new NumberKey(issueId));
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
return templateInfo;
}
/**
* A Depend object for use within the Scarab API.
*/
public Depend getDepend()
throws Exception
{
try
{
if (depend == null)
{
String dependId = getIntakeTool()
.get("Depend", IntakeTool.DEFAULT_KEY).get("Id").toString();
if ( dependId == null || dependId.length() == 0 )
{
depend = Depend.getInstance();
}
else
{
depend = DependPeer.retrieveByPK(new NumberKey(dependId));
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
return depend;
}
/**
* A Attachment object for use within the Scarab API.
*/
public Attachment getAttachment()
throws Exception
{
try{
if (attachment == null)
{
Group att = getIntakeTool()
.get("Attachment", IntakeTool.DEFAULT_KEY, false);
if ( att != null )
{
String attId = att.get("Id").toString();
if ( attId == null || attId.length() == 0 )
{
attachment = new Attachment();
}
else
{
attachment = AttachmentPeer
.retrieveByPK(new NumberKey(attId));
}
}
else
{
attachment = new Attachment();
}
}
}catch(Exception e){e.printStackTrace(); throw e;}
return attachment;
}
/**
* Get a new AttributeGroup object.
*/
public AttributeGroup getAttributeGroup()
{
return new AttributeGroup();
}
/**
* Get a AttributeGroup object.
*/
public AttributeGroup getAttributeGroup(String key)
{
AttributeGroup group = null;
try
{
group = (AttributeGroup)
AttributeGroupPeer.retrieveByPK(new NumberKey(key));
}
catch (Exception e)
{
e.printStackTrace();
}
return group;
}
/**
* Get a specific issue type by key value. Returns null if
* the Issue Type could not be found
*
* @param key a <code>String</code> value
* @return a <code>IssueType</code> value
*/
public IssueType getIssueType(String key)
{
IssueType issueType = null;
try
{
issueType = (IssueType)
IssueTypePeer.retrieveByPK(new NumberKey(key));
}
catch (Exception e)
{
}
return issueType;
}
/**
* Get an issue type object.
*/
public IssueType getIssueType()
throws Exception
{
if ( issueType == null )
{
String key = data.getParameters()
.getString("issuetypeid");
if ( key == null )
{
// get new issue type
issueType = new IssueType();
}
else
{
try
{
issueType = (IssueType) IssueTypePeer
.retrieveByPK(new NumberKey(key));
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
return issueType;
}
/**
* Get an RModuleAttribute object.
*
* @return a <code>Module</code> value
*/
public RModuleAttribute getRModuleAttribute()
throws Exception
{
RModuleAttribute rma = null;
try{
ComboKey rModAttId = (ComboKey)getIntakeTool()
.get("RModuleAttribute", IntakeTool.DEFAULT_KEY)
.get("Id").getValue();
if ( rModAttId == null )
{
NumberKey attId = (NumberKey)getIntakeTool()
.get("Attribute", IntakeTool.DEFAULT_KEY)
.get("Id").getValue();
ModuleEntity currentModule = getCurrentModule();
if ( attId != null && currentModule != null )
{
NumberKey[] nka = {attId, currentModule.getModuleId()};
rma = RModuleAttributePeer.retrieveByPK(new ComboKey(nka));
}
else
{
rma = new RModuleAttribute();
}
}
else
{
rma = RModuleAttributePeer.retrieveByPK(rModAttId);
}
}catch(Exception e){e.printStackTrace();}
return rma;
}
/**
* A AttributeGroup object for use within the Scarab API.
*/
public void setAttributeGroup(AttributeGroup group)
{
this.group = group;
}
/**
* A Module object for use within the Scarab API.
*/
public void setModule(ModuleEntity module)
{
this.module = module;
}
/**
* Get an Module object.
*
* @return a <code>ModuleEntity</code> value
*/
public ModuleEntity getModule()
throws Exception
{
try{
String modId = getIntakeTool()
.get("Module", IntakeTool.DEFAULT_KEY).get("Id").toString();
if ( modId == null || modId.length() == 0 )
{
module = ModuleManager.getInstance();
}
else
{
module = ModuleManager.getInstance(new NumberKey(modId));
}
}catch(Exception e){e.printStackTrace();}
return module;
}
/**
* Get a specific module by key value. Returns null if
* the Module could not be found
*
* @param key a <code>String</code> value
* @return a <code>Module</code> value
*/
public ModuleEntity getModule(String key)
{
ModuleEntity me = null;
try
{
me = ModuleManager.getInstance(new NumberKey(key));
}
catch (Exception e)
{
}
return me;
}
/**
* Gets the ModuleEntity associated with the information
* passed around in the query string. Returns null if
* the Module could not be found.
*/
public ModuleEntity getCurrentModule()
{
if (currentModule == null)
{
currentModule = getModule(
data.getParameters()
.getString(ScarabConstants.CURRENT_MODULE));
}
return currentModule;
}
/**
* Gets the IssueType associated with the information
* passed around in the query string. Returns null if
* the Module could not be found.
*/
public IssueType getCurrentIssueType() throws Exception
{
if (currentIssueType == null)
{
currentIssueType = getIssueType(
data.getParameters()
.getString(ScarabConstants.CURRENT_ISSUE_TYPE));
}
return currentIssueType;
}
/**
* The issue that is currently being entered.
*
* @return an <code>Issue</code> value
*/
public Issue getReportingIssue()
throws Exception
{
if ( reportingIssue == null )
{
String key = data.getParameters()
.getString(ScarabConstants.REPORTING_ISSUE);
if ( key == null )
{
getNewReportingIssue();
}
else
{
reportingIssue = ((ScarabUser)data.getUser())
.getReportingIssue(key);
// if reportingIssue is still null, the parameter must have
// been stale, just get a new issue
if ( reportingIssue == null )
{
getNewReportingIssue();
}
}
}
return reportingIssue;
}
private void getNewReportingIssue()
throws Exception
{
reportingIssue = getCurrentModule().getNewIssue(getCurrentIssueType());
String key = ((ScarabUser)data.getUser())
.setReportingIssue(reportingIssue);
data.getParameters().add(ScarabConstants.REPORTING_ISSUE, key);
}
public void setReportingIssue(Issue issue)
{
reportingIssue = issue;
}
/**
* The most recent query entered.
*
* @return an <code>Issue</code> value
public String getCurrentQuery()
throws Exception
{
if ( currentQuery == null )
{
System.out.println("use default");
}
else
{
currentQuery = (String)((ScarabUser)data.getUser())
.getTemp(ScarabConstants.CURRENT_QUERY);
}
return currentQuery;
}
public void setCurrentQuery(String query)
{
currentQuery = query;
}
*/
/**
* Sets the current ModuleEntity
*/
public void setCurrentModule(ModuleEntity me)
{
currentModule = me;
}
/**
* Sets the current ArtifactType
*/
public void setCurrentIssueType(IssueType issueType)
{
currentIssueType = issueType;
}
/**
* A Issue object for use within the Scarab API.
*/
public void setIssue(Issue issue)
{
this.issue = issue;
}
/**
* Get an Issue object. If it is the first time calling,
* it will be a new blank issue object.
*
* @return a <code>Issue</code> value
*/
public Issue getIssue()
throws Exception
{
if (issue == null)
{
String issueId = null;
Group issueGroup = getIntakeTool()
.get("Issue", IntakeTool.DEFAULT_KEY, false);
if ( issueGroup != null )
{
issueId = issueGroup.get("Id").toString();
}
else if ( data.getParameters().getString("issue_id") != null )
{
issueId = data.getParameters().getString("issue_id");
}
if ( issueId == null || issueId.length() == 0 )
{
issue = getCurrentModule()
.getNewIssue(getCurrentIssueType());
}
else
{
issue = IssuePeer
.retrieveByPK(new NumberKey(issueId));
}
}
return issue;
}
/**
* The id may be a primary key or an issue id.
*
* @param key a <code>String</code> value
* @return a <code>Issue</code> value
*/
public Issue getIssue(String key)
{
Issue issue = null;
try
{
issue = IssuePeer.retrieveByPK(new NumberKey(key));
}
catch (Exception e)
{
// was not a primary key, try fid
try
{
Issue.FederatedId fid = new Issue.FederatedId(key);
if ( fid.getDomain() == null )
{
// handle null (always null right now)
}
issue = Issue.getIssueById(fid);
}
catch (NumberFormatException nfe)
{
// invalid id, just return null
}
}
return issue;
}
/**
* Get a list of Issue objects.
*
* @return a <code>Issue</code> value
*/
public List getIssues()
throws Exception
{
List issues = null;
Group issueGroup = getIntakeTool()
.get("Issue", IntakeTool.DEFAULT_KEY, false);
if ( issueGroup != null )
{
NumberKey[] issueIds = (NumberKey[])
issueGroup.get("Ids").getValue();
if ( issueIds != null )
{
issues = new ArrayList(issueIds.length);
for ( int i=0; i<issueIds.length; i++ )
{
issues.add(IssuePeer.retrieveByPK(issueIds[i]));
}
}
}
else if ( data.getParameters().getString("issue_ids") != null )
{
String[] issueIdStrings = data.getParameters()
.getStrings("issue_ids");
issues = new ArrayList(issueIdStrings.length);
for ( int i=0; i<issueIdStrings.length; i++ )
{
issues.add(IssuePeer
.retrieveByPK(new NumberKey(issueIdStrings[i])));
}
}
return issues;
}
/**
* Get all scopes.
*/
public List getScopes()
throws Exception
{
return ScopePeer.getAllScopes();
}
/**
* Get all frequencies.
*/
public List getFrequencies()
throws Exception
{
return FrequencyPeer.getFrequencies();
}
public Intake getConditionalIntake(String parameter)
throws Exception
{
Intake intake = null;
String param = data.getParameters().getString(parameter);
if ( param == null )
{
intake = getIntakeTool();
}
else
{
intake = new Intake();
StringValueParser parser = new StringValueParser();
parser.parse(param, '&', '=', true);
intake.init(parser);
}
return intake;
}
/**
* Get a new SearchIssue object.
*
* @return a <code>Issue</code> value
*/
public IssueSearch getSearch()
throws Exception
{
return new IssueSearch(getCurrentModule(), getCurrentIssueType());
}
public Intake parseQuery(String query)
throws Exception
{
Intake intake = new Intake();
StringValueParser parser = new StringValueParser();
parser.parse(query, '&', '=', true);
intake.init(parser);
return intake;
}
public List getCurrentSearchResults()
throws Exception
{
ScarabUser user = (ScarabUser)data.getUser();
Intake intake = new Intake();
if (user.getTemp(ScarabConstants.CURRENT_QUERY) == null)
{
// No query stored in session
// Check for default query.
Query query = user.getDefaultQuery(getCurrentModule(),
getCurrentIssueType());
String defaultQuery = null;
if (query == null)
{
// Use default query : all issues created by or
// Assigned to this user.
defaultQuery = user.getDefaultDefaultQuery();
}
else
{
defaultQuery = query.getValue();
}
intake = parseQuery(defaultQuery);
}
else
{
String currentQuery = user.getTemp(ScarabConstants
.CURRENT_QUERY).toString();
intake = parseQuery(currentQuery);
}
IssueSearch search = getSearch();
Group searchGroup = intake.get("SearchIssue",
getSearch().getQueryKey() );
searchGroup.setProperties(search);
SequencedHashtable avMap = search.getModuleAttributeValuesMap();
Iterator i = avMap.iterator();
while (i.hasNext())
{
AttributeValue aval = (AttributeValue)avMap.get(i.next());
Group group = intake.get("AttributeValue", aval.getQueryKey());
if ( group != null )
{
group.setProperties(aval);
}
}
return search.getMatchingIssues();
}
/**
* Convert paths with slashes to commas.
*/
public String convertPath(String path)
throws Exception
{
return path.replace('/',',');
}
/**
* a report helper class
*/
public Report getReport()
throws Exception
{
if ( reportGenerator == null )
{
ValueParser parameters = data.getParameters();
String id = parameters.getString("report_id");
if ( id == null || id.length() == 0 )
{
reportGenerator = new Report();
reportGenerator.setModule(getCurrentModule());
reportGenerator.setGeneratedBy((ScarabUser)data.getUser());
reportGenerator.setIssueType(getCurrentIssueType());
reportGenerator
.setQueryString(getReportQueryString(parameters));
}
else
{
reportGenerator = ReportPeer.retrieveByPK(new NumberKey(id));
reportGenerator
.setQueryString(getReportQueryString(parameters));
}
}
return reportGenerator;
}
public void setReport(Report report)
{
this.reportGenerator = report;
}
private static String getReportQueryString(ValueParser params)
{
StringBuffer query = new StringBuffer();
Object[] keys = params.getKeys();
for (int i =0; i<keys.length; i++)
{
String key = keys[i].toString();
if (key.startsWith("rep") || key.startsWith("intake"))
{
String[] values = params.getStrings(key);
for (int j=0; j<values.length; j++)
{
query.append('&').append(key);
query.append('=').append(values[j]);
}
}
}
return query.toString();
}
/**
* Return a subset of the passed-in list.
*/
public List getPaginatedList( List fullList, int pgNbr,
int nbrItemsPerPage)
{
this.nbrPages = (int)Math.ceil((float)fullList.size()
/ nbrItemsPerPage);
this.nextPage = pgNbr + 1;
this.prevPage = pgNbr - 1;
return fullList.subList
((pgNbr - 1) * nbrItemsPerPage,
Math.min(pgNbr * nbrItemsPerPage, fullList.size()));
}
/**
* Get the cached list of issue id's resulting from a search
* And return the list of issues.
*/
public List getIssueList()
throws Exception
{
if ( issueList == null )
{
issueList = getCurrentSearchResults();
}
return issueList;
}
/**
* Set the value of issueList.
* @param v Value to assign to issueList.
*/
public void setIssueList(List v)
{
this.issueList = v;
}
/**
* Return the number of paginated pages.
*
*/
public int getNbrPages()
{
return nbrPages;
}
/**
* Return the next page in the paginated list.
*
*/
public int getNextPage()
{
if (nextPage <= nbrPages)
{
return nextPage;
}
else
{
return 0;
}
}
/**
* Return the previous page in the paginated list.
*
*/
public int getPrevPage()
{
return prevPage;
}
/**
* Determine if the user currently interacting with the scarab
* application has a permission within the user's currently
* selected module.
*
* @param permission a <code>String</code> permission value, which should
* be a constant in this interface.
* @return true if the permission exists for the user within the
* current module, false otherwise
*/
public boolean hasPermission(String permission)
{
boolean hasPermission = false;
try
{
ModuleEntity module = getCurrentModule();
hasPermission = hasPermission(permission, module);
}
catch (Exception e)
{
hasPermission = false;
Log.error("Permission check failed on:" + permission, e);
}
return hasPermission;
}
/**
* Determine if the user currently interacting with the scarab
* application has a permission within a module.
*
* @param permission a <code>String</code> permission value, which should
* be a constant in this interface.
* @param module a <code>ModuleEntity</code> value
* @return true if the permission exists for the user within the
* given module, false otherwise
*/
public boolean hasPermission(String permission, ModuleEntity module)
{
boolean hasPermission = false;
try
{
hasPermission = ((ScarabUser)data.getUser())
.hasPermission(permission, module);
}
catch (Exception e)
{
hasPermission = false;
Log.error("Permission check failed on:" + permission, e);
}
return hasPermission;
}
// ****************** Recyclable implementation ************************
/**
* Disposes the object after use. The method is called when the
* object is returned to its pool. The dispose method must call
* its super.
*/
public void dispose()
{
super.dispose();
data = null;
user = null;
issue = null;
attribute = null;
}
}
| |
/*
* Copyright (C) 2016 Usman Akhtar.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Home Controller are responsible for mapping
* the the HTTP request with the Methods.
*/
package org.uclab.mm.dcl.adr;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Locale;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.uclab.mm.dcl.adr.model.SLDataModel;
// TODO: Auto-generated Javadoc
/**
* Handles requests mapping to data access object with the service.
*
* Indicates that an annotated class is a "Controller" (e.g. a web controller).
*
* @Controller annotation indicates that a particular class serves the role of a
* controller
*/
@Controller
public class HomeController {
/**
* LoggerFactory: Return a logger named corresponding to the class passed as
* parameter, In case the class parameter differs from the name of the
* caller as computed internally by SLF4J, a logger name mismatch warning
* will be printed but only if the slf4j.detectLoggerNameMismatch system
* property is set to true.
*/
private static final Logger logger = LoggerFactory.getLogger(HomeController.class);
/**
* If the org.apache.hive.jdbc.HiveDriver does not recognize the check the
* pom.xml. We have added the Apache Hive version 1.2.1.
*/
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
/**
* Simply selects the home view to render by returning its name.
*
* @param locale
* the locale
* @param model
* the model
* @return the string
*
* @RequestMapping: Annotation for mapping web requests onto specific
* handler classes and/or handler methods. Purpose: The
* main purpose of this method is to return the display on
* the home page and it serve as a static HomePage.
*/
@RequestMapping(value = "/", method = RequestMethod.GET)
public String home(Locale locale, Model model) {
logger.info("Welcome home! The client locale is {}.", locale);
Date date = new Date();
DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG, locale);
String formattedDate = dateFormat.format(date);
model.addAttribute("serverTime", formattedDate);
return "home";
}
/**
* Location.
*
* @param userid the userid
* @return This method will return the location count that are stored in the
* Apache Hive
* @throws SQLException the SQL exception
* @RequestMapping: Annotation for mapping web requests onto location method
* Usage: after starting the service goto the Rest Client
* and type www.localhost:8080/analytics/location?userid=0.
* This serves as a Get Request.
*/
@RequestMapping(value = "/location")
public @ResponseBody SLDataModel location(@RequestParam(value = "userid") String userid) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
/**
* The main connection String. We have added the JDBC connection setting
* inside spring.xml. Just replace the JDBC url with your own setting.
* You must enable the thrift Apache Hive post to interact with.
*/
Connection con = DriverManager.getConnection("jdbc:hive2://163.180.116.94:10000/test1", "hive", "hive");
Statement stmt = con.createStatement();
String tableName = "detectedlocation1";
String sql = "show tables '" + tableName + "'";
String data_name = "SCLData";
String data_id = "1";
System.out.println("Running: " + sql);
ResultSet res = stmt.executeQuery(sql);
ResultSetMetaData rsmd;
if (res.next()) {
System.out.println(res.getString(1));
}
ArrayList<Object> data_attributes = new ArrayList<Object>();
ArrayList<Object> data_values = new ArrayList<Object>();
LinkedHashMap<String, String> data_array = new LinkedHashMap<String, String>();
/**
* This service will map the user id. Get the Id = 0 and calculate the
* number of label from the Big Data Storage.
*/
if (userid.equals("0")) {
sql = "select * from detectedlocation1";
res = stmt.executeQuery(sql);
int home = 0;
int office = 0;
int yard = 0;
int gym = 0;
int mall = 0;
int restaurant = 0;
int outdoors = 0;
int transport = 0;
LinkedHashMap<String, String> attributes = new LinkedHashMap<String, String>();
attributes.put("attribute_name", "Count of Location");
data_attributes.add(attributes);
while (res.next()) {
if (res.getString(3).equals("Home")) {
home++;
} else if (res.getString(3).equals("Office")) {
office++;
} else if (res.getString(3).equals("Yard")) {
yard++;
} else if (res.getString(3).equals("Gym")) {
gym++;
} else if (res.getString(3).equals("Mall")) {
mall++;
} else if (res.getString(3).equals("Restaurant")) {
restaurant++;
} else if (res.getString(3).equals("Outdoors")) {
outdoors++;
} else if (res.getString(3).equals("Transport")) {
transport++;
}
}
String home_count = "Home:" + home;
String office_count = "Office:" + office;
String yard_count = "Yard:" + yard;
String gym_count = "Gym:" + gym;
String mall_count = "Mall:" + mall;
String restaurant_count = "Restaurant:" + restaurant;
String outdoors_count = "Outdoors:" + outdoors;
String transport_count = "Transport:" + transport;
data_values.add(home_count);
data_values.add(office_count);
data_values.add(yard_count);
data_values.add(gym_count);
data_values.add(mall_count);
data_values.add(restaurant_count);
data_values.add(outdoors_count);
data_values.add(transport_count);
data_array.put("Home", String.valueOf(home));
data_array.put("Office", String.valueOf(office));
data_array.put("Yard", String.valueOf(yard));
data_array.put("Mall", String.valueOf(mall));
data_array.put("Restaurant", String.valueOf(restaurant));
data_array.put("Outdoors", String.valueOf(outdoors));
data_array.put("Transport", String.valueOf(transport));
}
/**
* Other then the userid=0 this will search the label inside the Apache
* Hive and send the results.
*/
else {
sql = "select * from detectedlocation where userid=" + userid;
ResultSet resultSet = res = stmt.executeQuery(sql);
System.out.println("Running: " + sql);
rsmd = res.getMetaData();
int colcount = rsmd.getColumnCount();
for (int j = 1; j <= colcount; j++) {
{
LinkedHashMap<String, String> attributes = new LinkedHashMap<String, String>();
String attribute_name = rsmd.getColumnLabel(j).split("\\.")[1];
attributes.put("attribute_name", attribute_name);
String attribute_type = rsmd.getColumnTypeName(j);
attributes.put("Attribute_type", attribute_type);
data_attributes.add(attributes);
}
}
while (res.next()) {
{
ArrayList<Object> values = new ArrayList<Object>();
for (int j = 1; j <= colcount; j++) {
values.add(res.getString(j));
}
data_values.add(values);
}
}
}
/**
* Display the userid in the terminal.
*/
String line = "incoming userid = " + userid;
System.out.println(line);
/**
* Finally the result is send back to the model.
*/
return new SLDataModel(data_id, data_name, data_attributes, data_array);
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.salesforceupdate;
import java.util.ArrayList;
import org.apache.axis.message.MessageElement;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.salesforceinput.SalesforceConnection;
import org.pentaho.di.trans.steps.salesforceutils.SalesforceUtils;
import com.google.common.annotations.VisibleForTesting;
import com.sforce.soap.partner.sobject.SObject;
/**
* Read data from Salesforce module, convert them to rows and writes these to one or more output streams.
*
* @author jstairs,Samatar
* @since 10-06-2007
*/
public class SalesforceUpdate extends BaseStep implements StepInterface {
private static Class<?> PKG = SalesforceUpdateMeta.class; // for i18n purposes, needed by Translator2!!
private SalesforceUpdateMeta meta;
private SalesforceUpdateData data;
public SalesforceUpdate( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
// get one row ... This does some basic initialization of the objects, including loading the info coming in
Object[] outputRowData = getRow();
if ( outputRowData == null ) {
if ( data.iBufferPos > 0 ) {
flushBuffers();
}
setOutputDone();
return false;
}
// If we haven't looked at a row before then do some basic setup.
if ( first ) {
first = false;
data.sfBuffer = new SObject[meta.getBatchSizeInt()];
data.outputBuffer = new Object[meta.getBatchSizeInt()][];
// get total fields in the grid
data.nrfields = meta.getUpdateLookup().length;
// Check if field list is filled
if ( data.nrfields == 0 ) {
throw new KettleException( BaseMessages.getString( PKG,
"SalesforceUpdateDialog.FieldsMissing.DialogMessage" ) );
}
// Create the output row meta-data
data.inputRowMeta = getInputRowMeta().clone();
data.outputRowMeta = data.inputRowMeta.clone();
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
// Build the mapping of input position to field name
data.fieldnrs = new int[meta.getUpdateStream().length];
for ( int i = 0; i < meta.getUpdateStream().length; i++ ) {
data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getUpdateStream()[i] );
if ( data.fieldnrs[i] < 0 ) {
throw new KettleException( "Field [" + meta.getUpdateStream()[i]
+ "] couldn't be found in the input stream!" );
}
}
}
try {
writeToSalesForce( outputRowData );
} catch ( Exception e ) {
throw new KettleStepException( BaseMessages.getString( PKG, "SalesforceUpdate.log.Exception" ), e );
}
return true;
}
@VisibleForTesting
void writeToSalesForce( Object[] rowData ) throws KettleException {
try {
if ( log.isDetailed() ) {
logDetailed( "Called writeToSalesForce with " + data.iBufferPos + " out of " + meta.getBatchSizeInt() );
}
// if there is room in the buffer
if ( data.iBufferPos < meta.getBatchSizeInt() ) {
// Reserve for empty fields
ArrayList<String> fieldsToNull = new ArrayList<String>();
ArrayList<MessageElement> updatefields = new ArrayList<MessageElement>();
// Add fields to update
for ( int i = 0; i < data.nrfields; i++ ) {
boolean valueIsNull = data.inputRowMeta.isNull( rowData, data.fieldnrs[i] );
if ( valueIsNull ) {
// The value is null
// We need to keep track of this field
fieldsToNull.add( SalesforceUtils.getFieldToNullName( log, meta.getUpdateLookup()[i], meta
.getUseExternalId()[i] ) );
} else {
updatefields.add( SalesforceConnection.createMessageElement( meta.getUpdateLookup()[i],
rowData[data.fieldnrs[i]], meta.getUseExternalId()[i] ) );
}
}
// build the SObject
SObject sobjPass = new SObject();
sobjPass.setType( data.realModule );
if ( updatefields.size() > 0 ) {
sobjPass.set_any( updatefields.toArray( new MessageElement[updatefields.size()] ) );
}
if ( fieldsToNull.size() > 0 ) {
// Set Null to fields
sobjPass.setFieldsToNull( fieldsToNull.toArray( new String[fieldsToNull.size()] ) );
}
// Load the buffer array
data.sfBuffer[data.iBufferPos] = sobjPass;
data.outputBuffer[data.iBufferPos] = rowData;
data.iBufferPos++;
}
if ( data.iBufferPos >= meta.getBatchSizeInt() ) {
if ( log.isDetailed() ) {
logDetailed( "Calling flush buffer from writeToSalesForce" );
}
flushBuffers();
}
} catch ( Exception e ) {
throw new KettleException( "\nFailed in writeToSalesForce: " + e.getMessage() );
}
}
private void flushBuffers() throws KettleException {
try {
// create the object(s) by sending the array to the web service
data.saveResult = data.connection.update( data.sfBuffer );
int nr = data.saveResult.length;
for ( int j = 0; j < nr; j++ ) {
if ( data.saveResult[j].isSuccess() ) {
// Row was updated
String id = data.saveResult[j].getId();
if ( log.isDetailed() ) {
logDetailed( "Row updated with id: " + id );
}
// write out the row with the SalesForce ID
Object[] newRow = RowDataUtil.resizeArray( data.outputBuffer[j], data.outputRowMeta.size() );
if ( log.isDetailed() ) {
logDetailed( "The new row has an id value of : " + newRow[0] );
}
putRow( data.outputRowMeta, newRow ); // copy row to output rowset(s);
incrementLinesUpdated();
if ( checkFeedback( getLinesInput() ) ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "SalesforceUpdate.log.LineRow", "" + getLinesInput() ) );
}
}
} else {
// there were errors during the create call, go through the
// errors
// array and write them to the screen
if ( !getStepMeta().isDoingErrorHandling() ) {
if ( log.isDetailed() ) {
logDetailed( "Found error from SalesForce and raising the exception" );
}
// Only send the first error
//
com.sforce.soap.partner.Error err = data.saveResult[j].getErrors()[0];
throw new KettleException( BaseMessages.getString( PKG, "SalesforceUpdate.Error.FlushBuffer", new Integer(
j ), err.getStatusCode(), err.getMessage() ) );
}
String errorMessage = "";
for ( int i = 0; i < data.saveResult[j].getErrors().length; i++ ) {
// get the next error
com.sforce.soap.partner.Error err = data.saveResult[j].getErrors()[i];
errorMessage +=
BaseMessages.getString( PKG, "SalesforceUpdate.Error.FlushBuffer", new Integer( j ), err
.getStatusCode(), err.getMessage() );
}
// Simply add this row to the error row
if ( log.isDebug() ) {
logDebug( "Passing row to error step" );
}
putError( getInputRowMeta(), data.outputBuffer[j], 1, errorMessage, null, "SalesforceUpdate001" );
}
}
// reset the buffers
data.sfBuffer = new SObject[meta.getBatchSizeInt()];
data.outputBuffer = new Object[meta.getBatchSizeInt()][];
data.iBufferPos = 0;
} catch ( Exception e ) {
if ( !getStepMeta().isDoingErrorHandling() ) {
throw new KettleException( "\nFailed to update object, error message was: \n" + e.getMessage() );
}
// Simply add this row to the error row
if ( log.isDebug() ) {
logDebug( "Passing row to error step" );
}
for ( int i = 0; i < data.iBufferPos; i++ ) {
putError( data.inputRowMeta, data.outputBuffer[i], 1, e.getMessage(), null, "SalesforceUpdate002" );
}
} finally {
if ( data.saveResult != null ) {
data.saveResult = null;
}
}
}
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (SalesforceUpdateMeta) smi;
data = (SalesforceUpdateData) sdi;
if ( super.init( smi, sdi ) ) {
try {
data.realModule = environmentSubstitute( meta.getModule() );
// Check if module is specified
if ( Const.isEmpty( data.realModule ) ) {
log.logError( BaseMessages.getString( PKG, "SalesforceUpdateDialog.ModuleMissing.DialogMessage" ) );
return false;
}
String realUser = environmentSubstitute( meta.getUserName() );
// Check if username is specified
if ( Const.isEmpty( realUser ) ) {
log.logError( BaseMessages.getString( PKG, "SalesforceUpdateDialog.UsernameMissing.DialogMessage" ) );
return false;
}
// initialize variables
data.realURL = environmentSubstitute( meta.getTargetURL() );
// create a Salesforce connection
data.connection =
new SalesforceConnection( log, data.realURL, realUser, environmentSubstitute( meta.getPassword() ) );
// set timeout
data.connection.setTimeOut( Const.toInt( environmentSubstitute( meta.getTimeOut() ), 0 ) );
// Do we use compression?
data.connection.setUsingCompression( meta.isUsingCompression() );
// Do we need to rollback all changes on error
data.connection.rollbackAllChangesOnError( meta.isRollbackAllChangesOnError() );
// Now connect ...
data.connection.connect();
return true;
} catch ( KettleException ke ) {
logError( BaseMessages.getString( PKG, "SalesforceUpdate.Log.ErrorOccurredDuringStepInitialize" ) + ke
.getMessage() );
}
return true;
}
return false;
}
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (SalesforceUpdateMeta) smi;
data = (SalesforceUpdateData) sdi;
try {
if ( data.outputBuffer != null ) {
data.outputBuffer = null;
}
if ( data.sfBuffer != null ) {
data.sfBuffer = null;
}
if ( data.connection != null ) {
data.connection.close();
}
} catch ( Exception e ) { /* Ignore */
}
super.dispose( smi, sdi );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.avatica;
import org.apache.calcite.avatica.remote.TypedValue;
import org.apache.calcite.avatica.util.Cursor;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.Calendar;
/**
* A location that a value can be written to or read from.
*/
public class AvaticaSite {
final AvaticaParameter parameter;
/** Calendar is not thread-safe. But calendar is only used from within one
* thread, and we have to trust that clients are not modifying calendars
* that they pass to us in a method such as
* {@link java.sql.PreparedStatement#setTime(int, Time, Calendar)}, so we do
* not need to synchronize access. */
final Calendar calendar;
private final int index;
final TypedValue[] slots;
/** Value that means the parameter has been set to null.
* If value is null, parameter has not been set. */
public static final Object DUMMY_VALUE = Dummy.INSTANCE;
public AvaticaSite(AvaticaParameter parameter, Calendar calendar, int index,
TypedValue[] slots) {
assert calendar != null;
assert parameter != null;
assert slots != null;
this.parameter = parameter;
this.calendar = calendar;
this.index = index;
this.slots = slots;
}
private TypedValue wrap(ColumnMetaData.Rep rep, Object o,
Calendar calendar) {
return TypedValue.ofJdbc(rep, o, calendar);
}
private TypedValue wrap(ColumnMetaData.Rep rep, Object o) {
return TypedValue.ofJdbc(rep, o, calendar);
}
public boolean isSet(int index) {
return slots[index] != null;
}
public void setByte(byte o) {
slots[index] = wrap(ColumnMetaData.Rep.BYTE, o);
}
public void setChar(char o) {
slots[index] = wrap(ColumnMetaData.Rep.CHARACTER, o);
}
public void setShort(short o) {
slots[index] = wrap(ColumnMetaData.Rep.SHORT, o);
}
public void setInt(int o) {
slots[index] = wrap(ColumnMetaData.Rep.INTEGER, o);
}
public void setLong(long o) {
slots[index] = wrap(ColumnMetaData.Rep.LONG, o);
}
public void setBoolean(boolean o) {
slots[index] = wrap(ColumnMetaData.Rep.BOOLEAN, o);
}
public void setRowId(RowId x) {
slots[index] = wrap(ColumnMetaData.Rep.OBJECT, x);
}
public void setNString(String o) {
slots[index] = wrap(ColumnMetaData.Rep.STRING, o);
}
public void setNCharacterStream(Reader value, long length) {
}
public void setNClob(NClob value) {
slots[index] = wrap(ColumnMetaData.Rep.OBJECT, value);
}
public void setClob(Reader reader, long length) {
}
public void setBlob(InputStream inputStream, long length) {
}
public void setNClob(Reader reader, long length) {
}
public void setSQLXML(SQLXML xmlObject) {
slots[index] = wrap(ColumnMetaData.Rep.OBJECT, xmlObject);
}
public void setAsciiStream(InputStream x, long length) {
}
public void setBinaryStream(InputStream x, long length) {
}
public void setCharacterStream(Reader reader, long length) {
}
public void setAsciiStream(InputStream x) {
}
public void setBinaryStream(InputStream x) {
}
public void setCharacterStream(Reader reader) {
}
public void setNCharacterStream(Reader value) {
}
public void setClob(Reader reader) {
}
public void setBlob(InputStream inputStream) {
}
public void setNClob(Reader reader) {
}
public void setUnicodeStream(InputStream x, int length) {
}
public void setFloat(float x) {
slots[index] = wrap(ColumnMetaData.Rep.FLOAT, x);
}
public void setDouble(double x) {
slots[index] = wrap(ColumnMetaData.Rep.DOUBLE, x);
}
public void setBigDecimal(BigDecimal x) {
slots[index] = wrap(ColumnMetaData.Rep.NUMBER, x);
}
public void setString(String x) {
slots[index] = wrap(ColumnMetaData.Rep.STRING, x);
}
public void setBytes(byte[] x) {
slots[index] = wrap(ColumnMetaData.Rep.BYTE_STRING, x);
}
public void setTimestamp(Timestamp x, Calendar calendar) {
slots[index] = wrap(ColumnMetaData.Rep.JAVA_SQL_TIMESTAMP, x, calendar);
}
public void setTime(Time x, Calendar calendar) {
slots[index] = wrap(ColumnMetaData.Rep.JAVA_SQL_TIME, x, calendar);
}
public void setDate(Date x, Calendar calendar) {
slots[index] = wrap(ColumnMetaData.Rep.JAVA_SQL_DATE, x, calendar);
}
public void setObject(Object x, int targetSqlType) {
if (x == null || Types.NULL == targetSqlType) {
setNull(targetSqlType);
return;
}
switch (targetSqlType) {
case Types.CLOB:
case Types.DATALINK:
case Types.NCLOB:
case Types.OTHER:
case Types.REF:
case Types.SQLXML:
case Types.STRUCT:
throw notImplemented();
case Types.ARRAY:
setArray(toArray(x));
break;
case Types.BIGINT:
setLong(toLong(x));
break;
case Types.BINARY:
case Types.LONGVARBINARY:
case Types.VARBINARY:
setBytes(toBytes(x));
break;
case Types.BIT:
case Types.BOOLEAN:
setBoolean(toBoolean(x));
break;
case Types.BLOB:
if (x instanceof Blob) {
setBlob((Blob) x);
break;
} else if (x instanceof InputStream) {
setBlob((InputStream) x);
}
throw unsupportedCast(x.getClass(), Blob.class);
case Types.DATE:
setDate(toDate(x), calendar);
break;
case Types.DECIMAL:
case Types.NUMERIC:
setBigDecimal(toBigDecimal(x));
break;
case Types.DISTINCT:
throw notImplemented();
case Types.DOUBLE:
case Types.FLOAT: // yes really; SQL FLOAT is up to 8 bytes
setDouble(toDouble(x));
break;
case Types.INTEGER:
setInt(toInt(x));
break;
case Types.JAVA_OBJECT:
setObject(x);
break;
case Types.LONGNVARCHAR:
case Types.LONGVARCHAR:
case Types.NVARCHAR:
case Types.VARCHAR:
case Types.CHAR:
case Types.NCHAR:
setString(toString(x));
break;
case Types.REAL:
setFloat(toFloat(x));
break;
case Types.ROWID:
if (x instanceof RowId) {
setRowId((RowId) x);
break;
}
throw unsupportedCast(x.getClass(), RowId.class);
case Types.SMALLINT:
setShort(toShort(x));
break;
case Types.TIME:
setTime(toTime(x), calendar);
break;
case Types.TIMESTAMP:
setTimestamp(toTimestamp(x), calendar);
break;
case Types.TINYINT:
setByte(toByte(x));
break;
default:
throw notImplemented();
}
}
/** Similar logic to {@link #setObject}. */
public static Object get(Cursor.Accessor accessor, int targetSqlType,
Calendar localCalendar) throws SQLException {
switch (targetSqlType) {
case Types.CLOB:
case Types.DATALINK:
case Types.NCLOB:
case Types.REF:
case Types.SQLXML:
case Types.STRUCT:
throw notImplemented();
case Types.ARRAY:
return accessor.getArray();
case Types.BIGINT:
final long aLong = accessor.getLong();
if (aLong == 0 && accessor.wasNull()) {
return null;
}
return aLong;
case Types.BINARY:
case Types.LONGVARBINARY:
case Types.VARBINARY:
return accessor.getBytes();
case Types.BIT:
case Types.BOOLEAN:
final boolean aBoolean = accessor.getBoolean();
if (!aBoolean && accessor.wasNull()) {
return null;
}
return aBoolean;
case Types.BLOB:
return accessor.getBlob();
case Types.DATE:
return accessor.getDate(localCalendar);
case Types.DECIMAL:
case Types.NUMERIC:
return accessor.getBigDecimal();
case Types.DISTINCT:
throw notImplemented();
case Types.DOUBLE:
case Types.FLOAT: // yes really; SQL FLOAT is up to 8 bytes
final double aDouble = accessor.getDouble();
if (aDouble == 0 && accessor.wasNull()) {
return null;
}
return aDouble;
case Types.INTEGER:
final int anInt = accessor.getInt();
if (anInt == 0 && accessor.wasNull()) {
return null;
}
return anInt;
case Types.JAVA_OBJECT:
case Types.OTHER:
return accessor.getObject();
case Types.LONGNVARCHAR:
case Types.LONGVARCHAR:
case Types.NVARCHAR:
case Types.VARCHAR:
case Types.CHAR:
case Types.NCHAR:
return accessor.getString();
case Types.REAL:
final float aFloat = accessor.getFloat();
if (aFloat == 0 && accessor.wasNull()) {
return null;
}
return aFloat;
case Types.ROWID:
throw notImplemented();
case Types.SMALLINT:
final short aShort = accessor.getShort();
if (aShort == 0 && accessor.wasNull()) {
return null;
}
return aShort;
case Types.TIME:
return accessor.getTime(localCalendar);
case Types.TIMESTAMP:
return accessor.getTimestamp(localCalendar);
case Types.TINYINT:
final byte aByte = accessor.getByte();
if (aByte == 0 && accessor.wasNull()) {
return null;
}
return aByte;
default:
throw notImplemented();
}
}
public void setObject(Object x) {
slots[index] = TypedValue.ofJdbc(x, calendar);
}
public void setNull(int sqlType) {
slots[index] = wrap(ColumnMetaData.Rep.OBJECT, null);
}
public void setRef(Ref x) {
}
public void setBlob(Blob x) {
}
public void setClob(Clob x) {
}
public void setArray(Array x) {
}
public void setNull(int sqlType, String typeName) {
}
public void setURL(URL x) {
}
public void setObject(Object x, int targetSqlType,
int scaleOrLength) {
}
private static RuntimeException unsupportedCast(Class<?> from, Class<?> to) {
return new UnsupportedOperationException("Cannot convert from "
+ from.getCanonicalName() + " to " + to.getCanonicalName());
}
private static RuntimeException notImplemented() {
return new RuntimeException("not implemented");
}
private static Array toArray(Object x) {
if (x instanceof Array) {
return (Array) x;
}
throw unsupportedCast(x.getClass(), Array.class);
}
public static BigDecimal toBigDecimal(Object x) {
if (x instanceof BigDecimal) {
return (BigDecimal) x;
} else if (x instanceof BigInteger) {
return new BigDecimal((BigInteger) x);
} else if (x instanceof Number) {
if (x instanceof Double || x instanceof Float) {
return new BigDecimal(((Number) x).doubleValue());
} else {
return new BigDecimal(((Number) x).longValue());
}
} else if (x instanceof Boolean) {
return (Boolean) x ? BigDecimal.ONE : BigDecimal.ZERO;
} else if (x instanceof String) {
return new BigDecimal((String) x);
}
throw unsupportedCast(x.getClass(), BigDecimal.class);
}
private static boolean toBoolean(Object x) {
if (x instanceof Boolean) {
return (Boolean) x;
} else if (x instanceof Number) {
return ((Number) x).intValue() != 0;
} else if (x instanceof String) {
String s = (String) x;
if (s.equalsIgnoreCase("true") || s.equalsIgnoreCase("yes")) {
return true;
} else if (s.equalsIgnoreCase("false") || s.equalsIgnoreCase("no")) {
return false;
}
}
throw unsupportedCast(x.getClass(), Boolean.TYPE);
}
private static byte toByte(Object x) {
if (x instanceof Number) {
return ((Number) x).byteValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? (byte) 1 : (byte) 0;
} else if (x instanceof String) {
return Byte.parseByte((String) x);
} else {
throw unsupportedCast(x.getClass(), Byte.TYPE);
}
}
private static byte[] toBytes(Object x) {
if (x instanceof byte[]) {
return (byte[]) x;
}
if (x instanceof String) {
return ((String) x).getBytes();
}
throw unsupportedCast(x.getClass(), byte[].class);
}
private static Date toDate(Object x) {
if (x instanceof String) {
return Date.valueOf((String) x);
}
return new Date(toLong(x));
}
private static Time toTime(Object x) {
if (x instanceof String) {
return Time.valueOf((String) x);
}
return new Time(toLong(x));
}
private static Timestamp toTimestamp(Object x) {
if (x instanceof String) {
return Timestamp.valueOf((String) x);
}
return new Timestamp(toLong(x));
}
private static double toDouble(Object x) {
if (x instanceof Number) {
return ((Number) x).doubleValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? 1D : 0D;
} else if (x instanceof String) {
return Double.parseDouble((String) x);
} else {
throw unsupportedCast(x.getClass(), Double.TYPE);
}
}
private static float toFloat(Object x) {
if (x instanceof Number) {
return ((Number) x).floatValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? 1F : 0F;
} else if (x instanceof String) {
return Float.parseFloat((String) x);
} else {
throw unsupportedCast(x.getClass(), Float.TYPE);
}
}
private static int toInt(Object x) {
if (x instanceof Number) {
return ((Number) x).intValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? 1 : 0;
} else if (x instanceof String) {
return Integer.parseInt((String) x);
} else {
throw unsupportedCast(x.getClass(), Integer.TYPE);
}
}
private static long toLong(Object x) {
if (x instanceof Number) {
return ((Number) x).longValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? 1L : 0L;
} else if (x instanceof String) {
return Long.parseLong((String) x);
} else {
throw unsupportedCast(x.getClass(), Long.TYPE);
}
}
private static short toShort(Object x) {
if (x instanceof Number) {
return ((Number) x).shortValue();
} else if (x instanceof Boolean) {
return (Boolean) x ? (short) 1 : (short) 0;
} else if (x instanceof String) {
return Short.parseShort((String) x);
} else {
throw unsupportedCast(x.getClass(), Short.TYPE);
}
}
private static String toString(Object x) {
if (x instanceof String) {
return (String) x;
} else if (x instanceof Character
|| x instanceof Boolean) {
return x.toString();
}
throw unsupportedCast(x.getClass(), String.class);
}
/** Singleton value to denote parameters that have been set to null (as
* opposed to not set).
*
* <p>Not a valid value for a parameter.
*
* <p>As an enum, it is serializable by Jackson. */
private enum Dummy {
INSTANCE
}
}
// End AvaticaSite.java
| |
/*******************************************************************************
* Copyright 2011, 2012, 2013 fanfou.com, Xiaoke, Zhang
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.fanfou.app.opensource.util;
import android.content.Context;
import android.content.SharedPreferences.Editor;
import com.fanfou.app.opensource.AppContext;
import com.fanfou.app.opensource.R;
import com.fanfou.app.opensource.api.bean.User;
import com.fanfou.app.opensource.auth.OAuthToken;
/**
* @author mcxiaoke
* @version 1.0 2011.06.01
* @version 1.1 2011.10.10
* @version 1.2 2011.10.26
* @version 2.0 2011.12.06
* @version 3.0 2011.12.26
*
*/
public final class OptionHelper {
public final static void clearSettings() {
final Editor sp = AppContext.getPreferences().edit();
sp.clear();
sp.commit();
}
public final static int parseInt(final Context context, final int resId) {
final String res = AppContext.getPreferences().getString(
context.getString(resId), "-1");
return Integer.parseInt(res);
}
public final static int parseInt(final Context context, final int resId,
final String defaultValue) {
final String res = AppContext.getPreferences().getString(
context.getString(resId), defaultValue);
return Integer.parseInt(res);
}
public final static int parseInt(final String key) {
final String res = AppContext.getPreferences().getString(key, "-1");
return Integer.parseInt(res);
}
public final static int parseInt(final String key, final String defaultValue) {
final String res = AppContext.getPreferences().getString(key,
defaultValue);
return Integer.parseInt(res);
}
public final static boolean readBoolean(final Context context,
final int resId, final boolean defValue) {
final boolean res = AppContext.getPreferences().getBoolean(
context.getString(resId), defValue);
return res;
}
public final static boolean readBoolean(final String key,
final boolean defValue) {
final boolean res = AppContext.getPreferences().getBoolean(key,
defValue);
return res;
}
public final static int readInt(final Context context, final int resId,
final int defValue) {
final int res = AppContext.getPreferences().getInt(
context.getString(resId), defValue);
return res;
}
public final static int readInt(final String key, final int defValue) {
final int res = AppContext.getPreferences().getInt(key, defValue);
return res;
}
public final static long readLong(final Context context, final int resId,
final long defValue) {
final long res = AppContext.getPreferences().getLong(
context.getString(resId), defValue);
return res;
}
public final static long readLong(final String key, final int defValue) {
final long res = AppContext.getPreferences().getLong(key, defValue);
return res;
}
public final static String readString(final Context context,
final int resId, final String defValue) {
final String res = AppContext.getPreferences().getString(
context.getString(resId), defValue);
return res;
}
public final static String readString(final String key,
final String defValue) {
final String res = AppContext.getPreferences().getString(key, defValue);
return res;
}
public final static void remove(final Context context, final int resId) {
final Editor sp = AppContext.getPreferences().edit();
sp.remove(context.getString(resId));
sp.commit();
}
public final static void remove(final String key) {
final Editor sp = AppContext.getPreferences().edit();
sp.remove(key);
sp.commit();
}
public final static void removeAccountInfo(final Context context) {
final Editor editor = AppContext.getPreferences().edit();
editor.remove(context.getString(R.string.option_userid));
editor.remove(context.getString(R.string.option_username));
editor.remove(context.getString(R.string.option_profile_image));
editor.remove(context.getString(R.string.option_oauth_token));
editor.remove(context.getString(R.string.option_oauth_token_secret));
editor.commit();
}
public final static void saveBoolean(final Context context,
final int resId, final boolean value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putBoolean(context.getString(resId), value);
sp.commit();
}
public final static void saveBoolean(final String key, final boolean value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putBoolean(key, value);
sp.commit();
}
public final static void saveInt(final Context context, final int resId,
final int value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putInt(context.getString(resId), value);
sp.commit();
}
public final static void saveInt(final String key, final int value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putInt(key, value);
sp.commit();
}
public final static void saveLong(final Context context, final int resId,
final long value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putLong(context.getString(resId), value);
sp.commit();
}
public final static void saveLong(final String key, final long value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putLong(key, value);
sp.commit();
}
public final static void saveString(final Context context, final int resId,
final String value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putString(context.getString(resId), value);
sp.commit();
}
public final static void saveString(final String key, final String value) {
final Editor sp = AppContext.getPreferences().edit();
sp.putString(key, value);
sp.commit();
}
public final static void updateAccountInfo(final Context context,
final User u, final OAuthToken otoken) {
final Editor editor = AppContext.getPreferences().edit();
editor.putString(context.getString(R.string.option_userid), u.id);
editor.putString(context.getString(R.string.option_username),
u.screenName);
editor.putString(context.getString(R.string.option_profile_image),
u.profileImageUrl);
editor.putString(context.getString(R.string.option_oauth_token),
otoken.getToken());
editor.putString(context.getString(R.string.option_oauth_token_secret),
otoken.getTokenSecret());
editor.commit();
}
public final static void updateUserInfo(final Context context, final User u) {
final Editor editor = AppContext.getPreferences().edit();
editor.putString(context.getString(R.string.option_userid), u.id);
editor.putString(context.getString(R.string.option_username),
u.screenName);
editor.putString(context.getString(R.string.option_profile_image),
u.profileImageUrl);
editor.commit();
}
}
| |
/*
* Copyright (c) 2014 Haixing Hu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.haixing_hu.text;
import com.github.haixing_hu.lang.CharUtils;
import com.github.haixing_hu.lang.LongUtils;
import static com.github.haixing_hu.text.ErrorCode.*;
/**
* Provides function for parsing text.
*
* @author Haixing Hu
*/
public final class ParseUtils {
/**
* Skips the leading whitespace and non-printable characters of a string,
* returns the first current of graph character.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* a character sequence.
* @param endIndex
* the skipping end at this current.
*/
public static void skipBlanks(final ParsingPosition pos,
final CharSequence str, final int endIndex) {
int index = pos.getIndex();
while (index < endIndex) {
final int codePoint = Utf16.getNext(pos, str, endIndex);
if (codePoint < 0) { // an error occurs
pos.setIndex(index);
return;
}
if (CharUtils.isGraph(codePoint)) { // find the first graph character
pos.setIndex(index);
return;
}
index = pos.getIndex();
}
}
/**
* Skips the leading non-blank characters of a string, returns the first
* current of blank character.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* a character sequence.
* @param endIndex
* the skipping end at this current.
*/
public static void skipNonBlanks(final ParsingPosition pos,
final CharSequence str, final int endIndex) {
int index = pos.getIndex();
while (index < endIndex) {
final int codePoint = Utf16.getNext(pos, str, endIndex);
if (codePoint < 0) { // an error occurs
pos.setIndex(index);
return;
}
if (CharUtils.isBlank(codePoint)) { // find the first blank character
pos.setIndex(index);
return;
}
index = pos.getIndex();
}
}
/**
* Skips the leading whitespace, non-printable characters and a specified
* separator of a string, returns the first current of printable character.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* a character sequence.
* @param endIndex
* the skipping end at this current.
* @param separator
* the specified separator to be skipped. Note that the separator is
* treated as a substring.
*/
// public static void skipSeparators(final ParsingPosition pos,
// final CharSequence str, final int endIndex, final CharSequence separator) {
// int index = pos.getIndex();
// while (index < endIndex) {
// final int codePoint = Utf16.getNext(pos, str, endIndex);
// if (codePoint < 0) { // an error occurs
// pos.setIndex(index);
// return;
// }
// if (CharUtils.isGraph(codePoint)) { // find the first graph character
// if (CharSequenceUtils.startsWith(str, index, endIndex, separator)) {
// index += separator.length();
// pos.setIndex(index);
// } else {
// pos.setIndex(index);
// return;
// }
// } else {
// index = pos.getIndex();
// }
// }
// }
/**
* Skips the optional prefix.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* if the prefix is successfully skipped, this position will be set
* to the position after skipping the prefix; otherwise, this
* position will not be changed.
* @param str
* a character sequence.
* @param endIndex
* the skipping end at this current.
* @param prefix
* the optional prefix to be skipped.
* @return true if the prefix successfully skipped; false otherwise.
*/
public static boolean skipPrefix(final ParsingPosition pos,
final CharSequence str, final int endIndex, final String prefix) {
final int prefixLen = prefix.length();
final int startIndex = pos.getIndex();
if ((endIndex - startIndex) < prefixLen) {
return false;
}
for (int i = 0; i < prefixLen; ++i) {
if (str.charAt(startIndex + i) != prefix.charAt(i)) {
return false;
}
}
pos.setIndex(startIndex + prefixLen);
return true;
}
/**
* Gets the sign of the number to be parsed. After calling this function, the
* current.value may be forwarded to skip the possible sign symbol.
* <b>IMPORTANT NOTE:</b> this function does not check the validity of the
* arguments, therefore, the caller MUST make sure that the arguments are
* valid.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* a character sequence.
* @param endIndex
* the end current of the character sequence. It must satisfies that
* 0 <= current.value <= endIndex <= str.length(); The whole text
* segment is within the range [current.value, endIndex) of input.
* @param positiveSign
* the character of the positive sign symbol.
* @param negativeSign
* the character of the negative sign symbol.
* @return -1 if the input character sequence has a negative sign; +1
* otherwise.
*/
public static int getSign(final ParsingPosition pos, final CharSequence str,
final int endIndex, final char positiveSign, final char negativeSign) {
int sign = + 1;
final int index = pos.getIndex();
if (index < endIndex) {
final char ch = str.charAt(index);
if (ch == positiveSign) {
pos.setIndex(index + 1);
sign = + 1;
} else if (ch == negativeSign) {
pos.setIndex(index + 1);
sign = - 1;
}
}
return sign;
}
/**
* Gets the radix of the number to be parsed. After calling this function,
* this.index may be forwarded to skip the possible radix prefix in the input
* text segment. <b>IMPORTANT NOTE:</b> this function does not check the
* validity of the arguments, therefore, the caller MUST make sure that the
* arguments are valid.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* a character sequence.
* @param endIndex
* the end current of the character sequence. It must satisfies that
* 0 <= current.value <= endIndex <= str.length(); The whole text
* segment is within the range [current.value, endIndex) of input.
* @param flags
* the combination of the format flags of the number to be parsed.
* @param defaultRadix
* the default radix used if the number to be parsed does not specify
* the radix nor has the radix prefix.
* @return the radix of the number to be parsed.
*/
public static int getRadix(final ParsingPosition pos, final CharSequence str,
final int endIndex, final int flags, final int defaultRadix) {
int index = pos.getIndex();
switch (flags & FormatFlag.RADIX_MASK) {
case FormatFlag.BINARY:
if (index < endIndex) {
char ch = str.charAt(index);
if (ch == '0') {
++index;
if (index < endIndex) {
ch = str.charAt(index);
if ((ch == 'b') || (ch == 'B')) {
// skip the binary prefix "0b" or "0B"
pos.setIndex(index + 1);
return 2;
}
}
}
}
return 2;
case FormatFlag.OCTAL:
// since the octal prefix is a single '0', it could also be
// treated as the leading zero, so don't skip it.
return 8;
case FormatFlag.HEX:
if (index < endIndex) {
char ch = str.charAt(index);
if (ch == '0') {
++index;
if (index < endIndex) {
ch = str.charAt(index);
if ((ch == 'x') || (ch == 'X')) {
// skip the hex prefix "0x" or "0X"
pos.setIndex(index + 1);
return 16;
}
}
}
}
return 16;
case FormatFlag.DECIMAL:
return 10;
default:
// detect the radix prefix
if (index < endIndex) {
char ch = str.charAt(index);
if (ch == '0') {
++index;
if (index < endIndex) {
ch = str.charAt(index);
if ((ch == 'b') || (ch == 'B')) {
// skip the binary prefix "0b" or "0B"
pos.setIndex(index + 1);
return 2;
} else if ((ch == 'x') || (ch == 'X')) {
// skip the hex prefix "0x" or "0X"
pos.setIndex(index + 1);
return 16;
}
}
// since there is a leading 0, the number is treated
// as a octal number. Note that do NOT skip the leading
// prefix '0'.
return 8;
}
}
return defaultRadix;
}
}
/**
* Parse an unsigned int value in a special radix (2, 4, 8, or 16). Since Java
* has no unsigned integral type, the returned value is still signed int, but
* the overflow condition is modified for the unsigned int. After calling this
* function, the pos.index is set to the position where the parsing stopped.
* If an error occurs during the parsing, the pos.errorCode is set to the
* error code, and the pos.errorIndex is set to the current where the error
* occurs; otherwise, pos.errorCode is set to {@link ErrorCode#NONE} and
* pos.errorIndex is set to -1. Note that this function does NOT skip the
* leading whitespace, nor does it geet the radix prefix and sign. In order to
* do that, call {@link #skipNonBlanks(ParsingPosition, CharSequence, int)},
* {@link #getRadix(ParsingPosition, CharSequence, int, int, int)} and
* {@link #getSign(ParsingPosition, CharSequence, int, char, char)} before
* calling this function. <b>IMPORTANT NOTE:</b> this function does not check
* the validity of the arguments, therefore, the caller MUST make sure that
* the arguments are valid.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* the text segment. It can't be null.
* @param endIndex
* the end current of the text segment. It must satisfies that 0 <=
* this.index <= endIndex <= input.length(); The whole text segment
* is within the range [this.index, endIndex) of input.
* @param sign
* the sign of the integer to be parsed.
* @param radix
* the radix of the integral value to be parsed. It must be one of 2,
* 8, or 16.
* @param maxValue
* the maximum allowed value of the number to be parsed. Note that
* this value is treated as an unsigned int.
* @param maxDigits
* the maximum number of digits could be parsed.
* @return the int value parsed by this function. If this.errorCode is set to
* ParseError.EMPTY, 0 is returned; If this.errorCode is set to
* ParseError.OVERFLOW, the maxValue is returned.
* @see #skipNonBlanks(ParsingPosition, CharSequence, int)
* @see #getRadix(ParsingPosition, CharSequence, int, int, int)
* @see #getSign(ParsingPosition, CharSequence, int, char, char)
*/
public static int getSpecialRadixInt(final ParsingPosition pos,
final CharSequence str, final int endIndex, final int sign,
final int radix, final int maxValue, final int maxDigits) {
if ((radix != 2) && (radix != 4) && (radix != 8) && (radix != 16)) {
throw new IllegalArgumentException("radix is non of 2, 4, 8, 16.");
}
if (sign == 0) {
throw new IllegalArgumentException("sign can't be zero.");
}
if (maxDigits <= 0) {
throw new IllegalArgumentException("Maximum digits must be positive.");
}
// let shift = floor(log2(radix))
final int shift = 31 - Integer.numberOfLeadingZeros(radix);
final int limit = (maxValue >>> shift);
// now perform the parsing
int digitsCount = 0;
int value = 0;
pos.clearError();
int index = pos.getIndex();
for (; index < endIndex; ++index) {
if (digitsCount >= maxDigits) {
break;
}
final char ch = str.charAt(index);
final int digit = Character.digit(ch, radix);
if (digit < 0) {
break;
}
++digitsCount; // remember the number of digits have been read
if (pos.success()) {
// note that since in Java int is signed, the following
// condition "value >= 0" is critical for binary radix.
if ((value >= 0) && (value <= limit)) {
value <<= shift;
value |= digit;
} else { // overflows
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
}
}
}
pos.setIndex(index);
if (pos.success()) {
if (digitsCount == 0) { // no digit has been read
pos.setErrorCode(EMPTY_VALUE);
pos.setErrorIndex(index);
}
if (sign > 0) {
return value;
} else {
return (- value);
}
} else {
if (sign > 0) {
return maxValue;
} else {
return (- maxValue);
}
}
}
/**
* Parse an unsigned long value in a special radix (2, 4, 8, or 16). Since
* Java has no unsigned integral type, the returned value is still signed
* long, but the overflow condition is modified for the unsigned long. After
* calling this function, the pos.index is set to the position where the
* parsing stopped. If an error occurs during the parsing, the pos.errorCode
* is set to the error code, and the pos.errorIndex is set to the current
* where the error occurs; otherwise, pos.errorCode is set to
* {@link ErrorCode#NONE} and pos.errorIndex is set to -1. Note that this
* function does NOT skip the leading whitespace, nor does it geet the radix
* prefix and sign. In order to do that, call
* {@link #skipNonBlanks(ParsingPosition, CharSequence, int)},
* {@link #getRadix(ParsingPosition, CharSequence, int, int, int)} and
* {@link #getSign(ParsingPosition, CharSequence, int, char, char)} before
* calling this function. <b>IMPORTANT NOTE:</b> this function does not check
* the validity of the arguments, therefore, the caller MUST make sure that
* the arguments are valid.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* the text segment. It can't be null.
* @param endIndex
* the end current of the text segment. It must satisfies that 0 <=
* this.index <= endIndex <= input.length(); The whole text segment
* is within the range [this.index, endIndex) of input.
* @param radix
* the radix of the integral value to be parsed. It must be one of 2,
* 8, or 16.
* @return the long value parsed by this function. If this.errorCode is set to
* ParseError.EMPTY, 0 is returned; If this.errorCode is set to
* ParseError.OVERFLOW, the UNSIGNED_LONG_MAX is returned.
* @param maxDigits
* the maximum number of digits could be parsed.
* @see #skipNonBlanks(ParsingPosition, CharSequence, int)
* @see #getRadix(ParsingPosition, CharSequence, int, int, int)
* @see #getSign(ParsingPosition, CharSequence, int, char, char)
*/
public static long getSpecialRadixLong(final ParsingPosition pos,
final CharSequence str, final int endIndex, final int sign,
final int radix, final int maxDigits) {
if ((radix != 2) && (radix != 4) && (radix != 8) && (radix != 16)) {
throw new IllegalArgumentException("radix is non of 2, 4, 8, 16.");
}
if (sign == 0) {
throw new IllegalArgumentException("sign can't be zero.");
}
if (maxDigits <= 0) {
throw new IllegalArgumentException("Maximum digits must be positive.");
}
// let shift = floor(log2(radix))
final int shift = 31 - Integer.numberOfLeadingZeros(radix);
final long limit = (LongUtils.UNSIGNED_MAX >>> shift);
// now perform the parsing
int digitsCount = 0;
long value = 0;
pos.clearError();
int index = pos.getIndex();
for (; index < endIndex; ++index) {
if (digitsCount >= maxDigits) {
break;
}
final char ch = str.charAt(index);
final int digit = Character.digit(ch, radix);
if (digit < 0) {
break;
}
++digitsCount; // remember the number of digits have been read
if (pos.success()) {
// note that since in Java int is signed, the following
// condition "value >= 0" is critical for binary radix.
if ((value >= 0) && (value <= limit)) {
value <<= shift;
value |= digit;
} else { // overflows
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
}
}
}
pos.setIndex(index);
if (pos.success()) {
if (digitsCount == 0) { // no digit has been read
pos.setErrorCode(EMPTY_VALUE);
pos.setErrorIndex(index);
}
if (sign > 0) {
return value;
} else {
return (- value);
}
} else {
if (sign > 0) {
return LongUtils.UNSIGNED_MAX;
} else {
return (- LongUtils.UNSIGNED_MAX);
}
}
}
/**
* Parse an signed int value in a decimal radix. After calling this function,
* the pos.index is set to the position where the parsing stopped. If an error
* occurs during the parsing, the pos.errorCode is set to the error code, and
* the pos.errorIndex is set to the current where the error occurs; otherwise,
* pos.errorCode is set to {@link ErrorCode#NONE} and pos.errorIndex is set to
* -1. Note that this function does NOT skip the leading whitespace, nor does
* it geet the radix prefix and sign. In order to do that, call
* {@link #skipNonBlanks(ParsingPosition, CharSequence, int)},
* {@link #getRadix(ParsingPosition, CharSequence, int, int, int)} and
* {@link #getSign(ParsingPosition, CharSequence, int, char, char)} before
* calling this function. <b>IMPORTANT NOTE:</b> this function does not check
* the validity of the arguments, therefore, the caller MUST make sure that
* the arguments are valid. TODO: add support to digit grouping.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* the text segment. It can't be null.
* @param endIndex
* the end current of the text segment. It must satisfies that 0 <=
* this.index <= endIndex <= input.length(); The whole text segment
* is within the range [this.index, endIndex) of input.
* @param sign
* the sign of the integral value to be parsed. A negative value
* indicate the value to be parsed is a negative value, otherwise the
* value to be parsed is a positive value.
* @param maxValue
* the maximum allowed value of the number to be parsed. Note that
* this value is treated as a signed int, and the minimum allowed
* value of the number to be parsed is (- maxValue - 1).
* @param maxDigits
* the maximum number of digits could be parsed.
* @return the integral value parsed by this function. If this.errorCode is
* ParseError.EMPTY, returned value is 0; If this.errorCode is
* ParseError.OVERFLOW, returned value is maxValue if sign >= 0, or (-
* maxValue - 1) if sign < 0.
* @see #skipNonBlanks(ParsingPosition, CharSequence, int)
* @see #getRadix(ParsingPosition, CharSequence, int, int, int)
* @see #getSign(ParsingPosition, CharSequence, int, char, char)
*/
public static int getDecimalInt(final ParsingPosition pos,
final CharSequence str, final int endIndex, final int sign,
final int maxValue, final int maxDigits) {
if (sign == 0) {
throw new IllegalArgumentException("sign can't be zero.");
}
if (maxDigits <= 0) {
throw new IllegalArgumentException("Maximum digits must be positive.");
}
// note that we use a trick to deal with the overflow of integers.
// since the maximum absolute value of a negative int can not be represented
// as a positive int, we treat all int as a negative int, and fix it at last
// if
// if is really a positive int.
//
final int minValue = (- maxValue - 1);
final int limit = minValue / 10;
int digitsCount = 0;
int value = 0;
pos.clearError();
int index = pos.getIndex();
for (; index < endIndex; ++index) {
if (digitsCount >= maxDigits) {
break;
}
final char ch = str.charAt(index);
final int digit = Character.digit(ch, 10);
if (digit < 0) {
break;
}
// remember the number of digits has been read
++digitsCount;
// note that if the value overflow, the remained digits should still be
// read, but we don't need to accumulate the absolute value for the
// overflow
// value.
if (pos.success()) {
if (value < limit) {
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
} else {
value *= 10; // never overflow, since value >= limit
value -= digit; // may cause overflow
// check for overflow cased by the above,
// note that the following checking will miss one special case:
// the sign >= 0 and the value is minValue.
// so it should be fixed in the following code.
if ((value > 0) || (value < minValue)) {
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
}
}
}
}
pos.setIndex(index);
if (digitsCount == 0) { // no digits are read
pos.setErrorCode(EMPTY_VALUE);
pos.setErrorIndex(index);
return 0;
} else if (pos.getErrorCode() == NUMBER_OVERFLOW) {
return (sign >= 0 ? maxValue : minValue);
} else if (sign >= 0) {
if (value == minValue) {
// it's also an case of overflow
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index - 1);
return maxValue;
} else {
return (- value); // return the fixed value
}
} else {
return value;
}
}
/**
* Parse an signed long value in a decimal radix. After calling this function,
* the pos.index is set to the position where the parsing stopped. If an error
* occurs during the parsing, the pos.errorCode is set to the error code, and
* the pos.errorIndex is set to the current where the error occurs; otherwise,
* pos.errorCode is set to {@link ErrorCode#NONE} and pos.errorIndex is set to
* -1. Note that this function does NOT skip the leading whitespace, nor does
* it geet the radix prefix and sign. In order to do that, call
* {@link #skipNonBlanks(ParsingPosition, CharSequence, int)},
* {@link #getRadix(ParsingPosition, CharSequence, int, int, int)} and
* {@link #getSign(ParsingPosition, CharSequence, int, char, char)} before
* calling this function. <b>IMPORTANT NOTE:</b> this function does not check
* the validity of the arguments, therefore, the caller MUST make sure that
* the arguments are valid. TODO: add support to digit grouping.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed.
* @param str
* the text segment. It can't be null.
* @param endIndex
* the end current of the text segment. It must satisfies that 0 <=
* this.index <= endIndex <= input.length(); The whole text segment
* is within the range [this.index, endIndex) of input.
* @param sign
* the sign of the integral value to be parsed. A negative value
* indicate the value to be parsed is a negative value, otherwise the
* value to be parsed is a positive value.
* @param maxDigits
* the maximum number of digits could be parsed.
* @return the integral value parsed by this function. If this.errorCode is
* ParseError.EMPTY, returned value is 0; If this.errorCode is
* ParseError.OVERFLOW, returned value is Long.MAX_VALUE if sign >= 0,
* or Long.MIN_VALUE if sign < 0.
* @see #skipNonBlanks(ParsingPosition, CharSequence, int)
* @see #getRadix(ParsingPosition, CharSequence, int, int, int)
* @see #getSign(ParsingPosition, CharSequence, int, char, char)
*/
public static long getDecimalLong(final ParsingPosition pos,
final CharSequence str, final int endIndex, final int sign,
final int maxDigits) {
if (sign == 0) {
throw new IllegalArgumentException("sign can't be zero.");
}
if (maxDigits <= 0) {
throw new IllegalArgumentException("Maximum digits must be positive.");
}
final long limit = Long.MIN_VALUE / 10;
int digitsCount = 0;
long value = 0;
pos.clearError();
int index = pos.getIndex();
for (; index < endIndex; ++index) {
if (digitsCount >= maxDigits) {
break;
}
final char ch = str.charAt(index);
final int digit = Character.digit(ch, 10);
if (digit < 0) {
break;
}
// remember the number of digits has been read
++digitsCount;
// note that if the value overflow, the remained digits should still be
// read, but we don't need to accumulate the absolute value for the
// overflow
// value.
if (pos.success()) {
if (value < limit) {
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
} else {
value *= 10; // never overflow, since value >= limit
value -= digit; // may cause overflow
// check for overflow cased by the above,
// note that the following checking will miss one special case:
// the sign >= 0 and the value is minValue.
// so it should be fixed in the following code.
if ((value > 0) || (value < Long.MIN_VALUE)) {
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index);
}
}
}
}
pos.setIndex(index);
if (digitsCount == 0) { // no digits are read
pos.setErrorCode(EMPTY_VALUE);
pos.setErrorIndex(index);
return 0;
} else if (pos.getErrorCode() == NUMBER_OVERFLOW) {
return (sign >= 0 ? Long.MAX_VALUE : Long.MIN_VALUE);
} else if (sign >= 0) {
if (value == Long.MIN_VALUE) {
// it's also an case of overflow
pos.setErrorCode(NUMBER_OVERFLOW);
pos.setErrorIndex(index - 1);
return Long.MAX_VALUE;
} else {
return (- value); // return the fixed value
}
} else {
return value;
}
}
/**
* Parses a char value.
*
* @param pos
* a {@link ParsingPosition} object indicate the current position in
* the character sequence to be parsed; after calling this function,
* the current of the position would be changed, and if any error
* occurs during the parsing, the error code of this object will also
* be set.
* @param str
* the text segment to be parsed.
* @param endIndex
* the end current of the text segment.
* @param options
* the parsing options.
* @return the parsed value.
*/
public static char parseChar(final ParsingPosition pos, final CharSequence str,
final int endIndex, final ParseOptions options) {
// skip the leading white space if necessary
if (! options.isKeepBlank()) {
skipBlanks(pos, str, endIndex);
if (pos.fail()) {
return (char) 0;
}
}
final int index = pos.getIndex();
if ((index >= endIndex) || (index >= str.length())) {
pos.setErrorCode(EMPTY_VALUE);
pos.setErrorIndex(index);
return (char) 0;
} else {
return str.charAt(index);
}
}
/**
* Parses a boolean value.
*
* @param str
* the text segment to be parsed.
* @param startIndex
* the start current of the text segment.
* @param endIndex
* the end current of the text segment.
* @param options
* the parse options.
* @return the parsed value.
* @throws TextParseException
* if any error occurs during parsing.
* @throws IndexOutOfBoundsException
* if startIndex < 0 or startIndex > endIndex or endIndex >
* str.length().
*/
public static char parseChar(final CharSequence str, final int startIndex,
final int endIndex, final ParseOptions options) throws TextParseException {
final ParsingPosition pos = new ParsingPosition(startIndex);
final char result = parseChar(pos, str, endIndex, options);
if (pos.fail()) {
throw new TextParseException(str, startIndex, endIndex, pos);
}
return result;
}
/**
* Parses a boolean value.
*
* @param str
* the text segment to be parsed.
* @param startIndex
* the start current of the text segment.
* @param endIndex
* the end current of the text segment.
* @return the parsed value.
* @throws TextParseException
* if any error occurs during parsing.
* @throws IndexOutOfBoundsException
* if startIndex < 0 or startIndex > endIndex or endIndex >
* str.length().
*/
public static char parseChar(final CharSequence str, final int startIndex,
final int endIndex) throws TextParseException {
final ParsingPosition pos = new ParsingPosition(startIndex);
pos.reset(startIndex);
final char result = parseChar(pos, str, endIndex,
ParseOptions.DEFAULT_KEEP_BLANKS);
if (pos.fail()) {
throw new TextParseException(str, startIndex, endIndex, pos);
}
return result;
}
/**
* Parses a char value.
*
* @param str
* the text segment to be parsed.
* @return the parsed value.
* @throws TextParseException
* if any error occurs during parsing.
*/
public static char parseChar(final CharSequence str)
throws TextParseException {
final ParsingPosition pos = new ParsingPosition();
final char result = parseChar(pos, str, str.length(),
ParseOptions.DEFAULT_KEEP_BLANKS);
if (pos.fail()) {
throw new TextParseException(str, pos);
}
return result;
}
//
// public static Date parseDate(final ParsingPosition pos, final CharSequence str,
// final int endIndex, final ParseOptions options,
// final NumberFormatSymbols symbols, final String formatPattern) {
// // skip the leading white space if necessary
// if (! options.isKeepBlank()) {
// skipBlanks(pos, str, endIndex);
// if (pos.fail()) {
// return null;
// }
// }
// // / TODO: use a better parsing method
// final SimpleDateFormat df = new SimpleDateFormat(formatPattern);
// final Date result = df.parse(str.toString(), pos);
// if (result == null) {
// pos.setErrorCode(ErrorCode.INVALID_SYNTAX);
// return null;
// } else if (pos.getIndex() > endIndex) {
// pos.setErrorCode(ErrorCode.INVALID_SYNTAX);
// return null;
// } else {
// return result;
// }
// }
//
// public static Date parseDate(final CharSequence str, final int startIndex,
// final int endIndex, final ParseOptions options) throws TextParseException {
// final ParsePositionPool pool = ParsePositionPool.getInstance();
// final ParsingPosition pos = pool.borrow();
// try {
// pos.reset(startIndex);
// final Date result = parseDate(pos, str, endIndex, options,
// NumberFormatSymbols.DEFAULT, DateUtils.DEFAULT_LOCAL_DATETIME_PATTERN);
// if (pos.fail()) {
// throw new TextParseException(str, startIndex, endIndex, pos);
// } else {
// assert (result != null);
// return result;
// }
// } finally {
// pool.restore(pos);
// }
// }
//
// public static Date parseDate(final CharSequence str, final int startIndex,
// final int endIndex) throws TextParseException {
// final ParsePositionPool pool = ParsePositionPool.getInstance();
// final ParsingPosition pos = pool.borrow();
// try {
// pos.reset(startIndex);
// final Date result = parseDate(pos, str, endIndex, ParseOptions.DEFAULT,
// NumberFormatSymbols.DEFAULT, DateUtils.DEFAULT_LOCAL_DATETIME_PATTERN);
// if (pos.fail()) {
// throw new TextParseException(str, startIndex, endIndex, pos);
// } else {
// assert (result != null);
// return result;
// }
// } finally {
// pool.restore(pos);
// }
// }
//
// public static Date parseDate(final CharSequence str)
// throws TextParseException {
// final ParsePositionPool pool = ParsePositionPool.getInstance();
// final ParsingPosition pos = pool.borrow();
// try {
// final Date result = parseDate(pos, str, str.length(),
// ParseOptions.DEFAULT, FormatSymbols.DEFAULT,
// DateUtils.DEFAULT_LOCAL_DATETIME_PATTERN);
// if (pos.fail()) {
// throw new TextParseException(str, pos);
// } else {
// assert (result != null);
// return result;
// }
// } finally {
// pool.restore(pos);
// }
// }
}
| |
package ca.teyssedre.restclient;
import android.util.Base64;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import java.util.zip.GZIPInputStream;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
/**
* Former object to create HTTP request to be execute through the {@link HttpClient} instance.
*
* @author pteyssedre
* @version 2
*/
@SuppressWarnings("unused")
public class HttpRequest {
private static final String TAG = "HttpRequest";
//<editor-fold desc="properties">
private UUID id;
private HttpRequestType type;
private HttpContentType contentType;
private Set<HttpHeader> headers;
private boolean https;
private boolean anonymous;
private SSLSocketFactory sslFactory = (SSLSocketFactory) SSLSocketFactory.getDefault();
private String url;
private String data;
private byte[] binary;
private int readTimeout = 15 * 1000;
private int connectTimeout = 30 * 1000;
private boolean read = true;
private boolean write;
private HttpResponse response;
private boolean processed;
private HttpURLConnection connection;
//</editor-fold>
//<editor-fold desc="Constructors">
public HttpRequest() {
id = UUID.randomUUID();
headers = new HashSet<>();
type = HttpRequestType.GET;
}
public HttpRequest(String url) throws Exception {
id = UUID.randomUUID();
headers = new HashSet<>();
this.url = url;
type = HttpRequestType.GET;
validateUrl();
}
public HttpRequest(String url, HttpRequestType type) throws Exception {
id = UUID.randomUUID();
headers = new HashSet<>();
this.url = url;
setType(type);
validateUrl();
}
public HttpRequest(String url, HttpContentType contentType) throws Exception {
id = UUID.randomUUID();
headers = new HashSet<>();
this.url = url;
setContentType(contentType);
validateUrl();
}
public HttpRequest(String url, HttpRequestType type, HttpContentType contentType) throws Exception {
id = UUID.randomUUID();
headers = new HashSet<>();
this.url = url;
this.type = type;
setContentType(contentType);
sslFactory = new NoSSLValidation();
validateUrl();
}
//</editor-fold>
//<editor-fold desc="Public methods">
public HttpRequest addBinary(byte[] binary) {
this.binary = binary;
this.type = HttpRequestType.POST;
return this;
}
public HttpRequest addHeader(String key, String value) {
headers.add(new HttpHeader(key, value));
return this;
}
public HttpRequest addBasic(String credentials) {
String encoded = "Basic " + Base64.encodeToString((credentials).getBytes(), Base64.NO_WRAP);
addAuthorization(encoded);
return this;
}
public HttpRequest addAuthorization(String encoded) {
headers.add(new HttpHeader("Authorization", encoded));
return this;
}
public HttpRequest addFormData(HttpForm data) {
if (data != null) {
try {
String serialize = data.serialize();
this.addData(serialize);
this.contentType = HttpContentType.APPLICATION_WWW_FORM;
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
return this;
}
public HttpRequest addData(String data) {
this.data = data;
determineType(data);
this.type = HttpRequestType.POST;
this.write = true;
return this;
}
private void determineType(String data) {
// TODO: try to guess content-type
try {
JSONObject n = new JSONObject(data);
setContentType(HttpContentType.APPLICATION_JSON);
} catch (JSONException e) {
e.printStackTrace();
setContentType(HttpContentType.PLAIN_TEXT);
}
}
public HttpRequest addUserAgent(boolean anonymous) {
this.anonymous = anonymous;
return this;
}
public HttpRequest addUserAgent(String userAgent) {
headers.add(new HttpHeader("User-Agent", userAgent));
return this;
}
//</editor-fold>
//<editor-fold desc="Private methods">
protected HttpRequest processRequest() {
response = new HttpResponse();
processed = true;
connection = null;
try {
URL url = new URL(this.url);
if (https) {
connection = (HttpsURLConnection) url.openConnection();
} else {
connection = (HttpURLConnection) url.openConnection();
}
if (sslFactory != null) {
((HttpsURLConnection) connection).setSSLSocketFactory(sslFactory);
}
if (!anonymous) {
connection.setRequestProperty("User-Agent", System.getProperty("http.agent"));
}
if (contentType != null) {
connection.setRequestProperty("Content-Type", contentType.getValue());
}
connection.setRequestProperty("Accept-Encoding", "gzip, deflate");
connection.setRequestProperty("Connection", "keep-alive");
// TODO: may change depending on content-type
connection.setRequestProperty("Accept", "*/*");
connection.setConnectTimeout(connectTimeout);
switch (type) {
case PUT:
connection.setRequestMethod("PUT");
break;
case GET:
connection.setRequestMethod("GET");
break;
case POST:
connection.setRequestMethod("POST");
if (data != null && data.length() > 0) {
connection.setRequestProperty("Content-Length", "" + String.valueOf(data.getBytes().length));
} else if (binary != null) {
connection.setRequestProperty("Content-Length", "" + String.valueOf(binary.length));
}
break;
case DELETE:
connection.setRequestMethod("DELETE");
break;
}
if (headers != null) {
for (HttpHeader header : headers) {
connection.setRequestProperty(header.getName(), header.getValue());
}
}
connection.setReadTimeout(readTimeout);
connection.connect();
} catch (IOException e) {
e.printStackTrace();
response.setException(e);
}
return this;
}
protected void doRead() {
if (connection == null) {
return;
}
try {
InputStream in = connection.getInputStream();
if (in != null) {
Charset charset = Charset.forName("UTF8");
Reader reader;
if ("gzip".equals(connection.getContentEncoding())) {
reader = new InputStreamReader(new GZIPInputStream(in), charset);
} else {
reader = new InputStreamReader(in, charset);
}
BufferedReader rd = new BufferedReader(reader);
String line;
StringBuilder sbt = new StringBuilder();
while ((line = rd.readLine()) != null) {
sbt.append(line);
}
rd.close();
addStringResult(sbt.toString());
}
} catch (IOException e) {
response.setException(e);
}
}
private void addStringResult(String data) {
if (response == null) {
response = new HttpResponse();
}
response.setStringResponse(data);
}
protected void doWrite() {
try {
if (write) {
OutputStream os = connection.getOutputStream();
if (data != null) {
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
writer.write(data);
writer.flush();
writer.close();
} else {
// binary to send could be very long ... should be able request
DataOutputStream writer = new DataOutputStream(os);
writer.write(binary);
writer.flush();
writer.close();
}
}
} catch (IOException exception) {
response.setException(exception);
}
}
private void parseConnection() {
if (connection != null) {
if (response == null) {
response = new HttpResponse();
}
response.setOrigin(url);
try {
response.setStatusCode(connection.getResponseCode());
response.setContentType(connection.getContentType());
response.setHeaders(connection.getHeaderFields());
try {
if (https) {
HttpsURLConnection sslConnection = (HttpsURLConnection) connection;
response.setCertificates(sslConnection.getServerCertificates());
response.setCipherSuite(sslConnection.getCipherSuite());
}
} catch (RuntimeException ignored) {
}
} catch (IOException ignored) {
response.setException(ignored);
}
}
}
private void validateUrl() throws Exception {
if (this.url == null) {
throw new Exception("URL can't be null");
}
URL url = new URL(this.url);
if (url.getProtocol().toLowerCase().contains("https")) {
https = true;
}
}
//</editor-fold>
//<editor-fold desc="Getters & Setters">
public Set<HttpHeader> getHeaders() {
return headers;
}
public void setHeaders(Set<HttpHeader> headers) {
this.headers = headers;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
try {
validateUrl();
} catch (Exception e) {
e.printStackTrace();
}
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public byte[] getBinary() {
return binary;
}
public void setBinary(byte[] binary) {
this.binary = binary;
}
public int getReadTimeout() {
return readTimeout;
}
public void setReadTimeout(int readTimeout) {
this.readTimeout = readTimeout;
}
public int getConnectTimeout() {
return connectTimeout;
}
public void setConnectTimeout(int connectTimeout) {
this.connectTimeout = connectTimeout;
}
public boolean isRead() {
return read;
}
public void setRead(boolean read) {
this.read = read;
}
public boolean isWrite() {
return write;
}
public void setWrite(boolean write) {
this.write = write;
}
public HttpRequestType getType() {
return type;
}
public void setType(HttpRequestType type) {
this.type = type;
}
public HttpContentType getContentType() {
return contentType;
}
public void setContentType(HttpContentType contentType) {
this.contentType = contentType;
}
public boolean isAnonymous() {
return anonymous;
}
public SSLSocketFactory getSslFactory() {
return sslFactory;
}
public void setSslFactory(SSLSocketFactory sslFactory) {
this.sslFactory = sslFactory;
}
public HttpResponse getResponse() {
parseConnection();
return response;
}
public UUID getId() {
return id;
}
public boolean hasBeenProcessed() {
return processed;
}
public boolean shouldWrite() {
return write;
}
public boolean shouldRead() {
return read;
}
public HttpURLConnection getConnection() {
return connection;
}
//</editor-fold>
}
| |
/*
* Copyright 2016 Kejun Xia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shipdream.lib.android.mvc.service;
import java.util.HashMap;
/**
* Map file extensions to MIME types. Based on the Apache mime.types file.
* http://www.iana.org/assignments/media-types/
*/
public class MimeTypes {
public static final String MIME_APPLICATION_ANDREW_INSET = "application/andrew-inset";
public static final String MIME_APPLICATION_JSON = "application/json";
public static final String MIME_APPLICATION_ZIP = "application/zip";
public static final String MIME_APPLICATION_X_GZIP = "application/x-gzip";
public static final String MIME_APPLICATION_TGZ = "application/tgz";
public static final String MIME_APPLICATION_MSWORD = "application/msword";
public static final String MIME_APPLICATION_POSTSCRIPT = "application/postscript";
public static final String MIME_APPLICATION_PDF = "application/pdf";
public static final String MIME_APPLICATION_JNLP = "application/jnlp";
public static final String MIME_APPLICATION_MAC_BINHEX40 = "application/mac-binhex40";
public static final String MIME_APPLICATION_MAC_COMPACTPRO = "application/mac-compactpro";
public static final String MIME_APPLICATION_MATHML_XML = "application/mathml+xml";
public static final String MIME_APPLICATION_OCTET_STREAM = "application/octet-stream";
public static final String MIME_APPLICATION_ODA = "application/oda";
public static final String MIME_APPLICATION_RDF_XML = "application/rdf+xml";
public static final String MIME_APPLICATION_JAVA_ARCHIVE = "application/java-archive";
public static final String MIME_APPLICATION_RDF_SMIL = "application/smil";
public static final String MIME_APPLICATION_SRGS = "application/srgs";
public static final String MIME_APPLICATION_SRGS_XML = "application/srgs+xml";
public static final String MIME_APPLICATION_VND_MIF = "application/vnd.mif";
public static final String MIME_APPLICATION_VND_MSEXCEL = "application/vnd.ms-excel";
public static final String MIME_APPLICATION_VND_MSPOWERPOINT= "application/vnd.ms-powerpoint";
public static final String MIME_APPLICATION_VND_RNREALMEDIA = "application/vnd.rn-realmedia";
public static final String MIME_APPLICATION_X_BCPIO = "application/x-bcpio";
public static final String MIME_APPLICATION_X_CDLINK = "application/x-cdlink";
public static final String MIME_APPLICATION_X_CHESS_PGN = "application/x-chess-pgn";
public static final String MIME_APPLICATION_X_CPIO = "application/x-cpio";
public static final String MIME_APPLICATION_X_CSH = "application/x-csh";
public static final String MIME_APPLICATION_X_DIRECTOR = "application/x-director";
public static final String MIME_APPLICATION_X_DVI = "application/x-dvi";
public static final String MIME_APPLICATION_X_FUTURESPLASH = "application/x-futuresplash";
public static final String MIME_APPLICATION_X_GTAR = "application/x-gtar";
public static final String MIME_APPLICATION_X_HDF = "application/x-hdf";
public static final String MIME_APPLICATION_X_JAVASCRIPT = "application/x-javascript";
public static final String MIME_APPLICATION_X_KOAN = "application/x-koan";
public static final String MIME_APPLICATION_X_LATEX = "application/x-latex";
public static final String MIME_APPLICATION_X_NETCDF = "application/x-netcdf";
public static final String MIME_APPLICATION_X_OGG = "application/x-ogg";
public static final String MIME_APPLICATION_X_SH = "application/x-sh";
public static final String MIME_APPLICATION_X_SHAR = "application/x-shar";
public static final String MIME_APPLICATION_X_SHOCKWAVE_FLASH = "application/x-shockwave-flash";
public static final String MIME_APPLICATION_X_STUFFIT = "application/x-stuffit";
public static final String MIME_APPLICATION_X_SV4CPIO = "application/x-sv4cpio";
public static final String MIME_APPLICATION_X_SV4CRC = "application/x-sv4crc";
public static final String MIME_APPLICATION_X_TAR = "application/x-tar";
public static final String MIME_APPLICATION_X_RAR_COMPRESSED= "application/x-rar-compressed";
public static final String MIME_APPLICATION_X_TCL = "application/x-tcl";
public static final String MIME_APPLICATION_X_TEX = "application/x-tex";
public static final String MIME_APPLICATION_X_TEXINFO = "application/x-texinfo";
public static final String MIME_APPLICATION_X_TROFF = "application/x-troff";
public static final String MIME_APPLICATION_X_TROFF_MAN = "application/x-troff-man";
public static final String MIME_APPLICATION_X_TROFF_ME = "application/x-troff-me";
public static final String MIME_APPLICATION_X_TROFF_MS = "application/x-troff-ms";
public static final String MIME_APPLICATION_X_USTAR = "application/x-ustar";
public static final String MIME_APPLICATION_X_WAIS_SOURCE = "application/x-wais-source";
public static final String MIME_APPLICATION_VND_MOZZILLA_XUL_XML = "application/vnd.mozilla.xul+xml";
public static final String MIME_APPLICATION_XHTML_XML = "application/xhtml+xml";
public static final String MIME_APPLICATION_XSLT_XML = "application/xslt+xml";
public static final String MIME_APPLICATION_XML = "application/xml";
public static final String MIME_APPLICATION_XML_DTD = "application/xml-dtd";
public static final String MIME_IMAGE_BMP = "image/bmp";
public static final String MIME_IMAGE_CGM = "image/cgm";
public static final String MIME_IMAGE_GIF = "image/gif";
public static final String MIME_IMAGE_IEF = "image/ief";
public static final String MIME_IMAGE_JPEG = "image/jpeg";
public static final String MIME_IMAGE_TIFF = "image/tiff";
public static final String MIME_IMAGE_PNG = "image/png";
public static final String MIME_IMAGE_SVG_XML = "image/svg+xml";
public static final String MIME_IMAGE_VND_DJVU = "image/vnd.djvu";
public static final String MIME_IMAGE_WAP_WBMP = "image/vnd.wap.wbmp";
public static final String MIME_IMAGE_X_CMU_RASTER = "image/x-cmu-raster";
public static final String MIME_IMAGE_X_ICON = "image/x-icon";
public static final String MIME_IMAGE_X_PORTABLE_ANYMAP = "image/x-portable-anymap";
public static final String MIME_IMAGE_X_PORTABLE_BITMAP = "image/x-portable-bitmap";
public static final String MIME_IMAGE_X_PORTABLE_GRAYMAP = "image/x-portable-graymap";
public static final String MIME_IMAGE_X_PORTABLE_PIXMAP = "image/x-portable-pixmap";
public static final String MIME_IMAGE_X_RGB = "image/x-rgb";
public static final String MIME_AUDIO_BASIC = "audio/basic";
public static final String MIME_AUDIO_MIDI = "audio/midi";
public static final String MIME_AUDIO_MPEG = "audio/mpeg";
public static final String MIME_AUDIO_X_AIFF = "audio/x-aiff";
public static final String MIME_AUDIO_X_MPEGURL = "audio/x-mpegurl";
public static final String MIME_AUDIO_X_PN_REALAUDIO = "audio/x-pn-realaudio";
public static final String MIME_AUDIO_X_WAV = "audio/x-wav";
public static final String MIME_CHEMICAL_X_PDB = "chemical/x-pdb";
public static final String MIME_CHEMICAL_X_XYZ = "chemical/x-xyz";
public static final String MIME_MODEL_IGES = "model/iges";
public static final String MIME_MODEL_MESH = "model/mesh";
public static final String MIME_MODEL_VRLM = "model/vrml";
public static final String MIME_TEXT_PLAIN = "text/plain";
public static final String MIME_TEXT_RICHTEXT = "text/richtext";
public static final String MIME_TEXT_RTF = "text/rtf";
public static final String MIME_TEXT_HTML = "text/html";
public static final String MIME_TEXT_CALENDAR = "text/calendar";
public static final String MIME_TEXT_CSS = "text/css";
public static final String MIME_TEXT_SGML = "text/sgml";
public static final String MIME_TEXT_TAB_SEPARATED_VALUES = "text/tab-separated-values";
public static final String MIME_TEXT_VND_WAP_XML = "text/vnd.wap.wml";
public static final String MIME_TEXT_VND_WAP_WMLSCRIPT = "text/vnd.wap.wmlscript";
public static final String MIME_TEXT_X_SETEXT = "text/x-setext";
public static final String MIME_TEXT_X_COMPONENT = "text/x-component";
public static final String MIME_VIDEO_QUICKTIME = "video/quicktime";
public static final String MIME_VIDEO_MPEG = "video/mpeg";
public static final String MIME_VIDEO_MP4 = "video/mp4";
public static final String MIME_VIDEO_VND_MPEGURL = "video/vnd.mpegurl";
public static final String MIME_VIDEO_X_MSVIDEO = "video/x-msvideo";
public static final String MIME_VIDEO_X_MS_WMV = "video/x-ms-wmv";
public static final String MIME_VIDEO_X_SGI_MOVIE = "video/x-sgi-movie";
public static final String MIME_X_CONFERENCE_X_COOLTALK = "x-conference/x-cooltalk";
private static HashMap<String, String> mimeTypeMapping;
static {
mimeTypeMapping = new HashMap<String, String>(200) {
private void put1(String key, String value) {
if (put(key, value) != null) {
throw new IllegalArgumentException("Duplicated extension: " + key);
}
}
{
put1("xul", MIME_APPLICATION_VND_MOZZILLA_XUL_XML);
put1("json", MIME_APPLICATION_JSON);
put1("ice", MIME_X_CONFERENCE_X_COOLTALK);
put1("movie", MIME_VIDEO_X_SGI_MOVIE);
put1("avi", MIME_VIDEO_X_MSVIDEO);
put1("wmv", MIME_VIDEO_X_MS_WMV);
put1("m4u", MIME_VIDEO_VND_MPEGURL);
put1("mxu", MIME_VIDEO_VND_MPEGURL);
put1("htc", MIME_TEXT_X_COMPONENT);
put1("etx", MIME_TEXT_X_SETEXT);
put1("wmls", MIME_TEXT_VND_WAP_WMLSCRIPT);
put1("wml", MIME_TEXT_VND_WAP_XML);
put1("tsv", MIME_TEXT_TAB_SEPARATED_VALUES);
put1("sgm", MIME_TEXT_SGML);
put1("sgml", MIME_TEXT_SGML);
put1("css", MIME_TEXT_CSS);
put1("ifb", MIME_TEXT_CALENDAR);
put1("ics", MIME_TEXT_CALENDAR);
put1("wrl", MIME_MODEL_VRLM);
put1("vrlm", MIME_MODEL_VRLM);
put1("silo", MIME_MODEL_MESH);
put1("mesh", MIME_MODEL_MESH);
put1("msh", MIME_MODEL_MESH);
put1("iges", MIME_MODEL_IGES);
put1("igs", MIME_MODEL_IGES);
put1("rgb", MIME_IMAGE_X_RGB);
put1("ppm", MIME_IMAGE_X_PORTABLE_PIXMAP);
put1("pgm", MIME_IMAGE_X_PORTABLE_GRAYMAP);
put1("pbm", MIME_IMAGE_X_PORTABLE_BITMAP);
put1("pnm", MIME_IMAGE_X_PORTABLE_ANYMAP);
put1("ico", MIME_IMAGE_X_ICON);
put1("ras", MIME_IMAGE_X_CMU_RASTER);
put1("wbmp", MIME_IMAGE_WAP_WBMP);
put1("djv", MIME_IMAGE_VND_DJVU);
put1("djvu", MIME_IMAGE_VND_DJVU);
put1("svg", MIME_IMAGE_SVG_XML);
put1("ief", MIME_IMAGE_IEF);
put1("cgm", MIME_IMAGE_CGM);
put1("bmp", MIME_IMAGE_BMP);
put1("xyz", MIME_CHEMICAL_X_XYZ);
put1("pdb", MIME_CHEMICAL_X_PDB);
put1("ra", MIME_AUDIO_X_PN_REALAUDIO);
put1("ram", MIME_AUDIO_X_PN_REALAUDIO);
put1("m3u", MIME_AUDIO_X_MPEGURL);
put1("aifc", MIME_AUDIO_X_AIFF);
put1("aif", MIME_AUDIO_X_AIFF);
put1("aiff", MIME_AUDIO_X_AIFF);
put1("mp3", MIME_AUDIO_MPEG);
put1("mp2", MIME_AUDIO_MPEG);
put1("mp1", MIME_AUDIO_MPEG);
put1("mpga", MIME_AUDIO_MPEG);
put1("kar", MIME_AUDIO_MIDI);
put1("mid", MIME_AUDIO_MIDI);
put1("midi", MIME_AUDIO_MIDI);
put1("dtd", MIME_APPLICATION_XML_DTD);
put1("xsl", MIME_APPLICATION_XML);
put1("xml", MIME_APPLICATION_XML);
put1("xslt", MIME_APPLICATION_XSLT_XML);
put1("xht", MIME_APPLICATION_XHTML_XML);
put1("xhtml", MIME_APPLICATION_XHTML_XML);
put1("src", MIME_APPLICATION_X_WAIS_SOURCE);
put1("ustar", MIME_APPLICATION_X_USTAR);
put1("ms", MIME_APPLICATION_X_TROFF_MS);
put1("me", MIME_APPLICATION_X_TROFF_ME);
put1("man", MIME_APPLICATION_X_TROFF_MAN);
put1("roff", MIME_APPLICATION_X_TROFF);
put1("tr", MIME_APPLICATION_X_TROFF);
put1("t", MIME_APPLICATION_X_TROFF);
put1("texi", MIME_APPLICATION_X_TEXINFO);
put1("texinfo", MIME_APPLICATION_X_TEXINFO);
put1("tex", MIME_APPLICATION_X_TEX);
put1("tcl", MIME_APPLICATION_X_TCL);
put1("sv4crc", MIME_APPLICATION_X_SV4CRC);
put1("sv4cpio", MIME_APPLICATION_X_SV4CPIO);
put1("sit", MIME_APPLICATION_X_STUFFIT);
put1("swf", MIME_APPLICATION_X_SHOCKWAVE_FLASH);
put1("shar", MIME_APPLICATION_X_SHAR);
put1("sh", MIME_APPLICATION_X_SH);
put1("cdf", MIME_APPLICATION_X_NETCDF);
put1("nc", MIME_APPLICATION_X_NETCDF);
put1("latex", MIME_APPLICATION_X_LATEX);
put1("skm", MIME_APPLICATION_X_KOAN);
put1("skt", MIME_APPLICATION_X_KOAN);
put1("skd", MIME_APPLICATION_X_KOAN);
put1("skp", MIME_APPLICATION_X_KOAN);
put1("js", MIME_APPLICATION_X_JAVASCRIPT);
put1("hdf", MIME_APPLICATION_X_HDF);
put1("gtar", MIME_APPLICATION_X_GTAR);
put1("spl", MIME_APPLICATION_X_FUTURESPLASH);
put1("dvi", MIME_APPLICATION_X_DVI);
put1("dxr", MIME_APPLICATION_X_DIRECTOR);
put1("dir", MIME_APPLICATION_X_DIRECTOR);
put1("dcr", MIME_APPLICATION_X_DIRECTOR);
put1("csh", MIME_APPLICATION_X_CSH);
put1("cpio", MIME_APPLICATION_X_CPIO);
put1("pgn", MIME_APPLICATION_X_CHESS_PGN);
put1("vcd", MIME_APPLICATION_X_CDLINK);
put1("bcpio", MIME_APPLICATION_X_BCPIO);
put1("rm", MIME_APPLICATION_VND_RNREALMEDIA);
put1("ppt", MIME_APPLICATION_VND_MSPOWERPOINT);
put1("mif", MIME_APPLICATION_VND_MIF);
put1("grxml", MIME_APPLICATION_SRGS_XML);
put1("gram", MIME_APPLICATION_SRGS);
put1("smil", MIME_APPLICATION_RDF_SMIL);
put1("smi", MIME_APPLICATION_RDF_SMIL);
put1("rdf", MIME_APPLICATION_RDF_XML);
put1("ogg", MIME_APPLICATION_X_OGG);
put1("oda", MIME_APPLICATION_ODA);
put1("dmg", MIME_APPLICATION_OCTET_STREAM);
put1("lzh", MIME_APPLICATION_OCTET_STREAM);
put1("so", MIME_APPLICATION_OCTET_STREAM);
put1("lha", MIME_APPLICATION_OCTET_STREAM);
put1("dms", MIME_APPLICATION_OCTET_STREAM);
put1("bin", MIME_APPLICATION_OCTET_STREAM);
put1("mathml", MIME_APPLICATION_MATHML_XML);
put1("cpt", MIME_APPLICATION_MAC_COMPACTPRO);
put1("hqx", MIME_APPLICATION_MAC_BINHEX40);
put1("jnlp", MIME_APPLICATION_JNLP);
put1("ez", MIME_APPLICATION_ANDREW_INSET);
put1("txt", MIME_TEXT_PLAIN);
put1("ini", MIME_TEXT_PLAIN);
put1("c", MIME_TEXT_PLAIN);
put1("h", MIME_TEXT_PLAIN);
put1("cpp", MIME_TEXT_PLAIN);
put1("cxx", MIME_TEXT_PLAIN);
put1("cc", MIME_TEXT_PLAIN);
put1("chh", MIME_TEXT_PLAIN);
put1("java", MIME_TEXT_PLAIN);
put1("csv", MIME_TEXT_PLAIN);
put1("bat", MIME_TEXT_PLAIN);
put1("cmd", MIME_TEXT_PLAIN);
put1("asc", MIME_TEXT_PLAIN);
put1("rtf", MIME_TEXT_RTF);
put1("rtx", MIME_TEXT_RICHTEXT);
put1("html", MIME_TEXT_HTML);
put1("htm", MIME_TEXT_HTML);
put1("zip", MIME_APPLICATION_ZIP);
put1("rar", MIME_APPLICATION_X_RAR_COMPRESSED);
put1("gzip", MIME_APPLICATION_X_GZIP);
put1("gz", MIME_APPLICATION_X_GZIP);
put1("tgz", MIME_APPLICATION_TGZ);
put1("tar", MIME_APPLICATION_X_TAR);
put1("gif", MIME_IMAGE_GIF);
put1("jpeg", MIME_IMAGE_JPEG);
put1("jpg", MIME_IMAGE_JPEG);
put1("jpe", MIME_IMAGE_JPEG);
put1("tiff", MIME_IMAGE_TIFF);
put1("tif", MIME_IMAGE_TIFF);
put1("png", MIME_IMAGE_PNG);
put1("au", MIME_AUDIO_BASIC);
put1("snd", MIME_AUDIO_BASIC);
put1("wav", MIME_AUDIO_X_WAV);
put1("mov", MIME_VIDEO_QUICKTIME);
put1("qt", MIME_VIDEO_QUICKTIME);
put1("mp4", MIME_VIDEO_MP4);
put1("mpeg", MIME_VIDEO_MPEG);
put1("mpg", MIME_VIDEO_MPEG);
put1("mpe", MIME_VIDEO_MPEG);
put1("abs", MIME_VIDEO_MPEG);
put1("doc", MIME_APPLICATION_MSWORD);
put1("xls", MIME_APPLICATION_VND_MSEXCEL);
put1("eps", MIME_APPLICATION_POSTSCRIPT);
put1("ai", MIME_APPLICATION_POSTSCRIPT);
put1("ps", MIME_APPLICATION_POSTSCRIPT);
put1("pdf", MIME_APPLICATION_PDF);
put1("exe", MIME_APPLICATION_OCTET_STREAM);
put1("dll", MIME_APPLICATION_OCTET_STREAM);
put1("class", MIME_APPLICATION_OCTET_STREAM);
put1("jar", MIME_APPLICATION_JAVA_ARCHIVE);
}};
}
public static void main(String[] args) {
System.out.println(mimeTypeMapping.size());
}
/**
* Registers MIME type for provided extension. Existing extension type will be overriden.
*/
public static void registerMimeType(String ext, String mimeType) {
mimeTypeMapping.put(ext, mimeType);
}
/**
* Returns the corresponding MIME type to the given extension.
* If no MIME type was found it returns 'application/octet-stream' type.
*/
public static String getMimeType(String ext) {
String mimeType = lookupMimeType(ext);
if (mimeType == null) {
mimeType = MIME_APPLICATION_OCTET_STREAM;
}
return mimeType;
}
/**
* Simply returns MIME type or <code>null</code> if no type is found.
*/
public static String lookupMimeType(String ext) {
return mimeTypeMapping.get(ext.toLowerCase());
}
}
| |
package org.cdbookstore.view;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import javax.ejb.SessionContext;
import javax.ejb.Stateful;
import javax.enterprise.context.Conversation;
import javax.enterprise.context.ConversationScoped;
import javax.faces.application.FacesMessage;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.inject.Inject;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.cdbookstore.model.Author;
/**
* Backing bean for Author entities.
* <p/>
* This class provides CRUD functionality for all Author entities. It focuses
* purely on Java EE 6 standards (e.g. <tt>@ConversationScoped</tt> for
* state management, <tt>PersistenceContext</tt> for persistence,
* <tt>CriteriaBuilder</tt> for searches) rather than introducing a CRUD framework or
* custom base class.
*/
@Named
@Stateful
@ConversationScoped
public class AuthorBean implements Serializable {
private static final long serialVersionUID = 1L;
/*
* Support creating and retrieving Author entities
*/
private Long id;
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
private Author author;
public Author getAuthor() {
return this.author;
}
public void setAuthor(Author author) {
this.author = author;
}
@Inject
private Conversation conversation;
@PersistenceContext(unitName = "cdbookstore-persistence-unit", type = PersistenceContextType.EXTENDED)
private EntityManager entityManager;
public String create() {
this.conversation.begin();
this.conversation.setTimeout(1800000L);
return "create?faces-redirect=true";
}
public void retrieve() {
if (FacesContext.getCurrentInstance().isPostback()) {
return;
}
if (this.conversation.isTransient()) {
this.conversation.begin();
this.conversation.setTimeout(1800000L);
}
if (this.id == null) {
this.author = this.example;
} else {
this.author = findById(getId());
}
}
public Author findById(Long id) {
return this.entityManager.find(Author.class, id);
}
/*
* Support updating and deleting Author entities
*/
public String update() {
this.conversation.end();
try {
if (this.id == null) {
this.entityManager.persist(this.author);
return "search?faces-redirect=true";
} else {
this.entityManager.merge(this.author);
return "view?faces-redirect=true&id=" + this.author.getId();
}
} catch (Exception e) {
FacesContext.getCurrentInstance().addMessage(null,
new FacesMessage(e.getMessage()));
return null;
}
}
public String delete() {
this.conversation.end();
try {
Author deletableEntity = findById(getId());
this.entityManager.remove(deletableEntity);
this.entityManager.flush();
return "search?faces-redirect=true";
} catch (Exception e) {
FacesContext.getCurrentInstance().addMessage(null,
new FacesMessage(e.getMessage()));
return null;
}
}
/*
* Support searching Author entities with pagination
*/
private int page;
private long count;
private List<Author> pageItems;
private Author example = new Author();
public int getPage() {
return this.page;
}
public void setPage(int page) {
this.page = page;
}
public int getPageSize() {
return 10;
}
public Author getExample() {
return this.example;
}
public void setExample(Author example) {
this.example = example;
}
public String search() {
this.page = 0;
return null;
}
public void paginate() {
CriteriaBuilder builder = this.entityManager.getCriteriaBuilder();
// Populate this.count
CriteriaQuery<Long> countCriteria = builder.createQuery(Long.class);
Root<Author> root = countCriteria.from(Author.class);
countCriteria = countCriteria.select(builder.count(root)).where(
getSearchPredicates(root));
this.count = this.entityManager.createQuery(countCriteria)
.getSingleResult();
// Populate this.pageItems
CriteriaQuery<Author> criteria = builder.createQuery(Author.class);
root = criteria.from(Author.class);
TypedQuery<Author> query = this.entityManager.createQuery(criteria
.select(root).where(getSearchPredicates(root)));
query.setFirstResult(this.page * getPageSize()).setMaxResults(
getPageSize());
this.pageItems = query.getResultList();
}
private Predicate[] getSearchPredicates(Root<Author> root) {
CriteriaBuilder builder = this.entityManager.getCriteriaBuilder();
List<Predicate> predicatesList = new ArrayList<Predicate>();
String firstName = this.example.getFirstName();
if (firstName != null && !"".equals(firstName)) {
predicatesList.add(builder.like(
builder.lower(root.<String> get("firstName")),
'%' + firstName.toLowerCase() + '%'));
}
return predicatesList.toArray(new Predicate[predicatesList.size()]);
}
public List<Author> getPageItems() {
return this.pageItems;
}
public long getCount() {
return this.count;
}
/*
* Support listing and POSTing back Author entities (e.g. from inside an
* HtmlSelectOneMenu)
*/
public List<Author> getAll() {
CriteriaQuery<Author> criteria = this.entityManager
.getCriteriaBuilder().createQuery(Author.class);
return this.entityManager.createQuery(
criteria.select(criteria.from(Author.class))).getResultList();
}
@Resource
private SessionContext sessionContext;
public Converter getConverter() {
final AuthorBean ejbProxy = this.sessionContext
.getBusinessObject(AuthorBean.class);
return new Converter() {
@Override
public Object getAsObject(FacesContext context,
UIComponent component, String value) {
return ejbProxy.findById(Long.valueOf(value));
}
@Override
public String getAsString(FacesContext context,
UIComponent component, Object value) {
if (value == null) {
return "";
}
return String.valueOf(((Author) value).getId());
}
};
}
/*
* Support adding children to bidirectional, one-to-many tables
*/
private Author add = new Author();
public Author getAdd() {
return this.add;
}
public Author getAdded() {
Author added = this.add;
this.add = new Author();
return added;
}
}
| |
package libshapedraw.shape;
import static org.junit.Assert.*;
import java.util.ArrayList;
import libshapedraw.MockMinecraftAccess;
import libshapedraw.SetupTestEnvironment;
import libshapedraw.primitive.Color;
import libshapedraw.primitive.LineStyle;
import libshapedraw.primitive.ReadonlyVector3;
import libshapedraw.primitive.Vector3;
import org.junit.Test;
public class TestWireframeLinesBlend extends SetupTestEnvironment.TestCase {
@Test
public void testConstructor() {
ArrayList<ReadonlyVector3> arr;
arr = new ArrayList<ReadonlyVector3>();
new WireframeLinesBlend(arr);
arr = new ArrayList<ReadonlyVector3>();
arr.add(new Vector3(1.0, 2.0, 3.0));
new WireframeLinesBlend(arr);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructorInvalidNull() {
new WireframeLinesBlend(null);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructorInvalidNullItem() {
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
arr.add(null);
new WireframeLinesBlend(arr);
}
@Test
public void testConstructorHiddenNullItem() {
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
arr.add(new Vector3(1.0, 2.0, 3.0));
// this null is invalid and will break things later, but the constructor will accept it
arr.add(null);
new WireframeLinesBlend(arr);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testConstructorTypeErased() {
ArrayList arr;
arr = new ArrayList();
new WireframeLinesBlend(arr);
arr = new ArrayList();
arr.add(new Vector3(1.0, 2.0, 3.0));
new WireframeLinesBlend(arr);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test(expected=IllegalArgumentException.class)
public void testConstructorTypeErasedInvalidWrongType() {
ArrayList arr = new ArrayList();
arr.add(new Object());
new WireframeLinesBlend(arr);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testConstructorTypeErasedHiddenWrongType() {
ArrayList arr = new ArrayList();
arr.add(new Vector3(1.0, 2.0, 3.0));
// this is invalid and will break things later, but the constructor will accept it
arr.add(new Object());
new WireframeLinesBlend(arr);
}
@Test
public void testGetSetPoints() {
// setPoint is called from the constructor so most of its logic has already been tested above
ArrayList<ReadonlyVector3> arr0 = new ArrayList<ReadonlyVector3>();
arr0.add(new Vector3(1.0, 2.0, 3.0));
arr0.add(new Vector3(4.0, 5.0, 6.0));
assertEquals(2, arr0.size());
assertEquals("[(1.0,2.0,3.0), (4.0,5.0,6.0)]", arr0.toString());
ArrayList<ReadonlyVector3> arr1 = new ArrayList<ReadonlyVector3>();
arr1.add(new Vector3(-1.0, -2.0, -3.0));
arr1.add(new Vector3(-4.0, -5.0, -6.0));
arr1.add(new Vector3(-7.0, -8.0, -9.0));
assertEquals(3, arr1.size());
assertEquals("[(-1.0,-2.0,-3.0), (-4.0,-5.0,-6.0), (-7.0,-8.0,-9.0)]", arr1.toString());
assertNotSame(arr0, arr1);
WireframeLinesBlend shape = new WireframeLinesBlend(arr0);
assertSame(arr0, shape.getPoints());
assertNotSame(arr1, shape.getPoints());
assertEquals("[(1.0,2.0,3.0), (4.0,5.0,6.0)]", shape.getPoints().toString());
shape.setPoints(arr1);
assertNotSame(arr0, shape.getPoints());
assertSame(arr1, shape.getPoints());
assertEquals("[(-1.0,-2.0,-3.0), (-4.0,-5.0,-6.0), (-7.0,-8.0,-9.0)]", shape.getPoints().toString());
}
@Test
public void testGetSetRenderCapAndBlendEndpoint() {
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
arr.add(new Vector3(1.0, 2.0, 3.0));
arr.add(new Vector3(4.0, 5.0, 6.0));
arr.add(new Vector3(7.0, 8.0, 9.0));
arr.add(new Vector3(10.0, 11.0, 12.0));
arr.add(new Vector3(13.0, 14.0, 15.0));
WireframeLinesBlend shape = new WireframeLinesBlend(arr);
assertEquals(-1, shape.getRenderCap());
assertEquals(3, shape.getBlendEndpoint());
shape.setRenderCap(0);
assertEquals(0, shape.getRenderCap());
assertEquals(-1, shape.getBlendEndpoint());
shape.setRenderCap(2);
assertEquals(2, shape.getRenderCap());
assertEquals(1, shape.getBlendEndpoint());
shape.setRenderCap(50);
assertEquals(50, shape.getRenderCap());
assertEquals(49, shape.getBlendEndpoint());
shape.setRenderCap(-20);
assertEquals(-20, shape.getRenderCap());
assertEquals(3, shape.getBlendEndpoint());
shape.setRenderCap(Integer.MAX_VALUE);
assertEquals(Integer.MAX_VALUE, shape.getRenderCap());
assertEquals(Integer.MAX_VALUE-1, shape.getBlendEndpoint());
shape.setRenderCap(Integer.MIN_VALUE);
assertEquals(Integer.MIN_VALUE, shape.getRenderCap());
assertEquals(3, shape.getBlendEndpoint());
}
@Test
public void testLineStyle() {
WireframeLinesBlend shape = new WireframeLinesBlend(new ArrayList<ReadonlyVector3>());
assertNull(shape.getLineStyle());
assertSame(LineStyle.DEFAULT, shape.getEffectiveLineStyle());
shape.setLineStyle(Color.BISQUE.copy(), 5.0F, true);
assertNotNull(shape.getLineStyle());
assertEquals("(0xffe4c4ff,5.0|0xffe4c43f,5.0)", shape.getLineStyle().toString());
assertSame(shape.getLineStyle(), shape.getEffectiveLineStyle());
}
@Test
public void testBlendToLineStyle() {
WireframeLinesBlend shape = new WireframeLinesBlend(new ArrayList<ReadonlyVector3>());
assertNull(shape.getBlendToLineStyle());
shape.setBlendToLineStyle(Color.BISQUE.copy(), 5.0F, true);
assertNotNull(shape.getBlendToLineStyle());
assertEquals("(0xffe4c4ff,5.0|0xffe4c43f,5.0)", shape.getBlendToLineStyle().toString());
shape.setBlendToLineStyle(Color.CYAN.copy(), 3.0F, false);
assertNotNull(shape.getBlendToLineStyle());
assertEquals("(0x00ffffff,3.0)", shape.getBlendToLineStyle().toString());
assertNotSame(shape.getBlendToLineStyle(), shape.getLineStyle());
shape.setBlendToLineStyle(new LineStyle(Color.RED.copy(), 2.5F, false));
assertNotNull(shape.getBlendToLineStyle());
assertEquals("(0xff0000ff,2.5)", shape.getBlendToLineStyle().toString());
shape.setBlendToLineStyle(null);
assertNull(shape.getBlendToLineStyle());
}
@Test
public void testGetOrigin() {
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
WireframeLinesBlend shape = new WireframeLinesBlend(arr);
// no points at all: no origin
assertNull(shape.getOriginReadonly());
// the first point is the origin
arr.add(new Vector3(4.0, 5.5, -3.0));
assertEquals("(4.0,5.5,-3.0)", shape.getOriginReadonly().toString());
// additional points are ignored
arr.add(new Vector3(1.0, 2.0, 3.0));
arr.add(new Vector3(7.0, -9.0, 213.5));
assertEquals("(4.0,5.5,-3.0)", shape.getOriginReadonly().toString());
}
@Test
public void testRender() {
MockMinecraftAccess mc = new MockMinecraftAccess();
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
for (boolean seeThru : new boolean[] {true, false}) {
arr.clear();
WireframeLinesBlend shape = new WireframeLinesBlend(arr);
shape.setLineStyle(Color.WHITE.copy(), 1.0F, seeThru);
shape.setBlendToLineStyle(Color.RED.copy().setAlpha(0.5), 5.0F, seeThru);
assertEquals(seeThru, shape.isVisibleThroughTerrain());
// No points == nothing to render
mc.reset();
shape.render(mc);
mc.assertCountsEqual(0, 0, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(0, 0, seeThru);
// Only one point makes no lines
arr.add(new Vector3(0.0, 5.5, -12.5));
mc.reset();
shape.render(mc); // deferred to WireframeLines
mc.assertCountsEqual(1, 1, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(4, 4, seeThru);
// Two points make one line
arr.add(new Vector3(7.0, 5.5, -12.5));
mc.reset();
shape.render(mc);
mc.assertCountsEqual(1, 2, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(4, 8, seeThru);
// Three points make two lines
arr.add(new Vector3(7.0, 15.5, -12.5));
mc.reset();
shape.render(mc);
mc.assertCountsEqual(2, 4, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(8, 16, seeThru);
// Eleven points make ten lines
arr.add(new Vector3(7.0, 15.5, -6.5));
arr.add(new Vector3(7.0, 12.5, -3.5));
arr.add(new Vector3(17.0, 12.5, -3.5));
arr.add(new Vector3(17.0, 6.5, -3.5));
arr.add(new Vector3(12.0, 7.5, -3.5));
arr.add(new Vector3(10.0, 7.5, 3.5));
arr.add(new Vector3(10.0, 7.5, 6.0));
arr.add(new Vector3(20.0, 17.5, 6.0));
assertEquals(11, arr.size());
mc.reset();
shape.render(mc);
mc.assertCountsEqual(10, 20, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(40, 80, seeThru);
// Add a render cap, we only render that many lines
shape.setRenderCap(5);
mc.reset();
shape.render(mc);
mc.assertCountsEqual(5, 10, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(20, 40, seeThru);
// Remove the render cap, we render everything again
shape.setRenderCap(-1);
mc.reset();
shape.render(mc);
mc.assertCountsEqual(10, 20, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(40, 80, seeThru);
// A render cap that's larger than the number of line segments defined is fine too
shape.setRenderCap(9001);
mc.reset();
shape.render(mc);
mc.assertCountsEqual(10, 20, seeThru);
shape.render(mc);
shape.render(mc);
shape.render(mc);
mc.assertCountsEqual(40, 80, seeThru);
shape.setBlendToLineStyle(Color.YELLOW.copy().setAlpha(0.5), 5.0F, false);
shape.setRenderCap(-1);
mc.reset();
shape.render(mc); // secondary deferred to WireframeLines
if (seeThru) {
mc.assertCountsEqual(11, 31, false);
} else {
mc.assertCountsEqual(10, 20, false);
}
shape.render(mc);
shape.render(mc);
shape.render(mc);
if (seeThru) {
mc.assertCountsEqual(44, 124, false);
} else {
mc.assertCountsEqual(40, 80, false);
}
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test(expected=ClassCastException.class)
public void testRenderInvalidWrongType() {
ArrayList arr = new ArrayList();
arr.add(new Vector3(1.0, 2.0, 3.0));
arr.add(new Vector3(4.0, 5.0, 6.0));
arr.add("how did i get here");
arr.add(new Vector3(7.0, 8.0, 9.0));
WireframeLinesBlend shape = new WireframeLinesBlend(arr);
shape.setLineStyle(Color.WHITE.copy(), 1.0F, true);
shape.render(new MockMinecraftAccess());
}
@Test(expected=NullPointerException.class)
public void testRenderInvalidNull() {
ArrayList<ReadonlyVector3> arr = new ArrayList<ReadonlyVector3>();
arr.add(new Vector3(1.0, 2.0, 3.0));
arr.add(new Vector3(4.0, 5.0, 6.0));
arr.add(null);
arr.add(new Vector3(7.0, 8.0, 9.0));
WireframeLinesBlend shape = new WireframeLinesBlend(arr);
shape.setLineStyle(Color.WHITE.copy(), 1.0F, true);
shape.render(new MockMinecraftAccess());
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.intention.impl.TypeExpression;
import com.intellij.codeInsight.template.TemplateBuilder;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
import static com.intellij.codeInsight.ExpectedTypeInfo.*;
import static com.intellij.util.containers.ContainerUtil.map;
/**
* @author ven
*/
public class GuessTypeParameters {
private static final Logger LOG = Logger.getInstance(GuessTypeParameters.class);
private final Project myProject;
private final PsiManager myManager;
private final JVMElementFactory myFactory;
private final TemplateBuilder myBuilder;
private final PsiSubstitutor mySubstitutor;
public GuessTypeParameters(@NotNull Project project,
@NotNull JVMElementFactory factory,
@NotNull TemplateBuilder builder,
@Nullable PsiSubstitutor substitutor) {
myProject = project;
myManager = PsiManager.getInstance(project);
myFactory = factory;
myBuilder = builder;
mySubstitutor = substitutor == null ? PsiSubstitutor.EMPTY : substitutor;
}
@NotNull
public PsiTypeElement setupTypeElement(@NotNull PsiTypeElement typeElement,
ExpectedTypeInfo @NotNull [] infos,
@Nullable PsiElement context,
@NotNull PsiClass targetClass) {
LOG.assertTrue(typeElement.isValid());
ApplicationManager.getApplication().assertWriteAccessAllowed();
GlobalSearchScope scope = typeElement.getResolveScope();
if (infos.length == 1 && mySubstitutor != PsiSubstitutor.EMPTY) {
ExpectedTypeInfo info = infos[0];
final PsiType expectedType = info.getType();
final List<PsiTypeParameter> matchedParameters = matchingTypeParameters(mySubstitutor, expectedType, info.getKind());
if (!matchedParameters.isEmpty()) {
final List<PsiType> types = new SmartList<>(map(matchedParameters, it -> myFactory.createType(it)));
ContainerUtil.addAll(types, ExpectedTypesProvider.processExpectedTypes(infos, new MyTypeVisitor(myManager, scope), myProject));
myBuilder.replaceElement(typeElement, new TypeExpression(myProject, types));
return typeElement;
}
typeElement = replaceTypeElement(typeElement, info.getType());
PsiSubstitutor rawingSubstitutor = getRawingSubstitutor(myProject, context, targetClass);
int substitionResult = hasNullSubstitutions(mySubstitutor)
? SUBSTITUTED_NONE
: substituteToTypeParameters(typeElement, expectedType, rawingSubstitutor, true);
if (substitionResult == SUBSTITUTED_IN_PARAMETERS) {
PsiJavaCodeReferenceElement refElement = typeElement.getInnermostComponentReferenceElement();
LOG.assertTrue(refElement != null);
PsiElement qualifier = refElement.getQualifier();
if (qualifier != null) {
// Consider type element `java.util.List<java.lang.String>`.
// At this point there is a template on `java.lang.String` type element already.
//
// We need java.util.List element to put the second template on,
// but there is no such element, because type element consists of:
// - qualifier `java.util`
// - dot
// - reference name element `List`
// - reference type parameters `<java.lang.String>`
//
// Removing the qualifier also removes the dot, so in the end we get `List<java.lang.String>`
// and we are safe to put the template on the reference name element `List`.
//
// Actual shortening or using of FQNs is then handled by com.intellij.codeInsight.template.impl.ShortenFQNamesProcessor.
qualifier.delete();
}
PsiElement referenceNameElement = refElement.getReferenceNameElement();
LOG.assertTrue(referenceNameElement != null);
PsiClassType defaultType = getComponentType(info.getDefaultType());
LOG.assertTrue(defaultType != null);
PsiClassType rawDefaultType = defaultType.rawType();
ExpectedTypeInfo info1 = ExpectedTypesProvider.createInfo(rawDefaultType, TYPE_STRICTLY, rawDefaultType, info.getTailType());
MyTypeVisitor visitor = new MyTypeVisitor(myManager, scope);
PsiType[] types = ExpectedTypesProvider.processExpectedTypes(new ExpectedTypeInfo[]{info1}, visitor, myProject);
myBuilder.replaceElement(referenceNameElement, new TypeExpression(myProject, types));
return typeElement;
}
else if (substitionResult != SUBSTITUTED_NONE) {
return typeElement;
}
}
PsiType[] types = infos.length == 0
? new PsiType[]{typeElement.getType()}
: ExpectedTypesProvider.processExpectedTypes(infos, new MyTypeVisitor(myManager, scope), myProject);
myBuilder.replaceElement(typeElement, new TypeExpression(myProject, types));
return typeElement;
}
private PsiTypeElement replaceTypeElement(@NotNull PsiTypeElement templateElement, @NotNull PsiType type) {
PsiTypeElement newTypeElement = JavaPsiFacade.getElementFactory(myProject).createTypeElement(type);
return PostprocessReformattingAspect.getInstance(myProject).disablePostprocessFormattingInside(
() -> (PsiTypeElement)templateElement.replace(newTypeElement)
);
}
private static PsiSubstitutor getRawingSubstitutor(Project project, PsiElement context, PsiClass targetClass) {
if (context == null || targetClass == null) return PsiSubstitutor.EMPTY;
PsiTypeParameterListOwner currContext = PsiTreeUtil.getParentOfType(context, PsiTypeParameterListOwner.class);
PsiManager manager = context.getManager();
PsiSubstitutor substitutor = PsiSubstitutor.EMPTY;
while (currContext != null && !manager.areElementsEquivalent(currContext, targetClass)) {
PsiTypeParameter[] typeParameters = currContext.getTypeParameters();
substitutor = JavaPsiFacade.getElementFactory(project).createRawSubstitutor(substitutor, typeParameters);
currContext = currContext.getContainingClass();
}
return substitutor;
}
@Nullable
private static PsiClassType getComponentType (PsiType type) {
type = type.getDeepComponentType();
if (type instanceof PsiClassType) return (PsiClassType)type;
return null;
}
private static final int SUBSTITUTED_NONE = 0;
private static final int SUBSTITUTED_IN_REF = 1;
private static final int SUBSTITUTED_IN_PARAMETERS = 2;
private int substituteToTypeParameters(PsiTypeElement typeElement,
PsiType expectedType,
PsiSubstitutor rawingSubstitutor,
boolean toplevel) {
final List<PsiTypeParameter> matchedParameters = matchingTypeParameters(mySubstitutor, expectedType, TYPE_STRICTLY);
if (!matchedParameters.isEmpty()) {
List<PsiType> types = new SmartList<>(map(matchedParameters, it -> myFactory.createType(it)));
PsiType substituted = rawingSubstitutor.substitute(expectedType);
if (!CommonClassNames.JAVA_LANG_OBJECT.equals(substituted.getCanonicalText()) && (toplevel || substituted.equals(expectedType))) {
types.add(substituted);
}
myBuilder.replaceElement(typeElement, new TypeExpression(myProject, types));
return toplevel ? SUBSTITUTED_IN_REF : SUBSTITUTED_IN_PARAMETERS;
}
final PsiTypeElement[] innerTypeElements = typeArguments(typeElement);
if (innerTypeElements == null) return SUBSTITUTED_NONE;
final PsiType[] expectedTypeArguments = typeArguments(expectedType);
if (expectedTypeArguments == null) return SUBSTITUTED_NONE;
boolean substituted = false;
for (int i = 0; i < innerTypeElements.length; i++) {
substituted |= substituteToTypeParameters(innerTypeElements[i], expectedTypeArguments[i],
rawingSubstitutor, false) != SUBSTITUTED_NONE;
}
return substituted ? SUBSTITUTED_IN_PARAMETERS : SUBSTITUTED_NONE;
}
private static PsiTypeElement @Nullable [] typeArguments(@NotNull PsiTypeElement typeElement) {
// Foo<String, Bar>[][][] -> Foo<String, Bar>
// Foo<String, Bar> -> Foo<String, Bar>
final PsiJavaCodeReferenceElement unwrappedRef = typeElement.getInnermostComponentReferenceElement();
if (unwrappedRef == null) return null;
// Foo<String, Bar> -> <String, Bar>
final PsiReferenceParameterList typeArgumentList = unwrappedRef.getParameterList();
if (typeArgumentList == null) return null;
return typeArgumentList.getTypeParameterElements();
}
private static PsiType @Nullable [] typeArguments(@NotNull PsiType type) {
PsiClassType unwrappedType = getComponentType(type);
return unwrappedType == null ? null : unwrappedType.getParameters();
}
public static class MyTypeVisitor extends PsiTypeVisitor<PsiType> {
private final GlobalSearchScope myResolveScope;
private final PsiManager myManager;
public MyTypeVisitor(PsiManager manager, GlobalSearchScope resolveScope) {
myManager = manager;
myResolveScope = resolveScope;
}
@Override
public PsiType visitType(@NotNull PsiType type) {
if (type.equals(PsiType.NULL)) return PsiType.getJavaLangObject(myManager, myResolveScope);
return type;
}
@Override
public PsiType visitCapturedWildcardType(@NotNull PsiCapturedWildcardType capturedWildcardType) {
return capturedWildcardType.getUpperBound().accept(this);
}
}
/**
* @return list of type parameters which match expected type after substitution
*/
@NotNull
private static List<PsiTypeParameter> matchingTypeParameters(@NotNull PsiSubstitutor substitutor,
@NotNull PsiType expectedType,
@Type int kind) {
final List<PsiTypeParameter> result = new SmartList<>();
for (Map.Entry<PsiTypeParameter, PsiType> entry : substitutor.getSubstitutionMap().entrySet()) {
final PsiType typeArgument = entry.getValue();
if (typeArgument != null && matches(typeArgument, expectedType, kind)) {
result.add(entry.getKey());
}
}
return result;
}
private static boolean matches(@NotNull PsiType type, @NotNull PsiType expectedType, @Type int kind) {
switch (kind) {
case TYPE_STRICTLY:
return type.equals(expectedType);
case TYPE_OR_SUBTYPE:
return expectedType.isAssignableFrom(type);
case TYPE_OR_SUPERTYPE:
return type.isAssignableFrom(expectedType);
default:
return false;
}
}
private static boolean hasNullSubstitutions(@NotNull PsiSubstitutor substitutor) {
for (PsiType type : substitutor.getSubstitutionMap().values()) {
if (type == null) return true;
}
return false;
}
}
| |
/**
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 Eric Haddad Koenig
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.all.download.manager.search;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.all.commons.IncrementalNamedThreadFactory;
import com.all.downloader.search.ManagedSearcher;
import com.all.downloader.search.SearchDataEvent;
import com.all.downloader.search.SearchErrorEvent;
import com.all.downloader.search.SearchException;
import com.all.downloader.search.SearchProgressEvent;
import com.all.downloader.search.SearcherListener;
public class ManagedSearch implements SearcherListener {
private final static Log log = LogFactory.getLog(SearcherManager.class);
private final static IncrementalNamedThreadFactory threadFactory = new IncrementalNamedThreadFactory(
"ManagedSearch");
private SearcherListener searcherListener;
private Collection<ManagedSearcher> searcherCollection;
private Collection<ManagedSearcher> workingSearchers = new ArrayList<ManagedSearcher>();
private String keyword;
private ScheduledExecutorService scheduledExecutorService = Executors
.newSingleThreadScheduledExecutor(threadFactory);
private ScheduledFuture<?> updateTaskFuture;
@Deprecated
public ManagedSearch() {
}
public ManagedSearch(String keyword) {
this.keyword = keyword;
}
public void setSearchers(Collection<ManagedSearcher> searcherCollection) {
this.searcherCollection = searcherCollection;
}
public void setSearcherListener(SearcherListener searcherListener) {
this.searcherListener = searcherListener;
}
public void search() throws SearchException {
for (ManagedSearcher searcher : searcherCollection) {
try {
searcher.search(keyword);
searcher.addSearcherListener(this);
workingSearchers.add(searcher);
} catch (Exception e) {
log.error(String.format("Unable to perform search data, searcher[%s]", searcher), e);
}
}
if (workingSearchers.isEmpty()) {
notifyError();
throw new SearchException("Searchers are unable to perform search now.");
} else {
updateTaskFuture = scheduledExecutorService.schedule(new UpdateProgressTask(), 1, TimeUnit.SECONDS);
}
}
private void notifyError() {
log.warn(String.format("Search failed for keyword[%s]", keyword));
searcherListener.onError(new SearchErrorEvent(searcherListener, keyword));
}
@Override
public void updateSearchData(SearchDataEvent searchDataEvent) {
searcherListener.updateSearchData(searchDataEvent);
}
@Override
public void updateProgress(SearchProgressEvent updateProgressEvent) {
// omit these events as we'll be generating them here.
}
@Override
public void onError(SearchErrorEvent searchErrorEvent) {
Object source = searchErrorEvent.getSource();
if (source instanceof ManagedSearcher) {
ManagedSearcher searcher = (ManagedSearcher) source;
searcher.removeSearcherListener(this);
if (workingSearchers.remove(searcher)) {
if (workingSearchers.isEmpty()) {
stopUpdateProgressTask();
notifyError();
}
} else {
log.warn(String.format("Searcher[%s] for keyword[%s] not found in working searchers[%s] ", searcher,
keyword, workingSearchers.toString()));
}
}
}
private void stopUpdateProgressTask() {
updateTaskFuture.cancel(true);
scheduledExecutorService.shutdownNow();
for (ManagedSearcher managedSearcher : workingSearchers) {
managedSearcher.removeSearcherListener(this);
}
}
class UpdateProgressTask implements Runnable {
private static final int TOTAL_SEARCH_PROGRESS_TIME = 100;
private int progress = 0;
@Override
public void run() {
searcherListener.updateProgress(new SearchProgressEvent(searcherListener, keyword, progress++));
if (progress <= TOTAL_SEARCH_PROGRESS_TIME) {
updateTaskFuture = scheduledExecutorService.schedule(this, 1, TimeUnit.SECONDS);
} else {
log.debug("Stopping search update task becasue reach it's time limit");
stopUpdateProgressTask();
}
}
}
}
| |
/*
* Copyright 2015-2016 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.db.dao;
import java.sql.SQLException;
import java.util.List;
import org.mycontroller.standalone.db.DB_TABLES;
import org.mycontroller.standalone.db.tables.MetricsDoubleTypeDevice;
import org.mycontroller.standalone.metrics.MetricsUtils.AGGREGATION_TYPE;
import org.mycontroller.standalone.utils.McUtils;
import com.j256.ormlite.stmt.DeleteBuilder;
import com.j256.ormlite.stmt.QueryBuilder;
import com.j256.ormlite.stmt.Where;
import com.j256.ormlite.support.ConnectionSource;
import lombok.extern.slf4j.Slf4j;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.1
*/
@Slf4j
public class MetricsDoubleTypeDeviceDaoImpl extends BaseAbstractDaoImpl<MetricsDoubleTypeDevice, Object> implements
MetricsDoubleTypeDeviceDao {
public MetricsDoubleTypeDeviceDaoImpl(ConnectionSource connectionSource)
throws SQLException {
super(connectionSource, MetricsDoubleTypeDevice.class);
}
@Override
public void deletePrevious(MetricsDoubleTypeDevice metric) {
try {
DeleteBuilder<MetricsDoubleTypeDevice, Object> deleteBuilder = this.getDao().deleteBuilder();
deleteBuilder.where().eq(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE, metric.getAggregationType())
.and().le(MetricsDoubleTypeDevice.KEY_TIMESTAMP, metric.getTimestamp());
int count = this.getDao().delete(deleteBuilder.prepare());
_logger.debug("Metric:[{}] deleted, Delete count:{}", metric, count);
} catch (SQLException ex) {
_logger.error("unable to delete metric:[{}]", metric, ex);
}
}
@Override
public void deleteBySensorVariableRefId(int sensorValueRefId) {
try {
DeleteBuilder<MetricsDoubleTypeDevice, Object> deleteBuilder = this.getDao().deleteBuilder();
deleteBuilder.where().eq(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID, sensorValueRefId);
int count = deleteBuilder.delete();
_logger.debug("Metric-sensorValueRefId:[{}] deleted, Delete count:{}", sensorValueRefId, count);
} catch (SQLException ex) {
_logger.error("unable to delete metric-sensorValueRefId:[{}]", sensorValueRefId, ex);
}
}
@Override
public List<MetricsDoubleTypeDevice> getAll(MetricsDoubleTypeDevice metric) {
try {
QueryBuilder<MetricsDoubleTypeDevice, Object> queryBuilder = this.getDao().queryBuilder();
Where<MetricsDoubleTypeDevice, Object> whereBuilder = queryBuilder.where();
whereBuilder.eq(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID,
metric.getSensorVariable().getId());
if (metric.getAggregationType() != null) {
whereBuilder.and().eq(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE,
metric.getAggregationType());
}
if (metric.getTimestampFrom() != null) {
whereBuilder.and().gt(MetricsDoubleTypeDevice.KEY_TIMESTAMP,
metric.getTimestampFrom());
}
if (metric.getTimestampTo() != null) {
whereBuilder.and().le(MetricsDoubleTypeDevice.KEY_TIMESTAMP,
metric.getTimestampTo());
}
return queryBuilder.query();
} catch (SQLException ex) {
_logger.error("unable to get, metric:{}", metric, ex);
}
return null;
}
@Override
public MetricsDoubleTypeDevice get(MetricsDoubleTypeDevice metric) {
try {
return this.getDao().queryForFirst(
this.getDao().queryBuilder()
.where()
.eq(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID, metric.getSensorVariable().getId())
.and().eq(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE, metric.getAggregationType())
.and().eq(MetricsDoubleTypeDevice.KEY_TIMESTAMP, metric.getTimestamp()).prepare());
} catch (SQLException ex) {
_logger.error("unable to get, metric:{}", metric, ex);
}
return null;
}
@Override
public List<MetricsDoubleTypeDevice> getAll(List<Object> ids) {
return null;
}
@Override
public List<MetricsDoubleTypeDevice> getAggregationRequiredVariableIds(AGGREGATION_TYPE aggregationType,
Long fromTimestamp, Long toTimestamp) {
QueryBuilder<MetricsDoubleTypeDevice, Object> queryBuilder = this.getDao().queryBuilder();
try {
return queryBuilder.distinct().selectColumns(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID)
.where().eq(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE, aggregationType).and()
.gt(MetricsDoubleTypeDevice.KEY_TIMESTAMP, fromTimestamp).and()
.le(MetricsDoubleTypeDevice.KEY_TIMESTAMP, toTimestamp)
.query();
} catch (SQLException ex) {
_logger.error("Exception,", ex);
return null;
}
}
@Override
public MetricsDoubleTypeDevice getMinMaxAvg(MetricsDoubleTypeDevice metric) {
StringBuilder query = new StringBuilder();
StringBuilder queryTimestamp = new StringBuilder();
//timestamp from / to
if (metric.getTimestampFrom() != null) {
queryTimestamp.append(" AND ").append(MetricsDoubleTypeDevice.KEY_TIMESTAMP).append(" > ")
.append(metric.getTimestampFrom());
}
if (metric.getTimestampTo() != null) {
queryTimestamp.append(" AND ").append(MetricsDoubleTypeDevice.KEY_TIMESTAMP).append(" <= ")
.append(metric.getTimestampTo());
}
try {
//Query sample
//SELECT MIN(MINREF) AS MIN FROM (SELECT MIN(MIN) AS MINREF FROM metrics_double_type_device WHERE
//sensorVariableId=7 UNION SELECT MIN(AVG) AS MINREF FROM metrics_double_type_device WHERE
//sensorVariableId=7 AND aggregationType=0 AND timestamp > fromTime AND timestamp <= toTime) AS TABLE_MIN
//Query to get minumum
query.append("SELECT MIN(MINREF) AS MIN FROM (SELECT MIN(MIN) AS MINREF FROM ")
.append(DB_TABLES.METRICS_DOUBLE_TYPE_DEVICE).append(" WHERE ")
.append(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID).append("=")
.append(metric.getSensorVariable().getId());
if (queryTimestamp.length() > 0) {
query.append(queryTimestamp);
}
query.append(" UNION ")
.append("SELECT MIN(AVG) AS MINREF FROM ").append(DB_TABLES.METRICS_DOUBLE_TYPE_DEVICE)
.append(" WHERE ")
.append(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID).append("=")
.append(metric.getSensorVariable().getId())
.append(" AND ").append(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE).append("=")
.append(AGGREGATION_TYPE.RAW.ordinal());
if (queryTimestamp.length() > 0) {
query.append(queryTimestamp);
}
query.append(") AS TABLE_MIN");
if (_logger.isTraceEnabled()) {
_logger.trace("Minimum sql query:{}", query);
}
//Get minimum value
Double min = McUtils.getDouble(this.getDao().queryRaw(query.toString()).getResults().get(0)[0]);
//reset query
query.setLength(0);
//Query sample
//SELECT MAX(MAXREF) AS MAX FROM (SELECT MAX(MAX) AS MAXREF FROM metrics_double_type_device
//WHERE sensorVariableId=7 UNION SELECT MAX(AVG) AS MAXREF FROM metrics_double_type_device
//WHERE sensorVariableId=7 AND aggregationType=0
//AND timestamp > fromTime AND timestamp <= toTime) AS TABLE_MAX
//Query to get maximum
query.append("SELECT MAX(MAXREF) AS MAX FROM (SELECT MAX(MAX) AS MAXREF FROM ")
.append(DB_TABLES.METRICS_DOUBLE_TYPE_DEVICE).append(" WHERE ")
.append(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID).append("=")
.append(metric.getSensorVariable().getId());
if (queryTimestamp.length() > 0) {
query.append(queryTimestamp);
}
query.append(" UNION ")
.append("SELECT MAX(AVG) AS MAXREF FROM ").append(DB_TABLES.METRICS_DOUBLE_TYPE_DEVICE)
.append(" WHERE ")
.append(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID).append("=")
.append(metric.getSensorVariable().getId())
.append(" AND ").append(MetricsDoubleTypeDevice.KEY_AGGREGATION_TYPE).append("=")
.append(AGGREGATION_TYPE.RAW.ordinal());
//timestamp from / to
if (queryTimestamp.length() > 0) {
query.append(queryTimestamp);
}
query.append(") AS TABLE_MAX");
if (_logger.isTraceEnabled()) {
_logger.trace("Maximum sql query:{}", query);
}
//Get maximum value
Double max = McUtils.getDouble(this.getDao().queryRaw(query.toString()).getResults().get(0)[0]);
//reset query
query.setLength(0);
//Query sample
//SELECT ROUND(SUM(avg * samples) / SUM(samples), 2) AS AVG FROM metrics_double_type_device
//WHERE sensorVariableId=7 AND timestamp > fromTime AND timestamp <= toTime) AS MASTER_TABLE
//Query to get average
query.append("SELECT ROUND(SUM(").append(MetricsDoubleTypeDevice.KEY_AVG).append(" * ")
.append(MetricsDoubleTypeDevice.KEY_SAMPLES).append(") / SUM(")
.append(MetricsDoubleTypeDevice.KEY_SAMPLES).append("), 2) AS AVG FROM ")
.append(DB_TABLES.METRICS_DOUBLE_TYPE_DEVICE).append(" WHERE ")
.append(MetricsDoubleTypeDevice.KEY_SENSOR_VARIABLE_ID).append("=")
.append(metric.getSensorVariable().getId());
//timestamp from / to
if (queryTimestamp.length() > 0) {
query.append(queryTimestamp.toString());
}
if (_logger.isTraceEnabled()) {
_logger.trace("Average sql query:{}", query);
}
//Get average value
Double avg = McUtils.getDouble(this.getDao().queryRaw(query.toString()).getResults().get(0)[0]);
return MetricsDoubleTypeDevice.builder()
.min(min)
.max(max)
.avg(avg)
.build();
} catch (SQLException ex) {
_logger.error("Unable to execute query:{}", query, ex);
}
return null;
}
}
| |
/*
* Copyright 2018, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.controller.standard;
import static io.enmasse.address.model.Phase.Active;
import static io.enmasse.address.model.Phase.Configuring;
import static io.enmasse.address.model.Phase.Pending;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import io.enmasse.address.model.*;
import io.enmasse.admin.model.v1.StandardInfraConfig;
import io.enmasse.admin.model.v1.StandardInfraConfigSpec;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.internal.util.collections.Sets;
import io.enmasse.admin.model.v1.ResourceAllowance;
import io.enmasse.k8s.api.EventLogger;
import io.fabric8.kubernetes.api.model.KubernetesList;
import io.fabric8.kubernetes.api.model.KubernetesListBuilder;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.fabric8.kubernetes.api.model.apps.StatefulSetBuilder;
public class AddressProvisionerTest {
private BrokerSetGenerator generator;
private Kubernetes kubernetes;
private int id = 0;
private BrokerIdGenerator idGenerator = () -> String.valueOf(id++);
private final StandardInfraConfig infraConfig = new StandardInfraConfig();
@BeforeEach
public void setup() {
id = 0;
generator = mock(BrokerSetGenerator.class);
kubernetes = mock(Kubernetes.class);
infraConfig.setSpec(new StandardInfraConfigSpec());
}
private class ProvisionerTestFixture {
StandardControllerSchema standardControllerSchema;
AddressResolver resolver;
AddressSpaceResolver addressSpaceResolver;
EventLogger logger = mock(EventLogger.class);
AddressProvisioner addressProvisioner;
public ProvisionerTestFixture() {
standardControllerSchema = new StandardControllerSchema();
resolver = new AddressResolver(standardControllerSchema.getType());
addressSpaceResolver = new AddressSpaceResolver(standardControllerSchema.getSchema());
logger = mock(EventLogger.class);
addressProvisioner = new AddressProvisioner(addressSpaceResolver, resolver, standardControllerSchema.getPlan(), generator, kubernetes, logger, "1234", idGenerator);
}
public ProvisionerTestFixture(List<ResourceAllowance> resourceAllowances) {
standardControllerSchema = new StandardControllerSchema(resourceAllowances);
resolver = new AddressResolver(standardControllerSchema.getType());
addressSpaceResolver = new AddressSpaceResolver(standardControllerSchema.getSchema());
logger = mock(EventLogger.class);
addressProvisioner = new AddressProvisioner(addressSpaceResolver, resolver, standardControllerSchema.getPlan(), generator, kubernetes, logger, "1234", idGenerator);
}
}
@Test
public void testUsageCheck() {
Set<Address> addresses = new HashSet<>();
addresses.add(new AddressBuilder()
.withMetadata(new ObjectMetaBuilder()
.withNamespace("ns")
.build())
.withNewSpec()
.withAddress("a1")
.withAddressSpace("myspace")
.withPlan("small-anycast")
.withType("anycast")
.endSpec()
.build());
AddressProvisioner provisioner = new ProvisionerTestFixture().addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
assertThat(usageMap.size(), is(1));
assertThat(usageMap.get("router").size(), is(1));
assertEquals(0.2, usageMap.get("router").get("all").getUsed(), 0.01);
addresses.add(new AddressBuilder()
.withMetadata(new ObjectMetaBuilder()
.withNamespace("ns")
.build())
.withNewSpec()
.withAddress("q1")
.withAddressSpace("myspace")
.withPlan("small-queue")
.withType("queue")
.endSpec()
.withStatus(new AddressStatusBuilder()
.withReady(true)
.addToBrokerStatuses(new BrokerStatus("broker-0", "broker-0-0").setState(BrokerState.Active))
.build())
.build());
usageMap = provisioner.checkUsage(addresses);
assertThat(usageMap.size(), is(2));
assertThat(usageMap.get("router").size(), is(1));
assertThat(usageMap.get("broker").size(), is(1));
assertEquals(0.4, usageMap.get("router").get("all").getUsed(), 0.01);
assertEquals(0.4, usageMap.get("broker").get("broker-0").getUsed(), 0.01);
addresses.add(new AddressBuilder()
.withMetadata(new ObjectMetaBuilder()
.withNamespace("ns")
.build())
.withNewSpec()
.withAddress("q2")
.withAddressSpace("myspace")
.withPlan("small-queue")
.withType("queue")
.endSpec()
.withNewStatus()
.addNewBrokerStatus()
.withClusterId("broker-0")
.withContainerId("broker-0-0")
.withState(BrokerState.Active)
.endBrokerStatus()
.endStatus()
.build());
usageMap = provisioner.checkUsage(addresses);
assertThat(usageMap.size(), is(2));
assertThat(usageMap.get("router").size(), is(1));
assertThat(usageMap.get("broker").size(), is(1));
assertEquals(0.6, usageMap.get("router").get("all").getUsed(), 0.01);
assertEquals(0.8, usageMap.get("broker").get("broker-0").getUsed(), 0.01);
}
@Test
public void testQuotaCheck() {
Set<Address> addresses = new HashSet<>();
addresses.add(createQueue("q1", "small-queue", createPooledBrokerStatus("broker-1234-0")));
addresses.add(createQueue("q2", "small-queue", createPooledBrokerStatus("broker-1234-0")));
addresses.add(createQueue("q3", "small-queue", createPooledBrokerStatus("broker-1234-1")));
AddressProvisioner provisioner = new ProvisionerTestFixture().addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
id = 2;
Address largeQueue = createQueue("q4", "xlarge-queue");
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(largeQueue), Sets.newSet(largeQueue));
assertThat(neededMap, is(usageMap));
assertThat(largeQueue.getStatus().getPhase(), is(Pending));
assertTrue(largeQueue.getStatus().getMessages().contains("Quota exceeded"));
Address smallQueue = createQueue("q4", "small-queue");
neededMap = provisioner.checkQuota(usageMap, Sets.newSet(smallQueue), Sets.newSet(smallQueue));
assertThat(neededMap, is(not(usageMap)));
}
@Test
public void testQuotaCheckPartitionedQueues() {
ProvisionerTestFixture testFixture = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 1),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4)));
Address q1 = new AddressBuilder(createQueue("q1", "small-sharded-queue", createPooledBrokerStatus("broker-1234-0")))
.editStatus()
.withPhase(Active)
.withPlanStatus(AddressPlanStatus.fromAddressPlan(testFixture.standardControllerSchema.getType().findAddressType("queue").get().findAddressPlan("small-queue").get()))
.endStatus()
.build();
AddressProvisioner provisioner = testFixture.addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Set.of(q1));
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Set.of(q1), Sets.newSet(q1));
assertEquals(usageMap, neededMap);
assertThat(q1.getStatus().getPhase(), is(Active));
assertTrue(q1.getStatus().getMessages().contains("Quota exceeded"));
}
@Test
public void testQuotaCheckMany() {
Map<String, Address> addresses = new HashMap<>();
for (int i = 0; i < 200; i++) {
addresses.put("a" + i, createAddress("a" + i, "anycast", "small-anycast"));
}
AddressProvisioner provisioner = new ProvisionerTestFixture().addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = new HashMap<>();
Map<String, Map<String, UsageInfo>> provisionMap = provisioner.checkQuota(usageMap, new LinkedHashSet<>(addresses.values()), new LinkedHashSet<>(addresses.values()));
assertThat(provisionMap.get("router").get("all").getNeeded(), is(1));
int numConfiguring = 0;
for (Address address : addresses.values()) {
if (address.getStatus().getPhase().equals(Phase.Configuring)) {
numConfiguring++;
}
}
assertThat(numConfiguring, is(5));
}
@Test
public void testProvisioningColocated() throws Exception {
Set<Address> addresses = new HashSet<>();
addresses.add(createAddress("a1", "anycast", "small-anycast"));
addresses.add(createAddress("q1", "queue", "small-queue", createPooledBrokerStatus("broker-1234-0")));
AddressProvisioner provisioner = new ProvisionerTestFixture().addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
Address queue = createAddress("q2", "queue", "small-queue");
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(queue), Sets.newSet(queue));
List<BrokerCluster> clusterList = Arrays.asList(new BrokerCluster("broker-1234-0", new KubernetesList()));
provisioner.provisionResources(createDeployment(1), clusterList, neededMap, Sets.newSet(queue), infraConfig);
assertThat(clusterList.get(0).getResources().getItems().size(), is(0));
assertTrue(queue.getStatus().getMessages().isEmpty(), queue.getStatus().getMessages().toString());
assertThat(queue.getStatus().getPhase(), is(Phase.Configuring));
assertThat(queue.getStatus().getBrokerStatuses().get(0).getContainerId(), is("broker-1234-0-0"));
assertThat(queue.getStatus().getBrokerStatuses().get(0).getClusterId(), is("broker-1234-0"));
}
private RouterCluster createDeployment(int replicas) {
return new RouterCluster("router", replicas, infraConfig);
}
@Test
public void testScalingColocated() throws Exception {
Set<Address> addresses = new HashSet<>();
addresses.add(createAddress("a1", "anycast", "small-anycast"));
addresses.add(createAddress("q1", "queue", "small-queue", createPooledBrokerStatus("broker-1234-0")));
addresses.add(createAddress("q2", "queue", "small-queue", createPooledBrokerStatus("broker-1234-0")));
id = 1;
AddressProvisioner provisioner = new ProvisionerTestFixture().addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
Address queue = createAddress("q3", "queue", "small-queue");
Map<String, Map<String, UsageInfo>> provisionMap = provisioner.checkQuota(usageMap, Sets.newSet(queue), Sets.newSet(queue));
List<BrokerCluster> clusterList = new ArrayList<>();
clusterList.add(new BrokerCluster("broker-1234-0", new KubernetesList()));
when(generator.generateCluster(eq("broker-1234-1"), anyInt(), any(), any(), any())).thenReturn(new BrokerCluster("broker-1234-1", new KubernetesList()));
provisioner.provisionResources(createDeployment(1), clusterList, provisionMap, Sets.newSet(queue), infraConfig);
assertTrue(queue.getStatus().getMessages().isEmpty(), queue.getStatus().getMessages().toString());
assertThat(queue.getStatus().getPhase(), is(Phase.Configuring));
assertThat(queue.getStatus().getBrokerStatuses().get(0).getClusterId(), is("broker-1234-1"));
assertThat(queue.getStatus().getBrokerStatuses().get(0).getContainerId(), is("broker-1234-1-0"));
}
@Test
public void testProvisionColocated() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 2))).addressProvisioner;
Set<Address> addressSet = Sets.newSet(
createQueue("q9", "pooled-queue-tiny"),
createQueue("q8", "pooled-queue-tiny"),
createQueue("q11", "pooled-queue-tiny"),
createQueue("q12", "pooled-queue-tiny"),
createQueue("q10", "pooled-queue-tiny"),
createQueue("q1", "pooled-queue-large"),
createQueue("q7", "pooled-queue-tiny"),
createQueue("q6", "pooled-queue-small"),
createQueue("q5", "pooled-queue-small"),
createQueue("q4", "pooled-queue-small"),
createQueue("q3", "pooled-queue-small"),
createQueue("q2", "pooled-queue-large"));
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(neededMap.keySet().size(), is(1));
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(2));
List<BrokerCluster> brokerClusters = Arrays.asList(
createCluster("broker-1234-0", 1),
createCluster("broker-1234-1", 1));
provisioner.provisionResources(new RouterCluster("router", 1, null), brokerClusters, neededMap, addressSet, infraConfig);
for (Address address : addressSet) {
assertThat(address.getStatus().getPhase(), is(Phase.Configuring));
}
}
private BrokerCluster createCluster(String clusterId, int replicas) {
KubernetesListBuilder builder = new KubernetesListBuilder();
builder.addToItems(new StatefulSetBuilder()
.editOrNewMetadata()
.withName(clusterId)
.endMetadata()
.editOrNewSpec()
.withReplicas(replicas)
.endSpec()
.build());
return new BrokerCluster(clusterId, builder.build());
}
private Address createQueue(String address, String plan) {
return createQueue(address, plan, (BrokerStatus[])null);
}
private Address createQueue(String address, String plan, BrokerStatus ... brokerStatuses) {
return createAddress(address, "queue", plan, brokerStatuses);
}
private static Address createAddress(String address, String type, String plan) {
return createAddress(address, type, plan, (BrokerStatus[])null);
}
private BrokerStatus createPooledBrokerStatus(String clusterId) {
return new BrokerStatusBuilder()
.withClusterId(clusterId)
.withContainerId(clusterId + "-0")
.withState(BrokerState.Active)
.build();
}
private static Address createAddress(String address, String type, String plan, BrokerStatus ... brokerStatuses) {
final AddressBuilder addressBuilder = new AddressBuilder()
.withNewMetadata()
.withName("myspace." + address)
.withNamespace("ns")
.endMetadata()
.withNewSpec()
.withAddress(address)
.withAddressSpace("myspace")
.withPlan(plan)
.withType(type)
.endSpec();
if (brokerStatuses != null && brokerStatuses.length > 0) {
addressBuilder.withNewStatus()
.addToBrokerStatuses(brokerStatuses)
.endStatus();
}
return addressBuilder.build();
}
private Address createSubscription(String address, String topic, String plan) {
return new AddressBuilder()
.withNewMetadata()
.withNamespace("ns")
.endMetadata()
.withNewSpec()
.withAddress(address)
.withAddressSpace("myspace")
.withPlan(plan)
.withType("subscription")
.withTopic(topic)
.endSpec()
.build();
}
@Test
public void testShardedPooled() throws Exception {
Address q2 = createQueue("q1", "medium-sharded-queue");
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = new HashMap<>();
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Set.of(q2), Set.of(q2));
assertThat(neededMap.size(), is(2));
assertThat(neededMap.get("broker").size(), is(2));
usageMap = provisioner.checkUsage(Set.of(q2));
assertEquals(neededMap, usageMap);
}
@Test
public void testProvisioningShardedTopic() throws Exception {
Set<Address> addresses = new HashSet<>();
addresses.add(createAddress("a1", "anycast", "small-anycast"));
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 3),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
Address t1 = createAddress("t1", "topic", "xlarge-topic");
Address t2 = createAddress("t2", "topic", "large-topic");
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(t1, t2), Sets.newSet(t1, t2));
when(generator.generateCluster(eq(provisioner.getShardedClusterId(t1)), anyInt(), eq(t1), any(), any())).thenReturn(new BrokerCluster(provisioner.getShardedClusterId(t1), new KubernetesList()));
when(generator.generateCluster(eq(provisioner.getShardedClusterId(t2)), anyInt(), eq(t2), any(), any())).thenReturn(new BrokerCluster(provisioner.getShardedClusterId(t2), new KubernetesList()));
provisioner.provisionResources(createDeployment(1), new ArrayList<>(), neededMap, Sets.newSet(t1, t2), infraConfig);
assertTrue(t1.getStatus().getMessages().isEmpty(), t1.getStatus().getMessages().toString());
assertThat(t1.getStatus().getPhase(), is(Phase.Configuring));
assertThat(t1.getStatus().getBrokerStatuses().get(0).getContainerId(), is("t1"));
verify(generator).generateCluster(eq(provisioner.getShardedClusterId(t1)), eq(2), eq(t1), any(), any());
assertTrue(t2.getStatus().getMessages().isEmpty(), t2.getStatus().getMessages().toString());
assertThat(t2.getStatus().getPhase(), is(Phase.Configuring));
assertThat(t2.getStatus().getBrokerStatuses().get(0).getContainerId(), is("t2"));
verify(generator).generateCluster(eq(provisioner.getShardedClusterId(t2)), eq(1), eq(t2), any(), any());
}
@Test
public void testProvisioningSharded() throws Exception {
Set<Address> addresses = new HashSet<>();
addresses.add(createAddress("a1", "anycast", "small-anycast"));
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 3),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
Address q1 = createQueue("q1", "xlarge-queue");
Address q2 = createQueue("q2", "large-queue");
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(q1, q2), Sets.newSet(q1, q2));
when(generator.generateCluster(eq("broker-1234-0"), anyInt(), any(), any(), any())).thenReturn(new BrokerCluster("broker-1234-0", new KubernetesList()));
when(generator.generateCluster(eq("broker-1234-1"), anyInt(), any(), any(), any())).thenReturn(new BrokerCluster("broker-1234-1", new KubernetesList()));
when(generator.generateCluster(eq("broker-1234-2"), anyInt(), any(), any(), any())).thenReturn(new BrokerCluster("broker-1234-2", new KubernetesList()));
provisioner.provisionResources(createDeployment(1), new ArrayList<>(), neededMap, Sets.newSet(q1, q2), infraConfig);
assertTrue(q1.getStatus().getMessages().isEmpty(), q1.getStatus().getMessages().toString());
assertThat(q1.getStatus().getPhase(), is(Phase.Configuring));
assertThat(q1.getStatus().getBrokerStatuses().size(), is(2));
assertTrue(q1.getStatus().getBrokerStatuses().stream().map(BrokerStatus::getContainerId).collect(Collectors.toSet()).contains("broker-1234-1-0"));
assertTrue(q1.getStatus().getBrokerStatuses().stream().map(BrokerStatus::getContainerId).collect(Collectors.toSet()).contains("broker-1234-2-0"));
verify(generator).generateCluster(eq("broker-1234-1"), eq(1), any(), any(), any());
verify(generator).generateCluster(eq("broker-1234-2"), eq(1), any(), any(), any());
assertTrue(q2.getStatus().getMessages().isEmpty(), q2.getStatus().getMessages().toString());
assertThat(q2.getStatus().getPhase(), is(Phase.Configuring));
assertThat(q2.getStatus().getBrokerStatuses().size(), is(1));
assertThat(q2.getStatus().getBrokerStatuses().get(0).getContainerId(), is("broker-1234-0-0"));
verify(generator).generateCluster(eq("broker-1234-0"), eq(1), any(), any(), any());
}
@Test
public void testUpgradeFromNoAppliedPlan() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 3),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Set<Address> addresses = new HashSet<>();
Address q1 = createQueue("q1", "xlarge-queue");
Address a1 = createAddress("a1", "anycast", "small-anycast");
addresses.add(q1);
addresses.add(a1);
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(addresses);
assertNotEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
assertNotEquals(a1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(a1).orElse(null));
@SuppressWarnings("unused")
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(a1, q1), Sets.newSet(a1, q1));
assertEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
assertEquals(a1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(a1).orElse(null));
}
@Test
public void testSwitchShardedAddressPlan() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Address q1 = createQueue("q1", "large-queue");
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
q1 = new AddressBuilder(q1)
.editOrNewSpec()
.withPlan("xlarge-queue")
.endSpec()
.build();
q1.getStatus().setPhase(Active);
usageMap = provisioner.checkUsage(Sets.newSet(q1));
@SuppressWarnings("unused")
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
}
@Test
public void testSwitchPooledToShardedQuotaCheck() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 1),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Address q1 = createQueue("q1", "small-queue");
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
q1.getStatus().setPhase(Active);
q1 = new AddressBuilder(q1)
.editOrNewSpec()
.withPlan("large-queue")
.endSpec()
.build();
usageMap = provisioner.checkUsage(Sets.newSet(q1));
provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertTrue(q1.getStatus().getMessages().isEmpty());
assertThat(q1.getStatus().getPhase(), is(Configuring));
assertEquals(q1.getSpec().getPlan(), AppliedConfig.getCurrentAppliedPlanFromAddress(q1).orElse(null));
}
/*
@Test
public void testReuseExistingBrokerWhenSharding() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 4))).addressProvisioner;
Address q1 = createQueue("q1", "large-queue");
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertEquals(q1.getSpec().getPlan(), q1.getAnnotation(AnnotationKeys.APPLIED_PLAN));
q1.getStatus().setPhase(Active);
q1 = new AddressBuilder(q1)
.editOrNewSpec()
.withPlan("xlarge-queue")
.endSpec()
.build();
usageMap = provisioner.checkUsage(Sets.newSet(q1));
provisioner.checkQuota(usageMap, Sets.newSet(q1), Sets.newSet(q1));
assertTrue(q1.getStatus().getMessages().isEmpty());
assertThat(q1.getStatus().getPhase(), is(Configuring));
assertEquals(q1.getSpec().getPlan(), q1.getAnnotation(AnnotationKeys.APPLIED_PLAN));
}
*/
@Test
public void testScalingRouter() throws Exception {
Set<Address> addresses = new HashSet<>();
for (int i = 0; i < 199; i++) {
addresses.add(createAddress("a" + i, "anycast", "small-anycast"));
}
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 0),
new ResourceAllowance("router", 100000),
new ResourceAllowance("aggregate", 100000))).addressProvisioner;
Map<String, Map<String, UsageInfo>> usageMap = new HashMap<>();
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addresses, addresses);
provisioner.provisionResources(createDeployment(1), new ArrayList<>(), neededMap, addresses, infraConfig);
verify(kubernetes, atLeast(1)).scaleStatefulSet(eq("router"), eq(40));
verify(kubernetes, never()).scaleStatefulSet(eq("router"), eq(41));
}
@Test
public void testDurableSubscriptionsColocated() {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 3))).addressProvisioner;
Set<Address> addressSet = Sets.newSet(
createAddress("t1", "topic", "small-topic"),
createAddress("t2", "topic", "small-topic"),
createSubscription("s1", "t1", "small-subscription"));
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(neededMap.keySet().size(), is(3));
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(2));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("router")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("broker")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("subscription")), is(1));
for (Address address : addressSet) {
assertThat(address.getStatus().getPhase(), is(Phase.Configuring));
}
}
@Test
public void testDurableSubscriptionsColocatedStaysOnTopicBroker() {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 3))).addressProvisioner;
Set<Address> addressSet = Sets.newSet(
createAddress("t1", "topic", "small-topic"),
createSubscription("s1", "t1", "small-subscription"),
createSubscription("s2", "t1", "small-subscription"),
createSubscription("s3", "t1", "small-subscription"),
createSubscription("s4", "t1", "small-subscription"),
createSubscription("s5", "t1", "small-subscription"),
createSubscription("s6", "t1", "small-subscription"),
createSubscription("s7", "t1", "small-subscription"),
createSubscription("s8", "t1", "small-subscription"),
createSubscription("s9", "t1", "small-subscription"),
createSubscription("s10", "t1", "small-subscription"),
createSubscription("s11", "t1", "small-subscription"),
createSubscription("s12", "t1", "small-subscription"));
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(2));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("router")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("broker")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("subscription")), is(1));
Set<Address> configured = new HashSet<>();
Set<Address> unConfigured = new HashSet<>();
for (Address address : addressSet) {
if (address.getStatus().getPhase().equals(Phase.Pending)) {
unConfigured.add(address);
} else if (address.getStatus().getPhase().equals(Phase.Configuring)) {
configured.add(address);
}
}
assertEquals(2, unConfigured.size());
assertEquals(11, configured.size(), "contains topic + 10 subscriptions");
Iterator<Address> unconfiguredIterator = unConfigured.iterator();
assertFalse(configured.contains(unconfiguredIterator.next()));
assertFalse(configured.contains(unconfiguredIterator.next()));
}
@Test
public void testDurableSubscriptionsSharded() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 3))).addressProvisioner;
Address t1 = createAddress("t1", "topic", "large-topic");
Address t2 = createAddress("t2", "topic", "large-topic");
Address s1 = createSubscription("s1", "t1", "small-subscription");
Set<Address> addressSet = Sets.newSet(
t1,
t2,
s1);
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(neededMap.keySet().size(), is(3));
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(3));
List<BrokerCluster> brokerClusters = new ArrayList<>(Arrays.asList(createCluster("broker", 1)));
when(generator.generateCluster(eq(provisioner.getShardedClusterId(t1)), anyInt(), eq(t1), any(), any())).thenReturn(new BrokerCluster(provisioner.getShardedClusterId(t1), new KubernetesList()));
when(generator.generateCluster(eq(provisioner.getShardedClusterId(t2)), anyInt(), eq(t2), any(), any())).thenReturn(new BrokerCluster(provisioner.getShardedClusterId(t2), new KubernetesList()));
provisioner.provisionResources(createDeployment(1), brokerClusters, neededMap, addressSet, infraConfig);
for (Address address : addressSet) {
assertThat(address.getStatus().getPhase(), is(Phase.Configuring));
}
verify(generator).generateCluster(eq(provisioner.getShardedClusterId(t2)), eq(1), eq(t2), any(), any());
verify(generator).generateCluster(eq(provisioner.getShardedClusterId(t1)), eq(1), eq(t1), any(), any());
}
@Test
public void testLargeSubscription() throws Exception {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 3))).addressProvisioner;
Address t1 = createAddress("t1", "topic", "large-topic");
Address s1 = createSubscription("s1", "t1", "large-subscription");
Set<Address> addressSet = Sets.newSet(
t1,
s1);
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(neededMap.keySet().size(), is(3));
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(2));
List<BrokerCluster> brokerClusters = new ArrayList<>(Arrays.asList(createCluster("broker", 1)));
when(generator.generateCluster(eq(provisioner.getShardedClusterId(t1)), anyInt(), eq(t1), any(), any())).thenReturn(new BrokerCluster(provisioner.getShardedClusterId(t1), new KubernetesList()));
provisioner.provisionResources(createDeployment(1), brokerClusters, neededMap, addressSet, infraConfig);
for (Address address : addressSet) {
assertThat(address.getStatus().getPhase(), is(Configuring));
}
verify(generator).generateCluster(eq(provisioner.getShardedClusterId(t1)), eq(1), eq(t1), any(), any());
}
@Test
public void testDurableSubscriptionsShardedStaysOnTopicBroker() {
AddressProvisioner provisioner = new ProvisionerTestFixture(Arrays.asList(
new ResourceAllowance("broker", 2),
new ResourceAllowance("router", 1),
new ResourceAllowance("aggregate", 3))).addressProvisioner;
Address t1 = createAddress("t1", "topic", "small-topic");
Address t2 = createAddress("t2", "topic", "small-topic");
Set<Address> addressSet = Sets.newSet(
t1,
createSubscription("s1", "t1", "small-subscription"),
createSubscription("s2", "t1", "small-subscription"),
createSubscription("s3", "t1", "small-subscription"),
createSubscription("s4", "t1", "small-subscription"),
createSubscription("s5", "t1", "small-subscription"),
createSubscription("s6", "t1", "small-subscription"),
createSubscription("s7", "t1", "small-subscription"),
createSubscription("s8", "t1", "small-subscription"),
createSubscription("s9", "t1", "small-subscription"),
createSubscription("s10", "t1", "small-subscription"),
createSubscription("s11", "t1", "small-subscription"),
createSubscription("s12", "t1", "small-subscription"),
t2);
Map<String, Map<String, UsageInfo>> usageMap = provisioner.checkUsage(Collections.emptySet());
Map<String, Map<String, UsageInfo>> neededMap = provisioner.checkQuota(usageMap, addressSet, addressSet);
assertThat(neededMap.keySet().size(), is(3));
assertThat(AddressProvisioner.sumTotalNeeded(neededMap), is(2));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("router")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("broker")), is(1));
assertThat(AddressProvisioner.sumNeeded(neededMap.get("subscription")), is(1));
Set<Address> configured = new HashSet<>();
Set<Address> unConfigured = new HashSet<>();
for (Address address : addressSet) {
if (address.getStatus().getPhase().equals(Phase.Pending)) {
unConfigured.add(address);
} else if (address.getStatus().getPhase().equals(Phase.Configuring)) {
configured.add(address);
}
}
assertEquals(2, unConfigured.size());
assertTrue(configured.contains(t1));
assertTrue(configured.contains(t2));
assertEquals(12, configured.size(), "contains 2 topic + 10 subscriptions");
Iterator<Address> unconfiguredIterator = unConfigured.iterator();
assertFalse(configured.contains(unconfiguredIterator.next()));
assertFalse(configured.contains(unconfiguredIterator.next()));
}
}
| |
package com.vladmihalcea.book.hpjp.util;
import java.lang.reflect.*;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* <code>ReflectionUtils</code> - Reflection utilities holder.
*
* @author Vlad Mihalcea
*/
public final class ReflectionUtils {
private static final String GETTER_PREFIX = "get";
private static final String SETTER_PREFIX = "set";
/**
* Prevent any instantiation.
*/
private ReflectionUtils() {
throw new UnsupportedOperationException("The " + getClass() + " is not instantiable!");
}
/**
* Instantiate a new {@link Object} of the provided type.
*
* @param className The fully-qualified Java class name of the {@link Object} to instantiate
* @param <T> class type
* @return new Java {@link Object} of the provided type
*/
public static <T> T newInstance(String className) {
Class clazz = getClass(className);
return newInstance(clazz);
}
/**
* Instantiate a new {@link Object} of the provided type.
*
* @param clazz The Java {@link Class} of the {@link Object} to instantiate
* @param <T> class type
* @return new Java {@link Object} of the provided type
*/
@SuppressWarnings("unchecked")
public static <T> T newInstance(Class clazz) {
try {
return (T) clazz.newInstance();
} catch (InstantiationException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Instantiate a new {@link Object} of the provided type.
*
* @param clazz The Java {@link Class} of the {@link Object} to instantiate
* @param args The arguments that need to be passed to the constructor
* @param argsTypes The argument types that need to be passed to the constructor
* @param <T> class type
* @return new Java {@link Object} of the provided type
*/
@SuppressWarnings("unchecked")
public static <T> T newInstance(Class clazz, Object[] args, Class[] argsTypes) {
try {
Constructor<T> constructor = clazz.getDeclaredConstructor(argsTypes);
constructor.setAccessible(true);
return constructor.newInstance(args);
} catch (InstantiationException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
} catch (NoSuchMethodException e) {
throw handleException(e);
} catch (InvocationTargetException e) {
throw handleException(e);
}
}
/**
* Get the {@link Field} with the given name belonging to the provided Java {@link Class}.
*
* @param targetClass the provided Java {@link Class} the field belongs to
* @param fieldName the {@link Field} name
* @return the {@link Field} matching the given name
*/
public static Field getField(Class targetClass, String fieldName) {
Field field = null;
try {
field = targetClass.getDeclaredField(fieldName);
} catch (NoSuchFieldException e) {
try {
field = targetClass.getField(fieldName);
} catch (NoSuchFieldException ignore) {
}
if (!targetClass.getSuperclass().equals(Object.class)) {
return getField(targetClass.getSuperclass(), fieldName);
} else {
throw handleException(e);
}
} finally {
if (field != null) {
field.setAccessible(true);
}
}
return field;
}
/**
* Get the {@link Field} with the given name belonging to the provided Java {@link Class} or {@code null}
* if no {@link Field} was found.
*
* @param targetClass the provided Java {@link Class} the field belongs to
* @param fieldName the {@link Field} name
* @return the {@link Field} matching the given name or {@code null}
*/
public static Field getFieldOrNull(Class targetClass, String fieldName) {
try {
return getField(targetClass, fieldName);
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* Get the value of the field matching the given name and belonging to target {@link Object}.
*
* @param target target {@link Object} whose field we are retrieving the value from
* @param fieldName field name
* @param <T> field type
* @return field value
*/
public static <T> T getFieldValue(Object target, String fieldName) {
try {
Field field = getField(target.getClass(), fieldName);
@SuppressWarnings("unchecked")
T returnValue = (T) field.get(target);
return returnValue;
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Get the value of the field matching the given name and belonging to target {@link Object} or {@code null}
* if no {@link Field} was found..
*
* @param target target {@link Object} whose field we are retrieving the value from
* @param fieldName field name
* @param <T> field type
* @return field value matching the given name or {@code null}
*/
public static <T> T getFieldValueOrNull(Object target, String fieldName) {
try {
Field field = getField(target.getClass(), fieldName);
@SuppressWarnings("unchecked")
T returnValue = (T) field.get(target);
return returnValue;
} catch (IllegalAccessException e) {
return null;
}
}
/**
* Set the value of the field matching the given name and belonging to target {@link Object}.
*
* @param target target object
* @param fieldName field name
* @param value field value
*/
public static void setFieldValue(Object target, String fieldName, Object value) {
try {
Field field = getField(target.getClass(), fieldName);
field.set(target, value);
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Get the {@link Method} with the given signature (name and parameter types) belonging to
* the provided Java {@link Object}.
*
* @param target target {@link Object}
* @param methodName method name
* @param parameterTypes method parameter types
* @return return {@link Method} matching the provided signature
*/
public static Method getMethod(Object target, String methodName, Class... parameterTypes) {
return getMethod(target.getClass(), methodName, parameterTypes);
}
/**
* Get the {@link Method} with the given signature (name and parameter types) belonging to
* the provided Java {@link Object} or {@code null} if no {@link Method} was found.
*
* @param target target {@link Object}
* @param methodName method name
* @param parameterTypes method parameter types
* @return return {@link Method} matching the provided signature or {@code null}
*/
public static Method getMethodOrNull(Object target, String methodName, Class... parameterTypes) {
try {
return getMethod(target.getClass(), methodName, parameterTypes);
} catch (RuntimeException e) {
return null;
}
}
/**
* Get the {@link Method} with the given signature (name and parameter types) belonging to
* the provided Java {@link Class}.
*
* @param targetClass target {@link Class}
* @param methodName method name
* @param parameterTypes method parameter types
* @return the {@link Method} matching the provided signature
*/
@SuppressWarnings("unchecked")
public static Method getMethod(Class targetClass, String methodName, Class... parameterTypes) {
try {
return targetClass.getDeclaredMethod(methodName, parameterTypes);
} catch (NoSuchMethodException e) {
try {
return targetClass.getMethod(methodName, parameterTypes);
} catch (NoSuchMethodException ignore) {
}
if (!targetClass.getSuperclass().equals(Object.class)) {
return getMethod(targetClass.getSuperclass(), methodName, parameterTypes);
} else {
throw handleException(e);
}
}
}
/**
* Get the {@link Method} with the given signature (name and parameter types) belonging to
* the provided Java {@link Object} or {@code null} if no {@link Method} was found.
*
* @param targetClass target {@link Class}
* @param methodName method name
* @param parameterTypes method parameter types
* @return return {@link Method} matching the provided signature or {@code null}
*/
public static Method getMethodOrNull(Class targetClass, String methodName, Class... parameterTypes) {
try {
return getMethod(targetClass, methodName, parameterTypes);
} catch (RuntimeException e) {
return null;
}
}
/**
* Get the {@link Method} with the given signature (name and parameter types) belonging to
* the provided Java {@link Class}, excluding inherited ones, or {@code null} if no {@link Method} was found.
*
* @param targetClass target {@link Class}
* @param methodName method name
* @param parameterTypes method parameter types
* @return return {@link Method} matching the provided signature or {@code null}
*/
public static Method getDeclaredMethodOrNull(Class targetClass, String methodName, Class... parameterTypes) {
try {
return targetClass.getDeclaredMethod(methodName, parameterTypes);
} catch (NoSuchMethodException e) {
return null;
}
}
/**
* Check if the provided Java {@link Class} contains a method matching
* the given signature (name and parameter types).
*
* @param targetClass target {@link Class}
* @param methodName method name
* @param parameterTypes method parameter types
* @return the provided Java {@link Class} contains a method with the given signature
*/
public static boolean hasMethod(Class<?> targetClass, String methodName, Class... parameterTypes) {
try {
targetClass.getMethod(methodName, parameterTypes);
return true;
} catch (NoSuchMethodException e) {
return false;
}
}
/**
* Get the property setter {@link Method} with the given signature (name and parameter types)
* belonging to the provided Java {@link Object}.
*
* @param target target {@link Object}
* @param propertyName property name
* @param parameterType setter property type
* @return the setter {@link Method} matching the provided signature
*/
public static Method getSetter(Object target, String propertyName, Class<?> parameterType) {
String setterMethodName = SETTER_PREFIX + propertyName.substring(0, 1).toUpperCase() + propertyName.substring(1);
Method setter = getMethod(target, setterMethodName, parameterType);
setter.setAccessible(true);
return setter;
}
/**
* Get the property getter {@link Method} with the given name belonging to
* the provided Java {@link Object}.
*
* @param target target {@link Object}
* @param propertyName property name
* @return the getter {@link Method} matching the provided name
*/
public static Method getGetter(Object target, String propertyName) {
String getterMethodName = GETTER_PREFIX + propertyName.substring(0, 1).toUpperCase() + propertyName.substring(1);
Method getter = getMethod(target, getterMethodName);
getter.setAccessible(true);
return getter;
}
/**
* Invoke the provided {@link Method} on the given Java {@link Object}.
*
* @param target target {@link Object} whose method we are invoking
* @param method method to invoke
* @param parameters parameters passed to the method call
* @param <T> return value object type
* @return the value return by the {@link Method} invocation
*/
public static <T> T invokeMethod(Object target, Method method, Object... parameters) {
try {
method.setAccessible(true);
@SuppressWarnings("unchecked")
T returnValue = (T) method.invoke(target, parameters);
return returnValue;
} catch (InvocationTargetException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Invoke the method with the provided signature (name and parameter types)
* on the given Java {@link Object}.
*
* @param target target {@link Object} whose method we are invoking
* @param methodName method name to invoke
* @param parameters parameters passed to the method call
* @param <T> return value object type
* @return the value return by the method invocation
*/
public static <T> T invokeMethod(Object target, String methodName, Object... parameters) {
try {
Class[] parameterClasses = new Class[parameters.length];
for (int i = 0; i < parameters.length; i++) {
parameterClasses[i] = parameters[i].getClass();
}
Method method = getMethod(target, methodName, parameterClasses);
method.setAccessible(true);
@SuppressWarnings("unchecked")
T returnValue = (T) method.invoke(target, parameters);
return returnValue;
} catch (InvocationTargetException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Invoke the property getter with the provided name on the given Java {@link Object}.
*
* @param target target {@link Object} whose property getter we are invoking
* @param propertyName property name whose getter we are invoking
* @param <T> return value object type
* @return the value return by the getter invocation
*/
public static <T> T invokeGetter(Object target, String propertyName) {
Method setter = getGetter(target, propertyName);
try {
return (T) setter.invoke(target);
} catch (IllegalAccessException e) {
throw handleException(e);
} catch (InvocationTargetException e) {
throw handleException(e);
}
}
/**
* Invoke the property setter with the provided signature (name and parameter types)
* on the given Java {@link Object}.
*
* @param target target {@link Object} whose property setter we are invoking
* @param propertyName property name whose setter we are invoking
* @param parameter parameter passed to the setter call
*/
public static void invokeSetter(Object target, String propertyName, Object parameter) {
Method setter = getSetter(target, propertyName, parameter.getClass());
try {
setter.invoke(target, parameter);
} catch (IllegalAccessException e) {
throw handleException(e);
} catch (InvocationTargetException e) {
throw handleException(e);
}
}
/**
* Invoke the {@link boolean} property setter with the provided name
* on the given Java {@link Object}.
*
* @param target target {@link Object} whose property setter we are invoking
* @param propertyName property name whose setter we are invoking
* @param parameter {@link boolean} parameter passed to the setter call
*/
public static void invokeSetter(Object target, String propertyName, boolean parameter) {
Method setter = getSetter(target, propertyName, boolean.class);
try {
setter.invoke(target, parameter);
} catch (IllegalAccessException e) {
throw handleException(e);
} catch (InvocationTargetException e) {
throw handleException(e);
}
}
/**
* Invoke the {@link int} property setter with the provided name
* on the given Java {@link Object}.
*
* @param target target {@link Object} whose property setter we are invoking
* @param propertyName property name whose setter we are invoking
* @param parameter {@link int} parameter passed to the setter call
*/
public static void invokeSetter(Object target, String propertyName, int parameter) {
Method setter = getSetter(target, propertyName, int.class);
try {
setter.invoke(target, parameter);
} catch (IllegalAccessException e) {
throw handleException(e);
} catch (InvocationTargetException e) {
throw handleException(e);
}
}
/**
* Invoke the {@code static} {@link Method} with the provided parameters.
*
* @param method target {@code static} {@link Method} to invoke
* @param parameters parameters passed to the method call
* @param <T> return value object type
* @return the value return by the method invocation
*/
public static <T> T invokeStaticMethod(Method method, Object... parameters) {
try {
method.setAccessible(true);
@SuppressWarnings("unchecked")
T returnValue = (T) method.invoke(null, parameters);
return returnValue;
} catch (InvocationTargetException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
}
}
/**
* Get the Java {@link Class} with the given fully-qualified name.
*
* @param className the Java {@link Class} name to be retrieved
* @param <T> {@link Class} type
* @return the Java {@link Class} object
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> getClass(String className) {
try {
return (Class<T>) Class.forName(className, false, Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException e) {
throw handleException(e);
}
}
/**
* Get the Java {@link Class} with the given fully-qualified name or or {@code null}
* if no {@link Class} was found matching the provided name.
*
* @param className the Java {@link Class} name to be retrieved
* @param <T> {@link Class} type
* @return the Java {@link Class} object or {@code null}
*/
@SuppressWarnings("unchecked")
public static <T> Class<T> getClassOrNull(String className) {
try {
return (Class<T>) getClass(className);
} catch (Exception e) {
return null;
}
}
/**
* Get the Java Wrapper {@link Class} associated to the given primitive type.
*
* @param clazz primitive class
* @return the Java Wrapper {@link Class}
*/
public static Class<?> getWrapperClass(Class<?> clazz) {
if (!clazz.isPrimitive())
return clazz;
if (clazz == Integer.TYPE)
return Integer.class;
if (clazz == Long.TYPE)
return Long.class;
if (clazz == Boolean.TYPE)
return Boolean.class;
if (clazz == Byte.TYPE)
return Byte.class;
if (clazz == Character.TYPE)
return Character.class;
if (clazz == Float.TYPE)
return Float.class;
if (clazz == Double.TYPE)
return Double.class;
if (clazz == Short.TYPE)
return Short.class;
if (clazz == Void.TYPE)
return Void.class;
return clazz;
}
/**
* Get the first super class matching the provided package name.
*
* @param clazz Java class
* @param packageName package name
* @param <T> class generic type
* @return the first super class matching the provided package name or {@code null}.
*/
public static <T> Class<T> getFirstSuperClassFromPackage(Class clazz, String packageName) {
if (clazz.getPackage().getName().equals(packageName)) {
return clazz;
} else {
Class superClass = clazz.getSuperclass();
return (superClass == null || superClass.equals(Object.class)) ?
null :
(Class<T>) getFirstSuperClassFromPackage(superClass, packageName);
}
}
/**
* Get the generic types of a given Class.
*
* @param parameterizedType parameterized Type
* @return generic types for the given Class.
*/
public static Set<Class> getGenericTypes(ParameterizedType parameterizedType) {
Set<Class> genericTypes = new LinkedHashSet<>();
for(Type genericType : parameterizedType.getActualTypeArguments()) {
if (genericType instanceof Class) {
genericTypes.add((Class) genericType);
}
}
return genericTypes;
}
/**
* Get class package name.
*
* @param className Class name.
* @return class package name
*/
public static String getClassPackageName(String className) {
try {
Class clazz = getClassOrNull(className);
if(clazz == null) {
return null;
}
Package classPackage = clazz.getPackage();
return classPackage != null ? classPackage.getName() : null;
} catch (Exception e) {
return null;
}
}
/**
* Get the {@link Member} with the given name belonging to the provided Java {@link Class} or {@code null}
* if no {@link Member} was found.
*
* @param targetClass the provided Java {@link Class} the field or method belongs to
* @param memberName the {@link Field} or {@link Method} name
* @return the {@link Field} or {@link Method} matching the given name or {@code null}
*/
public static Member getMemberOrNull(Class targetClass, String memberName) {
Field field = getFieldOrNull(targetClass, memberName);
return (field != null) ? field : getMethodOrNull(targetClass, memberName);
}
/**
* Get the generic {@link Type} of the {@link Member} with the given name belonging to the provided Java {@link Class} or {@code null}
* if no {@link Member} was found.
*
* @param targetClass the provided Java {@link Class} the field or method belongs to
* @param memberName the {@link Field} or {@link Method} name
* @return the generic {@link Type} of the {@link Field} or {@link Method} matching the given name or {@code null}
*/
public static Type getMemberGenericTypeOrNull(Class targetClass, String memberName) {
Field field = getFieldOrNull(targetClass, memberName);
return (field != null) ? field.getGenericType() : getMethodOrNull(targetClass, memberName).getGenericReturnType();
}
/**
* Handle the {@link NoSuchFieldException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link NoSuchFieldException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(NoSuchFieldException e) {
return new IllegalArgumentException(e);
}
/**
* Handle the {@link NoSuchMethodException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link NoSuchMethodException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(NoSuchMethodException e) {
return new IllegalArgumentException(e);
}
/**
* Handle the {@link IllegalAccessException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link IllegalAccessException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(IllegalAccessException e) {
return new IllegalArgumentException(e);
}
/**
* Handle the {@link InvocationTargetException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link InvocationTargetException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(InvocationTargetException e) {
return new IllegalArgumentException(e);
}
/**
* Handle the {@link ClassNotFoundException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link ClassNotFoundException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(ClassNotFoundException e) {
return new IllegalArgumentException(e);
}
/**
* Handle the {@link InstantiationException} by rethrowing it as an {@link IllegalArgumentException}.
*
* @param e the original {@link InstantiationException}
* @return the {@link IllegalArgumentException} wrapping exception
*/
private static IllegalArgumentException handleException(InstantiationException e) {
return new IllegalArgumentException(e);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.jdbc;
import org.apache.calcite.adapter.enumerable.EnumerableRel;
import org.apache.calcite.adapter.enumerable.EnumerableRelImplementor;
import org.apache.calcite.adapter.enumerable.JavaRowFormat;
import org.apache.calcite.adapter.enumerable.PhysType;
import org.apache.calcite.adapter.enumerable.PhysTypeImpl;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.linq4j.tree.BlockBuilder;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.ParameterExpression;
import org.apache.calcite.linq4j.tree.Primitive;
import org.apache.calcite.linq4j.tree.UnaryExpression;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.prepare.CalcitePrepareImpl;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterImpl;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.runtime.Hook;
import org.apache.calcite.runtime.SqlFunctions;
import org.apache.calcite.schema.Schemas;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.BuiltInMethod;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
/**
* Relational expression representing a scan of a table in a JDBC data source.
*/
public class JdbcToEnumerableConverter
extends ConverterImpl
implements EnumerableRel {
protected JdbcToEnumerableConverter(
RelOptCluster cluster,
RelTraitSet traits,
RelNode input) {
super(cluster, ConventionTraitDef.INSTANCE, traits, input);
}
@Override public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) {
return new JdbcToEnumerableConverter(
getCluster(), traitSet, sole(inputs));
}
@Override public RelOptCost computeSelfCost(RelOptPlanner planner,
RelMetadataQuery mq) {
return super.computeSelfCost(planner, mq).multiplyBy(.1);
}
public Result implement(EnumerableRelImplementor implementor, Prefer pref) {
// Generate:
// ResultSetEnumerable.of(schema.getDataSource(), "select ...")
final BlockBuilder builder0 = new BlockBuilder(false);
final JdbcRel child = (JdbcRel) getInput();
final PhysType physType =
PhysTypeImpl.of(
implementor.getTypeFactory(), getRowType(),
pref.prefer(JavaRowFormat.CUSTOM));
final JdbcConvention jdbcConvention =
(JdbcConvention) child.getConvention();
String sql = generateSql(jdbcConvention.dialect);
if (CalcitePrepareImpl.DEBUG) {
System.out.println("[" + sql + "]");
}
Hook.QUERY_PLAN.run(sql);
final Expression sql_ =
builder0.append("sql", Expressions.constant(sql));
final int fieldCount = getRowType().getFieldCount();
BlockBuilder builder = new BlockBuilder();
final ParameterExpression resultSet_ =
Expressions.parameter(Modifier.FINAL, ResultSet.class,
builder.newName("resultSet"));
CalendarPolicy calendarPolicy = CalendarPolicy.of(jdbcConvention.dialect);
final Expression calendar_;
switch (calendarPolicy) {
case LOCAL:
calendar_ =
builder0.append("calendar",
Expressions.call(Calendar.class, "getInstance",
getTimeZoneExpression(implementor)));
break;
default:
calendar_ = null;
}
if (fieldCount == 1) {
final ParameterExpression value_ =
Expressions.parameter(Object.class, builder.newName("value"));
builder.add(Expressions.declare(Modifier.FINAL, value_, null));
generateGet(implementor, physType, builder, resultSet_, 0, value_,
calendar_, calendarPolicy);
builder.add(Expressions.return_(null, value_));
} else {
final Expression values_ =
builder.append("values",
Expressions.newArrayBounds(Object.class, 1,
Expressions.constant(fieldCount)));
for (int i = 0; i < fieldCount; i++) {
generateGet(implementor, physType, builder, resultSet_, i,
Expressions.arrayIndex(values_, Expressions.constant(i)),
calendar_, calendarPolicy);
}
builder.add(
Expressions.return_(null, values_));
}
final ParameterExpression e_ =
Expressions.parameter(SQLException.class, builder.newName("e"));
final Expression rowBuilderFactory_ =
builder0.append("rowBuilderFactory",
Expressions.lambda(
Expressions.block(
Expressions.return_(null,
Expressions.lambda(
Expressions.block(
Expressions.tryCatch(
builder.toBlock(),
Expressions.catch_(
e_,
Expressions.throw_(
Expressions.new_(
RuntimeException.class,
e_)))))))),
resultSet_));
final Expression enumerable =
builder0.append(
"enumerable",
Expressions.call(
BuiltInMethod.RESULT_SET_ENUMERABLE_OF.method,
Expressions.call(
Schemas.unwrap(jdbcConvention.expression,
JdbcSchema.class),
BuiltInMethod.JDBC_SCHEMA_DATA_SOURCE.method),
sql_,
rowBuilderFactory_));
builder0.add(
Expressions.return_(null, enumerable));
return implementor.result(physType, builder0.toBlock());
}
private UnaryExpression getTimeZoneExpression(
EnumerableRelImplementor implementor) {
return Expressions.convert_(
Expressions.call(
implementor.getRootExpression(),
"get",
Expressions.constant("timeZone")),
TimeZone.class);
}
private void generateGet(EnumerableRelImplementor implementor,
PhysType physType, BlockBuilder builder, ParameterExpression resultSet_,
int i, Expression target, Expression calendar_,
CalendarPolicy calendarPolicy) {
final Primitive primitive = Primitive.ofBoxOr(physType.fieldClass(i));
final RelDataType fieldType =
physType.getRowType().getFieldList().get(i).getType();
final List<Expression> dateTimeArgs = new ArrayList<Expression>();
dateTimeArgs.add(Expressions.constant(i + 1));
SqlTypeName sqlTypeName = fieldType.getSqlTypeName();
boolean offset = false;
switch (calendarPolicy) {
case LOCAL:
dateTimeArgs.add(calendar_);
break;
case NULL:
// We don't specify a calendar at all, so we don't add an argument and
// instead use the version of the getXXX that doesn't take a Calendar
break;
case DIRECT:
sqlTypeName = SqlTypeName.ANY;
break;
case SHIFT:
switch (sqlTypeName) {
case TIMESTAMP:
case DATE:
offset = true;
}
break;
}
final Expression source;
switch (sqlTypeName) {
case DATE:
case TIME:
case TIMESTAMP:
source = Expressions.call(
getMethod(sqlTypeName, fieldType.isNullable(), offset),
Expressions.<Expression>list()
.append(
Expressions.call(resultSet_,
getMethod2(sqlTypeName), dateTimeArgs))
.appendIf(offset, getTimeZoneExpression(implementor)));
break;
case ARRAY:
final Expression x = Expressions.convert_(
Expressions.call(resultSet_, jdbcGetMethod(primitive),
Expressions.constant(i + 1)),
java.sql.Array.class);
source = Expressions.call(BuiltInMethod.JDBC_ARRAY_TO_LIST.method, x);
break;
default:
source = Expressions.call(
resultSet_, jdbcGetMethod(primitive), Expressions.constant(i + 1));
}
builder.add(
Expressions.statement(
Expressions.assign(
target, source)));
// [CALCITE-596] If primitive type columns contain null value, returns null
// object
if (primitive != null) {
builder.add(
Expressions.ifThen(
Expressions.call(resultSet_, "wasNull"),
Expressions.statement(
Expressions.assign(target,
Expressions.constant(null)))));
}
}
private Method getMethod(SqlTypeName sqlTypeName, boolean nullable,
boolean offset) {
switch (sqlTypeName) {
case DATE:
return (nullable
? BuiltInMethod.DATE_TO_INT_OPTIONAL
: BuiltInMethod.DATE_TO_INT).method;
case TIME:
return (nullable
? BuiltInMethod.TIME_TO_INT_OPTIONAL
: BuiltInMethod.TIME_TO_INT).method;
case TIMESTAMP:
return (nullable
? (offset
? BuiltInMethod.TIMESTAMP_TO_LONG_OPTIONAL_OFFSET
: BuiltInMethod.TIMESTAMP_TO_LONG_OPTIONAL)
: (offset
? BuiltInMethod.TIMESTAMP_TO_LONG_OFFSET
: BuiltInMethod.TIMESTAMP_TO_LONG)).method;
default:
throw new AssertionError(sqlTypeName + ":" + nullable);
}
}
private Method getMethod2(SqlTypeName sqlTypeName) {
switch (sqlTypeName) {
case DATE:
return BuiltInMethod.RESULT_SET_GET_DATE2.method;
case TIME:
return BuiltInMethod.RESULT_SET_GET_TIME2.method;
case TIMESTAMP:
return BuiltInMethod.RESULT_SET_GET_TIMESTAMP2.method;
default:
throw new AssertionError(sqlTypeName);
}
}
/** E,g, {@code jdbcGetMethod(int)} returns "getInt". */
private String jdbcGetMethod(Primitive primitive) {
return primitive == null
? "getObject"
: "get" + SqlFunctions.initcap(primitive.primitiveName);
}
private String generateSql(SqlDialect dialect) {
final JdbcImplementor jdbcImplementor =
new JdbcImplementor(dialect,
(JavaTypeFactory) getCluster().getTypeFactory());
final JdbcImplementor.Result result =
jdbcImplementor.visitChild(0, getInput());
return result.asQuery().toSqlString(dialect).getSql();
}
/** Whether this JDBC driver needs you to pass a Calendar object to methods
* such as {@link ResultSet#getTimestamp(int, java.util.Calendar)}. */
private enum CalendarPolicy {
NONE,
NULL,
LOCAL,
DIRECT,
SHIFT;
static CalendarPolicy of(SqlDialect dialect) {
switch (dialect.getDatabaseProduct()) {
case MYSQL:
return SHIFT;
case HSQLDB:
default:
// NULL works for hsqldb-2.3; nothing worked for hsqldb-1.8.
return NULL;
}
}
}
}
// End JdbcToEnumerableConverter.java
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.mapping;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ObjectLocationSpecificationMethod;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.parameters.UnknownParamException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.HasRepositoryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryImportLocation;
import org.pentaho.di.repository.RepositoryObject;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.repository.StringObjectId;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.TransMeta.TransformationType;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepIOMeta;
import org.pentaho.di.trans.step.StepIOMetaInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.step.errorhandling.Stream;
import org.pentaho.di.trans.step.errorhandling.StreamIcon;
import org.pentaho.di.trans.step.errorhandling.StreamInterface.StreamType;
import org.pentaho.di.trans.steps.mappinginput.MappingInputMeta;
import org.pentaho.di.trans.steps.mappingoutput.MappingOutputMeta;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* Meta-data for the Mapping step: contains name of the (sub-)transformation to execute
*
* @since 22-nov-2005
* @author Matt
*
*/
public class MappingMeta extends BaseStepMeta implements StepMetaInterface, HasRepositoryInterface {
private static Class<?> PKG = MappingMeta.class; // for i18n purposes, needed by Translator2!!
private String transName;
private String fileName;
private String directoryPath;
private ObjectId transObjectId;
private ObjectLocationSpecificationMethod specificationMethod;
private List<MappingIODefinition> inputMappings;
private List<MappingIODefinition> outputMappings;
private MappingParameters mappingParameters;
private boolean allowingMultipleInputs;
private boolean allowingMultipleOutputs;
/*
* This repository object is injected from the outside at runtime or at design time. It comes from either Spoon or
* Trans
*/
private Repository repository;
private IMetaStore metaStore;
public MappingMeta() {
super(); // allocate BaseStepMeta
inputMappings = new ArrayList<MappingIODefinition>();
outputMappings = new ArrayList<MappingIODefinition>();
mappingParameters = new MappingParameters();
}
private void checkObjectLocationSpecificationMethod() {
if ( specificationMethod == null ) {
// Backward compatibility
//
// Default = Filename
//
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
if ( !Const.isEmpty( fileName ) ) {
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
} else if ( transObjectId != null ) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE;
} else if ( !Const.isEmpty( transName ) ) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
}
}
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
try {
String method = XMLHandler.getTagValue( stepnode, "specification_method" );
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
String transId = XMLHandler.getTagValue( stepnode, "trans_object_id" );
transObjectId = Const.isEmpty( transId ) ? null : new StringObjectId( transId );
transName = XMLHandler.getTagValue( stepnode, "trans_name" );
fileName = XMLHandler.getTagValue( stepnode, "filename" );
directoryPath = XMLHandler.getTagValue( stepnode, "directory_path" );
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
Node mappingsNode = XMLHandler.getSubNode( stepnode, "mappings" );
inputMappings.clear();
outputMappings.clear();
if ( mappingsNode != null ) {
// Read all the input mapping definitions...
//
Node inputNode = XMLHandler.getSubNode( mappingsNode, "input" );
int nrInputMappings = XMLHandler.countNodes( inputNode, MappingIODefinition.XML_TAG );
for ( int i = 0; i < nrInputMappings; i++ ) {
Node mappingNode = XMLHandler.getSubNodeByNr( inputNode, MappingIODefinition.XML_TAG, i );
MappingIODefinition inputMappingDefinition = new MappingIODefinition( mappingNode );
inputMappings.add( inputMappingDefinition );
}
Node outputNode = XMLHandler.getSubNode( mappingsNode, "output" );
int nrOutputMappings = XMLHandler.countNodes( outputNode, MappingIODefinition.XML_TAG );
for ( int i = 0; i < nrOutputMappings; i++ ) {
Node mappingNode = XMLHandler.getSubNodeByNr( outputNode, MappingIODefinition.XML_TAG, i );
MappingIODefinition outputMappingDefinition = new MappingIODefinition( mappingNode );
outputMappings.add( outputMappingDefinition );
}
// Load the mapping parameters too..
//
Node mappingParametersNode = XMLHandler.getSubNode( mappingsNode, MappingParameters.XML_TAG );
mappingParameters = new MappingParameters( mappingParametersNode );
} else {
// backward compatibility...
//
Node inputNode = XMLHandler.getSubNode( stepnode, "input" );
Node outputNode = XMLHandler.getSubNode( stepnode, "output" );
int nrInput = XMLHandler.countNodes( inputNode, "connector" );
int nrOutput = XMLHandler.countNodes( outputNode, "connector" );
// null means: auto-detect
//
MappingIODefinition inputMappingDefinition = new MappingIODefinition();
inputMappingDefinition.setMainDataPath( true );
for ( int i = 0; i < nrInput; i++ ) {
Node inputConnector = XMLHandler.getSubNodeByNr( inputNode, "connector", i );
String inputField = XMLHandler.getTagValue( inputConnector, "field" );
String inputMapping = XMLHandler.getTagValue( inputConnector, "mapping" );
inputMappingDefinition.getValueRenames().add( new MappingValueRename( inputField, inputMapping ) );
}
// null means: auto-detect
//
MappingIODefinition outputMappingDefinition = new MappingIODefinition();
outputMappingDefinition.setMainDataPath( true );
for ( int i = 0; i < nrOutput; i++ ) {
Node outputConnector = XMLHandler.getSubNodeByNr( outputNode, "connector", i );
String outputField = XMLHandler.getTagValue( outputConnector, "field" );
String outputMapping = XMLHandler.getTagValue( outputConnector, "mapping" );
outputMappingDefinition.getValueRenames().add( new MappingValueRename( outputMapping, outputField ) );
}
// Don't forget to add these to the input and output mapping
// definitions...
//
inputMappings.add( inputMappingDefinition );
outputMappings.add( outputMappingDefinition );
// The default is to have no mapping parameters: the concept didn't
// exist before.
//
mappingParameters = new MappingParameters();
}
String multiInput = XMLHandler.getTagValue( stepnode, "allow_multiple_input" );
allowingMultipleInputs =
Const.isEmpty( multiInput ) ? inputMappings.size() > 1 : "Y".equalsIgnoreCase( multiInput );
String multiOutput = XMLHandler.getTagValue( stepnode, "allow_multiple_output" );
allowingMultipleOutputs =
Const.isEmpty( multiOutput ) ? outputMappings.size() > 1 : "Y".equalsIgnoreCase( multiOutput );
} catch ( Exception e ) {
throw new KettleXMLException( BaseMessages.getString(
PKG, "MappingMeta.Exception.ErrorLoadingTransformationStepFromXML" ), e );
}
}
public Object clone() {
Object retval = super.clone();
return retval;
}
public String getXML() {
StringBuffer retval = new StringBuffer( 300 );
retval.append( " " ).append(
XMLHandler.addTagValue( "specification_method", specificationMethod == null ? null : specificationMethod
.getCode() ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "trans_object_id", transObjectId == null ? null : transObjectId.toString() ) );
// Export a little bit of extra information regarding the reference since it doesn't really matter outside the same
// repository.
//
if ( repository != null && transObjectId != null ) {
try {
RepositoryObject objectInformation =
repository.getObjectInformation( transObjectId, RepositoryObjectType.TRANSFORMATION );
if ( objectInformation != null ) {
transName = objectInformation.getName();
directoryPath = objectInformation.getRepositoryDirectory().getPath();
}
} catch ( KettleException e ) {
// Ignore object reference problems. It simply means that the reference is no longer valid.
}
}
retval.append( " " ).append( XMLHandler.addTagValue( "trans_name", transName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "filename", fileName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "directory_path", directoryPath ) );
retval.append( " " ).append( XMLHandler.openTag( "mappings" ) ).append( Const.CR );
retval.append( " " ).append( XMLHandler.openTag( "input" ) ).append( Const.CR );
for ( int i = 0; i < inputMappings.size(); i++ ) {
retval.append( inputMappings.get( i ).getXML() );
}
retval.append( " " ).append( XMLHandler.closeTag( "input" ) ).append( Const.CR );
retval.append( " " ).append( XMLHandler.openTag( "output" ) ).append( Const.CR );
for ( int i = 0; i < outputMappings.size(); i++ ) {
retval.append( outputMappings.get( i ).getXML() );
}
retval.append( " " ).append( XMLHandler.closeTag( "output" ) ).append( Const.CR );
// Add the mapping parameters too
//
retval.append( " " ).append( mappingParameters.getXML() ).append( Const.CR );
retval.append( " " ).append( XMLHandler.closeTag( "mappings" ) ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "allow_multiple_input", allowingMultipleInputs ) );
retval.append( " " ).append( XMLHandler.addTagValue( "allow_multiple_output", allowingMultipleOutputs ) );
return retval.toString();
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
String method = rep.getStepAttributeString( id_step, "specification_method" );
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
String transId = rep.getStepAttributeString( id_step, "trans_object_id" );
transObjectId = Const.isEmpty( transId ) ? null : new StringObjectId( transId );
transName = rep.getStepAttributeString( id_step, "trans_name" );
fileName = rep.getStepAttributeString( id_step, "filename" );
directoryPath = rep.getStepAttributeString( id_step, "directory_path" );
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
inputMappings.clear();
outputMappings.clear();
int nrInput = rep.countNrStepAttributes( id_step, "input_field" );
int nrOutput = rep.countNrStepAttributes( id_step, "output_field" );
// Backward compatibility...
//
if ( nrInput > 0 || nrOutput > 0 ) {
MappingIODefinition inputMappingDefinition = new MappingIODefinition();
inputMappingDefinition.setMainDataPath( true );
for ( int i = 0; i < nrInput; i++ ) {
String inputField = rep.getStepAttributeString( id_step, i, "input_field" );
String inputMapping = rep.getStepAttributeString( id_step, i, "input_mapping" );
inputMappingDefinition.getValueRenames().add( new MappingValueRename( inputField, inputMapping ) );
}
MappingIODefinition outputMappingDefinition = new MappingIODefinition();
outputMappingDefinition.setMainDataPath( true );
for ( int i = 0; i < nrOutput; i++ ) {
String outputField = rep.getStepAttributeString( id_step, i, "output_field" );
String outputMapping = rep.getStepAttributeString( id_step, i, "output_mapping" );
outputMappingDefinition.getValueRenames().add( new MappingValueRename( outputMapping, outputField ) );
}
// Don't forget to add these to the input and output mapping
// definitions...
//
inputMappings.add( inputMappingDefinition );
outputMappings.add( outputMappingDefinition );
// The default is to have no mapping parameters: the concept didn't exist
// before.
mappingParameters = new MappingParameters();
} else {
nrInput = rep.countNrStepAttributes( id_step, "input_main_path" );
nrOutput = rep.countNrStepAttributes( id_step, "output_main_path" );
for ( int i = 0; i < nrInput; i++ ) {
inputMappings.add( new MappingIODefinition( rep, id_step, "input_", i ) );
}
for ( int i = 0; i < nrOutput; i++ ) {
outputMappings.add( new MappingIODefinition( rep, id_step, "output_", i ) );
}
mappingParameters = new MappingParameters( rep, id_step );
}
allowingMultipleInputs =
rep.getStepAttributeBoolean( id_step, 0, "allow_multiple_input", inputMappings.size() > 1 );
allowingMultipleOutputs =
rep.getStepAttributeBoolean( id_step, 0, "allow_multiple_output", outputMappings.size() > 1 );
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
rep.saveStepAttribute( id_transformation, id_step, "specification_method", specificationMethod == null
? null : specificationMethod.getCode() );
rep.saveStepAttribute( id_transformation, id_step, "trans_object_id", transObjectId == null
? null : transObjectId.toString() );
rep.saveStepAttribute( id_transformation, id_step, "filename", fileName );
rep.saveStepAttribute( id_transformation, id_step, "trans_name", transName );
rep.saveStepAttribute( id_transformation, id_step, "directory_path", directoryPath );
for ( int i = 0; i < inputMappings.size(); i++ ) {
inputMappings.get( i ).saveRep( rep, metaStore, id_transformation, id_step, "input_", i );
}
for ( int i = 0; i < outputMappings.size(); i++ ) {
outputMappings.get( i ).saveRep( rep, metaStore, id_transformation, id_step, "output_", i );
}
// save the mapping parameters too
//
mappingParameters.saveRep( rep, metaStore, id_transformation, id_step );
rep.saveStepAttribute( id_transformation, id_step, 0, "allow_multiple_input", allowingMultipleInputs );
rep.saveStepAttribute( id_transformation, id_step, 0, "allow_multiple_output", allowingMultipleOutputs );
}
public void setDefault() {
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
MappingIODefinition inputDefinition = new MappingIODefinition( null, null );
inputDefinition.setMainDataPath( true );
inputDefinition.setRenamingOnOutput( true );
inputMappings.add( inputDefinition );
MappingIODefinition outputDefinition = new MappingIODefinition( null, null );
outputDefinition.setMainDataPath( true );
outputMappings.add( outputDefinition );
allowingMultipleInputs = false;
allowingMultipleOutputs = false;
}
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
// First load some interesting data...
// Then see which fields get added to the row.
//
TransMeta mappingTransMeta = null;
try {
mappingTransMeta = loadMappingMeta( this, repository, metaStore, space );
} catch ( KettleException e ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToLoadMappingTransformation" ), e );
}
// The field structure may depend on the input parameters as well (think of parameter replacements in MDX queries
// for instance)
if ( mappingParameters != null ) {
// See if we need to pass all variables from the parent or not...
//
if ( mappingParameters.isInheritingAllVariables() ) {
mappingTransMeta.copyVariablesFrom( space );
}
// Just set the variables in the transformation statically.
// This just means: set a number of variables or parameter values:
//
List<String> subParams = Arrays.asList( mappingTransMeta.listParameters() );
for ( int i = 0; i < mappingParameters.getVariable().length; i++ ) {
String name = mappingParameters.getVariable()[i];
String value = space.environmentSubstitute( mappingParameters.getInputField()[i] );
if ( !Const.isEmpty( name ) && !Const.isEmpty( value ) ) {
if ( subParams.contains( name ) ) {
try {
mappingTransMeta.setParameterValue( name, value );
} catch ( UnknownParamException e ) {
// this is explicitly checked for up front
}
}
mappingTransMeta.setVariable( name, value );
}
}
}
// Keep track of all the fields that need renaming...
//
List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>();
/*
* Before we ask the mapping outputs anything, we should teach the mapping input steps in the sub-transformation
* about the data coming in...
*/
for ( MappingIODefinition definition : inputMappings ) {
RowMetaInterface inputRowMeta;
if ( definition.isMainDataPath() || Const.isEmpty( definition.getInputStepname() ) ) {
// The row metadata, what we pass to the mapping input step
// definition.getOutputStep(), is "row"
// However, we do need to re-map some fields...
//
inputRowMeta = row.clone();
if ( !inputRowMeta.isEmpty() ) {
for ( MappingValueRename valueRename : definition.getValueRenames() ) {
ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta( valueRename.getSourceValueName() );
if ( valueMeta == null ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName() ) );
}
valueMeta.setName( valueRename.getTargetValueName() );
}
}
} else {
// The row metadata that goes to the info mapping input comes from the
// specified step
// In fact, it's one of the info steps that is going to contain this
// information...
//
String[] infoSteps = getInfoSteps();
int infoStepIndex = Const.indexOfString( definition.getInputStepname(), infoSteps );
if ( infoStepIndex < 0 ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToFindMetadataInfo", definition.getInputStepname() ) );
}
if ( info[infoStepIndex] != null ) {
inputRowMeta = info[infoStepIndex].clone();
} else {
inputRowMeta = null;
}
}
// What is this mapping input step?
//
StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep( definition.getOutputStepname() );
// We're certain it's a MappingInput step...
//
MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface();
// Inform the mapping input step about what it's going to receive...
//
mappingInputMeta.setInputRowMeta( inputRowMeta );
// What values are we changing names for?
//
mappingInputMeta.setValueRenames( definition.getValueRenames() );
// Keep a list of the input rename values that need to be changed back at
// the output
//
if ( definition.isRenamingOnOutput() ) {
Mapping.addInputRenames( inputRenameList, definition.getValueRenames() );
}
}
// All the mapping steps now know what they will be receiving.
// That also means that the sub-transformation / mapping has everything it
// needs.
// So that means that the MappingOutput steps know exactly what the output
// is going to be.
// That could basically be anything.
// It also could have absolutely no resemblance to what came in on the
// input.
// The relative old approach is therefore no longer suited.
//
// OK, but what we *can* do is have the MappingOutput step rename the
// appropriate fields.
// The mapping step will tell this step how it's done.
//
// Let's look for the mapping output step that is relevant for this actual
// call...
//
MappingIODefinition mappingOutputDefinition = null;
if ( nextStep == null ) {
// This is the main step we read from...
// Look up the main step to write to.
// This is the output mapping definition with "main path" enabled.
//
for ( MappingIODefinition definition : outputMappings ) {
if ( definition.isMainDataPath() || Const.isEmpty( definition.getOutputStepname() ) ) {
// This is the definition to use...
//
mappingOutputDefinition = definition;
}
}
} else {
// Is there an output mapping definition for this step?
// If so, we can look up the Mapping output step to see what has changed.
//
for ( MappingIODefinition definition : outputMappings ) {
if ( nextStep.getName().equals( definition.getOutputStepname() )
|| definition.isMainDataPath() || Const.isEmpty( definition.getOutputStepname() ) ) {
mappingOutputDefinition = definition;
}
}
}
if ( mappingOutputDefinition == null ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToFindMappingDefinition" ) );
}
// OK, now find the mapping output step in the mapping...
// This method in TransMeta takes into account a number of things, such as
// the step not specified, etc.
// The method never returns null but throws an exception.
//
StepMeta mappingOutputStep =
mappingTransMeta.findMappingOutputStep( mappingOutputDefinition.getInputStepname() );
// We know it's a mapping output step...
MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface();
// Change a few columns.
mappingOutputMeta.setOutputValueRenames( mappingOutputDefinition.getValueRenames() );
// Perhaps we need to change a few input columns back to the original?
//
mappingOutputMeta.setInputValueRenames( inputRenameList );
// Now we know wat's going to come out of there...
// This is going to be the full row, including all the remapping, etc.
//
RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields( mappingOutputStep );
row.clear();
row.addRowMeta( mappingOutputRowMeta );
}
public String[] getInfoSteps() {
String[] infoSteps = getStepIOMeta().getInfoStepnames();
// Return null instead of empty array to preserve existing behavior
return infoSteps.length == 0 ? null : infoSteps;
}
public String[] getTargetSteps() {
List<String> targetSteps = new ArrayList<String>();
// The infosteps are those steps that are specified in the input mappings
for ( MappingIODefinition definition : outputMappings ) {
if ( !definition.isMainDataPath() && !Const.isEmpty( definition.getOutputStepname() ) ) {
targetSteps.add( definition.getOutputStepname() );
}
}
if ( targetSteps.isEmpty() ) {
return null;
}
return targetSteps.toArray( new String[targetSteps.size()] );
}
@Deprecated
public static final synchronized TransMeta loadMappingMeta( MappingMeta mappingMeta, Repository rep,
VariableSpace space ) throws KettleException {
return loadMappingMeta( mappingMeta, rep, null, space );
}
public static final synchronized TransMeta loadMappingMeta( MappingMeta mappingMeta, Repository rep,
IMetaStore metaStore, VariableSpace space ) throws KettleException {
return loadMappingMeta( mappingMeta, rep, metaStore, space, true );
}
public static final synchronized TransMeta loadMappingMeta( MappingMeta mappingMeta, Repository rep,
IMetaStore metaStore, VariableSpace space, boolean share ) throws KettleException {
TransMeta mappingTransMeta = null;
switch ( mappingMeta.getSpecificationMethod() ) {
case FILENAME:
String realFilename = space.environmentSubstitute( mappingMeta.getFileName() );
try {
// OK, load the meta-data from file...
//
// Don't set internal variables: they belong to the parent thread!
// PDI-3064 do not share with parent variable space
mappingTransMeta = new TransMeta( realFilename, metaStore, rep, true, null, null );
mappingTransMeta.getLogChannel().logDetailed(
"Loading Mapping from repository",
"Mapping transformation was loaded from XML file [" + realFilename + "]" );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "MappingMeta.Exception.UnableToLoadMapping" ), e );
}
break;
case REPOSITORY_BY_NAME:
String realTransname = space.environmentSubstitute( mappingMeta.getTransName() );
String realDirectory = space.environmentSubstitute( mappingMeta.getDirectoryPath() );
if ( rep == null ) { // hardening because TransMeta.setRepositoryOnMappingSteps(); might be missing in special
// situations
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.InternalErrorRepository.Message" ) );
}
if ( !Const.isEmpty( realTransname ) && !Const.isEmpty( realDirectory ) && rep != null ) {
RepositoryDirectoryInterface repdir = rep.findDirectory( realDirectory );
if ( repdir != null ) {
try {
// reads the last revision in the repository...
//
mappingTransMeta = rep.loadTransformation( realTransname, repdir, null, true, null ); // TODO: FIXME:
// Should we pass in
// external
// MetaStore into
// Repository
// methods?
mappingTransMeta.getLogChannel().logDetailed(
"Loading Mapping from repository",
"Mapping transformation [" + realTransname + "] was loaded from the repository" );
} catch ( Exception e ) {
throw new KettleException( "Unable to load transformation [" + realTransname + "]", e );
}
} else {
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToLoadTransformation", realTransname )
+ realDirectory );
}
} else {
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToLoadTransformationNameOrDirNotGiven" ) );
}
break;
case REPOSITORY_BY_REFERENCE:
// Read the last revision by reference...
if ( rep == null ) { // hardening because TransMeta.setRepositoryOnMappingSteps(); might be missing in special
// situations
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.InternalErrorRepository.Message" ) );
}
mappingTransMeta = rep.loadTransformation( mappingMeta.getTransObjectId(), null );
break;
default:
break;
}
// Pass some important information to the mapping transformation metadata:
//
if ( mappingTransMeta == null ) { // hardening because TransMeta might have issues in special situations
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.InternalErrorTransMetaIsNULL.Message" ) );
}
if ( share ) {
mappingTransMeta.copyVariablesFrom( space );
}
mappingTransMeta.setRepository( rep );
mappingTransMeta.setMetaStore( metaStore );
mappingTransMeta.setFilename( mappingTransMeta.getFilename() );
return mappingTransMeta;
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
CheckResult cr;
if ( prev == null || prev.size() == 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(
PKG, "MappingMeta.CheckResult.NotReceivingAnyFields" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "MappingMeta.CheckResult.StepReceivingFields", prev.size() + "" ), stepMeta );
remarks.add( cr );
}
// See if we have input streams leading to this step!
if ( input.length > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "MappingMeta.CheckResult.StepReceivingFieldsFromOtherSteps" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "MappingMeta.CheckResult.NoInputReceived" ), stepMeta );
remarks.add( cr );
}
/*
* TODO re-enable validation code for mappings...
*
* // Change the names of the fields if this is required by the mapping. for (int i=0;i<inputField.length;i++) { if
* (inputField[i]!=null && inputField[i].length()>0) { if (inputMapping[i]!=null && inputMapping[i].length()>0) { if
* (!inputField[i].equals(inputMapping[i])) // rename these! { int idx = prev.indexOfValue(inputField[i]); if
* (idx<0) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.MappingTargetFieldNotPresent",inputField[i]), stepinfo); remarks.add(cr); } } } else {
* cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.MappingTargetFieldNotSepecified" ,i+"",inputField[i]), stepinfo);
* remarks.add(cr); } } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
* BaseMessages.getString(PKG, "MappingMeta.CheckResult.InputFieldNotSpecified",i+""), stepinfo); remarks.add(cr); }
* }
*
* // Then check the fields that get added to the row. //
*
* Repository repository = Repository.getCurrentRepository(); TransMeta mappingTransMeta = null; try {
* mappingTransMeta = loadMappingMeta(fileName, transName, directoryPath, repository); } catch(KettleException e) {
* cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.UnableToLoadMappingTransformation" )+":"+Const.getStackTracker(e), stepinfo);
* remarks.add(cr); }
*
* if (mappingTransMeta!=null) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK,
* BaseMessages.getString(PKG, "MappingMeta.CheckResult.MappingTransformationSpecified"), stepinfo);
* remarks.add(cr);
*
* StepMeta stepMeta = mappingTransMeta.getMappingOutputStep();
*
* if (stepMeta!=null) { // See which fields are coming out of the mapping output step of the sub-transformation //
* For these fields we check the existance // RowMetaInterface fields = null; try { fields =
* mappingTransMeta.getStepFields(stepMeta);
*
* boolean allOK = true;
*
* // Check the fields... for (int i=0;i<outputMapping.length;i++) { ValueMetaInterface v =
* fields.searchValueMeta(outputMapping[i]); if (v==null) // Not found! { cr = new
* CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.MappingOutFieldSpecifiedCouldNotFound" )+outputMapping[i], stepinfo); remarks.add(cr);
* allOK=false; } }
*
* if (allOK) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.AllOutputMappingFieldCouldBeFound"), stepinfo); remarks.add(cr); } }
* catch(KettleStepException e) { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
* BaseMessages.getString(PKG, "MappingMeta.CheckResult.UnableToGetStepOutputFields" )+stepMeta.getName()+"]",
* stepinfo); remarks.add(cr); } } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR,
* BaseMessages.getString(PKG, "MappingMeta.CheckResult.NoMappingOutputStepSpecified"), stepinfo); remarks.add(cr);
* } } else { cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG,
* "MappingMeta.CheckResult.NoMappingSpecified"), stepinfo); remarks.add(cr); }
*/
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) {
return new Mapping( stepMeta, stepDataInterface, cnr, tr, trans );
}
public StepDataInterface getStepData() {
return new MappingData();
}
/**
* @return the directoryPath
*/
public String getDirectoryPath() {
return directoryPath;
}
/**
* @param directoryPath
* the directoryPath to set
*/
public void setDirectoryPath( String directoryPath ) {
this.directoryPath = directoryPath;
}
/**
* @return the fileName
*/
public String getFileName() {
return fileName;
}
/**
* @param fileName
* the fileName to set
*/
public void setFileName( String fileName ) {
this.fileName = fileName;
}
/**
* @return the transName
*/
public String getTransName() {
return transName;
}
/**
* @param transName
* the transName to set
*/
public void setTransName( String transName ) {
this.transName = transName;
}
/**
* @return the inputMappings
*/
public List<MappingIODefinition> getInputMappings() {
return inputMappings;
}
/**
* @param inputMappings
* the inputMappings to set
*/
public void setInputMappings( List<MappingIODefinition> inputMappings ) {
this.inputMappings = inputMappings;
resetStepIoMeta();
}
/**
* @return the outputMappings
*/
public List<MappingIODefinition> getOutputMappings() {
return outputMappings;
}
/**
* @param outputMappings
* the outputMappings to set
*/
public void setOutputMappings( List<MappingIODefinition> outputMappings ) {
this.outputMappings = outputMappings;
}
/**
* @return the mappingParameters
*/
public MappingParameters getMappingParameters() {
return mappingParameters;
}
/**
* @param mappingParameters
* the mappingParameters to set
*/
public void setMappingParameters( MappingParameters mappingParameters ) {
this.mappingParameters = mappingParameters;
}
@Override
public List<ResourceReference> getResourceDependencies( TransMeta transMeta, StepMeta stepInfo ) {
List<ResourceReference> references = new ArrayList<ResourceReference>( 5 );
String realFilename = transMeta.environmentSubstitute( fileName );
String realTransname = transMeta.environmentSubstitute( transName );
ResourceReference reference = new ResourceReference( stepInfo );
references.add( reference );
if ( !Const.isEmpty( realFilename ) ) {
// Add the filename to the references, including a reference to this step
// meta data.
//
reference.getEntries().add( new ResourceEntry( realFilename, ResourceType.ACTIONFILE ) );
} else if ( !Const.isEmpty( realTransname ) ) {
// Add the filename to the references, including a reference to this step
// meta data.
//
reference.getEntries().add( new ResourceEntry( realTransname, ResourceType.ACTIONFILE ) );
references.add( reference );
}
return references;
}
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
try {
// Try to load the transformation from repository or file.
// Modify this recursively too...
//
// NOTE: there is no need to clone this step because the caller is
// responsible for this.
//
// First load the mapping metadata...
//
TransMeta mappingTransMeta = loadMappingMeta( this, repository, space );
// Also go down into the mapping transformation and export the files
// there. (mapping recursively down)
//
String proposedNewFilename =
mappingTransMeta.exportResources(
mappingTransMeta, definitions, resourceNamingInterface, repository, metaStore );
// To get a relative path to it, we inject
// ${Internal.Job.Filename.Directory}
//
String newFilename =
"${" + Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY + "}/" + proposedNewFilename;
// Set the correct filename inside the XML.
//
mappingTransMeta.setFilename( newFilename );
// exports always reside in the root directory, in case we want to turn
// this into a file repository...
//
mappingTransMeta.setRepositoryDirectory( new RepositoryDirectory() );
// change it in the job entry
//
fileName = newFilename;
return proposedNewFilename;
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingMeta.Exception.UnableToLoadTransformation", fileName ) );
}
}
/**
* @return the repository
*/
public Repository getRepository() {
return repository;
}
/**
* @param repository
* the repository to set
*/
public void setRepository( Repository repository ) {
this.repository = repository;
}
/**
* @return the transObjectId
*/
public ObjectId getTransObjectId() {
return transObjectId;
}
/**
* @param transObjectId
* the transObjectId to set
*/
public void setTransObjectId( ObjectId transObjectId ) {
this.transObjectId = transObjectId;
}
/**
* @return the specificationMethod
*/
public ObjectLocationSpecificationMethod getSpecificationMethod() {
return specificationMethod;
}
/**
* @param specificationMethod
* the specificationMethod to set
*/
public void setSpecificationMethod( ObjectLocationSpecificationMethod specificationMethod ) {
this.specificationMethod = specificationMethod;
}
@Override
public StepIOMetaInterface getStepIOMeta() {
if ( ioMeta == null ) {
// TODO Create a dynamic StepIOMeta so that we can more easily manipulate the info streams?
ioMeta = new StepIOMeta( true, true, true, false, true, false );
for ( MappingIODefinition def : inputMappings ) {
if ( isInfoMapping( def ) ) {
Stream stream =
new Stream( StreamType.INFO, def.getInputStep(), BaseMessages.getString(
PKG, "MappingMeta.InfoStream.Description" ), StreamIcon.INFO, null );
ioMeta.addStream( stream );
}
}
}
return ioMeta;
}
private boolean isInfoMapping( MappingIODefinition def ) {
return !def.isMainDataPath() && !Const.isEmpty( def.getInputStepname() );
}
/**
* Remove the cached {@link StepIOMeta} so it is recreated when it is next accessed.
*/
public void resetStepIoMeta() {
ioMeta = null;
}
public boolean excludeFromRowLayoutVerification() {
return true;
}
@Override
public void searchInfoAndTargetSteps( List<StepMeta> steps ) {
// Assign all StepMeta references for Input Mappings that are INFO inputs
for ( MappingIODefinition def : inputMappings ) {
if ( isInfoMapping( def ) ) {
def.setInputStep( StepMeta.findStep( steps, def.getInputStepname() ) );
}
}
}
public TransformationType[] getSupportedTransformationTypes() {
return new TransformationType[] { TransformationType.Normal, };
}
@Override
public boolean hasRepositoryReferences() {
return specificationMethod == ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE;
}
@Override
public void lookupRepositoryReferences( Repository repository ) throws KettleException {
// The correct reference is stored in the trans name and directory attributes...
//
RepositoryDirectoryInterface repositoryDirectoryInterface =
RepositoryImportLocation.getRepositoryImportLocation().findDirectory( directoryPath );
transObjectId = repository.getTransformationID( transName, repositoryDirectoryInterface );
}
/**
* @return the allowingMultipleInputs
*/
public boolean isAllowingMultipleInputs() {
return allowingMultipleInputs;
}
/**
* @param allowingMultipleInputs
* the allowingMultipleInputs to set
*/
public void setAllowingMultipleInputs( boolean allowingMultipleInputs ) {
this.allowingMultipleInputs = allowingMultipleInputs;
}
/**
* @return the allowingMultipleOutputs
*/
public boolean isAllowingMultipleOutputs() {
return allowingMultipleOutputs;
}
/**
* @param allowingMultipleOutputs
* the allowingMultipleOutputs to set
*/
public void setAllowingMultipleOutputs( boolean allowingMultipleOutputs ) {
this.allowingMultipleOutputs = allowingMultipleOutputs;
}
/**
* @return The objects referenced in the step, like a mapping, a transformation, a job, ...
*/
public String[] getReferencedObjectDescriptions() {
return new String[] { BaseMessages.getString( PKG, "MappingMeta.ReferencedObject.Description" ), };
}
private boolean isMapppingDefined() {
return !Const.isEmpty( fileName )
|| transObjectId != null || ( !Const.isEmpty( this.directoryPath ) && !Const.isEmpty( transName ) );
}
public boolean[] isReferencedObjectEnabled() {
return new boolean[] { isMapppingDefined(), };
}
@Deprecated
public Object loadReferencedObject( int index, Repository rep, VariableSpace space ) throws KettleException {
return loadReferencedObject( index, rep, null, space );
}
/**
* Load the referenced object
*
* @param index
* the object index to load
* @param rep
* the repository
* @param metaStore
* the MetaStore to use
* @param space
* the variable space to use
* @return the referenced object once loaded
* @throws KettleException
*/
public Object loadReferencedObject( int index, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException {
return loadMappingMeta( this, rep, metaStore, space );
}
public IMetaStore getMetaStore() {
return metaStore;
}
public void setMetaStore( IMetaStore metaStore ) {
this.metaStore = metaStore;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.timeline;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.UOE;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.timeline.partition.ImmutablePartitionHolder;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.PartitionHolder;
import org.apache.druid.utils.CollectionUtils;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* VersionedIntervalTimeline is a data structure that manages objects on a specific timeline.
*
* It associates an {@link Interval} and a generically-typed version with the object that is being stored.
*
* In the event of overlapping timeline entries, timeline intervals may be chunked. The underlying data associated
* with a timeline entry remains unchanged when chunking occurs.
*
* After loading objects via the {@link #add} method, the {@link #lookup(Interval)} method can be used to get the list
* of the most recent objects (according to the version) that match the given interval. The intent is that objects
* represent a certain time period and when you do a {@link #lookup(Interval)}, you are asking for all of the objects
* that you need to look at in order to get a correct answer about that time period.
*
* The {@link #findFullyOvershadowed} method returns a list of objects that will never be returned by a call to {@link
* #lookup} because they are overshadowed by some other object. This can be used in conjunction with the {@link #add}
* and {@link #remove} methods to achieve "atomic" updates. First add new items, then check if those items caused
* anything to be overshadowed, if so, remove the overshadowed elements and you have effectively updated your data set
* without any user impact.
*/
public class VersionedIntervalTimeline<VersionType, ObjectType extends Overshadowable<ObjectType>>
implements TimelineLookup<VersionType, ObjectType>
{
public static VersionedIntervalTimeline<String, DataSegment> forSegments(Iterable<DataSegment> segments)
{
return forSegments(segments.iterator());
}
public static VersionedIntervalTimeline<String, DataSegment> forSegments(Iterator<DataSegment> segments)
{
final VersionedIntervalTimeline<String, DataSegment> timeline =
new VersionedIntervalTimeline<>(Comparator.naturalOrder());
addSegments(timeline, segments);
return timeline;
}
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);
// Below timelines stores only *visible* timelineEntries
// adjusted interval -> timelineEntry
private final NavigableMap<Interval, TimelineEntry> completePartitionsTimeline = new TreeMap<>(
Comparators.intervalsByStartThenEnd()
);
// IncompletePartitionsTimeline also includes completePartitionsTimeline
// adjusted interval -> timelineEntry
@VisibleForTesting
final NavigableMap<Interval, TimelineEntry> incompletePartitionsTimeline = new TreeMap<>(
Comparators.intervalsByStartThenEnd()
);
// true interval -> version -> timelineEntry
private final Map<Interval, TreeMap<VersionType, TimelineEntry>> allTimelineEntries = new HashMap<>();
private final AtomicInteger numObjects = new AtomicInteger();
private final Comparator<? super VersionType> versionComparator;
public VersionedIntervalTimeline(Comparator<? super VersionType> versionComparator)
{
this.versionComparator = versionComparator;
}
public static void addSegments(
VersionedIntervalTimeline<String, DataSegment> timeline,
Iterator<DataSegment> segments
)
{
timeline.addAll(
Iterators.transform(segments, segment -> segment.getShardSpec().createChunk(segment)),
DataSegment::getInterval,
DataSegment::getVersion
);
}
public Map<Interval, TreeMap<VersionType, TimelineEntry>> getAllTimelineEntries()
{
return allTimelineEntries;
}
/**
* Returns a lazy collection with all objects (including partially AND fully overshadowed, see {@link
* #findFullyOvershadowed}) in this VersionedIntervalTimeline to be used for iteration or {@link Collection#stream()}
* transformation. The order of objects in this collection is unspecified.
*
* Note: iteration over the returned collection may not be as trivially cheap as, for example, iteration over an
* ArrayList. Try (to some reasonable extent) to organize the code so that it iterates the returned collection only
* once rather than several times.
*/
public Collection<ObjectType> iterateAllObjects()
{
return CollectionUtils.createLazyCollectionFromStream(
() -> allTimelineEntries
.values()
.stream()
.flatMap((TreeMap<VersionType, TimelineEntry> entryMap) -> entryMap.values().stream())
.flatMap((TimelineEntry entry) -> StreamSupport.stream(entry.getPartitionHolder().spliterator(), false))
.map(PartitionChunk::getObject),
numObjects.get()
);
}
public int getNumObjects()
{
return numObjects.get();
}
/**
* Computes a set with all objects falling within the specified interval which are at least partially "visible" in
* this interval (that is, are not fully overshadowed within this interval).
*/
public Set<ObjectType> findNonOvershadowedObjectsInInterval(Interval interval, Partitions completeness)
{
return lookup(interval, completeness)
.stream()
.flatMap(timelineObjectHolder -> timelineObjectHolder.getObject().stream())
.map(PartitionChunk::getObject)
.collect(Collectors.toSet());
}
public void add(final Interval interval, VersionType version, PartitionChunk<ObjectType> object)
{
addAll(Iterators.singletonIterator(object), o -> interval, o -> version);
}
private void addAll(
final Iterator<PartitionChunk<ObjectType>> objects,
final Function<ObjectType, Interval> intervalFunction,
final Function<ObjectType, VersionType> versionFunction
)
{
lock.writeLock().lock();
try {
final IdentityHashMap<TimelineEntry, Interval> allEntries = new IdentityHashMap<>();
while (objects.hasNext()) {
PartitionChunk<ObjectType> object = objects.next();
Interval interval = intervalFunction.apply(object.getObject());
VersionType version = versionFunction.apply(object.getObject());
Map<VersionType, TimelineEntry> exists = allTimelineEntries.get(interval);
TimelineEntry entry;
if (exists == null) {
entry = new TimelineEntry(interval, version, new PartitionHolder<>(object));
TreeMap<VersionType, TimelineEntry> versionEntry = new TreeMap<>(versionComparator);
versionEntry.put(version, entry);
allTimelineEntries.put(interval, versionEntry);
numObjects.incrementAndGet();
} else {
entry = exists.get(version);
if (entry == null) {
entry = new TimelineEntry(interval, version, new PartitionHolder<>(object));
exists.put(version, entry);
numObjects.incrementAndGet();
} else {
PartitionHolder<ObjectType> partitionHolder = entry.getPartitionHolder();
if (partitionHolder.add(object)) {
numObjects.incrementAndGet();
}
}
}
allEntries.put(entry, interval);
}
// "isComplete" is O(objects in holder) so defer it to the end of addAll.
for (Entry<TimelineEntry, Interval> entry : allEntries.entrySet()) {
Interval interval = entry.getValue();
if (entry.getKey().getPartitionHolder().isComplete()) {
add(completePartitionsTimeline, interval, entry.getKey());
}
add(incompletePartitionsTimeline, interval, entry.getKey());
}
}
finally {
lock.writeLock().unlock();
}
}
@Nullable
public PartitionChunk<ObjectType> remove(Interval interval, VersionType version, PartitionChunk<ObjectType> chunk)
{
lock.writeLock().lock();
try {
Map<VersionType, TimelineEntry> versionEntries = allTimelineEntries.get(interval);
if (versionEntries == null) {
return null;
}
TimelineEntry entry = versionEntries.get(version);
if (entry == null) {
return null;
}
PartitionChunk<ObjectType> removedChunk = entry.getPartitionHolder().remove(chunk);
if (removedChunk == null) {
return null;
}
numObjects.decrementAndGet();
if (entry.getPartitionHolder().isEmpty()) {
versionEntries.remove(version);
if (versionEntries.isEmpty()) {
allTimelineEntries.remove(interval);
}
remove(incompletePartitionsTimeline, interval, entry, true);
}
remove(completePartitionsTimeline, interval, entry, false);
return removedChunk;
}
finally {
lock.writeLock().unlock();
}
}
@Override
public @Nullable PartitionHolder<ObjectType> findEntry(Interval interval, VersionType version)
{
lock.readLock().lock();
try {
for (Entry<Interval, TreeMap<VersionType, TimelineEntry>> entry : allTimelineEntries.entrySet()) {
if (entry.getKey().equals(interval) || entry.getKey().contains(interval)) {
TimelineEntry foundEntry = entry.getValue().get(version);
if (foundEntry != null) {
return new ImmutablePartitionHolder<>(foundEntry.getPartitionHolder());
}
}
}
return null;
}
finally {
lock.readLock().unlock();
}
}
/**
* Does a lookup for the objects representing the given time interval. Will *only* return
* PartitionHolders that are {@linkplain PartitionHolder#isComplete() complete}.
*
* @param interval interval to find objects for
*
* @return Holders representing the interval that the objects exist for, PartitionHolders
* are guaranteed to be complete
*/
@Override
public List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval)
{
lock.readLock().lock();
try {
return lookup(interval, Partitions.ONLY_COMPLETE);
}
finally {
lock.readLock().unlock();
}
}
@Override
public List<TimelineObjectHolder<VersionType, ObjectType>> lookupWithIncompletePartitions(Interval interval)
{
lock.readLock().lock();
try {
return lookup(interval, Partitions.INCOMPLETE_OK);
}
finally {
lock.readLock().unlock();
}
}
public boolean isEmpty()
{
lock.readLock().lock();
try {
return completePartitionsTimeline.isEmpty();
}
finally {
lock.readLock().unlock();
}
}
public TimelineObjectHolder<VersionType, ObjectType> first()
{
lock.readLock().lock();
try {
return timelineEntryToObjectHolder(completePartitionsTimeline.firstEntry().getValue());
}
finally {
lock.readLock().unlock();
}
}
public TimelineObjectHolder<VersionType, ObjectType> last()
{
lock.readLock().lock();
try {
return timelineEntryToObjectHolder(completePartitionsTimeline.lastEntry().getValue());
}
finally {
lock.readLock().unlock();
}
}
private TimelineObjectHolder<VersionType, ObjectType> timelineEntryToObjectHolder(TimelineEntry entry)
{
return new TimelineObjectHolder<>(
entry.getTrueInterval(),
entry.getTrueInterval(),
entry.getVersion(),
new PartitionHolder<>(entry.getPartitionHolder())
);
}
/**
* This method should be deduplicated with DataSourcesSnapshot.determineOvershadowedSegments(): see
* https://github.com/apache/druid/issues/8070.
*/
public Set<TimelineObjectHolder<VersionType, ObjectType>> findFullyOvershadowed()
{
lock.readLock().lock();
try {
// 1. Put all timelineEntries and remove all visible entries to find out only non-visible timelineEntries.
final Map<Interval, Map<VersionType, TimelineEntry>> overshadowedPartitionsTimeline =
computeOvershadowedPartitionsTimeline();
final Set<TimelineObjectHolder<VersionType, ObjectType>> overshadowedObjects = overshadowedPartitionsTimeline
.values()
.stream()
.flatMap(
(Map<VersionType, TimelineEntry> entry) -> entry.values().stream().map(this::timelineEntryToObjectHolder)
)
.collect(Collectors.toSet());
// 2. Visible timelineEntries can also have overshadowed objects. Add them to the result too.
for (TimelineEntry entry : incompletePartitionsTimeline.values()) {
final List<PartitionChunk<ObjectType>> overshadowedEntries = entry.partitionHolder.getOvershadowed();
if (!overshadowedEntries.isEmpty()) {
overshadowedObjects.add(
new TimelineObjectHolder<>(
entry.trueInterval,
entry.version,
new PartitionHolder<>(overshadowedEntries)
)
);
}
}
return overshadowedObjects;
}
finally {
lock.readLock().unlock();
}
}
private Map<Interval, Map<VersionType, TimelineEntry>> computeOvershadowedPartitionsTimeline()
{
final Map<Interval, Map<VersionType, TimelineEntry>> overshadowedPartitionsTimeline = new HashMap<>();
allTimelineEntries.forEach((Interval interval, TreeMap<VersionType, TimelineEntry> versionEntry) -> {
@SuppressWarnings("unchecked")
Map<VersionType, TimelineEntry> versionEntryCopy = (TreeMap) versionEntry.clone();
overshadowedPartitionsTimeline.put(interval, versionEntryCopy);
});
for (TimelineEntry entry : completePartitionsTimeline.values()) {
overshadowedPartitionsTimeline.computeIfPresent(
entry.getTrueInterval(),
(Interval interval, Map<VersionType, TimelineEntry> versionEntry) -> {
versionEntry.remove(entry.getVersion());
return versionEntry.isEmpty() ? null : versionEntry;
}
);
}
for (TimelineEntry entry : incompletePartitionsTimeline.values()) {
overshadowedPartitionsTimeline.computeIfPresent(
entry.getTrueInterval(),
(Interval interval, Map<VersionType, TimelineEntry> versionEntry) -> {
versionEntry.remove(entry.getVersion());
return versionEntry.isEmpty() ? null : versionEntry;
}
);
}
return overshadowedPartitionsTimeline;
}
public boolean isOvershadowed(Interval interval, VersionType version, ObjectType object)
{
lock.readLock().lock();
try {
TimelineEntry entry = completePartitionsTimeline.get(interval);
if (entry != null) {
final int majorVersionCompare = versionComparator.compare(version, entry.getVersion());
if (majorVersionCompare == 0) {
for (PartitionChunk<ObjectType> chunk : entry.partitionHolder) {
if (chunk.getObject().overshadows(object)) {
return true;
}
}
return false;
} else {
return majorVersionCompare < 0;
}
}
Interval lower = completePartitionsTimeline.floorKey(
new Interval(interval.getStart(), DateTimes.MAX)
);
if (lower == null || !lower.overlaps(interval)) {
return false;
}
Interval prev = null;
Interval curr = lower;
do {
if (curr == null || //no further keys
(prev != null && curr.getStartMillis() > prev.getEndMillis()) //a discontinuity
) {
return false;
}
final TimelineEntry timelineEntry = completePartitionsTimeline.get(curr);
final int versionCompare = versionComparator.compare(version, timelineEntry.getVersion());
//lower or same version
if (versionCompare > 0) {
return false;
} else if (versionCompare == 0) {
if (timelineEntry.partitionHolder.stream().noneMatch(chunk -> chunk.getObject().overshadows(object))) {
return false;
}
}
prev = curr;
curr = completePartitionsTimeline.higherKey(curr);
} while (interval.getEndMillis() > prev.getEndMillis());
return true;
}
finally {
lock.readLock().unlock();
}
}
@GuardedBy("lock")
private void add(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
TimelineEntry entry
)
{
TimelineEntry existsInTimeline = timeline.get(interval);
if (existsInTimeline != null) {
int compare = versionComparator.compare(entry.getVersion(), existsInTimeline.getVersion());
if (compare > 0) {
addIntervalToTimeline(interval, entry, timeline);
}
return;
}
Interval lowerKey = timeline.lowerKey(interval);
if (lowerKey != null) {
if (addAtKey(timeline, lowerKey, entry)) {
return;
}
}
Interval higherKey = timeline.higherKey(interval);
if (higherKey != null) {
if (addAtKey(timeline, higherKey, entry)) {
return;
}
}
addIntervalToTimeline(interval, entry, timeline);
}
/**
* @return boolean flag indicating whether or not we inserted or discarded something
*/
@GuardedBy("lock")
private boolean addAtKey(
NavigableMap<Interval, TimelineEntry> timeline,
Interval key,
TimelineEntry entry
)
{
boolean retVal = false;
Interval currKey = key;
Interval entryInterval = entry.getTrueInterval();
if (!currKey.overlaps(entryInterval)) {
return false;
}
while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) {
final Interval nextKey = timeline.higherKey(currKey);
final int versionCompare = versionComparator.compare(
entry.getVersion(),
timeline.get(currKey).getVersion()
);
if (versionCompare < 0) {
// since the entry version is lower than the existing one, the existing one overwrites the given entry
// if overlapped.
if (currKey.contains(entryInterval)) {
// the version of the entry of currKey is larger than that of the given entry. Discard it
return true;
} else if (currKey.getStart().isBefore(entryInterval.getStart())) {
// | entry |
// | cur |
// => |new|
entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
} else {
// | entry |
// | cur |
// => |new|
addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline);
// | entry |
// | cur |
// => |new|
if (entryInterval.getEnd().isAfter(currKey.getEnd())) {
entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
} else {
// Discard this entry since there is no portion of the entry interval that goes past the end of the curr
// key interval.
entryInterval = null;
}
}
} else if (versionCompare > 0) {
// since the entry version is greater than the existing one, the given entry overwrites the existing one
// if overlapped.
final TimelineEntry oldEntry = timeline.remove(currKey);
if (currKey.contains(entryInterval)) {
// | cur |
// | entry |
// => |old| new |old|
addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
addIntervalToTimeline(entryInterval, entry, timeline);
return true;
} else if (currKey.getStart().isBefore(entryInterval.getStart())) {
// | cur |
// | entry |
// => |old|
addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline);
} else if (entryInterval.getEnd().isBefore(currKey.getEnd())) {
// | cur |
// | entry |
// => |old|
addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline);
}
} else {
if (timeline.get(currKey).equals(entry)) {
// This occurs when restoring segments
timeline.remove(currKey);
} else {
throw new UOE(
"Cannot add overlapping segments [%s and %s] with the same version [%s]",
currKey,
entryInterval,
entry.getVersion()
);
}
}
currKey = nextKey;
retVal = true;
}
addIntervalToTimeline(entryInterval, entry, timeline);
return retVal;
}
@GuardedBy("lock")
private void addIntervalToTimeline(
Interval interval,
TimelineEntry entry,
NavigableMap<Interval, TimelineEntry> timeline
)
{
if (interval != null && interval.toDurationMillis() > 0) {
timeline.put(interval, entry);
}
}
@GuardedBy("lock")
private void remove(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
TimelineEntry entry,
boolean incompleteOk
)
{
List<Interval> intervalsToRemove = new ArrayList<>();
TimelineEntry removed = timeline.get(interval);
if (removed == null) {
Iterator<Entry<Interval, TimelineEntry>> iter = timeline.entrySet().iterator();
while (iter.hasNext()) {
Entry<Interval, TimelineEntry> timelineEntry = iter.next();
if (timelineEntry.getValue() == entry) {
intervalsToRemove.add(timelineEntry.getKey());
}
}
} else {
intervalsToRemove.add(interval);
}
for (Interval i : intervalsToRemove) {
remove(timeline, i, incompleteOk);
}
}
@GuardedBy("lock")
private void remove(
NavigableMap<Interval, TimelineEntry> timeline,
Interval interval,
boolean incompleteOk
)
{
timeline.remove(interval);
for (Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) {
if (versionEntry.getKey().overlap(interval) != null) {
if (incompleteOk) {
add(timeline, versionEntry.getKey(), versionEntry.getValue().lastEntry().getValue());
} else {
for (VersionType ver : versionEntry.getValue().descendingKeySet()) {
TimelineEntry timelineEntry = versionEntry.getValue().get(ver);
if (timelineEntry.getPartitionHolder().isComplete()) {
add(timeline, versionEntry.getKey(), timelineEntry);
break;
}
}
}
}
}
}
@GuardedBy("lock")
private List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval, Partitions completeness)
{
List<TimelineObjectHolder<VersionType, ObjectType>> retVal = new ArrayList<>();
NavigableMap<Interval, TimelineEntry> timeline;
if (completeness == Partitions.INCOMPLETE_OK) {
timeline = incompletePartitionsTimeline;
} else {
timeline = completePartitionsTimeline;
}
for (Entry<Interval, TimelineEntry> entry : timeline.entrySet()) {
Interval timelineInterval = entry.getKey();
TimelineEntry val = entry.getValue();
if (timelineInterval.overlaps(interval)) {
retVal.add(
new TimelineObjectHolder<>(
timelineInterval,
val.getTrueInterval(),
val.getVersion(),
new PartitionHolder<>(val.getPartitionHolder())
)
);
}
}
if (retVal.isEmpty()) {
return retVal;
}
TimelineObjectHolder<VersionType, ObjectType> firstEntry = retVal.get(0);
if (interval.overlaps(firstEntry.getInterval()) &&
interval.getStart().isAfter(firstEntry.getInterval().getStart())) {
retVal.set(
0,
new TimelineObjectHolder<>(
new Interval(interval.getStart(), firstEntry.getInterval().getEnd()),
firstEntry.getTrueInterval(),
firstEntry.getVersion(),
firstEntry.getObject()
)
);
}
TimelineObjectHolder<VersionType, ObjectType> lastEntry = retVal.get(retVal.size() - 1);
if (interval.overlaps(lastEntry.getInterval()) && interval.getEnd().isBefore(lastEntry.getInterval().getEnd())) {
retVal.set(
retVal.size() - 1,
new TimelineObjectHolder<>(
new Interval(lastEntry.getInterval().getStart(), interval.getEnd()),
lastEntry.getTrueInterval(),
lastEntry.getVersion(),
lastEntry.getObject()
)
);
}
return retVal;
}
public class TimelineEntry
{
private final Interval trueInterval;
private final VersionType version;
private final PartitionHolder<ObjectType> partitionHolder;
TimelineEntry(Interval trueInterval, VersionType version, PartitionHolder<ObjectType> partitionHolder)
{
this.trueInterval = Preconditions.checkNotNull(trueInterval);
this.version = Preconditions.checkNotNull(version);
this.partitionHolder = Preconditions.checkNotNull(partitionHolder);
}
Interval getTrueInterval()
{
return trueInterval;
}
public VersionType getVersion()
{
return version;
}
public PartitionHolder<ObjectType> getPartitionHolder()
{
return partitionHolder;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final TimelineEntry that = (TimelineEntry) o;
if (!this.trueInterval.equals(that.trueInterval)) {
return false;
}
if (!this.version.equals(that.version)) {
return false;
}
if (!this.partitionHolder.equals(that.partitionHolder)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return Objects.hash(trueInterval, version, partitionHolder);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
import org.elasticsearch.index.analysis.compound.HyphenationCompoundWordTokenFilterFactory;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
/**
* Alerts us if new analyzers are added to lucene, so we don't miss them.
* <p>
* If we don't want to expose one for a specific reason, just map it to Void
*/
public class AnalysisFactoryTests extends ESTestCase {
static final Map<String,Class<?>> KNOWN_TOKENIZERS = new HashMap<String,Class<?>>() {{
// deprecated ones, we dont care about these
put("arabicletter", Deprecated.class);
put("chinese", Deprecated.class);
put("cjk", Deprecated.class);
put("russianletter", Deprecated.class);
// exposed in ES
put("classic", ClassicTokenizerFactory.class);
put("edgengram", EdgeNGramTokenizerFactory.class);
put("keyword", KeywordTokenizerFactory.class);
put("letter", LetterTokenizerFactory.class);
put("lowercase", LowerCaseTokenizerFactory.class);
put("ngram", NGramTokenizerFactory.class);
put("pathhierarchy", PathHierarchyTokenizerFactory.class);
put("pattern", PatternTokenizerFactory.class);
put("standard", StandardTokenizerFactory.class);
put("thai", ThaiTokenizerFactory.class);
put("uax29urlemail", UAX29URLEmailTokenizerFactory.class);
put("whitespace", WhitespaceTokenizerFactory.class);
// this one "seems to mess up offsets". probably shouldn't be a tokenizer...
put("wikipedia", Void.class);
}};
public void testTokenizers() {
Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.TokenizerFactory.availableTokenizers());
missing.removeAll(KNOWN_TOKENIZERS.keySet());
assertTrue("new tokenizers found, please update KNOWN_TOKENIZERS: " + missing.toString(), missing.isEmpty());
}
static final Map<String,Class<?>> KNOWN_TOKENFILTERS = new HashMap<String,Class<?>>() {{
// deprecated ones, we dont care about these
put("chinese", Deprecated.class);
put("collationkey", Deprecated.class);
put("position", Deprecated.class);
put("thaiword", Deprecated.class);
// exposed in ES
put("apostrophe", ApostropheFilterFactory.class);
put("arabicnormalization", ArabicNormalizationFilterFactory.class);
put("arabicstem", ArabicStemTokenFilterFactory.class);
put("asciifolding", ASCIIFoldingTokenFilterFactory.class);
put("brazilianstem", BrazilianStemTokenFilterFactory.class);
put("bulgarianstem", StemmerTokenFilterFactory.class);
put("cjkbigram", CJKBigramFilterFactory.class);
put("cjkwidth", CJKWidthFilterFactory.class);
put("classic", ClassicFilterFactory.class);
put("commongrams", CommonGramsTokenFilterFactory.class);
put("commongramsquery", CommonGramsTokenFilterFactory.class);
put("czechstem", CzechStemTokenFilterFactory.class);
put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class);
put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class);
put("edgengram", EdgeNGramTokenFilterFactory.class);
put("elision", ElisionTokenFilterFactory.class);
put("englishminimalstem", StemmerTokenFilterFactory.class);
put("englishpossessive", StemmerTokenFilterFactory.class);
put("finnishlightstem", StemmerTokenFilterFactory.class);
put("frenchlightstem", StemmerTokenFilterFactory.class);
put("frenchminimalstem", StemmerTokenFilterFactory.class);
put("galicianminimalstem", StemmerTokenFilterFactory.class);
put("galicianstem", StemmerTokenFilterFactory.class);
put("germanstem", GermanStemTokenFilterFactory.class);
put("germanlightstem", StemmerTokenFilterFactory.class);
put("germanminimalstem", StemmerTokenFilterFactory.class);
put("germannormalization", GermanNormalizationFilterFactory.class);
put("greeklowercase", LowerCaseTokenFilterFactory.class);
put("greekstem", StemmerTokenFilterFactory.class);
put("hindinormalization", HindiNormalizationFilterFactory.class);
put("hindistem", StemmerTokenFilterFactory.class);
put("hungarianlightstem", StemmerTokenFilterFactory.class);
put("hunspellstem", HunspellTokenFilterFactory.class);
put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class);
put("indicnormalization", IndicNormalizationFilterFactory.class);
put("irishlowercase", LowerCaseTokenFilterFactory.class);
put("indonesianstem", StemmerTokenFilterFactory.class);
put("italianlightstem", StemmerTokenFilterFactory.class);
put("keepword", KeepWordFilterFactory.class);
put("keywordmarker", KeywordMarkerTokenFilterFactory.class);
put("kstem", KStemTokenFilterFactory.class);
put("latvianstem", StemmerTokenFilterFactory.class);
put("length", LengthTokenFilterFactory.class);
put("limittokencount", LimitTokenCountFilterFactory.class);
put("lowercase", LowerCaseTokenFilterFactory.class);
put("ngram", NGramTokenFilterFactory.class);
put("norwegianlightstem", StemmerTokenFilterFactory.class);
put("norwegianminimalstem", StemmerTokenFilterFactory.class);
put("patterncapturegroup", PatternCaptureGroupTokenFilterFactory.class);
put("patternreplace", PatternReplaceTokenFilterFactory.class);
put("persiannormalization", PersianNormalizationFilterFactory.class);
put("porterstem", PorterStemTokenFilterFactory.class);
put("portuguesestem", StemmerTokenFilterFactory.class);
put("portugueselightstem", StemmerTokenFilterFactory.class);
put("portugueseminimalstem", StemmerTokenFilterFactory.class);
put("reversestring", ReverseTokenFilterFactory.class);
put("russianlightstem", StemmerTokenFilterFactory.class);
put("scandinavianfolding", ScandinavianFoldingFilterFactory.class);
put("scandinaviannormalization", ScandinavianNormalizationFilterFactory.class);
put("serbiannormalization", SerbianNormalizationFilterFactory.class);
put("shingle", ShingleTokenFilterFactory.class);
put("snowballporter", SnowballTokenFilterFactory.class);
put("soraninormalization", SoraniNormalizationFilterFactory.class);
put("soranistem", StemmerTokenFilterFactory.class);
put("spanishlightstem", StemmerTokenFilterFactory.class);
put("standard", StandardTokenFilterFactory.class);
put("stemmeroverride", StemmerOverrideTokenFilterFactory.class);
put("stop", StopTokenFilterFactory.class);
put("swedishlightstem", StemmerTokenFilterFactory.class);
put("synonym", SynonymTokenFilterFactory.class);
put("trim", TrimTokenFilterFactory.class);
put("truncate", TruncateTokenFilterFactory.class);
put("turkishlowercase", LowerCaseTokenFilterFactory.class);
put("type", KeepTypesFilterFactory.class);
put("uppercase", UpperCaseTokenFilterFactory.class);
put("worddelimiter", WordDelimiterTokenFilterFactory.class);
// TODO: these tokenfilters are not yet exposed: useful?
// suggest stop
put("suggeststop", Void.class);
// capitalizes tokens
put("capitalization", Void.class);
// like length filter (but codepoints)
put("codepointcount", Void.class);
// puts hyphenated words back together
put("hyphenatedwords", Void.class);
// repeats anything marked as keyword
put("keywordrepeat", Void.class);
// like limittokencount, but by offset
put("limittokenoffset", Void.class);
// like limittokencount, but by position
put("limittokenposition", Void.class);
// ???
put("numericpayload", Void.class);
// removes duplicates at the same position (this should be used by the existing factory)
put("removeduplicates", Void.class);
// ???
put("tokenoffsetpayload", Void.class);
// puts the type into the payload
put("typeaspayload", Void.class);
}};
public void testTokenFilters() {
Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.TokenFilterFactory.availableTokenFilters());
missing.removeAll(KNOWN_TOKENFILTERS.keySet());
assertTrue("new tokenfilters found, please update KNOWN_TOKENFILTERS: " + missing.toString(), missing.isEmpty());
}
static final Map<String,Class<?>> KNOWN_CHARFILTERS = new HashMap<String,Class<?>>() {{
// exposed in ES
put("htmlstrip", HtmlStripCharFilterFactory.class);
put("mapping", MappingCharFilterFactory.class);
put("patternreplace", PatternReplaceCharFilterFactory.class);
// TODO: these charfilters are not yet exposed: useful?
// handling of zwnj for persian
put("persian", Void.class);
}};
public void testCharFilters() {
Set<String> missing = new TreeSet<String>(org.apache.lucene.analysis.util.CharFilterFactory.availableCharFilters());
missing.removeAll(KNOWN_CHARFILTERS.keySet());
assertTrue("new charfilters found, please update KNOWN_CHARFILTERS: " + missing.toString(), missing.isEmpty());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle.sort;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import javax.annotation.Nullable;
import scala.None$;
import scala.Option;
import scala.Product2;
import scala.Tuple2;
import scala.collection.Iterator;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.io.Closeables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.spark.Partitioner;
import org.apache.spark.ShuffleDependency;
import org.apache.spark.SparkConf;
import org.apache.spark.TaskContext;
import org.apache.spark.executor.ShuffleWriteMetrics;
import org.apache.spark.scheduler.MapStatus;
import org.apache.spark.scheduler.MapStatus$;
import org.apache.spark.serializer.Serializer;
import org.apache.spark.serializer.SerializerInstance;
import org.apache.spark.shuffle.IndexShuffleBlockResolver;
import org.apache.spark.shuffle.ShuffleWriter;
import org.apache.spark.storage.*;
import org.apache.spark.util.Utils;
/**
* This class implements sort-based shuffle's hash-style shuffle fallback path. This write path
* writes incoming records to separate files, one file per reduce partition, then concatenates these
* per-partition files to form a single output file, regions of which are served to reducers.
* Records are not buffered in memory. This is essentially identical to
* {@link org.apache.spark.shuffle.hash.HashShuffleWriter}, except that it writes output in a format
* that can be served / consumed via {@link org.apache.spark.shuffle.IndexShuffleBlockResolver}.
* <p>
* This write path is inefficient for shuffles with large numbers of reduce partitions because it
* simultaneously opens separate serializers and file streams for all partitions. As a result,
* {@link SortShuffleManager} only selects this write path when
* <ul>
* <li>no Ordering is specified,</li>
* <li>no Aggregator is specific, and</li>
* <li>the number of partitions is less than
* <code>spark.shuffle.sort.bypassMergeThreshold</code>.</li>
* </ul>
*
* This code used to be part of {@link org.apache.spark.util.collection.ExternalSorter} but was
* refactored into its own class in order to reduce code complexity; see SPARK-7855 for details.
* <p>
* There have been proposals to completely remove this code path; see SPARK-6026 for details.
*/
final class BypassMergeSortShuffleWriter<K, V> extends ShuffleWriter<K, V> {
private final Logger logger = LoggerFactory.getLogger(BypassMergeSortShuffleWriter.class);
private final int fileBufferSize;
private final boolean transferToEnabled;
private final int numPartitions;
private final BlockManager blockManager;
private final Partitioner partitioner;
private final ShuffleWriteMetrics writeMetrics;
private final int shuffleId;
private final int mapId;
private final Serializer serializer;
private final IndexShuffleBlockResolver shuffleBlockResolver;
/** Array of file writers, one for each partition */
private DiskBlockObjectWriter[] partitionWriters;
@Nullable private MapStatus mapStatus;
private long[] partitionLengths;
/**
* Are we in the process of stopping? Because map tasks can call stop() with success = true
* and then call stop() with success = false if they get an exception, we want to make sure
* we don't try deleting files, etc twice.
*/
private boolean stopping = false;
BypassMergeSortShuffleWriter(
BlockManager blockManager,
IndexShuffleBlockResolver shuffleBlockResolver,
BypassMergeSortShuffleHandle<K, V> handle,
int mapId,
TaskContext taskContext,
SparkConf conf) {
// Use getSizeAsKb (not bytes) to maintain backwards compatibility if no units are provided
this.fileBufferSize = (int) conf.getSizeAsKb("spark.shuffle.file.buffer", "32k") * 1024;
this.transferToEnabled = conf.getBoolean("spark.file.transferTo", true);
this.blockManager = blockManager;
final ShuffleDependency<K, V, V> dep = handle.dependency();
this.mapId = mapId;
this.shuffleId = dep.shuffleId();
this.partitioner = dep.partitioner();
this.numPartitions = partitioner.numPartitions();
this.writeMetrics = taskContext.taskMetrics().shuffleWriteMetrics();
this.serializer = dep.serializer();
this.shuffleBlockResolver = shuffleBlockResolver;
}
@Override
public void write(Iterator<Product2<K, V>> records) throws IOException {
assert (partitionWriters == null);
if (!records.hasNext()) {
partitionLengths = new long[numPartitions];
shuffleBlockResolver.writeIndexFileAndCommit(shuffleId, mapId, partitionLengths, null);
mapStatus = MapStatus$.MODULE$.apply(blockManager.shuffleServerId(), partitionLengths);
return;
}
final SerializerInstance serInstance = serializer.newInstance();
final long openStartTime = System.nanoTime();
partitionWriters = new DiskBlockObjectWriter[numPartitions];
for (int i = 0; i < numPartitions; i++) {
final Tuple2<TempShuffleBlockId, File> tempShuffleBlockIdPlusFile =
blockManager.diskBlockManager().createTempShuffleBlock();
final File file = tempShuffleBlockIdPlusFile._2();
final BlockId blockId = tempShuffleBlockIdPlusFile._1();
partitionWriters[i] =
blockManager.getDiskWriter(blockId, file, serInstance, fileBufferSize, writeMetrics);
}
// Creating the file to write to and creating a disk writer both involve interacting with
// the disk, and can take a long time in aggregate when we open many files, so should be
// included in the shuffle write time.
writeMetrics.incWriteTime(System.nanoTime() - openStartTime);
while (records.hasNext()) {
final Product2<K, V> record = records.next();
final K key = record._1();
partitionWriters[partitioner.getPartition(key)].write(key, record._2());
}
for (DiskBlockObjectWriter writer : partitionWriters) {
writer.commitAndClose();
}
File output = shuffleBlockResolver.getDataFile(shuffleId, mapId);
File tmp = Utils.tempFileWith(output);
try {
partitionLengths = writePartitionedFile(tmp);
shuffleBlockResolver.writeIndexFileAndCommit(shuffleId, mapId, partitionLengths, tmp);
} finally {
if (tmp.exists() && !tmp.delete()) {
logger.error("Error while deleting temp file {}", tmp.getAbsolutePath());
}
}
mapStatus = MapStatus$.MODULE$.apply(blockManager.shuffleServerId(), partitionLengths);
}
@VisibleForTesting
long[] getPartitionLengths() {
return partitionLengths;
}
/**
* Concatenate all of the per-partition files into a single combined file.
*
* @return array of lengths, in bytes, of each partition of the file (used by map output tracker).
*/
private long[] writePartitionedFile(File outputFile) throws IOException {
// Track location of the partition starts in the output file
final long[] lengths = new long[numPartitions];
if (partitionWriters == null) {
// We were passed an empty iterator
return lengths;
}
final FileOutputStream out = new FileOutputStream(outputFile, true);
final long writeStartTime = System.nanoTime();
boolean threwException = true;
try {
for (int i = 0; i < numPartitions; i++) {
final File file = partitionWriters[i].fileSegment().file();
if (file.exists()) {
final FileInputStream in = new FileInputStream(file);
boolean copyThrewException = true;
try {
lengths[i] = Utils.copyStream(in, out, false, transferToEnabled);
copyThrewException = false;
} finally {
Closeables.close(in, copyThrewException);
}
if (!file.delete()) {
logger.error("Unable to delete file for partition {}", i);
}
}
}
threwException = false;
} finally {
Closeables.close(out, threwException);
writeMetrics.incWriteTime(System.nanoTime() - writeStartTime);
}
partitionWriters = null;
return lengths;
}
@Override
public Option<MapStatus> stop(boolean success) {
if (stopping) {
return None$.empty();
} else {
stopping = true;
if (success) {
if (mapStatus == null) {
throw new IllegalStateException("Cannot call stop(true) without having called write()");
}
return Option.apply(mapStatus);
} else {
// The map task failed, so delete our output data.
if (partitionWriters != null) {
try {
for (DiskBlockObjectWriter writer : partitionWriters) {
// This method explicitly does _not_ throw exceptions:
File file = writer.revertPartialWritesAndClose();
if (!file.delete()) {
logger.error("Error while deleting file {}", file.getAbsolutePath());
}
}
} finally {
partitionWriters = null;
}
}
shuffleBlockResolver.removeDataByMap(shuffleId, mapId);
return None$.empty();
}
}
}
}
| |
package org.apache.commons.jcs.auxiliary.disk.jdbc;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.jcs.auxiliary.AuxiliaryCacheAttributes;
import org.apache.commons.jcs.auxiliary.disk.AbstractDiskCache;
import org.apache.commons.jcs.engine.CacheConstants;
import org.apache.commons.jcs.engine.behavior.ICacheElement;
import org.apache.commons.jcs.engine.behavior.ICompositeCacheManager;
import org.apache.commons.jcs.engine.behavior.IElementSerializer;
import org.apache.commons.jcs.engine.logging.behavior.ICacheEvent;
import org.apache.commons.jcs.engine.logging.behavior.ICacheEventLogger;
import org.apache.commons.jcs.engine.stats.StatElement;
import org.apache.commons.jcs.engine.stats.behavior.IStatElement;
import org.apache.commons.jcs.engine.stats.behavior.IStats;
import org.apache.commons.jcs.utils.serialization.StandardSerializer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* This is the jdbc disk cache plugin.
* <p>
* It expects a table created by the following script. The table name is configurable.
* <p>
*
* <pre>
* drop TABLE JCS_STORE;
* CREATE TABLE JCS_STORE
* (
* CACHE_KEY VARCHAR(250) NOT NULL,
* REGION VARCHAR(250) NOT NULL,
* ELEMENT BLOB,
* CREATE_TIME TIMESTAMP,
* UPDATE_TIME_SECONDS BIGINT,
* MAX_LIFE_SECONDS BIGINT,
* SYSTEM_EXPIRE_TIME_SECONDS BIGINT,
* IS_ETERNAL CHAR(1),
* PRIMARY KEY (CACHE_KEY, REGION)
* );
* </pre>
* <p>
* The cleanup thread will delete non eternal items where (now - create time) > max life seconds *
* 1000
* <p>
* To speed up the deletion the SYSTEM_EXPIRE_TIME_SECONDS is used instead. It is recommended that
* an index be created on this column is you will have over a million records.
* <p>
* @author Aaron Smuts
*/
public class JDBCDiskCache<K extends Serializable, V extends Serializable>
extends AbstractDiskCache<K, V>
{
/** The local logger. */
private final static Log log = LogFactory.getLog( JDBCDiskCache.class );
/** Don't change. */
private static final long serialVersionUID = -7169488308515823492L;
/** custom serialization */
private IElementSerializer elementSerializer = new StandardSerializer();
/** configuration */
private JDBCDiskCacheAttributes jdbcDiskCacheAttributes;
/** # of times update was called */
private int updateCount = 0;
/** # of times get was called */
private int getCount = 0;
/** # of times getMatching was called */
private int getMatchingCount = 0;
/** if count % interval == 0 then log */
private static final int LOG_INTERVAL = 100;
/** db connection pool */
private JDBCDiskCachePoolAccess poolAccess = null;
/** tracks optimization */
private TableState tableState;
/**
* Constructs a JDBC Disk Cache for the provided cache attributes. The table state object is
* used to mark deletions.
* <p>
* @param cattr
* @param tableState
* @param compositeCacheManager
*/
public JDBCDiskCache( JDBCDiskCacheAttributes cattr, TableState tableState,
ICompositeCacheManager compositeCacheManager )
{
super( cattr );
setTableState( tableState );
setJdbcDiskCacheAttributes( cattr );
if ( log.isInfoEnabled() )
{
log.info( "jdbcDiskCacheAttributes = " + getJdbcDiskCacheAttributes() );
}
// This initializes the pool access.
setPoolAccess( initializePoolAccess( cattr, compositeCacheManager ) );
// Initialization finished successfully, so set alive to true.
alive = true;
}
/**
* Registers the driver and creates a poolAccess class.
* <p>
* @param cattr
* @param compositeCacheManager
* @return JDBCDiskCachePoolAccess for testing
*/
protected JDBCDiskCachePoolAccess initializePoolAccess( JDBCDiskCacheAttributes cattr,
ICompositeCacheManager compositeCacheManager )
{
JDBCDiskCachePoolAccess poolAccess = null;
if ( cattr.getConnectionPoolName() != null )
{
JDBCDiskCachePoolAccessManager manager = JDBCDiskCachePoolAccessManager.getInstance( compositeCacheManager
.getConfigurationProperties() );
poolAccess = manager.getJDBCDiskCachePoolAccess( cattr.getConnectionPoolName() );
}
else
{
try
{
poolAccess = JDBCDiskCachePoolAccessFactory.createPoolAccess( cattr );
}
catch ( Exception e )
{
logError( getAuxiliaryCacheAttributes().getName(), "initializePoolAccess", e.getMessage() + " URL: "
+ getDiskLocation() );
log.error( "Problem getting connection.", e );
}
}
return poolAccess;
}
/**
* Inserts or updates. By default it will try to insert. If the item exists we will get an
* error. It will then update. This behavior is configurable. The cache can be configured to
* check before inserting.
* <p>
* @param ce
*/
@Override
protected void processUpdate( ICacheElement<K, V> ce )
{
incrementUpdateCount();
if ( log.isDebugEnabled() )
{
log.debug( "updating, ce = " + ce );
}
Connection con;
try
{
con = getPoolAccess().getConnection();
}
catch ( SQLException e )
{
log.error( "Problem getting connection.", e );
return;
}
try
{
// TEST
Statement sStatement = null;
try
{
sStatement = con.createStatement();
alive = true;
}
catch ( SQLException e )
{
log.error( "Problem creating statement.", e );
alive = false;
}
finally
{
try
{
if (sStatement != null)
{
sStatement.close();
}
}
catch ( SQLException e )
{
log.error( "Problem closing statement.", e );
}
}
if ( !alive )
{
if ( log.isInfoEnabled() )
{
log.info( "Disk is not alive, aborting put." );
}
return;
}
if ( log.isDebugEnabled() )
{
log.debug( "Putting [" + ce.getKey() + "] on disk." );
}
byte[] element;
try
{
element = getElementSerializer().serialize( ce );
}
catch ( IOException e )
{
log.error( "Could not serialize element", e );
return;
}
insertOrUpdate( ce, con, element );
}
finally
{
try
{
con.close();
}
catch ( SQLException e )
{
log.error( "Problem closing connection.", e );
}
}
if ( log.isInfoEnabled() )
{
if ( updateCount % LOG_INTERVAL == 0 )
{
// TODO make a log stats method
log.info( "Update Count [" + updateCount + "]" );
}
}
}
/**
* If test before insert it true, we check to see if the element exists. If the element exists
* we will update. Otherwise, we try inserting. If this fails because the item exists, we will
* update.
* <p>
* @param ce
* @param con
* @param element
*/
private void insertOrUpdate( ICacheElement<K, V> ce, Connection con, byte[] element )
{
boolean exists = false;
// First do a query to determine if the element already exists
if ( this.getJdbcDiskCacheAttributes().isTestBeforeInsert() )
{
exists = doesElementExist( ce );
}
// If it doesn't exist, insert it, otherwise update
if ( !exists )
{
exists = insertRow( ce, con, element );
}
// update if it exists.
if ( exists )
{
updateRow( ce, con, element );
}
}
/**
* This inserts a new row in the database.
* <p>
* @param ce
* @param con
* @param element
* @return true if the insertion fails because the record exists.
*/
private boolean insertRow( ICacheElement<K, V> ce, Connection con, byte[] element )
{
boolean exists = false;
try
{
String sqlI = "insert into "
+ getJdbcDiskCacheAttributes().getTableName()
+ " (CACHE_KEY, REGION, ELEMENT, MAX_LIFE_SECONDS, IS_ETERNAL, CREATE_TIME, UPDATE_TIME_SECONDS, SYSTEM_EXPIRE_TIME_SECONDS) "
+ " values (?, ?, ?, ?, ?, ?, ?, ?)";
PreparedStatement psInsert = con.prepareStatement( sqlI );
psInsert.setString( 1, (String) ce.getKey() );
psInsert.setString( 2, this.getCacheName() );
psInsert.setBytes( 3, element );
psInsert.setLong( 4, ce.getElementAttributes().getMaxLifeSeconds() );
if ( ce.getElementAttributes().getIsEternal() )
{
psInsert.setString( 5, "T" );
}
else
{
psInsert.setString( 5, "F" );
}
Timestamp createTime = new Timestamp( ce.getElementAttributes().getCreateTime() );
psInsert.setTimestamp( 6, createTime );
long now = System.currentTimeMillis() / 1000;
psInsert.setLong( 7, now );
long expireTime = now + ce.getElementAttributes().getMaxLifeSeconds();
psInsert.setLong( 8, expireTime );
psInsert.execute();
psInsert.close();
}
catch ( SQLException e )
{
if ( e.toString().indexOf( "Violation of unique index" ) != -1
|| e.getMessage().indexOf( "Duplicate entry" ) != -1
|| e.getMessage().indexOf( "duplicate key" ) != -1
|| e.getMessage().indexOf( "primary key constraint" ) != -1 )
{
exists = true;
}
else
{
log.error( "Could not insert element", e );
}
// see if it exists, if we didn't already
if ( !exists && !this.getJdbcDiskCacheAttributes().isTestBeforeInsert() )
{
exists = doesElementExist( ce );
}
}
return exists;
}
/**
* This updates a row in the database.
* <p>
* @param ce
* @param con
* @param element
*/
private void updateRow( ICacheElement<K, V> ce, Connection con, byte[] element )
{
String sqlU = null;
try
{
sqlU = "update " + getJdbcDiskCacheAttributes().getTableName()
+ " set ELEMENT = ?, CREATE_TIME = ?, UPDATE_TIME_SECONDS = ?, " + " SYSTEM_EXPIRE_TIME_SECONDS = ? "
+ " where CACHE_KEY = ? and REGION = ?";
PreparedStatement psUpdate = con.prepareStatement( sqlU );
psUpdate.setBytes( 1, element );
Timestamp createTime = new Timestamp( ce.getElementAttributes().getCreateTime() );
psUpdate.setTimestamp( 2, createTime );
long now = System.currentTimeMillis() / 1000;
psUpdate.setLong( 3, now );
long expireTime = now + ce.getElementAttributes().getMaxLifeSeconds();
psUpdate.setLong( 4, expireTime );
psUpdate.setString( 5, (String) ce.getKey() );
psUpdate.setString( 6, this.getCacheName() );
psUpdate.execute();
psUpdate.close();
if ( log.isDebugEnabled() )
{
log.debug( "ran update " + sqlU );
}
}
catch ( SQLException e2 )
{
log.error( "e2 sql [" + sqlU + "] Exception: ", e2 );
}
}
/**
* Does an element exist for this key?
* <p>
* @param ce
* @return boolean
*/
protected boolean doesElementExist( ICacheElement<K, V> ce )
{
boolean exists = false;
Connection con;
try
{
con = getPoolAccess().getConnection();
}
catch ( SQLException e )
{
log.error( "Problem getting connection.", e );
return exists;
}
PreparedStatement psSelect = null;
try
{
// don't select the element, since we want this to be fast.
String sqlS = "select CACHE_KEY from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ? and CACHE_KEY = ?";
psSelect = con.prepareStatement( sqlS );
psSelect.setString( 1, this.getCacheName() );
psSelect.setString( 2, (String) ce.getKey() );
ResultSet rs = psSelect.executeQuery();
if ( rs.next() )
{
exists = true;
}
if ( log.isDebugEnabled() )
{
log.debug( "[" + ce.getKey() + "] existing status is " + exists );
}
rs.close();
}
catch ( SQLException e )
{
log.error( "Problem looking for item before insert.", e );
}
finally
{
try
{
if ( psSelect != null )
{
psSelect.close();
}
}
catch ( SQLException e1 )
{
log.error( "Problem closing statement.", e1 );
}
try
{
con.close();
}
catch ( SQLException e )
{
log.error( "Problem closing connection.", e );
}
}
return exists;
}
/**
* Queries the database for the value. If it gets a result, the value is deserialized.
* <p>
* @param key
* @return ICacheElement
* @see org.apache.commons.jcs.auxiliary.disk.AbstractDiskCache#doGet(java.io.Serializable)
*/
@Override
protected ICacheElement<K, V> processGet( K key )
{
incrementGetCount();
if ( log.isDebugEnabled() )
{
log.debug( "Getting [" + key + "] from disk" );
}
if ( !alive )
{
return null;
}
ICacheElement<K, V> obj = null;
byte[] data = null;
try
{
// region, key
String selectString = "select ELEMENT from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ? and CACHE_KEY = ?";
Connection con = getPoolAccess().getConnection();
try
{
PreparedStatement psSelect = null;
try
{
psSelect = con.prepareStatement( selectString );
psSelect.setString( 1, this.getCacheName() );
psSelect.setString( 2, key.toString() );
ResultSet rs = psSelect.executeQuery();
try
{
if ( rs.next() )
{
data = rs.getBytes( 1 );
}
if ( data != null )
{
try
{
// USE THE SERIALIZER
obj = getElementSerializer().deSerialize( data );
}
catch ( IOException ioe )
{
log.error( "Problem getting item for key [" + key + "]", ioe );
}
catch ( Exception e )
{
log.error( "Problem getting item for key [" + key + "]", e );
}
}
}
finally
{
if ( rs != null )
{
rs.close();
}
}
}
finally
{
if ( psSelect != null )
{
psSelect.close();
}
}
}
finally
{
if ( con != null )
{
con.close();
}
}
}
catch ( SQLException sqle )
{
log.error( "Caught a SQL exception trying to get the item for key [" + key + "]", sqle );
}
if ( log.isInfoEnabled() )
{
if ( getCount % LOG_INTERVAL == 0 )
{
// TODO make a log stats method
log.info( "Get Count [" + getCount + "]" );
}
}
return obj;
}
/**
* This will run a like query. It will try to construct a usable query but different
* implementations will be needed to adjust the syntax.
* <p>
* @param pattern
* @return key,value map
*/
@Override
protected Map<K, ICacheElement<K, V>> processGetMatching( String pattern )
{
incrementGetMatchingCount();
if ( log.isDebugEnabled() )
{
log.debug( "Getting [" + pattern + "] from disk" );
}
if ( !alive )
{
return null;
}
Map<K, ICacheElement<K, V>> results = new HashMap<K, ICacheElement<K, V>>();
try
{
// region, key
String selectString = "select CACHE_KEY, ELEMENT from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ? and CACHE_KEY like ?";
Connection con = getPoolAccess().getConnection();
try
{
PreparedStatement psSelect = null;
try
{
psSelect = con.prepareStatement( selectString );
psSelect.setString( 1, this.getCacheName() );
psSelect.setString( 2, constructLikeParameterFromPattern( pattern ) );
ResultSet rs = psSelect.executeQuery();
try
{
while ( rs.next() )
{
String key = rs.getString( 1 );
byte[] data = rs.getBytes( 2 );
if ( data != null )
{
try
{
// USE THE SERIALIZER
ICacheElement<K, V> value = getElementSerializer().deSerialize( data );
results.put( (K) key, value );
}
catch ( IOException ioe )
{
log.error( "Problem getting items for pattern [" + pattern + "]", ioe );
}
catch ( Exception e )
{
log.error( "Problem getting items for pattern [" + pattern + "]", e );
}
}
}
}
finally
{
if ( rs != null )
{
rs.close();
}
}
}
finally
{
if ( psSelect != null )
{
psSelect.close();
}
}
}
finally
{
if ( con != null )
{
con.close();
}
}
}
catch ( SQLException sqle )
{
log.error( "Caught a SQL exception trying to get items for pattern [" + pattern + "]", sqle );
}
if ( log.isInfoEnabled() )
{
if ( getMatchingCount % LOG_INTERVAL == 0 )
{
// TODO make a log stats method
log.info( "Get Matching Count [" + getMatchingCount + "]" );
}
}
return results;
}
/**
* @param pattern
* @return String to use in the like query.
*/
public String constructLikeParameterFromPattern( String pattern )
{
String likePattern = pattern.replaceAll( "\\.\\+", "%" );
likePattern = likePattern.replaceAll( "\\.", "_" );
if ( log.isDebugEnabled() )
{
log.debug( "pattern = [" + likePattern + "]" );
}
return likePattern;
}
/**
* Returns true if the removal was successful; or false if there is nothing to remove. Current
* implementation always results in a disk orphan.
* <p>
* @param key
* @return boolean
*/
@Override
protected boolean processRemove( K key )
{
// remove single item.
String sql = "delete from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ? and CACHE_KEY = ?";
try
{
boolean partial = false;
if ( key instanceof String && key.toString().endsWith( CacheConstants.NAME_COMPONENT_DELIMITER ) )
{
// remove all keys of the same name group.
sql = "delete from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ? and CACHE_KEY like ?";
partial = true;
}
Connection con = getPoolAccess().getConnection();
PreparedStatement psSelect = null;
try
{
psSelect = con.prepareStatement( sql );
psSelect.setString( 1, this.getCacheName() );
if ( partial )
{
psSelect.setString( 2, key.toString() + "%" );
}
else
{
psSelect.setString( 2, key.toString() );
}
psSelect.executeUpdate();
alive = true;
}
catch ( SQLException e )
{
log.error( "Problem creating statement. sql [" + sql + "]", e );
alive = false;
}
finally
{
try
{
if ( psSelect != null )
{
psSelect.close();
}
con.close();
}
catch ( SQLException e1 )
{
log.error( "Problem closing statement.", e1 );
}
}
}
catch ( Exception e )
{
log.error( "Problem updating cache.", e );
reset();
}
return false;
}
/**
* This should remove all elements. The auxiliary can be configured to forbid this behavior. If
* remove all is not allowed, the method balks.
*/
@Override
protected void processRemoveAll()
{
// it should never get here formt he abstract dis cache.
if ( this.jdbcDiskCacheAttributes.isAllowRemoveAll() )
{
try
{
String sql = "delete from " + getJdbcDiskCacheAttributes().getTableName() + " where REGION = ?";
Connection con = getPoolAccess().getConnection();
PreparedStatement psDelete = null;
try
{
psDelete = con.prepareStatement( sql );
psDelete.setString( 1, this.getCacheName() );
alive = true;
psDelete.executeUpdate();
}
catch ( SQLException e )
{
log.error( "Problem creating statement.", e );
alive = false;
}
finally
{
try
{
if ( psDelete != null )
{
psDelete.close();
}
con.close();
}
catch ( SQLException e1 )
{
log.error( "Problem closing statement.", e1 );
}
}
}
catch ( Exception e )
{
log.error( "Problem removing all.", e );
reset();
}
}
else
{
if ( log.isInfoEnabled() )
{
log.info( "RemoveAll was requested but the request was not fulfilled: allowRemoveAll is set to false." );
}
}
}
/**
* Removed the expired. (now - create time) > max life seconds * 1000
* <p>
* @return the number deleted
*/
protected int deleteExpired()
{
int deleted = 0;
try
{
getTableState().setState( TableState.DELETE_RUNNING );
long now = System.currentTimeMillis() / 1000;
// This is to slow when we push over a million records
// String sql = "delete from " +
// getJdbcDiskCacheAttributes().getTableName() + " where REGION = '"
// + this.getCacheName() + "' and IS_ETERNAL = 'F' and (" + now
// + " - UPDATE_TIME_SECONDS) > MAX_LIFE_SECONDS";
String sql = "delete from " + getJdbcDiskCacheAttributes().getTableName()
+ " where IS_ETERNAL = ? and REGION = ? and ? > SYSTEM_EXPIRE_TIME_SECONDS";
Connection con = getPoolAccess().getConnection();
PreparedStatement psDelete = null;
try
{
psDelete = con.prepareStatement( sql );
psDelete.setString( 1, "F" );
psDelete.setString( 2, this.getCacheName() );
psDelete.setLong( 3, now );
alive = true;
deleted = psDelete.executeUpdate();
}
catch ( SQLException e )
{
log.error( "Problem creating statement.", e );
alive = false;
}
finally
{
try
{
if ( psDelete != null )
{
psDelete.close();
}
con.close();
}
catch ( SQLException e1 )
{
log.error( "Problem closing statement.", e1 );
}
}
logApplicationEvent( getAuxiliaryCacheAttributes().getName(), "deleteExpired",
"Deleted expired elements. URL: " + getDiskLocation() );
}
catch ( Exception e )
{
logError( getAuxiliaryCacheAttributes().getName(), "deleteExpired", e.getMessage() + " URL: "
+ getDiskLocation() );
log.error( "Problem removing expired elements from the table.", e );
reset();
}
finally
{
getTableState().setState( TableState.FREE );
}
return deleted;
}
/**
* Typically this is used to handle errors by last resort, force content update, or removeall
*/
public void reset()
{
// nothing
}
/** Shuts down the pool */
@Override
public void processDispose()
{
ICacheEvent<K> cacheEvent = createICacheEvent( cacheName, (K)"none", ICacheEventLogger.DISPOSE_EVENT );
try
{
try
{
getPoolAccess().shutdownDriver();
}
catch ( Exception e )
{
log.error( "Problem shutting down.", e );
}
}
finally
{
logICacheEvent( cacheEvent );
}
}
/**
* Returns the current cache size. Just does a count(*) for the region.
* <p>
* @return The size value
*/
@Override
public int getSize()
{
int size = 0;
// region, key
String selectString = "select count(*) from " + getJdbcDiskCacheAttributes().getTableName()
+ " where REGION = ?";
Connection con;
try
{
con = getPoolAccess().getConnection();
}
catch ( SQLException e1 )
{
log.error( "Problem getting connection.", e1 );
return size;
}
try
{
PreparedStatement psSelect = null;
try
{
psSelect = con.prepareStatement( selectString );
psSelect.setString( 1, this.getCacheName() );
ResultSet rs = null;
rs = psSelect.executeQuery();
try
{
if ( rs.next() )
{
size = rs.getInt( 1 );
}
}
finally
{
if ( rs != null )
{
rs.close();
}
}
}
finally
{
if ( psSelect != null )
{
psSelect.close();
}
}
}
catch ( SQLException e )
{
log.error( "Problem getting size.", e );
}
finally
{
try
{
con.close();
}
catch ( SQLException e )
{
log.error( "Problem closing connection.", e );
}
}
return size;
}
/**
* Return the keys in this cache.
* <p>
* @see org.apache.commons.jcs.auxiliary.disk.AbstractDiskCache#getKeySet()
*/
@Override
public Set<K> getKeySet() throws IOException
{
throw new UnsupportedOperationException( "Groups not implemented." );
// return null;
}
/**
* @param elementSerializer The elementSerializer to set.
*/
@Override
public void setElementSerializer( IElementSerializer elementSerializer )
{
this.elementSerializer = elementSerializer;
}
/**
* @return Returns the elementSerializer.
*/
@Override
public IElementSerializer getElementSerializer()
{
return elementSerializer;
}
/** safely increment */
private synchronized void incrementUpdateCount()
{
updateCount++;
}
/** safely increment */
private synchronized void incrementGetCount()
{
getCount++;
}
/** safely increment */
private synchronized void incrementGetMatchingCount()
{
getMatchingCount++;
}
/**
* @param jdbcDiskCacheAttributes The jdbcDiskCacheAttributes to set.
*/
protected void setJdbcDiskCacheAttributes( JDBCDiskCacheAttributes jdbcDiskCacheAttributes )
{
this.jdbcDiskCacheAttributes = jdbcDiskCacheAttributes;
}
/**
* @return Returns the jdbcDiskCacheAttributes.
*/
protected JDBCDiskCacheAttributes getJdbcDiskCacheAttributes()
{
return jdbcDiskCacheAttributes;
}
/**
* @return Returns the AuxiliaryCacheAttributes.
*/
public AuxiliaryCacheAttributes getAuxiliaryCacheAttributes()
{
return this.getJdbcDiskCacheAttributes();
}
/**
* Extends the parent stats.
* <p>
* @return IStats
*/
@Override
public IStats getStatistics()
{
IStats stats = super.getStatistics();
stats.setTypeName( "JDBC/Abstract Disk Cache" );
stats.getStatElements();
ArrayList<IStatElement> elems = new ArrayList<IStatElement>();
IStatElement se = null;
se = new StatElement();
se.setName( "Update Count" );
se.setData( "" + updateCount );
elems.add( se );
se = new StatElement();
se.setName( "Get Count" );
se.setData( "" + getCount );
elems.add( se );
se = new StatElement();
se.setName( "Get Matching Count" );
se.setData( "" + getMatchingCount );
elems.add( se );
se = new StatElement();
se.setName( "Size" );
se.setData( "" + getSize() );
elems.add( se );
se = new StatElement();
se.setName( "Active DB Connections" );
se.setData( "" + getPoolAccess().getNumActiveInPool() );
elems.add( se );
se = new StatElement();
se.setName( "Idle DB Connections" );
se.setData( "" + getPoolAccess().getNumIdleInPool() );
elems.add( se );
se = new StatElement();
se.setName( "DB URL" );
if ( getPoolAccess() != null )
{
se.setData( "" + getPoolAccess().getPoolUrl() );
}
else
{
se.setData( "" + getJdbcDiskCacheAttributes().getUrl() );
}
elems.add( se );
// get the stats from the event queue too
// get as array, convert to list, add list to our outer list
IStatElement[] eqSEs = stats.getStatElements();
List<IStatElement> eqL = Arrays.asList( eqSEs );
elems.addAll( eqL );
// get an array and put them in the Stats object
IStatElement[] ses = elems.toArray( new StatElement[0] );
stats.setStatElements( ses );
return stats;
}
/**
* Returns the name of the table.
* <p>
* @return the table name or UNDEFINED
*/
protected String getTableName()
{
String name = "UNDEFINED";
if ( this.getJdbcDiskCacheAttributes() != null )
{
name = this.getJdbcDiskCacheAttributes().getTableName();
}
return name;
}
/**
* @param tableState The tableState to set.
*/
public void setTableState( TableState tableState )
{
this.tableState = tableState;
}
/**
* @return Returns the tableState.
*/
public TableState getTableState()
{
return tableState;
}
/**
* This is used by the event logging.
* <p>
* @return the location of the disk, either path or ip.
*/
@Override
protected String getDiskLocation()
{
return this.jdbcDiskCacheAttributes.getUrl();
}
/**
* @param poolAccess the poolAccess to set
*/
protected void setPoolAccess( JDBCDiskCachePoolAccess poolAccess )
{
this.poolAccess = poolAccess;
}
/**
* Public so managers can access it.
* @return the poolAccess
*/
public JDBCDiskCachePoolAccess getPoolAccess()
{
return poolAccess;
}
/**
* For debugging.
* <p>
* @return this.getStats();
*/
@Override
public String toString()
{
return this.getStats();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.Lock;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.testframework.MvccFeatureChecker;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.ignite.events.EventType.EVTS_CACHE;
import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_LOCKED;
import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_PUT;
import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_UNLOCKED;
/**
* Multi-node cache test.
*/
public abstract class GridCacheMultiNodeAbstractTest extends GridCommonAbstractTest {
/** Grid 1. */
private static Ignite ignite1;
/** Grid 2. */
private static Ignite ignite2;
/** Grid 3. */
private static Ignite ignite3;
/** Cache 1. */
private static IgniteCache<Integer, String> cache1;
/** Cache 2. */
private static IgniteCache<Integer, String> cache2;
/** Cache 3. */
private static IgniteCache<Integer, String> cache3;
/** Listeners. */
private static Collection<CacheEventListener> lsnrs = new ArrayList<>();
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
ignite1 = startGrid(1);
ignite2 = startGrid(2);
ignite3 = startGrid(3);
cache1 = ignite1.cache(DEFAULT_CACHE_NAME);
cache2 = ignite2.cache(DEFAULT_CACHE_NAME);
cache3 = ignite3.cache(DEFAULT_CACHE_NAME);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.CACHE_EVENTS);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
cache1 = null;
cache2 = null;
cache3 = null;
ignite1 = null;
ignite2 = null;
ignite3 = null;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
removeListeners(ignite1);
removeListeners(ignite2);
removeListeners(ignite3);
lsnrs.clear();
}
/**
* @param ignite Grid to remove listeners from.
*/
private void removeListeners(Ignite ignite) {
if (ignite != null)
for (CacheEventListener lsnr : lsnrs) {
assert lsnr.latch.getCount() == 0;
ignite.events().stopLocalListen(lsnr);
}
}
/**
*
* @param ignite Grid.
* @param lsnr Listener.
* @param type Event types.
*/
private void addListener(Ignite ignite, CacheEventListener lsnr, int... type) {
if (!lsnrs.contains(lsnr))
lsnrs.add(lsnr);
ignite.events().localListen(lsnr, type.length == 0 ? EVTS_CACHE : type);
}
/**
* @throws Exception If test failed.
*/
@Test
public void testBasicPut() throws Exception {
checkPuts(3, ignite1);
}
/**
* @throws Exception If test fails.
*/
@Test
public void testMultiNodePut() throws Exception {
checkPuts(1, ignite1, ignite2, ignite3);
checkPuts(1, ignite2, ignite1, ignite3);
checkPuts(1, ignite3, ignite1, ignite2);
}
/**
* @throws Exception If test fails.
*/
@Test
public void testMultiValuePut() throws Exception {
checkPuts(1, ignite1);
}
/**
* @throws Exception If test fails.
*/
@Test
public void testMultiValueMultiNodePut() throws Exception {
checkPuts(3, ignite1, ignite2, ignite3);
checkPuts(3, ignite2, ignite1, ignite3);
checkPuts(3, ignite3, ignite1, ignite2);
}
/**
* Checks cache puts.
*
* @param cnt Count of puts.
* @param ignites Grids.
* @throws Exception If check fails.
*/
private void checkPuts(int cnt, Ignite... ignites) throws Exception {
MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.ENTRY_LOCK);
CountDownLatch latch = new CountDownLatch(ignites.length * cnt);
CacheEventListener lsnr = new CacheEventListener(latch, EVT_CACHE_OBJECT_PUT);
for (Ignite ignite : ignites)
addListener(ignite, lsnr);
IgniteCache<Integer, String> cache1 = ignites[0].cache(DEFAULT_CACHE_NAME);
for (int i = 1; i <= cnt; i++)
cache1.put(i, "val" + i);
for (int i = 1; i <= cnt; i++) {
String v = cache1.get(i);
assert v != null;
assert v.equals("val" + i);
}
latch.await(10, SECONDS);
for (Ignite ignite : ignites) {
IgniteCache<Integer, String> cache = ignite.cache(DEFAULT_CACHE_NAME);
if (cache == cache1)
continue;
for (int i = 1; i <= cnt; i++) {
String v = cache.get(i);
assert v != null;
assert v.equals("val" + i);
}
}
assert !cache1.isLocalLocked(1, false);
assert !cache1.isLocalLocked(2, false);
assert !cache1.isLocalLocked(3, false);
for (Ignite ignite : ignites)
ignite.events().stopLocalListen(lsnr);
}
/**
* @throws Exception If test failed.
*/
@Test
public void testLockUnlock() throws Exception {
MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.ENTRY_LOCK);
CacheEventListener lockLsnr1 = new CacheEventListener(ignite1, new CountDownLatch(1), EVT_CACHE_OBJECT_LOCKED);
addListener(ignite1, lockLsnr1, EVT_CACHE_OBJECT_LOCKED);
CacheEventListener unlockLsnr = new CacheEventListener(new CountDownLatch(3), EVT_CACHE_OBJECT_UNLOCKED);
addListener(ignite1, unlockLsnr, EVT_CACHE_OBJECT_UNLOCKED);
addListener(ignite2, unlockLsnr, EVT_CACHE_OBJECT_UNLOCKED);
addListener(ignite3, unlockLsnr, EVT_CACHE_OBJECT_UNLOCKED);
Lock lock = cache1.lock(1);
assert lock.tryLock(10000, MILLISECONDS);
try {
assert cache1.isLocalLocked(1, false);
assert cache2.isLocalLocked(1, false);
assert cache3.isLocalLocked(1, false);
assert cache1.isLocalLocked(1, true);
assert !cache2.isLocalLocked(1, true);
assert !cache3.isLocalLocked(1, true);
info("Acquired lock for cache1.");
}
finally {
lock.unlock();
}
Thread.sleep(50);
unlockLsnr.latch.await(10, SECONDS);
assert !cache1.isLocalLocked(1, false);
assert !cache2.isLocalLocked(2, false);
assert !cache3.isLocalLocked(3, false);
assert !cache1.isLocalLocked(1, true);
assert !cache2.isLocalLocked(1, true);
assert !cache3.isLocalLocked(1, true);
}
/**
* @throws Exception If test failed.
*/
@Test
public void testConcurrentPutAsync() throws Exception {
CountDownLatch latch = new CountDownLatch(9);
CacheEventListener lsnr = new CacheEventListener(latch, EVT_CACHE_OBJECT_PUT);
addListener(ignite1, lsnr);
addListener(ignite2, lsnr);
addListener(ignite3, lsnr);
IgniteFuture<String> f1 = cache1.getAndPutAsync(2, "val1");
IgniteFuture<String> f2 = cache2.getAndPutAsync(2, "val2");
IgniteFuture<String> f3 = cache3.getAndPutAsync(2, "val3");
String v1 = f1.get(20000);
info("Got v1 from future1: " + v1);
String v2 = f2.get(20000);
info("Got v2 from future2: " + v2);
String v3 = f3.get(20000);
info("Got v3 from future3: " + v3);
latch.await(60, SECONDS);
info("Woke up from latch: " + latch);
v1 = cache1.get(1);
v2 = cache2.get(1);
v3 = cache3.get(1);
info("Cache1 value for key 1: " + v1);
info("Cache2 value for key 1: " + v2);
info("Cache3 value for key 1: " + v3);
assert v1 != null;
assert v2 != null;
assert v3 != null;
assert v1.equals(v2) : "Mismatch [v1=" + v1 + ", v2=" + v2 + ']';
assert v1.equals(v3) : "Mismatch [v1=" + v1 + ", v3=" + v3 + ']';
}
/**
* @throws Exception If test failed.
*/
@Test
public void testGlobalClearAll() throws Exception {
cache1.put(1, "val1");
cache2.put(2, "val2");
cache3.put(3, "val3");
assertEquals(3, cache1.localSize(CachePeekMode.ALL));
assertEquals(3, cache2.localSize(CachePeekMode.ALL));
assertEquals(3, cache3.localSize(CachePeekMode.ALL));
cache1.clear();
assertEquals(0, cache1.localSize(CachePeekMode.ALL));
assertEquals(0, cache2.localSize(CachePeekMode.ALL));
assertEquals(0, cache3.localSize(CachePeekMode.ALL));
}
/**
* Event listener.
*/
private class CacheEventListener implements IgnitePredicate<Event> {
/** */
@GridToStringExclude
private final Ignite ignite;
/** Wait latch. */
@GridToStringExclude
private CountDownLatch latch;
/** Events to accept. */
private final List<Integer> evts;
/**
* @param latch Wait latch.
* @param evts Events.
*/
CacheEventListener(CountDownLatch latch, Integer... evts) {
this.latch = latch;
ignite = null;
assert evts.length > 0;
this.evts = Arrays.asList(evts);
}
/**
* @param ignite Grid.
* @param latch Wait latch.
* @param evts Events.
*/
CacheEventListener(Ignite ignite, CountDownLatch latch, Integer... evts) {
this.ignite = ignite;
this.latch = latch;
assert evts.length > 0;
this.evts = Arrays.asList(evts);
}
/**
* @param latch New latch.
*/
void setLatch(CountDownLatch latch) {
this.latch = latch;
}
/** {@inheritDoc} */
@Override public boolean apply(Event evt) {
info("Grid cache event [type=" + evt.type() + ", latch=" + latch.getCount() + ", evt=" + evt + ']');
if (evts.contains(evt.type()))
if (ignite == null || evt.node().id().equals(ignite.cluster().localNode().id())) {
if (latch.getCount() > 0)
latch.countDown();
else
info("Received unexpected cache event: " + evt);
}
return true;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheEventListener.class, this, "latchCount", latch.getCount(),
"grid", ignite != null ? ignite.name() : "N/A", "evts", evts);
}
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.github.oauth;
import static com.google.gerrit.reviewdb.client.AccountExternalId.SCHEME_USERNAME;
import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN;
import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import com.google.common.base.Strings;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
import com.google.gerrit.extensions.restapi.ResourceNotFoundException;
import com.google.gerrit.httpd.WebSession;
import com.google.gerrit.httpd.XGerritAuth;
import com.google.gerrit.reviewdb.client.AccountExternalId;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.AccessPath;
import com.google.gerrit.server.account.AccountCache;
import com.google.gerrit.server.account.AccountException;
import com.google.gerrit.server.account.AccountManager;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.AuthResult;
import com.google.gerrit.server.account.PutHttpPassword;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import com.googlesource.gerrit.plugins.github.oauth.OAuthProtocol.AccessToken;
import com.googlesource.gerrit.plugins.github.oauth.OAuthProtocol.Scope;
@Singleton
public class OAuthGitFilter implements Filter {
private static final String GITHUB_X_OAUTH_BASIC = "x-oauth-basic";
private static final org.slf4j.Logger log = LoggerFactory
.getLogger(OAuthGitFilter.class);
public static final String GIT_REALM_NAME =
"GitHub authentication for Gerrit Code Review";
private static final String GIT_AUTHORIZATION_HEADER = "Authorization";
private static final String GIT_AUTHENTICATION_BASIC = "Basic ";
private final OAuthCache oauthCache;
private final AccountCache accountCache;
private final GitHubHttpProvider httpClientProvider;
private final GitHubOAuthConfig config;
private final OAuthCookieProvider cookieProvider;
private final XGerritAuth xGerritAuth;
public static class BasicAuthHttpRequest extends HttpServletRequestWrapper {
private HashMap<String, String> headers = new HashMap<String, String>();
public BasicAuthHttpRequest(HttpServletRequest request, String username,
String password) {
super(request);
try {
headers.put(
GIT_AUTHORIZATION_HEADER,
GIT_AUTHENTICATION_BASIC
+ Base64.encodeBase64String((username + ":" + password)
.getBytes(OAuthGitFilter.encoding(request))));
} catch (UnsupportedEncodingException e) {
// This cannot really happen as we have already used the encoding for
// decoding the request
}
}
@Override
public Enumeration<String> getHeaderNames() {
final Enumeration<String> wrappedHeaderNames = super.getHeaderNames();
HashSet<String> headerNames = new HashSet<String>(headers.keySet());
while (wrappedHeaderNames.hasMoreElements()) {
headerNames.add(wrappedHeaderNames.nextElement());
}
return Iterators.asEnumeration(headerNames.iterator());
}
@Override
public String getHeader(String name) {
String headerValue = headers.get(name);
if (headerValue != null) {
return headerValue;
} else {
return super.getHeader(name);
}
}
}
@Inject
public OAuthGitFilter(OAuthCache oauthCache, AccountCache accountCache,
GitHubHttpProvider httpClientProvider, GitHubOAuthConfig config,
XGerritAuth xGerritAuth) {
this.oauthCache = oauthCache;
this.accountCache = accountCache;
this.httpClientProvider = httpClientProvider;
this.config = config;
this.cookieProvider = new OAuthCookieProvider(TokenCipher.get(), config);
this.xGerritAuth = xGerritAuth;
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain) throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse =
new OAuthGitWrappedResponse((HttpServletResponse) response);
log.debug("OAuthGitFilter(" + httpRequest.getRequestURL() + ") code="
+ request.getParameter("code"));
OAuthCookie oAuthCookie =
getAuthenticationCookieFromGitRequestUsingOAuthToken(httpRequest,
httpResponse);
if (oAuthCookie == null) {
return;
}
String gerritPassword =
oAuthCookie == OAuthCookie.ANONYMOUS ? null : accountCache
.getByUsername(oAuthCookie.user).getPassword(oAuthCookie.user);
if (gerritPassword == null && oAuthCookie != OAuthCookie.ANONYMOUS) {
gerritPassword =
generateRandomGerritPassword(oAuthCookie, httpRequest, httpResponse,
chain);
httpResponse.sendRedirect(getRequestPathWithQueryString(httpRequest));
return;
}
if (oAuthCookie != OAuthCookie.ANONYMOUS) {
httpRequest =
new BasicAuthHttpRequest(httpRequest, oAuthCookie.user,
gerritPassword);
}
chain.doFilter(httpRequest, httpResponse);
}
private String getRequestPathWithQueryString(HttpServletRequest httpRequest) {
String requestPathWithQueryString =
httpRequest.getContextPath() + httpRequest.getServletPath()
+ Strings.nullToEmpty(httpRequest.getPathInfo()) + "?"
+ httpRequest.getQueryString();
return requestPathWithQueryString;
}
private String generateRandomGerritPassword(OAuthCookie oAuthCookie,
HttpServletRequest httpRequest, HttpServletResponse httpResponse,
FilterChain chain) throws IOException, ServletException {
log.warn("User " + oAuthCookie.user + " has not a Gerrit HTTP password: "
+ "generating a random one in order to be able to use Git over HTTP");
Cookie gerritCookie =
getGerritLoginCookie(oAuthCookie.user, httpRequest, httpResponse, chain);
String xGerritAuthValue = xGerritAuth.getAuthValue(gerritCookie);
HttpPut putRequest =
new HttpPut(getRequestUrlWithAlternatePath(httpRequest,
"/accounts/self/password.http"));
putRequest.setHeader("Cookie",
gerritCookie.getName() + "=" + gerritCookie.getValue() + "; "
+ oAuthCookie.getName() + "=" + oAuthCookie.getValue());
putRequest.setHeader(XGerritAuth.X_GERRIT_AUTH, xGerritAuthValue);
putRequest.setEntity(new StringEntity("{\"generate\":true}",
ContentType.APPLICATION_JSON));
HttpResponse putResponse = httpClientProvider.get().execute(putRequest);
if (putResponse.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
throw new ServletException(
"Cannot generate HTTP password for authenticating user "
+ oAuthCookie.user);
}
return accountCache.getByUsername(oAuthCookie.user).getPassword(
oAuthCookie.user);
}
private URI getRequestUrlWithAlternatePath(HttpServletRequest httpRequest,
String alternatePath) throws MalformedURLException {
URL originalUrl = new URL(httpRequest.getRequestURL().toString());
String contextPath = httpRequest.getContextPath();
return URI.create(originalUrl.getProtocol() + "://" + originalUrl.getHost()
+ ":" + getPort(originalUrl) + contextPath + alternatePath);
}
private int getPort(URL originalUrl) {
String protocol = originalUrl.getProtocol().toLowerCase();
int port = originalUrl.getPort();
if (port == -1) {
return protocol.equals("https") ? 443 : 80;
} else {
return port;
}
}
private Cookie getGerritLoginCookie(String username,
HttpServletRequest httpRequest, HttpServletResponse httpResponse,
FilterChain chain) throws IOException, ServletException {
AuthenticatedPathHttpRequest loginRequest =
new AuthenticatedLoginHttpRequest(httpRequest, config.httpHeader,
username);
AuthenticatedLoginHttpResponse loginResponse =
new AuthenticatedLoginHttpResponse(httpResponse);
chain.doFilter(loginRequest, loginResponse);
return loginResponse.getGerritCookie();
}
private OAuthCookie getAuthenticationCookieFromGitRequestUsingOAuthToken(
HttpServletRequest req, HttpServletResponse rsp) throws IOException {
final String httpBasicAuth = getHttpBasicAuthenticationHeader(req);
if (httpBasicAuth == null) {
return OAuthCookie.ANONYMOUS;
}
if (isInvalidHttpAuthenticationHeader(httpBasicAuth)) {
rsp.sendError(SC_UNAUTHORIZED);
return null;
}
String oauthToken = StringUtils.substringBefore(httpBasicAuth, ":");
String oauthKeyword = StringUtils.substringAfter(httpBasicAuth, ":");
if (Strings.isNullOrEmpty(oauthToken)
|| Strings.isNullOrEmpty(oauthKeyword)) {
rsp.sendError(SC_UNAUTHORIZED);
return null;
}
if (!oauthKeyword.equalsIgnoreCase(GITHUB_X_OAUTH_BASIC)) {
return OAuthCookie.ANONYMOUS;
}
boolean loginSuccessful = false;
String oauthLogin = null;
try {
oauthLogin =
oauthCache.getLoginByAccessToken(new AccessToken(oauthToken));
loginSuccessful = !Strings.isNullOrEmpty(oauthLogin);
} catch (ExecutionException e) {
log.warn("Login failed for OAuth token " + oauthToken, e);
loginSuccessful = false;
}
if (!loginSuccessful) {
rsp.sendError(SC_FORBIDDEN);
return null;
}
return cookieProvider.getFromUser(oauthLogin, "", "", new TreeSet<Scope>());
}
private boolean isInvalidHttpAuthenticationHeader(String usernamePassword) {
return usernamePassword.indexOf(':') < 1;
}
static String encoding(HttpServletRequest req) {
return Objects.firstNonNull(req.getCharacterEncoding(), "UTF-8");
}
private String getHttpBasicAuthenticationHeader(final HttpServletRequest req)
throws UnsupportedEncodingException {
String hdr = req.getHeader(GIT_AUTHORIZATION_HEADER);
if (hdr == null || !hdr.startsWith(GIT_AUTHENTICATION_BASIC)) {
return null;
} else {
return new String(Base64.decodeBase64(hdr
.substring(GIT_AUTHENTICATION_BASIC.length())), encoding(req));
}
}
@Override
public void destroy() {
log.info("Destroy");
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
import org.elasticsearch.xpack.core.ml.action.PostDataAction;
import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.core.ml.action.PutJobAction;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.JobTaskState;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.persistent.PersistentTasksClusterService.needsReassignment;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasEntry;
public class BasicDistributedJobsIT extends BaseMlIntegTestCase {
public void testFailOverBasics() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(4);
ensureStableCluster(4);
Job.Builder job = createJob("fail-over-basics-job", new ByteSizeValue(2, ByteSizeUnit.MB));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
ensureYellow(); // at least the primary shards of the indices a job uses should be started
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
awaitJobOpenedAndAssigned(job.getId(), null);
ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index
internalCluster().stopRandomDataNode();
ensureStableCluster(3);
awaitJobOpenedAndAssigned(job.getId(), null);
ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index
internalCluster().stopRandomDataNode();
ensureStableCluster(2);
awaitJobOpenedAndAssigned(job.getId(), null);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/44566")
public void testFailOverBasics_withDataFeeder() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(4);
ensureStableCluster(4);
Detector.Builder d = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
analysisConfig.setSummaryCountFieldName("doc_count");
analysisConfig.setBucketSpan(TimeValue.timeValueHours(1));
Job.Builder job = new Job.Builder("fail-over-basics_with-data-feeder-job");
job.setAnalysisConfig(analysisConfig);
job.setDataDescription(new DataDescription.Builder());
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
DatafeedConfig.Builder configBuilder = createDatafeedBuilder("data_feed_id", job.getId(), Collections.singletonList("*"));
MaxAggregationBuilder maxAggregation = AggregationBuilders.max("time").field("time");
HistogramAggregationBuilder histogramAggregation = AggregationBuilders.histogram("time").interval(60000)
.subAggregation(maxAggregation).field("time");
configBuilder.setParsedAggregations(AggregatorFactories.builder().addAggregator(histogramAggregation));
configBuilder.setFrequency(TimeValue.timeValueMinutes(2));
DatafeedConfig config = configBuilder.build();
PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(config);
client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).actionGet();
ensureYellow(); // at least the primary shards of the indices a job uses should be started
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
awaitJobOpenedAndAssigned(job.getId(), null);
StartDatafeedAction.Request startDataFeedRequest = new StartDatafeedAction.Request(config.getId(), 0L);
client().execute(StartDatafeedAction.INSTANCE, startDataFeedRequest);
assertBusy(() -> {
GetDatafeedsStatsAction.Response statsResponse =
client().execute(GetDatafeedsStatsAction.INSTANCE, new GetDatafeedsStatsAction.Request(config.getId())).actionGet();
assertEquals(1, statsResponse.getResponse().results().size());
assertEquals(DatafeedState.STARTED, statsResponse.getResponse().results().get(0).getDatafeedState());
});
ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index
internalCluster().stopRandomDataNode();
ensureStableCluster(3);
awaitJobOpenedAndAssigned(job.getId(), null);
assertBusy(() -> {
GetDatafeedsStatsAction.Response statsResponse =
client().execute(GetDatafeedsStatsAction.INSTANCE, new GetDatafeedsStatsAction.Request(config.getId())).actionGet();
assertEquals(1, statsResponse.getResponse().results().size());
assertEquals(DatafeedState.STARTED, statsResponse.getResponse().results().get(0).getDatafeedState());
});
ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index
internalCluster().stopRandomDataNode();
ensureStableCluster(2);
awaitJobOpenedAndAssigned(job.getId(), null);
assertBusy(() -> {
GetDatafeedsStatsAction.Response statsResponse =
client().execute(GetDatafeedsStatsAction.INSTANCE, new GetDatafeedsStatsAction.Request(config.getId())).actionGet();
assertEquals(1, statsResponse.getResponse().results().size());
assertEquals(DatafeedState.STARTED, statsResponse.getResponse().results().get(0).getDatafeedState());
});
}
public void testJobAutoClose() throws Exception {
internalCluster().ensureAtMostNumDataNodes(0);
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), false));
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), true));
client().admin().indices().prepareCreate("data")
.addMapping("type", "time", "type=date")
.get();
IndexRequest indexRequest = new IndexRequest("data");
indexRequest.source("time", 1407081600L);
client().index(indexRequest).get();
indexRequest = new IndexRequest("data");
indexRequest.source("time", 1407082600L);
client().index(indexRequest).get();
indexRequest = new IndexRequest("data");
indexRequest.source("time", 1407083600L);
client().index(indexRequest).get();
refresh();
Job.Builder job = createScheduledJob("job_id");
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
DatafeedConfig config = createDatafeed("data_feed_id", job.getId(), Collections.singletonList("data"));
PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(config);
client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).actionGet();
ensureYellow(); // at least the primary shards of the indices a job uses should be started
client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get();
StartDatafeedAction.Request startDatafeedRequest = new StartDatafeedAction.Request(config.getId(), 0L);
startDatafeedRequest.getParams().setEndTime(1492616844L);
client().execute(StartDatafeedAction.INSTANCE, startDatafeedRequest).get();
assertBusy(() -> {
GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId());
assertEquals(3L, jobStats.getDataCounts().getProcessedRecordCount());
assertEquals(JobState.CLOSED, jobStats.getState());
});
}
public void testDedicatedMlNode() throws Exception {
internalCluster().ensureAtMostNumDataNodes(0);
// start 2 non ml node that will never get a job allocated. (but ml apis are accessible from this node)
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), false));
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), false));
// start ml node
if (randomBoolean()) {
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), true));
} else {
// the default is based on 'xpack.ml.enabled', which is enabled in base test class.
internalCluster().startNode();
}
ensureStableCluster(3);
String jobId = "dedicated-ml-node-job";
Job.Builder job = createJob(jobId, new ByteSizeValue(2, ByteSizeUnit.MB));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
ensureYellow(); // at least the primary shards of the indices a job uses should be started
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
assertBusy(() -> {
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
PersistentTask<?> task = tasks.getTask(MlTasks.jobTaskId(jobId));
DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode());
assertThat(node.getAttributes(), hasEntry(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, "20"));
JobTaskState jobTaskState = (JobTaskState) task.getState();
assertNotNull(jobTaskState);
assertEquals(JobState.OPENED, jobTaskState.getState());
});
logger.info("stop the only running ml node");
internalCluster().stopRandomNode(settings -> settings.getAsBoolean(MachineLearning.ML_ENABLED.getKey(), true));
ensureStableCluster(2);
assertBusy(() -> {
// job should get and remain in a failed state and
// the status remains to be opened as from ml we didn't had the chance to set the status to failed:
assertJobTask(jobId, JobState.OPENED, false);
});
logger.info("start ml node");
internalCluster().startNode(Settings.builder().put(MachineLearning.ML_ENABLED.getKey(), true));
ensureStableCluster(3);
assertBusy(() -> {
// job should be re-opened:
assertJobTask(jobId, JobState.OPENED, true);
});
}
public void testMaxConcurrentJobAllocations() throws Exception {
int numMlNodes = 2;
internalCluster().ensureAtMostNumDataNodes(0);
// start non ml node, but that will hold the indices
logger.info("Start non ml node:");
String nonMlNode = internalCluster().startNode(Settings.builder()
.put(MachineLearning.ML_ENABLED.getKey(), false));
logger.info("Starting ml nodes");
internalCluster().startNodes(numMlNodes, Settings.builder()
.put("node.data", false)
.put("node.master", false)
.put(MachineLearning.ML_ENABLED.getKey(), true).build());
ensureStableCluster(numMlNodes + 1);
int maxConcurrentJobAllocations = randomIntBetween(1, 4);
client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Settings.builder()
.put(MachineLearning.CONCURRENT_JOB_ALLOCATIONS.getKey(), maxConcurrentJobAllocations))
.get();
// Sample each cs update and keep track each time a node holds more than `maxConcurrentJobAllocations` opening jobs.
List<String> violations = new CopyOnWriteArrayList<>();
internalCluster().clusterService(nonMlNode).addListener(event -> {
PersistentTasksCustomMetaData tasks = event.state().metaData().custom(PersistentTasksCustomMetaData.TYPE);
if (tasks == null) {
return;
}
for (DiscoveryNode node : event.state().nodes()) {
Collection<PersistentTask<?>> foundTasks = tasks.findTasks(MlTasks.JOB_TASK_NAME, task -> {
JobTaskState jobTaskState = (JobTaskState) task.getState();
return node.getId().equals(task.getExecutorNode()) &&
(jobTaskState == null || jobTaskState.isStatusStale(task));
});
int count = foundTasks.size();
if (count > maxConcurrentJobAllocations) {
violations.add("Observed node [" + node.getName() + "] with [" + count + "] opening jobs on cluster state version [" +
event.state().version() + "]");
}
}
});
ensureYellow(); // at least the primary shards of the indices a job uses should be started
int numJobs = numMlNodes * 10;
for (int i = 0; i < numJobs; i++) {
Job.Builder job = createJob(Integer.toString(i), new ByteSizeValue(2, ByteSizeUnit.MB));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
}
assertBusy(checkAllJobsAreAssignedAndOpened(numJobs));
logger.info("stopping ml nodes");
for (int i = 0; i < numMlNodes; i++) {
// fork so stopping all ml nodes proceeds quicker:
Runnable r = () -> {
try {
internalCluster()
.stopRandomNode(settings -> settings.getAsBoolean(MachineLearning.ML_ENABLED.getKey(), false));
} catch (IOException e) {
logger.error("error stopping node", e);
}
};
new Thread(r).start();
}
ensureStableCluster(1, nonMlNode);
assertBusy(() -> {
ClusterState state = client(nonMlNode).admin().cluster().prepareState().get().getState();
PersistentTasksCustomMetaData tasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE);
assertEquals(numJobs, tasks.taskMap().size());
for (PersistentTask<?> task : tasks.taskMap().values()) {
assertNull(task.getExecutorNode());
}
});
logger.info("re-starting ml nodes");
internalCluster().startNodes(numMlNodes, Settings.builder()
.put("node.data", false)
.put("node.master", false)
.put(MachineLearning.ML_ENABLED.getKey(), true).build());
ensureStableCluster(1 + numMlNodes);
assertBusy(checkAllJobsAreAssignedAndOpened(numJobs), 30, TimeUnit.SECONDS);
assertEquals("Expected no violations, but got [" + violations + "]", 0, violations.size());
}
// This test is designed to check that a job will not open when the .ml-state
// or .ml-anomalies-shared indices are not available. To do this those indices
// must be allocated on a node which is later stopped while .ml-config is
// allocated on a second node which remains active.
public void testMlStateAndResultsIndicesNotAvailable() throws Exception {
internalCluster().ensureAtMostNumDataNodes(0);
// start non ml node that will hold the state and results indices
logger.info("Start non ml node:");
String nonMLNode = internalCluster().startNode(Settings.builder()
.put("node.data", true)
.put("node.attr.ml-indices", "state-and-results")
.put(MachineLearning.ML_ENABLED.getKey(), false));
ensureStableCluster(1);
// start an ml node for the config index
logger.info("Starting ml node");
String mlNode = internalCluster().startNode(Settings.builder()
.put("node.data", true)
.put("node.attr.ml-indices", "config")
.put(MachineLearning.ML_ENABLED.getKey(), true));
ensureStableCluster(2);
// Create the indices (using installed templates) and set the routing to specific nodes
// State and results go on the state-and-results node, config goes on the config node
client().admin().indices().prepareCreate(".ml-anomalies-shared")
.setSettings(Settings.builder()
.put("index.routing.allocation.include.ml-indices", "state-and-results")
.put("index.routing.allocation.exclude.ml-indices", "config")
.build())
.get();
client().admin().indices().prepareCreate(".ml-state")
.setSettings(Settings.builder()
.put("index.routing.allocation.include.ml-indices", "state-and-results")
.put("index.routing.allocation.exclude.ml-indices", "config")
.build())
.get();
client().admin().indices().prepareCreate(".ml-config")
.setSettings(Settings.builder()
.put("index.routing.allocation.exclude.ml-indices", "state-and-results")
.put("index.routing.allocation.include.ml-indices", "config")
.build())
.get();
String jobId = "ml-indices-not-available-job";
Job.Builder job = createFareQuoteJob(jobId);
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
client().execute(PutJobAction.INSTANCE, putJobRequest).actionGet();
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
PostDataAction.Request postDataRequest = new PostDataAction.Request(jobId);
postDataRequest.setContent(new BytesArray(
"{\"airline\":\"AAL\",\"responsetime\":\"132.2046\",\"sourcetype\":\"farequote\",\"time\":\"1403481600\"}\n" +
"{\"airline\":\"JZA\",\"responsetime\":\"990.4628\",\"sourcetype\":\"farequote\",\"time\":\"1403481700\"}"
), XContentType.JSON);
PostDataAction.Response response = client().execute(PostDataAction.INSTANCE, postDataRequest).actionGet();
assertEquals(2, response.getDataCounts().getProcessedRecordCount());
CloseJobAction.Request closeJobRequest = new CloseJobAction.Request(jobId);
client().execute(CloseJobAction.INSTANCE, closeJobRequest).actionGet();
assertBusy(() -> {
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
assertEquals(0, tasks.taskMap().size());
});
logger.info("Stop non ml node");
Settings nonMLNodeDataPathSettings = internalCluster().dataPathSettings(nonMLNode);
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nonMLNode));
ensureStableCluster(1);
Exception e = expectThrows(ElasticsearchStatusException.class,
() -> client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet());
assertEquals("Could not open job because no ML nodes with sufficient capacity were found", e.getMessage());
IllegalStateException detail = (IllegalStateException) e.getCause();
assertNotNull(detail);
String detailedMessage = detail.getMessage();
assertTrue(detailedMessage,
detailedMessage.startsWith("Could not open job because no suitable nodes were found, allocation explanation"));
assertThat(detailedMessage, containsString("because not all primary shards are active for the following indices"));
assertThat(detailedMessage, containsString(".ml-state"));
assertThat(detailedMessage, containsString(".ml-anomalies-shared"));
logger.info("Start data node");
String nonMlNode = internalCluster().startNode(Settings.builder()
.put(nonMLNodeDataPathSettings)
.put("node.data", true)
.put(MachineLearning.ML_ENABLED.getKey(), false));
ensureStableCluster(2, mlNode);
ensureStableCluster(2, nonMlNode);
ensureYellow(); // at least the primary shards of the indices a job uses should be started
client().execute(OpenJobAction.INSTANCE, openJobRequest).actionGet();
assertBusy(() -> assertJobTask(jobId, JobState.OPENED, true));
}
private void assertJobTask(String jobId, JobState expectedState, boolean hasExecutorNode) {
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
assertEquals(1, tasks.taskMap().size());
PersistentTask<?> task = MlTasks.getJobTask(jobId, tasks);
assertNotNull(task);
if (hasExecutorNode) {
assertNotNull(task.getExecutorNode());
assertFalse(needsReassignment(task.getAssignment(), clusterState.nodes()));
DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode());
assertThat(node.getAttributes(), hasEntry(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, "20"));
JobTaskState jobTaskState = (JobTaskState) task.getState();
assertNotNull(jobTaskState);
assertEquals(expectedState, jobTaskState.getState());
} else {
assertNull(task.getExecutorNode());
}
}
private CheckedRunnable<Exception> checkAllJobsAreAssignedAndOpened(int numJobs) {
return () -> {
ClusterState state = client().admin().cluster().prepareState().get().getState();
PersistentTasksCustomMetaData tasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE);
assertEquals(numJobs, tasks.taskMap().size());
for (PersistentTask<?> task : tasks.taskMap().values()) {
assertNotNull(task.getExecutorNode());
JobTaskState jobTaskState = (JobTaskState) task.getState();
assertNotNull(jobTaskState);
assertEquals(JobState.OPENED, jobTaskState.getState());
}
};
}
}
| |
/*
*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The initial idea for this class is from "org.apache.commons.lang.IntHashMap";
* http://commons.apache.org/commons-lang-2.6-src.zip
*
*/
package scouter.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.NoSuchElementException;
/**
* @author Paul Kim (sjkim@whatap.io)
*/
public class IntLongLinkedMap {
private static final int DEFAULT_CAPACITY = 101;
private static final float DEFAULT_LOAD_FACTOR = 0.75f;
private IntLongLinkedEntry table[];
private IntLongLinkedEntry header;
private int count;
private int threshold;
private float loadFactor;
private int NONE = 0;
public IntLongLinkedMap setNullValue(int none) {
this.NONE = none;
return this;
}
public IntLongLinkedMap(int initCapacity, float loadFactor) {
if (initCapacity < 0)
throw new RuntimeException("Capacity Error: " + initCapacity);
if (loadFactor <= 0)
throw new RuntimeException("Load Count Error: " + loadFactor);
if (initCapacity == 0)
initCapacity = 1;
this.loadFactor = loadFactor;
this.table = new IntLongLinkedEntry[initCapacity];
this.header = new IntLongLinkedEntry(0, 0, null);
this.header.link_next = header.link_prev = header;
threshold = (int) (initCapacity * loadFactor);
}
public IntLongLinkedMap() {
this(DEFAULT_CAPACITY, DEFAULT_LOAD_FACTOR);
}
public int size() {
return count;
}
public int[] keyArray() {
int[] _keys = new int[this.size()];
IntEnumer en = this.keys();
for (int i = 0; i < _keys.length; i++)
_keys[i] = en.nextInt();
return _keys;
}
public synchronized IntEnumer keys() {
return new Enumer(TYPE.KEYS);
}
public synchronized LongEnumer values() {
return new Enumer(TYPE.VALUES);
}
public synchronized Enumeration<IntLongLinkedEntry> entries() {
return new Enumer<IntLongLinkedEntry>(TYPE.ENTRIES);
}
public synchronized boolean containsValue(int value) {
IntLongLinkedEntry tab[] = table;
for (int i = tab.length; i-- > 0;) {
for (IntLongLinkedEntry e = tab[i]; e != null; e = e.hash_next) {
if (CompareUtil.equals(e.value, value)) {
return true;
}
}
}
return false;
}
public synchronized boolean containsKey(int key) {
IntLongLinkedEntry tab[] = table;
int index = hash(key) % tab.length;
IntLongLinkedEntry e = tab[index];
while (e != null) {
if (CompareUtil.equals(e.key, key)) {
return true;
}
e = e.hash_next;
}
return false;
}
public synchronized long get(int key) {
IntLongLinkedEntry tab[] = table;
int index = hash(key) % tab.length;
for (IntLongLinkedEntry e = tab[index]; e != null; e = e.hash_next) {
if (CompareUtil.equals(e.key, key)) {
return e.value;
}
}
return NONE;
}
public synchronized int getFirstKey() {
return this.header.link_next.key;
}
public synchronized int getLastKey() {
return this.header.link_prev.key;
}
public synchronized long getFirstValue() {
return this.header.link_next.value;
}
public synchronized long getLastValue() {
return this.header.link_prev.value;
}
private int hash(int key) {
return key & Integer.MAX_VALUE;
}
protected void rehash() {
int oldCapacity = table.length;
IntLongLinkedEntry oldMap[] = table;
int newCapacity = oldCapacity * 2 + 1;
IntLongLinkedEntry newMap[] = new IntLongLinkedEntry[newCapacity];
threshold = (int) (newCapacity * loadFactor);
table = newMap;
for (int i = oldCapacity; i-- > 0;) {
IntLongLinkedEntry old = oldMap[i];
while (old != null) {
IntLongLinkedEntry e = old;
old = old.hash_next;
int key = e.key;
int index = hash(key) % newCapacity;
e.hash_next = newMap[index];
newMap[index] = e;
}
}
}
private int max;
public IntLongLinkedMap setMax(int max) {
this.max = max;
return this;
}
private static enum MODE {
FORCE_FIRST, FORCE_LAST, FIRST, LAST
};
public long put(int key, long value) {
return _put(key, value, MODE.LAST);
}
public long putLast(int key, long value) {
return _put(key, value, MODE.FORCE_LAST);
}
public long putFirst(int key, long value) {
return _put(key, value, MODE.FORCE_FIRST);
}
private synchronized long _put(int key, long value, MODE m) {
IntLongLinkedEntry tab[] = table;
int index = hash(key) % tab.length;
for (IntLongLinkedEntry e = tab[index]; e != null; e = e.hash_next) {
if (CompareUtil.equals(e.key, key)) {
long old = e.value;
e.value = value;
switch (m) {
case FORCE_FIRST:
if (header.link_next != e) {
unchain(e);
chain(header, header.link_next, e);
}
break;
case FORCE_LAST:
if (header.link_prev != e) {
unchain(e);
chain(header.link_prev, header, e);
}
break;
}
return old;
}
}
if (max > 0) {
switch (m) {
case FORCE_FIRST:
case FIRST:
while (count >= max) {
//removeLast();
int k = header.link_prev.key;
long v = remove(k);
overflowed(k, v);
}
break;
case FORCE_LAST:
case LAST:
while (count >= max) {
//removeFirst();
int k = header.link_next.key;
long v = remove(k);
overflowed(k, v);
}
break;
}
}
if (count >= threshold) {
rehash();
tab = table;
index = hash(key) % tab.length;
}
IntLongLinkedEntry e = new IntLongLinkedEntry(key, value, tab[index]);
tab[index] = e;
switch (m) {
case FORCE_FIRST:
case FIRST:
chain(header, header.link_next, e);
break;
case FORCE_LAST:
case LAST:
chain(header.link_prev, header, e);
break;
}
count++;
return NONE;
}
protected void overflowed(int key, long value) {
}
public synchronized long remove(int key) {
IntLongLinkedEntry tab[] = table;
int index = hash(key) % tab.length;
IntLongLinkedEntry e = tab[index];
IntLongLinkedEntry prev = null;
while (e != null) {
if (CompareUtil.equals(e.key, key)) {
if (prev != null) {
prev.hash_next = e.hash_next;
} else {
tab[index] = e.hash_next;
}
count--;
long oldValue = e.value;
e.value = NONE;
//
unchain(e);
return oldValue;
}
prev = e;
e = e.hash_next;
}
return NONE;
}
public synchronized long removeFirst() {
if (isEmpty())
return 0;
return remove(header.link_next.key);
}
public synchronized long removeLast() {
if (isEmpty())
return 0;
return remove(header.link_prev.key);
}
public boolean isEmpty() {
return size() == 0;
}
public synchronized void clear() {
IntLongLinkedEntry tab[] = table;
for (int index = tab.length; --index >= 0;)
tab[index] = null;
this.header.link_next = header;
this.header.link_prev = header;
count = 0;
}
public String toString() {
StringBuffer buf = new StringBuffer();
Enumeration it = entries();
buf.append("{");
for (int i = 0; it.hasMoreElements(); i++) {
IntLongLinkedEntry e = (IntLongLinkedEntry) (it.nextElement());
if (i > 0)
buf.append(", ");
buf.append(e.getKey() + "=" + e.getValue());
}
buf.append("}");
return buf.toString();
}
public String toFormatString() {
StringBuffer buf = new StringBuffer();
Enumeration it = entries();
buf.append("{\n");
while (it.hasMoreElements()) {
IntLongLinkedEntry e = (IntLongLinkedEntry) it.nextElement();
buf.append("\t").append(e.getKey() + "=" + e.getValue()).append("\n");
}
buf.append("}");
return buf.toString();
}
private enum TYPE {
KEYS, VALUES, ENTRIES
}
private class Enumer<V> implements Enumeration, IntEnumer, LongEnumer {
TYPE type;
IntLongLinkedEntry entry = IntLongLinkedMap.this.header.link_next;
Enumer(TYPE type) {
this.type = type;
}
public boolean hasMoreElements() {
return entry != null && header != entry;
}
public Object nextElement() {
if (hasMoreElements()) {
IntLongLinkedEntry e = entry;
entry = e.link_next;
switch (type) {
case KEYS:
return e.key;
case VALUES:
return e.value;
default:
return e;
}
}
throw new NoSuchElementException("no more next");
}
public int nextInt() {
if (hasMoreElements()) {
IntLongLinkedEntry e = entry;
entry = e.link_next;
switch (type) {
case KEYS:
return e.key;
}
}
throw new NoSuchElementException("no more next");
}
public long nextLong() {
if (hasMoreElements()) {
IntLongLinkedEntry e = entry;
entry = e.link_next;
switch (type) {
case VALUES:
return e.value;
}
}
throw new NoSuchElementException("no more next");
}
}
private void chain(IntLongLinkedEntry link_prev, IntLongLinkedEntry link_next, IntLongLinkedEntry e) {
e.link_prev = link_prev;
e.link_next = link_next;
link_prev.link_next = e;
link_next.link_prev = e;
}
private void unchain(IntLongLinkedEntry e) {
e.link_prev.link_next = e.link_next;
e.link_next.link_prev = e.link_prev;
e.link_prev = null;
e.link_next = null;
}
public static class IntLongLinkedEntry {
int key;
long value;
IntLongLinkedEntry hash_next;
IntLongLinkedEntry link_next, link_prev;
protected IntLongLinkedEntry(int key, long value, IntLongLinkedEntry next) {
this.key = key;
this.value = value;
this.hash_next = next;
}
protected Object clone() {
return new IntLongLinkedEntry(key, value,
(hash_next == null ? null : (IntLongLinkedEntry) hash_next.clone()));
}
public int getKey() {
return key;
}
public long getValue() {
return value;
}
public long setValue(long value) {
long oldValue = this.value;
this.value = value;
return oldValue;
}
public boolean equals(Object o) {
if (!(o instanceof IntLongLinkedEntry))
return false;
IntLongLinkedEntry e = (IntLongLinkedEntry) o;
return CompareUtil.equals(e.key, key) && CompareUtil.equals(e.value, value);
}
public int hashCode() {
return key ^ (int) (value ^ (value >>> 32));
}
public String toString() {
return key + "=" + value;
}
}
public synchronized void sort(Comparator<IntLongLinkedEntry> c){
ArrayList<IntLongLinkedEntry> list = new ArrayList<IntLongLinkedEntry>(this.size());
Enumeration<IntLongLinkedEntry> en = this.entries();
while(en.hasMoreElements()){
list.add(en.nextElement());
}
Collections.sort(list, c);
this.clear();
for(int i = 0 ; i<list.size() ; i++){
IntLongLinkedEntry e = list.get(i);
this.put(e.getKey(), e.getValue());
}
}
public static void main(String[] args) {
IntLongLinkedMap m = new IntLongLinkedMap().setMax(6);
for (int i = 0; i < 10; i++) {
m.put(i, i);
System.out.println(m);
}
System.out.println();
// m.putFirst(1, 0);
System.out.println(m);
System.out.println("==================================");
IntEnumer en = m.keys();
while (en.hasMoreElements()) {
m.remove(5);
System.out.println(en.nextInt());
}
// System.out.println("==================================");
// for (int i = 0; i < 10; i++) {
// m.putLast(i, i);
// System.out.println(m);
// }
// System.out.println("==================================");
// for (int i = 0; i < 10; i++) {
// m.putFirst(i, i);
// System.out.println(m);
// }
// System.out.println("==================================");
// for (int i = 0; i < 10; i++) {
// m.removeFirst();
// System.out.println(m);
// }
}
private static void print(Object e) {
System.out.println(e);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.serviceregistry.config;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.apache.servicecomb.deployment.Deployment;
import org.apache.servicecomb.foundation.auth.AuthHeaderProvider;
import org.apache.servicecomb.foundation.common.net.IpPort;
import org.apache.servicecomb.foundation.common.net.NetUtils;
import org.apache.servicecomb.foundation.common.utils.SPIServiceUtils;
import org.apache.servicecomb.foundation.vertx.VertxConst;
import org.apache.servicecomb.serviceregistry.client.http.RegistryHttpClientOptionsSPI;
import org.apache.servicecomb.serviceregistry.client.http.RegistryWatchHttpClientOptionsSPI;
import org.apache.servicecomb.serviceregistry.collect.ServiceCenterDefaultDeploymentProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.config.DynamicBooleanProperty;
import com.netflix.config.DynamicIntProperty;
import com.netflix.config.DynamicPropertyFactory;
import com.netflix.config.DynamicStringProperty;
import io.vertx.core.http.HttpVersion;
class ServiceRegistryConfigBuilder {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceRegistryConfigBuilder.class);
private boolean ssl;
public ServiceRegistryConfig build() {
return new ServiceRegistryConfig()
.setHttpVersion(getHttpVersion())
.setInstances(getInstances())
.setIpPort(getIpPort())
.setSsl(isSsl())
.setClientName(RegistryHttpClientOptionsSPI.CLIENT_NAME)
.setWatchClientName(RegistryWatchHttpClientOptionsSPI.CLIENT_NAME)
.setConnectionTimeout(getConnectionTimeout())
.setIdleConnectionTimeout(getIdleConnectionTimeout())
.setRequestTimeout(getRequestTimeout())
.setHeartBeatRequestTimeout(getHeartBeatRequestTimeout())
.setHeartbeatInterval(getHeartbeatInterval())
.setInstancePullInterval(getInstancePullInterval())
.setRegistryAutoDiscovery(isRegistryAutoDiscovery())
.setResendHeartBeatTimes(getResendHeartBeatTimes())
.setAlwaysOverrideSchema(isAlwaysOverrideSchema())
.setIgnoreSwaggerDifference(isIgnoreSwaggerDifference())
.setPreferIpAddress(isPreferIpAddress())
.setWatch(isWatch())
.setRegistryApiVersion(getRegistryApiVersion())
.setTenantName(getTenantName())
.setDomainName(getDomainName())
.setAccessKey(getAccessKey())
.setSecretKey(getSecretKey())
.setProxyEnable(isProxyEnable())
.setProxyHost(getProxyHost())
.setProxyPort(getProxyPort())
.setProxyUsername(getProxyUsername())
.setProxyPasswd(getProxyPasswd())
.setAuthHeaderProviders(getAuthHeaderProviders());
}
public HttpVersion getHttpVersion() {
DynamicStringProperty property =
DynamicPropertyFactory.getInstance()
.getStringProperty("servicecomb.service.registry.client.httpVersion", "HTTP_1_1");
return HttpVersion.valueOf(property.get());
}
public int getInstances() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty(ServiceRegistryConfig.VERTICLE_INSTANCES, 1);
int deployInstances = property.get();
if (deployInstances <= 0) {
int nAvailableProcessors = Runtime.getRuntime().availableProcessors();
LOGGER.warn("The property `{}` must be positive integer, fallback to use number of available processors: {}",
ServiceRegistryConfig.VERTICLE_INSTANCES,
nAvailableProcessors);
return nAvailableProcessors;
}
return deployInstances;
}
/**
* must be invoked after {@link #getIpPort()}
*/
public boolean isSsl() {
return this.ssl;
}
public ArrayList<IpPort> getIpPort() {
List<String> uriList = Objects
.requireNonNull(Deployment.getSystemBootStrapInfo(ServiceCenterDefaultDeploymentProvider.SYSTEM_KEY_SERVICE_CENTER),
"no sc address found!")
.getAccessURL();
ArrayList<IpPort> ipPortList = new ArrayList<>();
uriList.forEach(anUriList -> {
try {
URI uri = new URI(anUriList.trim());
this.ssl = "https".equals(uri.getScheme());
ipPortList.add(NetUtils.parseIpPort(uri));
} catch (Exception e) {
LOGGER.error("servicecomb.service.registry.address invalid : {}", anUriList, e);
}
});
return ipPortList;
}
public String getTransport() {
return "rest";
}
public int getConnectionTimeout() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.client.timeout.connection",
1000);
int timeout = property.get();
return timeout < 0 ? 1000 : timeout;
}
public int getIdleConnectionTimeout() {
// connection pool idle timeout based on client heart beat interval. Heart beat default value is 30.
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.client.timeout.idle",
ServiceRegistryConfig.DEFAULT_TIMEOUT_IN_SECONDS * 2);
int timeout = property.get();
return timeout < 1 ? ServiceRegistryConfig.DEFAULT_TIMEOUT_IN_SECONDS * 2 : timeout;
}
public int getIdleWatchTimeout() {
// watch idle timeout based on SC PING/PONG interval. SC default value is 30.
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.client.timeout.watch",
ServiceRegistryConfig.DEFAULT_TIMEOUT_IN_SECONDS * 2);
int timeout = property.get();
return timeout < 1 ? ServiceRegistryConfig.DEFAULT_TIMEOUT_IN_SECONDS * 2 : timeout;
}
public int getRequestTimeout() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.client.timeout.request",
ServiceRegistryConfig.DEFAULT_REQUEST_TIMEOUT_IN_MS);
int timeout = property.get();
return timeout < 1 ? ServiceRegistryConfig.DEFAULT_REQUEST_TIMEOUT_IN_MS : timeout;
}
//Set the timeout of the heartbeat request
public int getHeartBeatRequestTimeout() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.client.timeout.heartbeat",
ServiceRegistryConfig.DEFAULT_REQUEST_HEARTBEAT_TIMEOUT_IN_MS);
int timeout = property.get();
return timeout < 1 ? ServiceRegistryConfig.DEFAULT_REQUEST_HEARTBEAT_TIMEOUT_IN_MS : timeout;
}
public int getHeartbeatInterval() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.instance.healthCheck.interval",
ServiceRegistryConfig.DEFAULT_CHECK_INTERVAL_IN_S);
int interval = property.get();
return interval < 0 ? ServiceRegistryConfig.DEFAULT_CHECK_INTERVAL_IN_S : interval;
}
public int getInstancePullInterval() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.instance.pull.interval",
ServiceRegistryConfig.DEFAULT_CHECK_INTERVAL_IN_S);
int interval = property.get();
return interval < 0 ? ServiceRegistryConfig.DEFAULT_CHECK_INTERVAL_IN_S : interval;
}
public boolean isRegistryAutoDiscovery() {
DynamicBooleanProperty property =
DynamicPropertyFactory.getInstance()
.getBooleanProperty("servicecomb.service.registry.autodiscovery",
false);
return property.get();
}
public int getResendHeartBeatTimes() {
DynamicIntProperty property =
DynamicPropertyFactory.getInstance()
.getIntProperty("servicecomb.service.registry.instance.healthCheck.times",
ServiceRegistryConfig.DEFAULT_CHECK_TIMES);
int times = property.get();
return times < 0 ? ServiceRegistryConfig.DEFAULT_CHECK_TIMES : times;
}
public boolean isAlwaysOverrideSchema() {
DynamicBooleanProperty property =
DynamicPropertyFactory.getInstance()
.getBooleanProperty("servicecomb.service.registry.instance.alwaysOverrideSchema",
false);
return property.get();
}
public boolean isIgnoreSwaggerDifference() {
DynamicBooleanProperty property =
DynamicPropertyFactory.getInstance()
.getBooleanProperty("servicecomb.service.registry.instance.ignoreSwaggerDifference",
false);
return property.get();
}
public boolean isPreferIpAddress() {
DynamicBooleanProperty property =
DynamicPropertyFactory.getInstance()
.getBooleanProperty("servicecomb.service.registry.instance.preferIpAddress",
false);
return property.get();
}
public boolean isWatch() {
DynamicBooleanProperty property =
DynamicPropertyFactory.getInstance()
.getBooleanProperty("servicecomb.service.registry.instance.watch",
true);
return property.get();
}
public boolean isClientAuthEnabled() {
String isAuthEnabled = getProperty("false", ServiceRegistryConfig.AUTH_ENABLED);
return Boolean.parseBoolean(isAuthEnabled);
}
public String getRegistryApiVersion() {
return getProperty("v4", ServiceRegistryConfig.REGISTRY_API_VERSION);
}
public String getTenantName() {
return getProperty(ServiceRegistryConfig.NO_TENANT, ServiceRegistryConfig.TENANT_NAME);
}
public String getDomainName() {
return getProperty(ServiceRegistryConfig.NO_DOMAIN, ServiceRegistryConfig.DOMAIN_NAME);
}
public String getAccessKey() {
return getProperty(null, ServiceRegistryConfig.TENANT_ACCESS_KEY);
}
public String getSecretKey() {
return getProperty(null, ServiceRegistryConfig.TENANT_SECRET_KEY);
}
public Boolean isProxyEnable() {
String enable = getProperty("false", VertxConst.PROXY_ENABLE);
return Boolean.parseBoolean(enable);
}
public String getProxyHost() {
return getProperty("127.0.0.1", VertxConst.PROXY_HOST);
}
public int getProxyPort() {
String port = getProperty("8080", VertxConst.PROXY_PORT);
return Integer.parseInt(port);
}
public String getProxyUsername() {
return getProperty(null, VertxConst.PROXY_USERNAME);
}
public String getProxyPasswd() {
return getProperty(null, VertxConst.PROXY_PASSWD);
}
public List<AuthHeaderProvider> getAuthHeaderProviders() {
return SPIServiceUtils.getAllService(AuthHeaderProvider.class);
}
private String getProperty(String defaultValue, String... keys) {
String property = null;
for (String key : keys) {
property = DynamicPropertyFactory.getInstance().getStringProperty(key, null).get();
if (property != null) {
break;
}
}
if (property != null) {
return property;
} else {
return defaultValue;
}
}
}
| |
/***
Copyright (c) 2015 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.commonsware.cwac.cam2;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.Log;
import com.commonsware.cwac.cam2.util.Size;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Implementation of a CameraEngine that supports the
* original android.hardware.Camera API.
*/
@SuppressWarnings("deprecation")
public class ClassicCameraEngine extends CameraEngine {
private List<CameraDescriptor> descriptors=null;
/**
* {@inheritDoc}
*/
@Override
public CameraSession.Builder buildSession(Context ctxt, CameraDescriptor descriptor) {
return(new SessionBuilder(ctxt, descriptor));
}
/**
* {@inheritDoc}
*/
public void loadCameraDescriptors(final CameraSelectionCriteria criteria) {
getThreadPool().execute(new Runnable() {
@Override
public void run() {
if (descriptors == null) {
int count=Camera.getNumberOfCameras();
List<CameraDescriptor> result=new ArrayList<CameraDescriptor>();
Camera.CameraInfo info=new Camera.CameraInfo();
for (int cameraId=0; cameraId < count; cameraId++) {
Camera.getCameraInfo(cameraId, info);
Descriptor descriptor=new Descriptor(cameraId, info);
result.add(descriptor);
Camera camera=Camera.open(descriptor.getCameraId());
Camera.Parameters params=camera.getParameters();
ArrayList<Size> sizes=new ArrayList<Size>();
for (Camera.Size size : params.getSupportedPreviewSizes()) {
sizes.add(new Size(size.width, size.height));
}
descriptor.setPreviewSizes(sizes);
sizes=new ArrayList<Size>();
for (Camera.Size size : params.getSupportedPictureSizes()) {
sizes.add(new Size(size.width, size.height));
}
descriptor.setPictureSizes(sizes);
camera.release();
}
descriptors=result;
}
List<CameraDescriptor> result=new ArrayList<CameraDescriptor>(descriptors);
Collections.sort(result, new Comparator<CameraDescriptor>() {
@Override
public int compare(CameraDescriptor descriptor, CameraDescriptor t1) {
Descriptor lhs=(Descriptor)descriptor;
Descriptor rhs=(Descriptor)t1;
// descending, so invert normal side-ness
int lhScore=rhs.getScore(criteria);
int rhScore=lhs.getScore(criteria);
// from Integer.compare(), which is new to API Level 19
return(lhScore < rhScore ? -1 : (lhScore == rhScore ? 0 : 1));
}
});
getBus().post(new CameraEngine.CameraDescriptorsEvent(result));
}
});
}
/**
* {@inheritDoc}
*/
@Override
public void close(final CameraSession session) {
Descriptor descriptor=(Descriptor)session.getDescriptor();
Camera camera=descriptor.getCamera();
if (camera != null) {
camera.stopPreview();
camera.release();
descriptor.setCamera(null);
}
getBus().post(new ClosedEvent());
}
/**
* {@inheritDoc}
*/
@Override
public void takePicture(final CameraSession session, final PictureTransaction xact) {
getThreadPool().execute(new Runnable() {
@Override
public void run() {
Descriptor descriptor=(Descriptor)session.getDescriptor();
Camera camera=descriptor.getCamera();
try {
camera.takePicture(new Camera.ShutterCallback() {
@Override
public void onShutter() {
// empty plays a sound -- go figure
}
}, null,
new TakePictureTransaction(session.getContext(), xact));
}
catch (Exception e) {
getBus().post(new PictureTakenEvent(e));
if (isDebug()) {
Log.e(getClass().getSimpleName(), "Exception taking picture", e);
}
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public void open(final CameraSession session,
final SurfaceTexture texture) {
getThreadPool().execute(new Runnable() {
@Override
public void run() {
Descriptor descriptor=(Descriptor)session.getDescriptor();
Camera camera=descriptor.getCamera();
if (camera == null) {
camera=Camera.open(descriptor.getCameraId());
descriptor.setCamera(camera);
}
try {
camera.setParameters(((Session)session).configure());
camera.setPreviewTexture(texture);
camera.startPreview();
getBus().post(new OpenedEvent());
}
catch (Exception e) {
camera.release();
descriptor.setCamera(null);
getBus().post(new OpenedEvent(e));
if (isDebug()) {
Log.e(getClass().getSimpleName(), "Exception opening camera", e);
}
}
}
});
}
private class TakePictureTransaction implements Camera.PictureCallback {
private final PictureTransaction xact;
private final Context ctxt;
TakePictureTransaction(Context ctxt, PictureTransaction xact) {
this.ctxt=ctxt.getApplicationContext();
this.xact=xact;
}
@Override
public void onPictureTaken(final byte[] bytes, final Camera camera) {
getThreadPool().execute(new Runnable() {
@Override
public void run() {
camera.startPreview();
getBus().post(new PictureTakenEvent(xact.process(new ImageContext(ctxt, bytes))));
}
});
}
}
static class Descriptor implements CameraDescriptor {
private int cameraId;
private Camera camera;
private ArrayList<Size> pictureSizes;
private ArrayList<Size> previewSizes;
private final int facing;
private Descriptor(int cameraId, Camera.CameraInfo info) {
this.cameraId=cameraId;
this.facing=info.facing;
}
public int getCameraId() {
return (cameraId);
}
private void setCamera(Camera camera) {
this.camera=camera;
}
private Camera getCamera() {
return (camera);
}
@Override
public ArrayList<Size> getPreviewSizes() {
return (previewSizes);
}
private void setPreviewSizes(ArrayList<Size> sizes) {
previewSizes=sizes;
}
@Override
public ArrayList<Size> getPictureSizes() {
return (pictureSizes);
}
@Override
public boolean isPictureFormatSupported(int format) {
return (ImageFormat.JPEG == format);
}
private void setPictureSizes(ArrayList<Size> sizes) {
pictureSizes=sizes;
}
private int getScore(CameraSelectionCriteria criteria) {
int score=10;
if (criteria != null) {
if ((criteria.getFacing().isFront() &&
facing != Camera.CameraInfo.CAMERA_FACING_FRONT) ||
(!criteria.getFacing().isFront() &&
facing != Camera.CameraInfo.CAMERA_FACING_BACK)) {
score=0;
}
}
return(score);
}
}
private static class Session extends CameraSession {
private Session(Context ctxt, CameraDescriptor descriptor) {
super(ctxt, descriptor);
}
Camera.Parameters configure() {
final Descriptor descriptor=(Descriptor)getDescriptor();
final Camera camera=descriptor.getCamera();
Camera.Parameters params=camera.getParameters();
Camera.CameraInfo info=new Camera.CameraInfo();
Camera.getCameraInfo(descriptor.getCameraId(), info);
for (CameraPlugin plugin : getPlugins()) {
ClassicCameraConfigurator configurator=plugin.buildConfigurator(ClassicCameraConfigurator.class);
if (configurator != null) {
params=configurator.configure(info, camera, params);
}
}
return (params);
}
}
private static class SessionBuilder extends CameraSession.Builder {
private SessionBuilder(Context ctxt, CameraDescriptor descriptor) {
super(new Session(ctxt, descriptor));
}
}
}
| |
package cgeo.geocaching.export;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.enumerations.WaypointType;
import cgeo.geocaching.files.GPX10Parser;
import cgeo.geocaching.files.ParserException;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.models.Waypoint;
import cgeo.geocaching.storage.DataStore;
import cgeo.geocaching.test.AbstractResourceInstrumentationTestCase;
import cgeo.geocaching.test.R;
import androidx.annotation.NonNull;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import static org.assertj.core.api.Java6Assertions.assertThat;
public class GpxSerializerTest extends AbstractResourceInstrumentationTestCase {
public static void testWriteEmptyGPX() throws Exception {
final StringWriter writer = new StringWriter();
new GpxSerializer().writeGPX(Collections.emptyList(), writer, null);
assertThat(removeWhitespaces(writer.getBuffer().toString())).isEqualTo(removeWhitespaces("<?xml version='1.0' encoding='UTF-8' standalone='yes' ?>" +
"<gpx version=\"1.0\" creator=\"c:geo - http://www.cgeo.org/\" " +
"xsi:schemaLocation=\"http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd " +
"http://www.groundspeak.com/cache/1/0/1 http://www.groundspeak.com/cache/1/0/1/cache.xsd " +
"http://www.gsak.net/xmlv1/6 http://www.gsak.net/xmlv1/6/gsak.xsd\" " +
"xmlns=\"http://www.topografix.com/GPX/1/0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " +
"xmlns:groundspeak=\"http://www.groundspeak.com/cache/1/0/1\" xmlns:gsak=\"http://www.gsak.net/xmlv1/6\" " +
"xmlns:cgeo=\"http://www.cgeo.org/wptext/1/0\" />"));
}
private static String removeWhitespaces(final String txt) {
return txt.replaceAll("\\s", "");
}
public void testProgressReporting() throws IOException, ParserException {
final AtomicReference<Integer> importedCount = new AtomicReference<>(0);
final StringWriter writer = new StringWriter();
final Geocache cache = loadCacheFromResource(R.raw.gc1bkp3_gpx101);
assertThat(cache).isNotNull();
new GpxSerializer().writeGPX(Collections.singletonList("GC1BKP3"), writer, importedCount::set);
assertEquals("Progress listener not called", 1, importedCount.get().intValue());
}
/**
* This test verifies that a loop of import, export, import leads to the same cache information.
*
*/
public void testStableExportImportExport() throws IOException, ParserException {
final String geocode = "GC1BKP3";
final int cacheResource = R.raw.gc1bkp3_gpx101;
final Geocache cache = loadCacheFromResource(cacheResource);
assertThat(cache).isNotNull();
final String gpxFirst = getGPXFromCache(geocode);
assertThat(gpxFirst.length()).isGreaterThan(0);
final GPX10Parser parser = new GPX10Parser(StoredList.TEMPORARY_LIST.id);
final InputStream stream = new ByteArrayInputStream(gpxFirst.getBytes(StandardCharsets.UTF_8));
final Collection<Geocache> caches = parser.parse(stream, null);
assertThat(caches).isNotNull();
assertThat(caches).hasSize(1);
final String gpxSecond = getGPXFromCache(geocode);
assertThat(replaceLogIds(gpxSecond)).isEqualTo(replaceLogIds(gpxFirst));
}
private static String replaceLogIds(final String gpx) {
return gpx.replaceAll("log id=\"\\d*\"", "");
}
private static String getGPXFromCache(final String geocode) throws IOException {
final StringWriter writer = new StringWriter();
new GpxSerializer().writeGPX(Collections.singletonList(geocode), writer, null);
return writer.toString();
}
public static void testStateFromStateCountry() throws Exception {
final Geocache cache = withLocation("state, country");
assertThat(GpxSerializer.getState(cache)).isEqualTo("state");
}
public static void testCountryFromStateCountry() throws Exception {
final Geocache cache = withLocation("state, country");
assertThat(GpxSerializer.getCountry(cache)).isEqualTo("country");
}
public static void testCountryFromCountryOnly() throws Exception {
final Geocache cache = withLocation("somewhere");
assertThat(GpxSerializer.getCountry(cache)).isEqualTo("somewhere");
}
public static void testStateFromCountryOnly() throws Exception {
final Geocache cache = withLocation("somewhere");
assertThat(GpxSerializer.getState(cache)).isEmpty();
}
public static void testCountryFromExternalCommaString() throws Exception {
final Geocache cache = withLocation("first,second"); // this was not created by c:geo, therefore don't split it
assertThat(GpxSerializer.getState(cache)).isEmpty();
}
private static Geocache withLocation(final String location) {
final Geocache cache = new Geocache();
cache.setLocation(location);
return cache;
}
public void testWaypointSym() throws IOException, ParserException {
final String geocode = "GC1BKP3";
try {
final int cacheResource = R.raw.gc1bkp3_gpx101;
final Geocache cache = loadCacheFromResource(cacheResource);
final Waypoint waypoint = new Waypoint("WP", WaypointType.PARKING, false);
waypoint.setCoords(cache.getCoords());
cache.addOrChangeWaypoint(waypoint, true);
assertThat(getGPXFromCache(geocode)).contains("<sym>Parking Area</sym>").contains("<type>Waypoint|Parking Area</type>");
} finally {
DataStore.removeCache(geocode, LoadFlags.REMOVE_ALL);
}
}
public void testDTNumbersAreIntegers() throws IOException, ParserException {
final int cacheResource = R.raw.gc31j2h;
loadCacheFromResource(cacheResource);
final String exported = getGPXFromCache("GC31J2H");
final String imported = IOUtils.toString(getResourceStream(R.raw.gc31j2h), StandardCharsets.UTF_8);
assertEqualTags(imported, exported, "groundspeak:difficulty");
assertEqualTags(imported, exported, "groundspeak:terrain");
}
public void testStatusSameCaseAfterExport() throws IOException, ParserException {
final int cacheResource = R.raw.gc31j2h;
loadCacheFromResource(cacheResource);
final String exported = getGPXFromCache("GC31J2H");
final String imported = IOUtils.toString(getResourceStream(R.raw.gc31j2h), StandardCharsets.UTF_8);
assertEqualTags(imported, exported, "groundspeak:type");
}
public void testSameFieldsAfterExport() throws IOException, ParserException {
final int cacheResource = R.raw.gc31j2h;
loadCacheFromResource(cacheResource);
final String exported = extractWaypoint(getGPXFromCache("GC31J2H"));
final String imported = extractWaypoint(IOUtils.toString(getResourceStream(R.raw.gc31j2h), StandardCharsets.UTF_8));
assertEqualTags(imported, exported, "time");
assertEqualTags(imported, exported, "name");
// desc is not the same, since imported files also contain owner and T/D there
// url is different since we export direct urls, no redirection via coord.info
assertEqualTags(imported, exported, "urlname");
assertEqualTags(imported, exported, "sym");
assertEqualTags(imported, exported, "type");
assertEqualTags(imported, exported, "groundspeak:name");
assertEqualTags(imported, exported, "groundspeak:placed_by");
assertEqualTags(imported, exported, "groundspeak:type");
assertEqualTags(imported, exported, "groundspeak:container");
assertEqualTags(imported, exported, "groundspeak:difficulty");
assertEqualTags(imported, exported, "groundspeak:terrain");
assertEqualTags(imported, exported, "groundspeak:country");
assertEqualTags(imported, exported, "groundspeak:state");
// different newlines in hints (and all other text). that's okay
assertEqualTags(imported, exported, "groundspeak:date");
}
public void testWaypointEmpty() throws IOException, ParserException {
final String geocode = "GC31J2H";
try {
final int cacheResource = R.raw.gc31j2h;
final Geocache cache = loadCacheFromResource(cacheResource);
final Waypoint waypoint = new Waypoint("WP", WaypointType.FINAL, false);
cache.addOrChangeWaypoint(waypoint, true);
final String gpxFromCache = getGPXFromCache(geocode);
assertThat(gpxFromCache).contains("<sym>Final Location</sym>").contains("<type>Waypoint|Final Location</type>");
assertThat(gpxFromCache).contains("<cgeo:originalCoordsEmpty>true</cgeo:originalCoordsEmpty>");
} finally {
DataStore.removeCache(geocode, LoadFlags.REMOVE_ALL);
}
}
@NonNull
private static String extractWaypoint(final String gpx) {
return StringUtils.substringBetween(gpx, "<wpt", "</wpt>");
}
private static void assertEqualTags(final String imported, final String exported, final String tag) {
final String[] importedContent = StringUtils.substringsBetween(imported, "<" + tag + ">", "</" + tag + ">");
final String[] exportedContent = StringUtils.substringsBetween(exported, "<" + tag + ">", "</" + tag + ">");
assertThat(importedContent).isNotEmpty();
assertThat(importedContent).isEqualTo(exportedContent);
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.test;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientAwsConfig;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.connection.AddressProvider;
import com.hazelcast.client.connection.AddressTranslator;
import com.hazelcast.client.connection.ClientConnectionManager;
import com.hazelcast.client.connection.nio.ClientConnection;
import com.hazelcast.client.connection.nio.ClientConnectionManagerImpl;
import com.hazelcast.client.impl.ClientConnectionManagerFactory;
import com.hazelcast.client.impl.HazelcastClientInstanceImpl;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.spi.impl.AwsAddressTranslator;
import com.hazelcast.client.spi.impl.DefaultAddressTranslator;
import com.hazelcast.client.spi.impl.discovery.DiscoveryAddressTranslator;
import com.hazelcast.client.spi.properties.ClientProperty;
import com.hazelcast.client.test.TwoWayBlockableExecutor.LockPair;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.instance.Node;
import com.hazelcast.instance.NodeState;
import com.hazelcast.instance.TestUtil;
import com.hazelcast.internal.networking.OutboundFrame;
import com.hazelcast.internal.networking.nio.NioEventLoopGroup;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.ConnectionType;
import com.hazelcast.spi.discovery.integration.DiscoveryService;
import com.hazelcast.spi.exception.TargetDisconnectedException;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.test.mocknetwork.MockConnection;
import com.hazelcast.test.mocknetwork.TestNodeRegistry;
import com.hazelcast.util.ConcurrencyUtil;
import com.hazelcast.util.ConstructorFunction;
import com.hazelcast.util.ExceptionUtil;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantReadWriteLock;
class TestClientRegistry {
private static final ILogger LOGGER = Logger.getLogger(HazelcastClient.class);
private final TestNodeRegistry nodeRegistry;
TestClientRegistry(TestNodeRegistry nodeRegistry) {
this.nodeRegistry = nodeRegistry;
}
ClientConnectionManagerFactory createClientServiceFactory(String host, AtomicInteger ports) {
return new MockClientConnectionManagerFactory(host, ports);
}
private class MockClientConnectionManagerFactory implements ClientConnectionManagerFactory {
private final String host;
private final AtomicInteger ports;
MockClientConnectionManagerFactory(String host, AtomicInteger ports) {
this.host = host;
this.ports = ports;
}
@Override
public ClientConnectionManager createConnectionManager(ClientConfig config, HazelcastClientInstanceImpl client,
DiscoveryService discoveryService, Collection<AddressProvider> addressProviders) {
final ClientAwsConfig awsConfig = config.getNetworkConfig().getAwsConfig();
AddressTranslator addressTranslator;
if (awsConfig != null && awsConfig.isEnabled()) {
try {
addressTranslator = new AwsAddressTranslator(awsConfig, client.getLoggingService());
} catch (NoClassDefFoundError e) {
LOGGER.warning("hazelcast-aws.jar might be missing!");
throw e;
}
} else if (discoveryService != null) {
addressTranslator = new DiscoveryAddressTranslator(discoveryService,
client.getProperties().getBoolean(ClientProperty.DISCOVERY_SPI_PUBLIC_IP_ENABLED));
} else {
addressTranslator = new DefaultAddressTranslator();
}
return new MockClientConnectionManager(client, addressTranslator, addressProviders, host, ports);
}
}
class MockClientConnectionManager extends ClientConnectionManagerImpl {
private final String host;
private final AtomicInteger ports;
private final HazelcastClientInstanceImpl client;
private final ConcurrentHashMap<Address, LockPair> addressBlockMap = new ConcurrentHashMap<Address, LockPair>();
MockClientConnectionManager(HazelcastClientInstanceImpl client, AddressTranslator addressTranslator, Collection<AddressProvider> addressProviders,
String host, AtomicInteger ports) {
super(client, addressTranslator, addressProviders);
this.client = client;
this.host = host;
this.ports = ports;
}
@Override
protected NioEventLoopGroup initEventLoopGroup(HazelcastClientInstanceImpl client) {
return null;
}
@Override
protected void startEventLoopGroup() {
}
@Override
protected void stopEventLoopGroup() {
}
@Override
protected ClientConnection createSocketConnection(Address address) throws IOException {
if (!alive) {
throw new HazelcastException("ConnectionManager is not active!!!");
}
try {
HazelcastInstance instance = nodeRegistry.getInstance(address);
if (instance == null) {
throw new IOException("Can not connected to " + address + ": instance does not exist");
}
Node node = TestUtil.getNode(instance);
Address localAddress = new Address(host, ports.incrementAndGet());
LockPair lockPair = getLockPair(address);
MockedClientConnection connection = new MockedClientConnection(client,
connectionIdGen.incrementAndGet(), node.nodeEngine, address, localAddress, lockPair);
LOGGER.info("Created connection to endpoint: " + address + ", connection: " + connection);
return connection;
} catch (Exception e) {
throw ExceptionUtil.rethrow(e, IOException.class);
}
}
private LockPair getLockPair(Address address) {
return ConcurrencyUtil.getOrPutIfAbsent(addressBlockMap, address,
new ConstructorFunction<Address, LockPair>() {
@Override
public LockPair createNew(Address arg) {
return new LockPair(new ReentrantReadWriteLock(), new ReentrantReadWriteLock());
}
});
}
/**
* Blocks incoming messages to client from given address
*/
void blockFrom(Address address) {
LOGGER.info("Blocked messages from " + address);
LockPair lockPair = getLockPair(address);
lockPair.blockIncoming();
}
/**
* Unblocks incoming messages to client from given address
*/
void unblockFrom(Address address) {
LOGGER.info("Unblocked messages from " + address);
LockPair lockPair = getLockPair(address);
lockPair.unblockIncoming();
}
/**
* Blocks outgoing messages from client to given address
*/
void blockTo(Address address) {
LOGGER.info("Blocked messages to " + address);
LockPair lockPair = getLockPair(address);
lockPair.blockOutgoing();
}
/**
* Unblocks outgoing messages from client to given address
*/
void unblockTo(Address address) {
LOGGER.info("Unblocked messages to " + address);
LockPair lockPair = getLockPair(address);
lockPair.unblockOutgoing();
}
}
private class MockedClientConnection extends ClientConnection {
private volatile long lastReadTime;
private volatile long lastWriteTime;
private final NodeEngineImpl serverNodeEngine;
private final Address remoteAddress;
private final Address localAddress;
private final MockedNodeConnection serverSideConnection;
private final TwoWayBlockableExecutor executor;
MockedClientConnection(HazelcastClientInstanceImpl client,
int connectionId, NodeEngineImpl serverNodeEngine,
Address address, Address localAddress,
LockPair lockPair) throws IOException {
super(client, connectionId);
this.serverNodeEngine = serverNodeEngine;
this.remoteAddress = address;
this.localAddress = localAddress;
this.executor = new TwoWayBlockableExecutor(lockPair);
this.serverSideConnection = new MockedNodeConnection(connectionId, remoteAddress,
localAddress, serverNodeEngine, this);
}
@Override
public void handleClientMessage(final ClientMessage clientMessage) {
executor.executeIncoming(new Runnable() {
@Override
public void run() {
lastReadTime = System.currentTimeMillis();
MockedClientConnection.super.handleClientMessage(clientMessage);
}
@Override
public String toString() {
return "Runnable message " + clientMessage + ", " + MockedClientConnection.this;
}
});
}
@Override
public boolean write(final OutboundFrame frame) {
final Node node = serverNodeEngine.getNode();
if (node.getState() == NodeState.SHUT_DOWN) {
return false;
}
executor.executeOutgoing(new Runnable() {
@Override
public String toString() {
return "Runnable message " + frame + ", " + MockedClientConnection.this;
}
@Override
public void run() {
ClientMessage newPacket = readFromPacket((ClientMessage) frame);
lastWriteTime = System.currentTimeMillis();
serverSideConnection.handleClientMessage(newPacket);
}
});
return true;
}
private ClientMessage readFromPacket(ClientMessage packet) {
return ClientMessage.createForDecode(packet.buffer(), 0);
}
@Override
public long lastReadTimeMillis() {
return lastReadTime;
}
@Override
public long lastWriteTimeMillis() {
return lastWriteTime;
}
@Override
public InetAddress getInetAddress() {
try {
return remoteAddress.getInetAddress();
} catch (UnknownHostException e) {
e.printStackTrace();
return null;
}
}
@Override
public InetSocketAddress getRemoteSocketAddress() {
try {
return remoteAddress.getInetSocketAddress();
} catch (UnknownHostException e) {
e.printStackTrace();
return null;
}
}
@Override
public int getPort() {
return remoteAddress.getPort();
}
@Override
public InetSocketAddress getLocalSocketAddress() {
try {
return localAddress.getInetSocketAddress();
} catch (UnknownHostException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void innerClose() throws IOException {
executor.executeOutgoing((new Runnable() {
@Override
public void run() {
serverSideConnection.close(null, null);
}
@Override
public String toString() {
return "Client Closed EOF. " + MockedClientConnection.this;
}
}));
executor.shutdownIncoming();
}
void onServerClose(final String reason) {
executor.executeIncoming(new Runnable() {
@Override
public String toString() {
return "Server Closed EOF. " + MockedClientConnection.this;
}
@Override
public void run() {
MockedClientConnection.this.close(reason, new TargetDisconnectedException("Mocked Remote socket closed"));
}
});
executor.shutdownOutgoing();
}
@Override
public String toString() {
return "MockedClientConnection{"
+ "localAddress=" + localAddress
+ ", super=" + super.toString()
+ '}';
}
}
private class MockedNodeConnection extends MockConnection {
private final MockedClientConnection responseConnection;
private final int connectionId;
private volatile long lastReadTimeMillis;
private volatile long lastWriteTimeMillis;
private volatile AtomicBoolean alive = new AtomicBoolean(true);
MockedNodeConnection(int connectionId, Address localEndpoint, Address remoteEndpoint, NodeEngineImpl nodeEngine,
MockedClientConnection responseConnection) {
super(localEndpoint, remoteEndpoint, nodeEngine);
this.responseConnection = responseConnection;
this.connectionId = connectionId;
register();
lastReadTimeMillis = System.currentTimeMillis();
lastWriteTimeMillis = System.currentTimeMillis();
}
private void register() {
Node node = remoteNodeEngine.getNode();
node.getConnectionManager().registerConnection(getEndPoint(), this);
}
@Override
public boolean write(OutboundFrame frame) {
final ClientMessage packet = (ClientMessage) frame;
if (isAlive()) {
lastWriteTimeMillis = System.currentTimeMillis();
ClientMessage newPacket = readFromPacket(packet);
responseConnection.handleClientMessage(newPacket);
return true;
}
return false;
}
void handleClientMessage(ClientMessage newPacket) {
lastReadTimeMillis = System.currentTimeMillis();
remoteNodeEngine.getNode().clientEngine.handleClientMessage(newPacket, this);
}
@Override
public boolean isClient() {
return true;
}
private ClientMessage readFromPacket(ClientMessage packet) {
return ClientMessage.createForDecode(packet.buffer(), 0);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MockedNodeConnection that = (MockedNodeConnection) o;
if (connectionId != that.connectionId) {
return false;
}
Address remoteEndpoint = getEndPoint();
return !(remoteEndpoint != null ? !remoteEndpoint.equals(that.getEndPoint()) : that.getEndPoint() != null);
}
@Override
public void close(String reason, Throwable cause) {
if (!alive.compareAndSet(true, false)) {
return;
}
Logger.getLogger(MockedNodeConnection.class).warning("Server connection closed: " + reason, cause);
super.close(reason, cause);
responseConnection.onServerClose(reason);
}
@Override
public int hashCode() {
int result = connectionId;
Address remoteEndpoint = getEndPoint();
result = 31 * result + (remoteEndpoint != null ? remoteEndpoint.hashCode() : 0);
return result;
}
@Override
public long lastReadTimeMillis() {
return lastReadTimeMillis;
}
@Override
public long lastWriteTimeMillis() {
return lastWriteTimeMillis;
}
@Override
public ConnectionType getType() {
return ConnectionType.JAVA_CLIENT;
}
@Override
public String toString() {
return "MockedNodeConnection{"
+ " remoteEndpoint = " + getEndPoint()
+ ", localEndpoint = " + localEndpoint
+ ", connectionId = " + connectionId
+ '}';
}
}
}
| |
package de.fau.cs.mad.rpgpack.templatebrowser;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Base64;
import de.fau.cs.mad.rpgpack.ThumbnailLoader;
import de.fau.cs.mad.rpgpack.jackson.CharacterSheet;
/**
* This class represents a container for a RPG Character template. RPG Character
* template is a set of all possible character attributes and elements. Template
* is stored as json file at <code>fileAbsolutePath</code>. Once created
* template could be reused again to create another RPG Character, just with
* another values or another subset of modeled elements.
*
*/
@SuppressWarnings("serial")
public class Template implements Serializable {
private String templateName;
private String worldName;
private String author;
private String date;
private String description;
public String fileAbsolutePath = null;
private long fileTimeStamp = 0;
private String tagString = "";
private String iconPath = "";
private List<CharacterSheet> characters = new ArrayList<CharacterSheet>();
public Template(String templateName, String worldName, String author,
String date, String iconPath, String description) {
this.templateName = templateName;
this.worldName = worldName;
this.author = author;
this.date = date;
this.iconPath = iconPath;
this.description = description;
}
public Template(String templateName, String worldName, String author,
String date, int iconID) {
this.templateName = templateName;
this.worldName = worldName;
this.author = author;
this.date = date;
this.description = "No description found!";
}
public Template(String templateName, String worldName, String author,
String date) {
this.templateName = templateName;
this.worldName = worldName;
this.author = author;
this.date = date;
}
public boolean addCharacter(CharacterSheet character) {
characters.add(character);
return true;
}
public String getTemplateName() {
return templateName;
}
public void setTemplateName(String templateName) {
this.templateName = templateName;
}
public String getWorldName() {
return worldName;
}
public void setWorldName(String worldName) {
this.worldName = worldName;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getIconPath() {
return iconPath;
}
public void setIconPath(String iconPath) {
this.iconPath = iconPath;
}
public String getFileName() {
if (fileAbsolutePath == null) {
// return null?
return "";
}
if (fileAbsolutePath.isEmpty()) {
return "";
}
int lastSlashPos = fileAbsolutePath.lastIndexOf("/");
String fileName = null;
if (lastSlashPos == -1) {
fileName = fileAbsolutePath;
} else {
fileName = fileAbsolutePath.substring(lastSlashPos + 1);
}
return fileName;
}
public List<CharacterSheet> getCharacters() {
return characters;
}
public void setCharacters(List<CharacterSheet> characters) {
this.characters = characters;
}
public void clearCharacters() {
this.characters.clear();
}
/**
* Checks whether the file for this template has changed by checking the
* time stamp of it.
*
* @return true if the file has been changed, false otherwise.
*/
public boolean hasFileTimeStampChanged() {
if (fileAbsolutePath == null) {
return false;
}
final File templateFile = new File(fileAbsolutePath);
final long newTimeStamp = templateFile.lastModified();
if (newTimeStamp > fileTimeStamp) {
fileTimeStamp = newTimeStamp;
return true;
}
return false;
}
public void setFileTimeStamp(final long timeStamp) {
fileTimeStamp = timeStamp;
}
public long getFileTimeStamp() {
return fileTimeStamp;
}
public File getTemplateFile() {
if (fileAbsolutePath == null) {
return null;
}
return new File(fileAbsolutePath);
}
public String getTagString() {
return tagString;
}
public void setTagString(String tagString) {
this.tagString = tagString;
}
public boolean hasIcon() {
if(iconPath == null) {
return false;
}
if(iconPath.isEmpty()) {
return false;
}
return true;
}
public boolean isIconBase64() {
if(!hasIcon()) {
return false;
}
try {
final File testFile = new File(iconPath);
if(testFile.isFile()) {
return false;
}
else {
return true;
}
}
catch(Exception e) {
return true;
}
}
public Bitmap getIcon(final Context context) {
if(!hasIcon()) {
return null;
}
if(isIconBase64()) {
try {
final byte[] decodedBase64 = Base64.decode(iconPath, Base64.DEFAULT);
return BitmapFactory.decodeByteArray(decodedBase64, 0, decodedBase64.length);
}
catch(Exception e) {
e.printStackTrace();
return null;
}
}
else {
return ThumbnailLoader.loadThumbnail(iconPath, context);
}
}
}
| |
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.sync.notifier;
import android.accounts.Account;
import android.content.ContentResolver;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SyncStatusObserver;
import android.os.StrictMode;
import android.preference.PreferenceManager;
import android.util.Log;
import org.chromium.sync.signin.AccountManagerHelper;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
/**
* A helper class to handle the current status of sync for Chrome in Android-land.
*
* It also has a helper class to be used by observers whenever sync settings change.
*
* To retrieve an instance of this class, call SyncStatusHelper.get(someContext).
*/
public class SyncStatusHelper {
public interface Listener {
/**
* Called when the user signs out of Chrome.
*/
void onClearSignedInUser();
}
// TODO(dsmyers): remove the downstream version of this constant.
public static final String AUTH_TOKEN_TYPE_SYNC = "chromiumsync";
@VisibleForTesting
public static final String SIGNED_IN_ACCOUNT_KEY = "google.services.username";
public static final String TAG = "SyncStatusHelper";
private final Context mApplicationContext;
private final SyncContentResolverDelegate mSyncContentResolverWrapper;
private static final Object lock = new Object();
private static SyncStatusHelper sSyncStatusHelper;
private ArrayList<Listener> mListeners;
/**
* @param context the context
* @param syncContentResolverWrapper an implementation of SyncContentResolverWrapper
*/
private SyncStatusHelper(Context context,
SyncContentResolverDelegate syncContentResolverWrapper) {
mApplicationContext = context.getApplicationContext();
mSyncContentResolverWrapper = syncContentResolverWrapper;
mListeners = new ArrayList<Listener>();
}
/**
* A factory method for the SyncStatusHelper.
*
* It is possible to override the SyncContentResolverWrapper to use in tests for the
* instance of the SyncStatusHelper by calling overrideSyncStatusHelperForTests(...) with
* your SyncContentResolverWrapper.
*
* @param context the ApplicationContext is retreived from the context used as an argument.
* @return a singleton instance of the SyncStatusHelper
*/
public static SyncStatusHelper get(Context context) {
synchronized (lock) {
if (sSyncStatusHelper == null) {
Context applicationContext = context.getApplicationContext();
sSyncStatusHelper = new SyncStatusHelper(applicationContext,
new SystemSyncContentResolverDelegate());
}
}
return sSyncStatusHelper;
}
/**
* Tests might want to consider overriding the context and SyncContentResolverWrapper so they
* do not use the real ContentResolver in Android.
*
* @param context the context to use
* @param syncContentResolverWrapper the SyncContentResolverWrapper to use
*/
@VisibleForTesting
public static void overrideSyncStatusHelperForTests(Context context,
SyncContentResolverDelegate syncContentResolverWrapper) {
synchronized (lock) {
if (sSyncStatusHelper != null) {
throw new IllegalStateException("SyncStatusHelper already exists");
}
sSyncStatusHelper = new SyncStatusHelper(context, syncContentResolverWrapper);
}
}
/**
* Wrapper method for the ContentResolver.addStatusChangeListener(...) when we are only
* interested in the settings type.
*/
public Object registerContentResolverObserver(SyncStatusObserver observer) {
return mSyncContentResolverWrapper.addStatusChangeListener(
ContentResolver.SYNC_OBSERVER_TYPE_SETTINGS, observer);
}
/**
* Wrapper method for the ContentResolver.removeStatusChangeListener(...).
*/
public void unregisterContentResolverObserver(Object observerHandle) {
mSyncContentResolverWrapper.removeStatusChangeListener(observerHandle);
}
/**
* Checks whether sync is currently enabled from Chrome for a given account.
*
* It checks both the master sync for the device, and Chrome sync setting for the given account.
*
* @param account the account to check if Chrome sync is enabled on.
* @return true if sync is on, false otherwise
*/
public boolean isSyncEnabled(Account account) {
StrictMode.ThreadPolicy oldPolicy = temporarilyAllowDiskWritesAndDiskReads();
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
boolean enabled = account != null &&
mSyncContentResolverWrapper.getMasterSyncAutomatically() &&
mSyncContentResolverWrapper.getSyncAutomatically(account, contractAuthority);
StrictMode.setThreadPolicy(oldPolicy);
return enabled;
}
/**
* Checks whether sync is currently enabled from Chrome for the currently signed in account.
*
* It checks both the master sync for the device, and Chrome sync setting for the given account.
* If no user is currently signed in it returns false.
*
* @return true if sync is on, false otherwise
*/
public boolean isSyncEnabled() {
return isSyncEnabled(getSignedInUser());
}
/**
* Checks whether sync is currently enabled from Chrome for a given account.
*
* It checks only Chrome sync setting for the given account,
* and ignores the master sync setting.
*
* @param account the account to check if Chrome sync is enabled on.
* @return true if sync is on, false otherwise
*/
public boolean isSyncEnabledForChrome(Account account) {
StrictMode.ThreadPolicy oldPolicy = temporarilyAllowDiskWritesAndDiskReads();
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
boolean enabled = account != null &&
mSyncContentResolverWrapper.getSyncAutomatically(account, contractAuthority);
StrictMode.setThreadPolicy(oldPolicy);
return enabled;
}
/**
* Checks whether the master sync flag for Android is currently set.
*
* @return true if the global master sync is on, false otherwise
*/
public boolean isMasterSyncAutomaticallyEnabled() {
StrictMode.ThreadPolicy oldPolicy = temporarilyAllowDiskWritesAndDiskReads();
boolean enabled = mSyncContentResolverWrapper.getMasterSyncAutomatically();
StrictMode.setThreadPolicy(oldPolicy);
return enabled;
}
/**
* Make sure Chrome is syncable, and enable sync.
*
* @param account the account to enable sync on
*/
public void enableAndroidSync(Account account) {
StrictMode.ThreadPolicy oldPolicy = temporarilyAllowDiskWritesAndDiskReads();
makeSyncable(account);
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
if (!mSyncContentResolverWrapper.getSyncAutomatically(account, contractAuthority)) {
mSyncContentResolverWrapper.setSyncAutomatically(account, contractAuthority, true);
}
StrictMode.setThreadPolicy(oldPolicy);
}
/**
* Disables Android Chrome sync
*
* @param account the account to disable Chrome sync on
*/
public void disableAndroidSync(Account account) {
StrictMode.ThreadPolicy oldPolicy = temporarilyAllowDiskWritesAndDiskReads();
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
if (mSyncContentResolverWrapper.getSyncAutomatically(account, contractAuthority)) {
mSyncContentResolverWrapper.setSyncAutomatically(account, contractAuthority, false);
}
StrictMode.setThreadPolicy(oldPolicy);
}
// TODO(nyquist) Move all these methods about signed in user to GoogleServicesManager.
public Account getSignedInUser() {
String syncAccountName = getSignedInAccountName();
if (syncAccountName == null) {
return null;
}
return AccountManagerHelper.createAccountFromName(syncAccountName);
}
public boolean isSignedIn() {
return getSignedInAccountName() != null;
}
public void setSignedInAccountName(String accountName) {
getPreferences().edit()
.putString(SIGNED_IN_ACCOUNT_KEY, accountName)
.apply();
}
public void clearSignedInUser() {
Log.d(TAG, "Clearing user signed in to Chrome");
setSignedInAccountName(null);
for (Listener listener : mListeners) {
listener.onClearSignedInUser();
}
}
private String getSignedInAccountName() {
return getPreferences().getString(SIGNED_IN_ACCOUNT_KEY, null);
}
/**
* Register with Android Sync Manager. This is what causes the "Chrome" option to appear in
* Settings -> Accounts / Sync .
*
* @param account the account to enable Chrome sync on
*/
private void makeSyncable(Account account) {
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
if (hasFinishedFirstSync(account)) {
mSyncContentResolverWrapper.setIsSyncable(account, contractAuthority, 1);
}
// Disable the syncability of Chrome for all other accounts
Account[] googleAccounts = AccountManagerHelper.get(mApplicationContext).
getGoogleAccounts();
for (Account accountToSetNotSyncable : googleAccounts) {
if (!accountToSetNotSyncable.equals(account) &&
mSyncContentResolverWrapper.getIsSyncable(
accountToSetNotSyncable, contractAuthority) > 0) {
mSyncContentResolverWrapper.setIsSyncable(accountToSetNotSyncable,
contractAuthority, 0);
}
}
}
/**
* Returns whether the given account has ever been synced.
*/
boolean hasFinishedFirstSync(Account account) {
String contractAuthority =
InvalidationController.get(mApplicationContext).getContractAuthority();
return mSyncContentResolverWrapper.getIsSyncable(account, contractAuthority) <= 0;
}
/**
* Helper class to be used by observers whenever sync settings change.
*
* To register the observer, call SyncStatusHelper.registerObserver(...).
*/
public static abstract class SyncSettingsChangedObserver implements SyncStatusObserver {
@Override
public void onStatusChanged(int which) {
if (ContentResolver.SYNC_OBSERVER_TYPE_SETTINGS == which) {
syncSettingsChanged();
}
}
protected abstract void syncSettingsChanged();
}
/**
* Returns the default shared preferences.
*/
private SharedPreferences getPreferences() {
return PreferenceManager.getDefaultSharedPreferences(mApplicationContext);
}
/**
* Sets a new StrictMode.ThreadPolicy based on the current one, but allows disk reads
* and disk writes.
*
* The return value is the old policy, which must be applied after the disk access is finished,
* by using StrictMode.setThreadPolicy(oldPolicy).
*
* @return the policy before allowing reads and writes.
*/
private static StrictMode.ThreadPolicy temporarilyAllowDiskWritesAndDiskReads() {
StrictMode.ThreadPolicy oldPolicy = StrictMode.getThreadPolicy();
StrictMode.ThreadPolicy.Builder newPolicy =
new StrictMode.ThreadPolicy.Builder(oldPolicy);
newPolicy.permitDiskReads();
newPolicy.permitDiskWrites();
StrictMode.setThreadPolicy(newPolicy.build());
return oldPolicy;
}
/**
* Adds a Listener.
* @param listener Listener to add.
*/
public void addListener(Listener listener) {
mListeners.add(listener);
}
/**
* Removes a Listener.
* @param listener Listener to remove from the list.
* @returns whether or not the Listener was removed.
*/
public boolean removeListener(Listener listener) {
return mListeners.remove(listener);
}
}
| |
package brooklyn.policy.basic;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.config.ConfigKey;
import brooklyn.entity.basic.ApplicationBuilder;
import brooklyn.entity.basic.Entities;
import brooklyn.event.basic.BasicConfigKey;
import brooklyn.event.basic.DependentConfiguration;
import brooklyn.test.entity.TestApplication;
import brooklyn.test.entity.TestEntity;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.exceptions.Exceptions;
import brooklyn.util.flags.SetFromFlag;
import com.google.common.util.concurrent.Callables;
/**
* Test that configuration properties are usable and inherited correctly.
*/
public class PolicyConfigMapUsageTest {
private static final int EARLY_RETURN_GRACE = 10;
public static class MyPolicy extends AbstractPolicy {
@SetFromFlag("intKey")
public static final BasicConfigKey<Integer> INT_KEY = new BasicConfigKey<Integer>(Integer.class, "bkey", "b key");
@SetFromFlag("strKey")
public static final ConfigKey<String> STR_KEY = new BasicConfigKey<String>(String.class, "akey", "a key");
public static final ConfigKey<Integer> INT_KEY_WITH_DEFAULT = new BasicConfigKey<Integer>(Integer.class, "ckey", "c key", 1);
public static final ConfigKey<String> STR_KEY_WITH_DEFAULT = new BasicConfigKey<String>(String.class, "strKey", "str key", "str key default");
MyPolicy(Map flags) {
super(flags);
}
MyPolicy() {
super();
}
}
private BasicConfigKey<String> differentKey = new BasicConfigKey<String>(String.class, "differentkey", "diffval");
private TestApplication app;
@BeforeMethod(alwaysRun=true)
public void setUp() {
app = ApplicationBuilder.newManagedApp(TestApplication.class);
}
@AfterMethod(alwaysRun=true)
public void tearDown() throws Exception {
if (app != null) Entities.destroyAll(app.getManagementContext());
}
@Test
public void testConfigFlagsPassedInAtConstructionIsAvailable() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put("strKey", "aval")
.put("intKey", 2)
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY), "aval");
assertEquals(policy.getConfig(MyPolicy.INT_KEY), (Integer)2);
// this is set, because key name matches annotation on STR_KEY
assertEquals(policy.getConfig(MyPolicy.STR_KEY_WITH_DEFAULT), "aval");
}
@Test
public void testUnknownConfigPassedInAtConstructionIsWarnedAndIgnored() throws Exception {
// TODO Also assert it's warned
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(differentKey, "aval")
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(differentKey), null);
assertEquals(policy.getPolicyType().getConfigKey(differentKey.getName()), null);
}
@Test
public void testConfigPassedInAtConstructionIsAvailable() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(MyPolicy.STR_KEY, "aval")
.put(MyPolicy.INT_KEY, 2)
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY), "aval");
assertEquals(policy.getConfig(MyPolicy.INT_KEY), (Integer)2);
// this is not set (contrast with above)
assertEquals(policy.getConfig(MyPolicy.STR_KEY_WITH_DEFAULT), MyPolicy.STR_KEY_WITH_DEFAULT.getDefaultValue());
}
@Test
public void testConfigSetToGroovyTruthFalseIsAvailable() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(MyPolicy.INT_KEY_WITH_DEFAULT, 0)
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.INT_KEY_WITH_DEFAULT), (Integer)0);
}
@Test
public void testConfigSetToNullIsAvailable() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(MyPolicy.STR_KEY_WITH_DEFAULT, null)
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY_WITH_DEFAULT), null);
}
@Test
public void testConfigCanBeSetOnPolicy() throws Exception {
MyPolicy policy = new MyPolicy();
policy.setConfig(MyPolicy.STR_KEY, "aval");
policy.setConfig(MyPolicy.INT_KEY, 2);
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY), "aval");
assertEquals(policy.getConfig(MyPolicy.INT_KEY), (Integer)2);
}
@Test
public void testConfigSetterOverridesConstructorValue() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(MyPolicy.STR_KEY, "aval")
.build());
policy.setConfig(MyPolicy.STR_KEY, "diffval");
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY), "diffval");
}
@Test
public void testConfigCannotBeSetAfterApplicationIsStarted() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(MyPolicy.STR_KEY, "origval")
.build());
app.addPolicy(policy);
try {
policy.setConfig(MyPolicy.STR_KEY,"newval");
fail();
} catch (UnsupportedOperationException e) {
// success
}
assertEquals(policy.getConfig(MyPolicy.STR_KEY), "origval");
}
@Test
public void testConfigReturnsDefaultValueIfNotSet() throws Exception {
MyPolicy policy = new MyPolicy();
app.addPolicy(policy);
assertEquals(policy.getConfig(MyPolicy.STR_KEY_WITH_DEFAULT), "str key default");
}
// FIXME Should we support this now?
@Test(enabled=false)
public void testGetFutureConfigWhenReady() throws Exception {
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(TestEntity.CONF_NAME, DependentConfiguration.whenDone(Callables.returning("aval")))
.build());
app.addPolicy(policy);
assertEquals(policy.getConfig(TestEntity.CONF_NAME), "aval");
}
// FIXME Should we support this now?
@Test(enabled=false)
public void testGetFutureConfigBlocksUntilReady() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
MyPolicy policy = new MyPolicy(MutableMap.builder()
.put(TestEntity.CONF_NAME, DependentConfiguration.whenDone(new Callable<String>() {
public String call() {
try {
latch.await(); return "aval";
} catch (InterruptedException e) {
throw Exceptions.propagate(e);
}
}}))
.build());
app.addPolicy(policy);
Thread t = new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(10+EARLY_RETURN_GRACE); latch.countDown();
} catch (InterruptedException e) {
throw Exceptions.propagate(e);
}
}});
try {
long starttime = System.currentTimeMillis();
t.start();
assertEquals(policy.getConfig(TestEntity.CONF_NAME), "aval");
long endtime = System.currentTimeMillis();
assertTrue((endtime - starttime) >= 10, "starttime="+starttime+"; endtime="+endtime);
} finally {
t.interrupt();
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.user.mgt.workflow.userstore;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService;
import org.wso2.carbon.identity.workflow.mgt.bean.Entity;
import org.wso2.carbon.identity.workflow.mgt.exception.InternalWorkflowException;
import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException;
import org.wso2.carbon.identity.workflow.mgt.extension.AbstractWorkflowRequestHandler;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowDataType;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowRequestStatus;
import org.wso2.carbon.user.api.UserRealm;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.common.AbstractUserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.user.mgt.workflow.internal.IdentityWorkflowDataHolder;
import org.wso2.carbon.user.mgt.workflow.util.UserStoreWFConstants;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class DeleteMultipleClaimsWFRequestHandler extends AbstractWorkflowRequestHandler {
private static final String FRIENDLY_NAME = "Delete User Claims";
private static final String FRIENDLY_DESCRIPTION = "Triggered when a user deletes his/her claims.";
private static final String USERNAME = "Username";
private static final String USER_STORE_DOMAIN = "User Store Domain";
private static final String CLAIMS = "Claims to Delete";
private static final String PROFILE_NAME = "Profile Name";
private static final Map<String, String> PARAM_DEFINITION;
private static Log log = LogFactory.getLog(SetMultipleClaimsWFRequestHandler.class);
static {
PARAM_DEFINITION = new LinkedHashMap<>();
PARAM_DEFINITION.put(USERNAME, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(USER_STORE_DOMAIN, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(CLAIMS, WorkflowDataType.STRING_LIST_TYPE);
PARAM_DEFINITION.put(PROFILE_NAME, WorkflowDataType.STRING_TYPE);
}
public boolean startDeleteMultipleClaimsWorkflow(String userStoreDomain, String userName, String[] claims,
String profileName) throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
if (claims == null) {
claims = new String[0];
}
int tenant = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String fullyQualifiedName = UserCoreUtil.addDomainToName(userName, userStoreDomain);
Map<String, Object> wfParams = new HashMap<>();
Map<String, Object> nonWfParams = new HashMap<>();
wfParams.put(USERNAME, userName);
wfParams.put(USER_STORE_DOMAIN, userStoreDomain);
wfParams.put(CLAIMS, Arrays.asList(claims));
wfParams.put(PROFILE_NAME, profileName);
String uuid = UUID.randomUUID().toString();
Entity[] entities = new Entity[claims.length + 1];
entities[0] = new Entity(fullyQualifiedName, UserStoreWFConstants.ENTITY_TYPE_USER, tenant);
for (int i = 0; i < claims.length; i++) {
entities[i + 1] = new Entity(claims[i], UserStoreWFConstants.ENTITY_TYPE_CLAIM, tenant);
}
if (workflowService.isEventAssociated(UserStoreWFConstants.DELETE_MULTIPLE_USER_CLAIMS_EVENT) &&
!Boolean.TRUE.equals(getWorkFlowCompleted()) && !isValidOperation(entities)) {
throw new WorkflowException("Operation is not valid.");
}
boolean state = startWorkFlow(wfParams, nonWfParams, uuid).getExecutorResultState().state();
//WF_REQUEST_ENTITY_RELATIONSHIP table has foreign key to WF_REQUEST, so need to run this after WF_REQUEST is
// updated
if (!Boolean.TRUE.equals(getWorkFlowCompleted()) && !state) {
try {
workflowService.addRequestEntityRelationships(uuid, entities);
} catch (InternalWorkflowException e) {
//debug exception which occurs at DB level since no workflows associated with event
if (log.isDebugEnabled()) {
log.debug("No workflow associated with the operation.");
}
}
}
return state;
}
@Override
public void onWorkflowCompletion(String status, Map<String, Object> requestParams,
Map<String, Object> responseAdditionalParams, int tenantId)
throws WorkflowException {
String userName;
Object requestUsername = requestParams.get(USERNAME);
if (requestUsername == null || !(requestUsername instanceof String)) {
throw new WorkflowException("Callback request for Set User Claim received without the mandatory " +
"parameter 'username'");
}
String userStoreDomain = (String) requestParams.get(USER_STORE_DOMAIN);
if (StringUtils.isNotBlank(userStoreDomain)) {
userName = userStoreDomain + "/" + requestUsername;
} else {
userName = (String) requestUsername;
}
List<String> claims = (List<String>) requestParams.get(CLAIMS);
String profile = (String) requestParams.get(PROFILE_NAME);
if (WorkflowRequestStatus.APPROVED.toString().equals(status) ||
WorkflowRequestStatus.SKIPPED.toString().equals(status)) {
try {
RealmService realmService = IdentityWorkflowDataHolder.getInstance().getRealmService();
UserRealm userRealm = realmService.getTenantUserRealm(tenantId);
userRealm.getUserStoreManager().deleteUserClaimValues(userName,
claims.toArray(new String[claims.size()]), profile);
} catch (UserStoreException e) {
// Sending e.getMessage() since it is required to give error message to end user.
throw new WorkflowException(e.getMessage(), e);
}
} else {
if (retryNeedAtCallback()) {
//unset threadlocal variable
unsetWorkFlowCompleted();
}
if (log.isDebugEnabled()) {
log.debug("Deleting User Claims is aborted for user '" + userName + "', Reason: Workflow response " +
"was: " + status);
}
}
}
@Override
public boolean retryNeedAtCallback() {
return true;
}
@Override
public String getEventId() {
return UserStoreWFConstants.DELETE_MULTIPLE_USER_CLAIMS_EVENT;
}
@Override
public Map<String, String> getParamDefinitions() {
return PARAM_DEFINITION;
}
@Override
public String getFriendlyName() {
return FRIENDLY_NAME;
}
@Override
public String getDescription() {
return FRIENDLY_DESCRIPTION;
}
@Override
public String getCategory() {
return UserStoreWFConstants.CATEGORY_USERSTORE_OPERATIONS;
}
@Override
public boolean isValidOperation(Entity[] entities) throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
RealmService realmService = IdentityWorkflowDataHolder.getInstance().getRealmService();
UserRealm userRealm;
AbstractUserStoreManager userStoreManager;
try {
userRealm = realmService.getTenantUserRealm(PrivilegedCarbonContext.getThreadLocalCarbonContext()
.getTenantId());
userStoreManager = (AbstractUserStoreManager) userRealm.getUserStoreManager();
} catch (UserStoreException e) {
throw new WorkflowException("Error while retrieving user realm.", e);
}
for (int i = 0; i < entities.length; i++) {
try {
if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER && workflowService
.entityHasPendingWorkflowsOfType(entities[i], UserStoreWFConstants.DELETE_USER_EVENT)) {
throw new WorkflowException("User has a delete operation pending.");
} else if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER && !userStoreManager
.isExistingUser(entities[i].getEntityId())) {
throw new WorkflowException("User " + entities[i].getEntityId() + " does not exist.");
}
if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER) {
for (int j = 0; j < entities.length; j++) {
if (entities[j].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_CLAIM && workflowService
.areTwoEntitiesRelated(entities[i], entities[j])) {
throw new WorkflowException(entities[j].getEntityId() + " of user is already in a " +
"workflow to delete or update.");
}
}
}
} catch (InternalWorkflowException | org.wso2.carbon.user.core.UserStoreException e) {
throw new WorkflowException(e.getMessage(), e);
}
}
return true;
}
}
| |
package com.github.agfsapi4j;
import com.sun.jna.Pointer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
@SuppressWarnings("squid:S1191")
public class GlusterFsSession implements Closeable
{
private static final int DEFAULT_LOG_LEVEL = 10;
private Logger log = LoggerFactory.getLogger(GlusterFsSession.class);
private int maxPathLength = GlusterFsApi.DEFAULT_MAX_PATH_LENGTH;
private int maxFileNameLength = GlusterFsApi.DEFAULT_MAX_FILE_NAME_LENGTH;
private String charSet = GlusterFsApi.DEFAULT_CHAR_SET;
private LibGfapi lib = LibGfapiProvider.get();
private LogAccess logAccess = new LogAccess(this);
private ResourceTracker resourceTracker = new ResourceTracker();
private Throwable connectStackTrace;
long glFsPtr;
GlusterFsSession()
{
}
void connect(String hostName, int port, String volName)
{
this.connectStackTrace = new Throwable("Stack trace of allocation");
this.connectStackTrace.fillInStackTrace();
this.glFsPtr = lib.glfs_new(volName);
checkPtr(this.glFsPtr, "glfs_new failed (0).");
int error = lib.glfs_set_logging(glFsPtr, logAccess.getLogFilePath(), DEFAULT_LOG_LEVEL);
checkError(error, "glfs_set_logging failed (%d).");
try
{
this.logAccess.beforeOp();
error = lib.glfs_set_volfile(glFsPtr, volName);
checkError(error, "glfs_set_volfile failed (%d).");
}
finally
{
this.logAccess.afterOp();
}
try
{
this.logAccess.beforeOp();
error = lib.glfs_set_volfile_server(glFsPtr, "tcp", hostName, port);
checkError(error, "glfs_set_volfile_server failed (%d).");
}
finally
{
this.logAccess.afterOp();
}
try
{
this.logAccess.beforeOp();
error = lib.glfs_init(glFsPtr);
checkError(error, "glfs_init failed (%d).");
}
finally
{
this.logAccess.afterOp();
}
}
public String cwd()
{
checkConnected();
try
{
this.logAccess.beforeOp();
byte[] bbuf = new byte[maxPathLength * 2];
long ptr = lib.glfs_getcwd(glFsPtr, bbuf, bbuf.length);
checkPtr(ptr, "glfs_getcwd failed.");
return new CStringReader(maxPathLength, charSet).readFrom(ByteBuffer.wrap(bbuf), 0);
}
finally
{
this.logAccess.afterOp();
}
}
public void chown(String path, int userId, int groupId)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = lib.glfs_chown(this.glFsPtr, path, userId, groupId);
checkError(result, "glfs_chown failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public GlusterFsFile create(String path, int flags, int mode)
{
checkConnected();
try
{
this.logAccess.beforeOp();
long glFsFdPtr = lib.glfs_creat(this.glFsPtr, path, (short) flags, Mode.valueOf(mode));
checkPtr(glFsFdPtr, "glfs_create failed.");
return new GlusterFsFile(this, this.lib, this.logAccess, this.resourceTracker, glFsFdPtr);
}
finally
{
this.logAccess.afterOp();
}
}
public GlusterFsFile open(String path, int flags)
{
checkConnected();
try
{
this.logAccess.beforeOp();
long glFsFdPtr = lib.glfs_open(this.glFsPtr, path, (short) flags);
checkPtr(glFsFdPtr, "glfs_open failed.");
return new GlusterFsFile(this, this.lib, this.logAccess, this.resourceTracker, glFsFdPtr);
}
finally
{
this.logAccess.afterOp();
}
}
private boolean isClosed()
{
return this.glFsPtr == 0;
}
@SuppressWarnings("squid:ObjectFinalizeOverridenCheck")
@Override
protected void finalize() throws Throwable
{
if (!isClosed())
{
closeOnFinalize();
}
super.finalize();
}
private void closeOnFinalize()
{
try
{
log.warn("Session has not been closed.", this.connectStackTrace);
close();
}
catch (Exception ex)
{
log.warn("Error while closing session in finalize.", ex);
}
}
public void close()
{
this.resourceTracker.closeResources();
if (glFsPtr != 0)
{
lib.glfs_fini(glFsPtr);
glFsPtr = 0;
}
this.logAccess.close(true);
}
private void checkConnected()
{
checkPtr(this.glFsPtr, "Session closed.");
}
void checkPtr(long ptr, String message)
{
if (ptr == 0)
{
raiseError(message);
}
}
void checkPtr(Pointer ptr, String message)
{
if (ptr == Pointer.NULL)
{
raiseError(message);
}
}
void checkError(int error, String msg)
{
if (error != 0)
{
raiseError(String.format(msg, error));
}
}
void raiseError(String message)
{
List<String> errors = this.logAccess.getLogMessages();
throw new GlusterFsRuntimeException(message, errors);
}
public GlusterFsFileStats stat(String path)
{
checkConnected();
try
{
this.logAccess.beforeOp();
byte[] bbuf = new byte[512];
int result = this.lib.glfs_stat(this.glFsPtr, path, bbuf);
checkError(result, "glfs_stat failed.");
ByteBuffer buf = ByteBuffer.wrap(bbuf);
buf.order(ByteOrder.LITTLE_ENDIAN);
return new FileStatsImpl(buf);
}
finally
{
this.logAccess.afterOp();
}
}
public void symlink(String targetPath, String sourcePath)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_symlink(this.glFsPtr, targetPath, sourcePath);
checkError(result, "glfs_symlink failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void rename(String oldPath, String newPath)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_rename(this.glFsPtr, oldPath, newPath);
checkError(result, "glfs_rename failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void mkdir(String path, int mode)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_mkdir(this.glFsPtr, path, Mode.valueOf(mode));
checkError(result, "glfs_mkdir failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void rmdir(String path)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_rmdir(this.glFsPtr, path);
checkError(result, "glfs_rmdir failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void unlink(String path)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_unlink(this.glFsPtr, path);
checkError(result, "glfs_unlink failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void truncate(String path)
{
truncate(path, 0);
}
public void truncate(String path, int offset)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_truncate(this.glFsPtr, path, offset);
checkError(result, "glfs_truncate failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public void chdir(String path)
{
checkConnected();
try
{
this.logAccess.beforeOp();
int result = this.lib.glfs_chdir(this.glFsPtr, path);
checkError(result, "glfs_chdir failed.");
}
finally
{
this.logAccess.afterOp();
}
}
public GlusterFsDirectoryIndex opendir(String path)
{
checkConnected();
try
{
this.logAccess.beforeOp();
Pointer result = this.lib.glfs_opendir(this.glFsPtr, path);
checkPtr(result, "glfs_opendir failed.");
return new DirectoryIndexImpl(this, this.lib,
this.logAccess, this.resourceTracker, result);
}
finally
{
this.logAccess.afterOp();
}
}
int getMaxFileNameLength()
{
return maxFileNameLength;
}
String getCharSet()
{
return charSet;
}
}
| |
package org.deeplearning4j.examples.rnn.strata.physionet.output.single;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Random;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.io.FileUtils;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.examples.rnn.strata.physionet.output.single.PhysioNet_ICU_SingleLabel_Iterator;
import org.deeplearning4j.examples.rnn.strata.physionet.utils.EvalScoreTracker;
import org.deeplearning4j.examples.rnn.strata.physionet.utils.PhysioNetDataUtils;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
import org.deeplearning4j.nn.conf.layers.GravesLSTM;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
public class PhysioNet_LSTM_Model {
public static void main( String[] args ) throws Exception {
trainPhysioNetExample();
}
public static void scoreInputWithModel(String modelPath) throws Exception {
}
public static void resumeTrainingPhysioNetModel(String modelPath) throws Exception {
}
public static void trainPhysioNetExample() throws Exception {
String existingModelPath = null; //"/tmp/rnns/physionet/models/dl4j_model_run_2016-03-21_11_27_18/epoch_9_f1_0.7380/";
int lstmLayerSize = 300; //Number of units in each GravesLSTM layer
int miniBatchSize = 20; //Size of mini batch to use when training
//int totalExamplesToTrainWith = 1100;
int trainingExamples = 200; // 2800;
int testExamples = 100; //600;
int validateExamples = 100; //600;
double learningRate = 0.009;
int numEpochs = 5; //Total number of training + sample generation epochs
Random rng = new Random(12345);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH_mm_ss");
int nOut = 1; //iter.totalOutcomes();
PhysioNet_ICU_SingleLabel_Iterator iter = new PhysioNet_ICU_SingleLabel_Iterator( "/tmp/set-a-full-splits-1/train/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", miniBatchSize, trainingExamples);
PhysioNet_ICU_SingleLabel_Iterator iter_validate = new PhysioNet_ICU_SingleLabel_Iterator( "/tmp/set-a-full-splits-1/validate/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", 1, validateExamples);
PhysioNet_ICU_SingleLabel_Iterator test_iter = new PhysioNet_ICU_SingleLabel_Iterator( "/tmp/set-a-full-splits-1/test/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", testExamples, testExamples);
iter.reset();
test_iter.reset();
iter_validate.reset();
System.out.println( "We have " + iter.inputColumns() + " input columns." );
System.out.println( "We have " + nOut + " output columns." );
// *****************************
// TODO: Drop:
/*
dropout for rnns is applied on the input activations only, not recurrent activations
as is common in the literature
same as other layers
so .dropout(0.5) with .regularization(true)
*/
//Set up network configuration:
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
.learningRate( learningRate )
.rmsDecay(0.95)
.seed(12345)
.regularization(true)
.l2(0.001)
//.dropOut(0.5)
.list(3)
.layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
.updater(Updater.RMSPROP)
.activation("tanh").weightInit(WeightInit.DISTRIBUTION)
.dist(new UniformDistribution(-0.08, 0.08)).build())
.layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
.updater(Updater.RMSPROP)
.activation("tanh").weightInit(WeightInit.DISTRIBUTION)
.dist(new UniformDistribution(-0.08, 0.08)).build())
.layer(2, new RnnOutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).activation("sigmoid") //MCXENT + softmax for classification
.updater(Updater.RMSPROP)
.nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
.dist(new UniformDistribution(-0.08, 0.08)).build())
.pretrain(false).backprop(true)
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
net.setListeners(new ScoreIterationListener(1));
// DAVE: UNCOMMENT HERE AND REPLACE DIRS TO RESUME TRAINING...
// System.out.println( "Loading old parameters [test] >> " );
if (null != existingModelPath) {
PhysioNetDataUtils.loadDL4JNetworkParameters( net, existingModelPath );
iter_validate.reset();
Evaluation evaluation_validate = new Evaluation( nOut );
while(iter_validate.hasNext()){
DataSet t = iter_validate.next();
INDArray features = t.getFeatureMatrix();
INDArray lables = t.getLabels();
INDArray inMask = t.getFeaturesMaskArray();
INDArray outMask = t.getLabelsMaskArray();
INDArray predicted = net.output(features,false,inMask,outMask);
evaluation_validate.evalTimeSeries(lables,predicted,outMask);
}
System.out.println( "\nParameter Load --- Pre Check: Validate Evaluation: ");
System.out.println( evaluation_validate.stats() );
}
org.deeplearning4j.eval.Evaluation e = new Evaluation();
//Print the number of parameters in the network (and for each layer)
Layer[] layers = net.getLayers();
int totalNumParams = 0;
for( int i=0; i<layers.length; i++ ){
int nParams = layers[i].numParams();
System.out.println("Number of parameters in layer " + i + ": " + nParams);
totalNumParams += nParams;
}
System.out.println("Total number of network parameters: " + totalNumParams);
long startTime = System.currentTimeMillis();
EvalScoreTracker f1Tracker = new EvalScoreTracker( 20 );
Date now = new Date();
String strDate = sdf.format(now);
DecimalFormat df = new DecimalFormat("#.0000");
String runID = "dl4j_model_run_" + strDate;
//Do training, and then generate and print samples from network
for ( int i=0; i<numEpochs; i++ ){
iter.reset();
test_iter.reset();
iter_validate.reset();
net.fit(iter);
System.out.println("--------------------");
System.out.println("Completed epoch " + i );
long curTime = System.currentTimeMillis();
long progressElapsedTimeMS = (curTime - startTime);
long processElpasedTimeMin = progressElapsedTimeMS / 1000 / 60;
System.out.println("Elapsed Time So Far: " + processElpasedTimeMin + " minutes");
//Evaluation eval = new Evaluation( 2 );
//INDArray output = net.output( testInput );
iter.reset();
test_iter.reset();
iter_validate.reset();
Evaluation evaluation_train = new Evaluation( nOut );
while(iter.hasNext()){
DataSet t = iter.next();
INDArray features = t.getFeatureMatrix();
INDArray lables = t.getLabels();
INDArray inMask = t.getFeaturesMaskArray();
INDArray outMask = t.getLabelsMaskArray();
INDArray predicted = net.output(features,false,inMask,outMask);
evaluation_train.evalTimeSeries(lables,predicted,outMask);
}
Evaluation evaluation_validate = new Evaluation( nOut );
while(iter_validate.hasNext()){
DataSet t = iter_validate.next();
INDArray features = t.getFeatureMatrix();
INDArray lables = t.getLabels();
INDArray inMask = t.getFeaturesMaskArray();
INDArray outMask = t.getLabelsMaskArray();
INDArray predicted = net.output(features,false,inMask,outMask);
/*
System.out.println("predicted: ");
System.out.println( predicted.getRow(0) );
System.out.println("label: ");
System.out.println( lables.getRow(0) );
*/
evaluation_validate.evalTimeSeries(lables,predicted,outMask);
//evaluation_validate.ev
}
//test_iter.reset();
System.out.println( "\nTrain Evaluation: ");
System.out.println( evaluation_train.stats() );
System.out.println( "\nValidate Evaluation: ");
System.out.println( evaluation_validate.stats() );
f1Tracker.addF1( i, evaluation_train.f1(), evaluation_validate.f1() );
f1Tracker.printWindow();
String epochID = "epoch_" + i + "_f1_0" + df.format( evaluation_train.f1() );
String fileNamePathBase = "/tmp/rnns/physionet/models/" + runID + "/" + epochID + "/";
File dirs = new File(fileNamePathBase);
dirs.mkdirs();
PhysioNetDataUtils.saveDL4JNetwork( net, fileNamePathBase );
}
/*
also fyi for evaluation (using Evaluation class), you can just pass the full/padded output along with the label mask array, and it'll do the subsetting for you
i.e., it'll only do the evaluation where the real data is (according to mask array) and ignore the padded time steps
also you can do an iamax op along dimension 1 for the label mask array to work out where real output is
that only works for the many-to-one case though
*/
Evaluation evaluation_final_test = new Evaluation(2);
while(test_iter.hasNext()){
DataSet t = test_iter.next();
INDArray features = t.getFeatureMatrix();
INDArray lables = t.getLabels();
INDArray inMask = t.getFeaturesMaskArray();
INDArray outMask = t.getLabelsMaskArray();
INDArray predicted = net.output(features,false,inMask,outMask);
evaluation_final_test.evalTimeSeries(lables,predicted,outMask);
}
//test_iter.reset();
System.out.println( "\n\n\nFinal Test Evaluation: ");
System.out.println( evaluation_final_test.stats() );
System.out.println("\n\nExample complete");
long endTime = System.currentTimeMillis();
long elapsedTimeMS = (endTime - startTime);
long elpasedTimeSeconds = elapsedTimeMS / 1000;
long elapsedTimeMinutes = elpasedTimeSeconds / 60;
long elapsedTimeHours = elapsedTimeMinutes / 60;
System.out.println("Training took " + (elpasedTimeSeconds) + " seconds");
System.out.println("Training took " + elapsedTimeMinutes + " minutes");
System.out.println("Training took " + elapsedTimeHours + " hours");
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.notification.impl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.notification.EventLog;
import com.intellij.notification.LogModel;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.wm.CustomStatusBarWidget;
import com.intellij.openapi.wm.IconLikeCustomStatusBarWidget;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.ui.ClickListener;
import com.intellij.ui.LayeredIcon;
import com.intellij.util.Alarm;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.List;
/**
* @author spleaner
*/
public class IdeNotificationArea extends JLabel implements CustomStatusBarWidget, IconLikeCustomStatusBarWidget {
public static final String WIDGET_ID = "Notifications";
private StatusBar myStatusBar;
private final Alarm myLogAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
public IdeNotificationArea() {
Disposer.register(this, myLogAlarm);
UISettings.getInstance().addUISettingsListener(new UISettingsListener() {
@Override
public void uiSettingsChanged(UISettings source) {
updateStatus();
}
}, this);
new ClickListener() {
@Override
public boolean onClick(MouseEvent e, int clickCount) {
EventLog.toggleLog(getProject());
return true;
}
}.installOn(this);
ApplicationManager.getApplication().getMessageBus().connect().subscribe(LogModel.LOG_MODEL_CHANGED, new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
updateStatus();
}
});
}
});
}
public WidgetPresentation getPresentation(@NotNull PlatformType type) {
return null;
}
public void dispose() {
}
public void install(@NotNull StatusBar statusBar) {
myStatusBar = statusBar;
updateStatus();
}
@Nullable
private Project getProject() {
return PlatformDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext((Component) myStatusBar));
}
@NotNull
public String ID() {
return WIDGET_ID;
}
private void updateStatus() {
final Project project = getProject();
ArrayList<Notification> notifications = EventLog.getLogModel(project).getNotifications();
applyIconToStatusAndToolWindow(project, createIconWithNotificationCount(notifications));
int count = notifications.size();
setToolTipText(count > 0 ? String.format("%s notification%s pending", count, count == 1 ? "" : "s") : "No new notifications");
myStatusBar.updateWidget(ID());
}
private void applyIconToStatusAndToolWindow(Project project, LayeredIcon icon) {
if (UISettings.getInstance().HIDE_TOOL_STRIPES || UISettings.getInstance().PRESENTATION_MODE) {
setVisible(true);
setIcon(icon);
}
else {
ToolWindow eventLog = EventLog.getEventLog(project);
if (eventLog != null) {
eventLog.setIcon(icon);
}
setVisible(false);
}
}
private LayeredIcon createIconWithNotificationCount(ArrayList<Notification> notifications) {
LayeredIcon icon = new LayeredIcon(2);
Icon statusIcon = getPendingNotificationsIcon(AllIcons.Ide.Notifications, getMaximumType(notifications));
icon.setIcon(statusIcon, 0);
if (notifications.size() > 0) {
icon.setIcon(new TextIcon(this, String.valueOf(notifications.size())), 1, statusIcon.getIconWidth() - 2, 0);
}
return icon;
}
@Override
public JComponent getComponent() {
return this;
}
private static Icon getPendingNotificationsIcon(Icon defIcon, final NotificationType maximumType) {
if (maximumType != null) {
switch (maximumType) {
case WARNING: return AllIcons.Ide.Warning_notifications;
case ERROR: return AllIcons.Ide.Error_notifications;
case INFORMATION: return AllIcons.Ide.Info_notifications;
}
}
return defIcon;
}
@Nullable
private static NotificationType getMaximumType(List<Notification> notifications) {
NotificationType result = null;
for (Notification notification : notifications) {
if (NotificationType.ERROR == notification.getType()) {
return NotificationType.ERROR;
}
if (NotificationType.WARNING == notification.getType()) {
result = NotificationType.WARNING;
}
else if (result == null && NotificationType.INFORMATION == notification.getType()) {
result = NotificationType.INFORMATION;
}
}
return result;
}
private static class TextIcon implements Icon {
private final String myStr;
private final JComponent myComponent;
private final int myWidth;
public TextIcon(JComponent component, @NotNull String str) {
myStr = str;
myComponent = component;
myWidth = myComponent.getFontMetrics(calcFont()).stringWidth(myStr) + 1;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof TextIcon)) return false;
TextIcon icon = (TextIcon)o;
if (myWidth != icon.myWidth) return false;
if (!myComponent.equals(icon.myComponent)) return false;
if (!myStr.equals(icon.myStr)) return false;
return true;
}
@Override
public int hashCode() {
int result = myStr.hashCode();
result = 31 * result + myComponent.hashCode();
result = 31 * result + myWidth;
return result;
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
Font originalFont = g.getFont();
Color originalColor = g.getColor();
g.setFont(calcFont());
y += getIconHeight() - g.getFontMetrics().getDescent();
g.setColor(UIUtil.getLabelForeground());
g.drawString(myStr, x, y);
g.setFont(originalFont);
g.setColor(originalColor);
}
private Font calcFont() {
return myComponent.getFont().deriveFont(Font.BOLD).deriveFont((float) getIconHeight() * 3 / 5);
}
@Override
public int getIconWidth() {
return myWidth;
}
@Override
public int getIconHeight() {
return AllIcons.Ide.Notifications.getIconHeight();
}
}
}
| |
package com.mapswithme.maps.bookmarks;
import android.content.Intent;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.google.android.material.tabs.TabLayout;
import androidx.fragment.app.FragmentManager;
import androidx.viewpager.widget.ViewPager;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.mapswithme.maps.R;
import com.mapswithme.maps.auth.TargetFragmentCallback;
import com.mapswithme.maps.base.BaseMwmFragment;
import com.mapswithme.maps.dialog.AlertDialogCallback;
import com.mapswithme.maps.purchase.PurchaseUtils;
import com.mapswithme.util.SharedPropertiesUtils;
import com.mapswithme.util.statistics.Statistics;
import java.util.Arrays;
import java.util.List;
public class BookmarkCategoriesPagerFragment extends BaseMwmFragment
implements TargetFragmentCallback, AlertDialogCallback, AuthCompleteListener
{
final static String ARG_CATEGORIES_PAGE = "arg_categories_page";
final static String ARG_CATALOG_DEEPLINK = "arg_catalog_deeplink";
@SuppressWarnings("NullableProblems")
@NonNull
private BookmarksPagerAdapter mAdapter;
@SuppressWarnings("NullableProblems")
@Nullable
private String mCatalogDeeplink;
@SuppressWarnings("NullableProblems")
@NonNull
private BookmarksDownloadFragmentDelegate mDelegate;
@SuppressWarnings("NullableProblems")
@NonNull
private AlertDialogCallback mInvalidSubsDialogCallback;
@SuppressWarnings("NullableProblems")
@NonNull
private ViewPager mViewPager;
@Override
public void onCreate(@Nullable Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
mDelegate = new BookmarksDownloadFragmentDelegate(this);
mDelegate.onCreate(savedInstanceState);
Bundle args = getArguments();
if (args == null)
return;
mCatalogDeeplink = args.getString(ARG_CATALOG_DEEPLINK);
mInvalidSubsDialogCallback = new InvalidSubscriptionAlertDialogCallback(this);
}
@Override
public void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
mDelegate.onSaveInstanceState(outState);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
mDelegate.onActivityResult(requestCode, resultCode, data);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState)
{
View root = inflater.inflate(R.layout.fragment_bookmark_categories_pager, container, false);
mViewPager = root.findViewById(R.id.viewpager);
TabLayout tabLayout = root.findViewById(R.id.sliding_tabs_layout);
FragmentManager fm = getActivity().getSupportFragmentManager();
List<BookmarksPageFactory> dataSet = getAdapterDataSet();
mAdapter = new BookmarksPagerAdapter(getContext(), fm, dataSet);
mViewPager.setAdapter(mAdapter);
mViewPager.setCurrentItem(saveAndGetInitialPage());
tabLayout.setupWithViewPager(mViewPager);
mViewPager.addOnPageChangeListener(new PageChangeListener());
mDelegate.onCreateView(savedInstanceState);
return root;
}
@Override
public void onStart()
{
super.onStart();
mDelegate.onStart();
if (TextUtils.isEmpty(mCatalogDeeplink))
return;
mDelegate.downloadBookmark(mCatalogDeeplink);
mCatalogDeeplink = null;
}
@Override
public void onResume()
{
super.onResume();
mDelegate.onResume();
}
@Override
public void onPause()
{
super.onPause();
mDelegate.onPause();
}
@Override
public void onStop()
{
super.onStop();
mDelegate.onStop();
}
@Override
public void onDestroyView()
{
super.onDestroyView();
mDelegate.onDestroyView();
}
private int saveAndGetInitialPage()
{
Bundle args = getArguments();
if (args != null && args.containsKey(ARG_CATEGORIES_PAGE))
{
int page = args.getInt(ARG_CATEGORIES_PAGE);
SharedPropertiesUtils.setLastVisibleBookmarkCategoriesPage(getActivity(), page);
return page;
}
return SharedPropertiesUtils.getLastVisibleBookmarkCategoriesPage(getActivity());
}
@NonNull
private static List<BookmarksPageFactory> getAdapterDataSet()
{
return Arrays.asList(BookmarksPageFactory.values());
}
@Override
public void onTargetFragmentResult(int resultCode, @Nullable Intent data)
{
mDelegate.onTargetFragmentResult(resultCode, data);
}
@Override
public boolean isTargetAdded()
{
return mDelegate.isTargetAdded();
}
@Override
public void onAlertDialogPositiveClick(int requestCode, int which)
{
mInvalidSubsDialogCallback.onAlertDialogPositiveClick(requestCode, which);
}
@Override
public void onAlertDialogNegativeClick(int requestCode, int which)
{
if (PurchaseUtils.REQ_CODE_CHECK_INVALID_SUBS_DIALOG == requestCode)
mViewPager.setAdapter(mAdapter);
mInvalidSubsDialogCallback.onAlertDialogNegativeClick(requestCode, which);
}
@Override
public void onAlertDialogCancel(int requestCode)
{
mInvalidSubsDialogCallback.onAlertDialogCancel(requestCode);
}
@Override
public void onAuthCompleted()
{
mViewPager.setAdapter(mAdapter);
}
private class PageChangeListener extends ViewPager.SimpleOnPageChangeListener
{
@Override
public void onPageSelected(int position)
{
SharedPropertiesUtils.setLastVisibleBookmarkCategoriesPage(getActivity(), position);
BookmarksPageFactory factory = mAdapter.getItemFactory(position);
Statistics.INSTANCE.trackBookmarksTabEvent(factory.getAnalytics().getName());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive;
import com.google.common.collect.ImmutableList;
import io.prestosql.plugin.hive.HivePageSourceProvider.BucketAdaptation;
import io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMapping;
import io.prestosql.plugin.hive.coercions.DoubleToFloatCoercer;
import io.prestosql.plugin.hive.coercions.FloatToDoubleCoercer;
import io.prestosql.plugin.hive.coercions.IntegerNumberToVarcharCoercer;
import io.prestosql.plugin.hive.coercions.IntegerNumberUpscaleCoercer;
import io.prestosql.plugin.hive.coercions.VarcharToIntegerNumberCoercer;
import io.prestosql.spi.Page;
import io.prestosql.spi.PrestoException;
import io.prestosql.spi.block.ArrayBlock;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.ColumnarArray;
import io.prestosql.spi.block.ColumnarMap;
import io.prestosql.spi.block.ColumnarRow;
import io.prestosql.spi.block.DictionaryBlock;
import io.prestosql.spi.block.LazyBlock;
import io.prestosql.spi.block.LazyBlockLoader;
import io.prestosql.spi.block.RowBlock;
import io.prestosql.spi.block.RunLengthEncodedBlock;
import io.prestosql.spi.connector.ConnectorPageSource;
import io.prestosql.spi.type.DecimalType;
import io.prestosql.spi.type.MapType;
import io.prestosql.spi.type.Type;
import io.prestosql.spi.type.TypeManager;
import io.prestosql.spi.type.VarcharType;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.prestosql.plugin.hive.HiveBucketing.getHiveBucket;
import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_CURSOR_ERROR;
import static io.prestosql.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES;
import static io.prestosql.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED;
import static io.prestosql.plugin.hive.HiveType.HIVE_BYTE;
import static io.prestosql.plugin.hive.HiveType.HIVE_DOUBLE;
import static io.prestosql.plugin.hive.HiveType.HIVE_FLOAT;
import static io.prestosql.plugin.hive.HiveType.HIVE_INT;
import static io.prestosql.plugin.hive.HiveType.HIVE_LONG;
import static io.prestosql.plugin.hive.HiveType.HIVE_SHORT;
import static io.prestosql.plugin.hive.HiveUtil.bigintPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.booleanPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.charPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.datePartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.doublePartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.extractStructFieldTypes;
import static io.prestosql.plugin.hive.HiveUtil.floatPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.integerPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.isArrayType;
import static io.prestosql.plugin.hive.HiveUtil.isHiveNull;
import static io.prestosql.plugin.hive.HiveUtil.isMapType;
import static io.prestosql.plugin.hive.HiveUtil.isRowType;
import static io.prestosql.plugin.hive.HiveUtil.longDecimalPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.shortDecimalPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.smallintPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.timestampPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.tinyintPartitionKey;
import static io.prestosql.plugin.hive.HiveUtil.varcharPartitionKey;
import static io.prestosql.plugin.hive.coercions.DecimalCoercers.createDecimalToDecimalCoercer;
import static io.prestosql.plugin.hive.coercions.DecimalCoercers.createDecimalToDoubleCoercer;
import static io.prestosql.plugin.hive.coercions.DecimalCoercers.createDecimalToRealCoercer;
import static io.prestosql.plugin.hive.coercions.DecimalCoercers.createDoubleToDecimalCoercer;
import static io.prestosql.plugin.hive.coercions.DecimalCoercers.createRealToDecimalCoercer;
import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.prestosql.spi.block.ColumnarArray.toColumnarArray;
import static io.prestosql.spi.block.ColumnarMap.toColumnarMap;
import static io.prestosql.spi.block.ColumnarRow.toColumnarRow;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.Chars.isCharType;
import static io.prestosql.spi.type.DateType.DATE;
import static io.prestosql.spi.type.Decimals.isLongDecimal;
import static io.prestosql.spi.type.Decimals.isShortDecimal;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.IntegerType.INTEGER;
import static io.prestosql.spi.type.RealType.REAL;
import static io.prestosql.spi.type.SmallintType.SMALLINT;
import static io.prestosql.spi.type.TimestampType.TIMESTAMP;
import static io.prestosql.spi.type.TinyintType.TINYINT;
import static io.prestosql.spi.type.Varchars.isVarcharType;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Objects.requireNonNull;
public class HivePageSource
implements ConnectorPageSource
{
private final List<ColumnMapping> columnMappings;
private final Optional<BucketAdapter> bucketAdapter;
private final Object[] prefilledValues;
private final Type[] types;
private final List<Optional<Function<Block, Block>>> coercers;
private final ConnectorPageSource delegate;
public HivePageSource(
List<ColumnMapping> columnMappings,
Optional<BucketAdaptation> bucketAdaptation,
DateTimeZone hiveStorageTimeZone,
TypeManager typeManager,
ConnectorPageSource delegate)
{
requireNonNull(columnMappings, "columnMappings is null");
requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
requireNonNull(typeManager, "typeManager is null");
this.delegate = requireNonNull(delegate, "delegate is null");
this.columnMappings = columnMappings;
this.bucketAdapter = bucketAdaptation.map(BucketAdapter::new);
int size = columnMappings.size();
prefilledValues = new Object[size];
types = new Type[size];
ImmutableList.Builder<Optional<Function<Block, Block>>> coercers = ImmutableList.builder();
for (int columnIndex = 0; columnIndex < size; columnIndex++) {
ColumnMapping columnMapping = columnMappings.get(columnIndex);
HiveColumnHandle column = columnMapping.getHiveColumnHandle();
String name = column.getName();
Type type = typeManager.getType(column.getTypeSignature());
types[columnIndex] = type;
if (columnMapping.getCoercionFrom().isPresent()) {
coercers.add(Optional.of(createCoercer(typeManager, columnMapping.getCoercionFrom().get(), columnMapping.getHiveColumnHandle().getHiveType())));
}
else {
coercers.add(Optional.empty());
}
if (columnMapping.getKind() == PREFILLED) {
String columnValue = columnMapping.getPrefilledValue();
byte[] bytes = columnValue.getBytes(UTF_8);
Object prefilledValue;
if (isHiveNull(bytes)) {
prefilledValue = null;
}
else if (type.equals(BOOLEAN)) {
prefilledValue = booleanPartitionKey(columnValue, name);
}
else if (type.equals(BIGINT)) {
prefilledValue = bigintPartitionKey(columnValue, name);
}
else if (type.equals(INTEGER)) {
prefilledValue = integerPartitionKey(columnValue, name);
}
else if (type.equals(SMALLINT)) {
prefilledValue = smallintPartitionKey(columnValue, name);
}
else if (type.equals(TINYINT)) {
prefilledValue = tinyintPartitionKey(columnValue, name);
}
else if (type.equals(REAL)) {
prefilledValue = floatPartitionKey(columnValue, name);
}
else if (type.equals(DOUBLE)) {
prefilledValue = doublePartitionKey(columnValue, name);
}
else if (isVarcharType(type)) {
prefilledValue = varcharPartitionKey(columnValue, name, type);
}
else if (isCharType(type)) {
prefilledValue = charPartitionKey(columnValue, name, type);
}
else if (type.equals(DATE)) {
prefilledValue = datePartitionKey(columnValue, name);
}
else if (type.equals(TIMESTAMP)) {
prefilledValue = timestampPartitionKey(columnValue, hiveStorageTimeZone, name);
}
else if (isShortDecimal(type)) {
prefilledValue = shortDecimalPartitionKey(columnValue, (DecimalType) type, name);
}
else if (isLongDecimal(type)) {
prefilledValue = longDecimalPartitionKey(columnValue, (DecimalType) type, name);
}
else {
throw new PrestoException(NOT_SUPPORTED, format("Unsupported column type %s for prefilled column: %s", type.getDisplayName(), name));
}
prefilledValues[columnIndex] = prefilledValue;
}
}
this.coercers = coercers.build();
}
@Override
public long getCompletedBytes()
{
return delegate.getCompletedBytes();
}
@Override
public long getReadTimeNanos()
{
return delegate.getReadTimeNanos();
}
@Override
public boolean isFinished()
{
return delegate.isFinished();
}
@Override
public Page getNextPage()
{
try {
Page dataPage = delegate.getNextPage();
if (dataPage == null) {
return null;
}
if (bucketAdapter.isPresent()) {
IntArrayList rowsToKeep = bucketAdapter.get().computeEligibleRowIds(dataPage);
Block[] adaptedBlocks = new Block[dataPage.getChannelCount()];
for (int i = 0; i < adaptedBlocks.length; i++) {
Block block = dataPage.getBlock(i);
if (!block.isLoaded()) {
adaptedBlocks[i] = new LazyBlock(rowsToKeep.size(), new RowFilterLazyBlockLoader(dataPage.getBlock(i), rowsToKeep));
}
else {
adaptedBlocks[i] = block.getPositions(rowsToKeep.elements(), 0, rowsToKeep.size());
}
}
dataPage = new Page(rowsToKeep.size(), adaptedBlocks);
}
int batchSize = dataPage.getPositionCount();
List<Block> blocks = new ArrayList<>();
for (int fieldId = 0; fieldId < columnMappings.size(); fieldId++) {
ColumnMapping columnMapping = columnMappings.get(fieldId);
switch (columnMapping.getKind()) {
case PREFILLED:
blocks.add(RunLengthEncodedBlock.create(types[fieldId], prefilledValues[fieldId], batchSize));
break;
case REGULAR:
Block block = dataPage.getBlock(columnMapping.getIndex());
Optional<Function<Block, Block>> coercer = coercers.get(fieldId);
if (coercer.isPresent()) {
block = new LazyBlock(batchSize, new CoercionLazyBlockLoader(block, coercer.get()));
}
blocks.add(block);
break;
case INTERIM:
// interim columns don't show up in output
break;
default:
throw new UnsupportedOperationException();
}
}
return new Page(batchSize, blocks.toArray(new Block[0]));
}
catch (PrestoException e) {
closeWithSuppression(e);
throw e;
}
catch (RuntimeException e) {
closeWithSuppression(e);
throw new PrestoException(HIVE_CURSOR_ERROR, e);
}
}
@Override
public void close()
{
try {
delegate.close();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public String toString()
{
return delegate.toString();
}
@Override
public long getSystemMemoryUsage()
{
return delegate.getSystemMemoryUsage();
}
protected void closeWithSuppression(Throwable throwable)
{
requireNonNull(throwable, "throwable is null");
try {
close();
}
catch (RuntimeException e) {
// Self-suppression not permitted
if (throwable != e) {
throwable.addSuppressed(e);
}
}
}
public ConnectorPageSource getPageSource()
{
return delegate;
}
private static Function<Block, Block> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType)
{
Type fromType = typeManager.getType(fromHiveType.getTypeSignature());
Type toType = typeManager.getType(toHiveType.getTypeSignature());
if (toType instanceof VarcharType && (fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG))) {
return new IntegerNumberToVarcharCoercer<>(fromType, (VarcharType) toType);
}
if (fromType instanceof VarcharType && (toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) {
return new VarcharToIntegerNumberCoercer<>((VarcharType) fromType, toType);
}
if (fromHiveType.equals(HIVE_BYTE) && toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG)) {
return new IntegerNumberUpscaleCoercer<>(fromType, toType);
}
if (fromHiveType.equals(HIVE_SHORT) && toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG)) {
return new IntegerNumberUpscaleCoercer<>(fromType, toType);
}
if (fromHiveType.equals(HIVE_INT) && toHiveType.equals(HIVE_LONG)) {
return new IntegerNumberUpscaleCoercer<>(fromType, toType);
}
if (fromHiveType.equals(HIVE_FLOAT) && toHiveType.equals(HIVE_DOUBLE)) {
return new FloatToDoubleCoercer();
}
if (fromHiveType.equals(HIVE_DOUBLE) && toHiveType.equals(HIVE_FLOAT)) {
return new DoubleToFloatCoercer();
}
if (fromType instanceof DecimalType && toType instanceof DecimalType) {
return createDecimalToDecimalCoercer((DecimalType) fromType, (DecimalType) toType);
}
if (fromType instanceof DecimalType && toType == DOUBLE) {
return createDecimalToDoubleCoercer((DecimalType) fromType);
}
if (fromType instanceof DecimalType && toType == REAL) {
return createDecimalToRealCoercer((DecimalType) fromType);
}
if (fromType == DOUBLE && toType instanceof DecimalType) {
return createDoubleToDecimalCoercer((DecimalType) toType);
}
if (fromType == REAL && toType instanceof DecimalType) {
return createRealToDecimalCoercer((DecimalType) toType);
}
if (isArrayType(fromType) && isArrayType(toType)) {
return new ListCoercer(typeManager, fromHiveType, toHiveType);
}
if (isMapType(fromType) && isMapType(toType)) {
return new MapCoercer(typeManager, fromHiveType, toHiveType);
}
if (isRowType(fromType) && isRowType(toType)) {
return new StructCoercer(typeManager, fromHiveType, toHiveType);
}
throw new PrestoException(NOT_SUPPORTED, format("Unsupported coercion from %s to %s", fromHiveType, toHiveType));
}
private static class ListCoercer
implements Function<Block, Block>
{
private final Function<Block, Block> elementCoercer;
public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType)
{
requireNonNull(typeManager, "typeManage is null");
requireNonNull(fromHiveType, "fromHiveType is null");
requireNonNull(toHiveType, "toHiveType is null");
HiveType fromElementHiveType = HiveType.valueOf(((ListTypeInfo) fromHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName());
HiveType toElementHiveType = HiveType.valueOf(((ListTypeInfo) toHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName());
this.elementCoercer = fromElementHiveType.equals(toElementHiveType) ? null : createCoercer(typeManager, fromElementHiveType, toElementHiveType);
}
@Override
public Block apply(Block block)
{
if (elementCoercer == null) {
return block;
}
ColumnarArray arrayBlock = toColumnarArray(block);
Block elementsBlock = elementCoercer.apply(arrayBlock.getElementsBlock());
boolean[] valueIsNull = new boolean[arrayBlock.getPositionCount()];
int[] offsets = new int[arrayBlock.getPositionCount() + 1];
for (int i = 0; i < arrayBlock.getPositionCount(); i++) {
valueIsNull[i] = arrayBlock.isNull(i);
offsets[i + 1] = offsets[i] + arrayBlock.getLength(i);
}
return ArrayBlock.fromElementBlock(arrayBlock.getPositionCount(), Optional.of(valueIsNull), offsets, elementsBlock);
}
}
private static class MapCoercer
implements Function<Block, Block>
{
private final Type toType;
private final Function<Block, Block> keyCoercer;
private final Function<Block, Block> valueCoercer;
public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType)
{
requireNonNull(typeManager, "typeManage is null");
requireNonNull(fromHiveType, "fromHiveType is null");
this.toType = requireNonNull(toHiveType, "toHiveType is null").getType(typeManager);
HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName());
HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName());
HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName());
HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName());
this.keyCoercer = fromKeyHiveType.equals(toKeyHiveType) ? null : createCoercer(typeManager, fromKeyHiveType, toKeyHiveType);
this.valueCoercer = fromValueHiveType.equals(toValueHiveType) ? null : createCoercer(typeManager, fromValueHiveType, toValueHiveType);
}
@Override
public Block apply(Block block)
{
ColumnarMap mapBlock = toColumnarMap(block);
Block keysBlock = keyCoercer == null ? mapBlock.getKeysBlock() : keyCoercer.apply(mapBlock.getKeysBlock());
Block valuesBlock = valueCoercer == null ? mapBlock.getValuesBlock() : valueCoercer.apply(mapBlock.getValuesBlock());
boolean[] valueIsNull = new boolean[mapBlock.getPositionCount()];
int[] offsets = new int[mapBlock.getPositionCount() + 1];
for (int i = 0; i < mapBlock.getPositionCount(); i++) {
valueIsNull[i] = mapBlock.isNull(i);
offsets[i + 1] = offsets[i] + mapBlock.getEntryCount(i);
}
return ((MapType) toType).createBlockFromKeyValue(Optional.of(valueIsNull), offsets, keysBlock, valuesBlock);
}
}
private static class StructCoercer
implements Function<Block, Block>
{
private final List<Optional<Function<Block, Block>>> coercers;
private final Block[] nullBlocks;
public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType)
{
requireNonNull(typeManager, "typeManage is null");
requireNonNull(fromHiveType, "fromHiveType is null");
requireNonNull(toHiveType, "toHiveType is null");
List<HiveType> fromFieldTypes = extractStructFieldTypes(fromHiveType);
List<HiveType> toFieldTypes = extractStructFieldTypes(toHiveType);
ImmutableList.Builder<Optional<Function<Block, Block>>> coercers = ImmutableList.builder();
this.nullBlocks = new Block[toFieldTypes.size()];
for (int i = 0; i < toFieldTypes.size(); i++) {
if (i >= fromFieldTypes.size()) {
nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build();
coercers.add(Optional.empty());
}
else if (!fromFieldTypes.get(i).equals(toFieldTypes.get(i))) {
coercers.add(Optional.of(createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i))));
}
else {
coercers.add(Optional.empty());
}
}
this.coercers = coercers.build();
}
@Override
public Block apply(Block block)
{
ColumnarRow rowBlock = toColumnarRow(block);
Block[] fields = new Block[coercers.size()];
int[] ids = new int[rowBlock.getField(0).getPositionCount()];
for (int i = 0; i < coercers.size(); i++) {
Optional<Function<Block, Block>> coercer = coercers.get(i);
if (coercer.isPresent()) {
fields[i] = coercer.get().apply(rowBlock.getField(i));
}
else if (i < rowBlock.getFieldCount()) {
fields[i] = rowBlock.getField(i);
}
else {
fields[i] = new DictionaryBlock(nullBlocks[i], ids);
}
}
boolean[] valueIsNull = new boolean[rowBlock.getPositionCount()];
for (int i = 0; i < rowBlock.getPositionCount(); i++) {
valueIsNull[i] = rowBlock.isNull(i);
}
return RowBlock.fromFieldBlocks(valueIsNull.length, Optional.of(valueIsNull), fields);
}
}
private static final class CoercionLazyBlockLoader
implements LazyBlockLoader<LazyBlock>
{
private final Function<Block, Block> coercer;
private Block block;
public CoercionLazyBlockLoader(Block block, Function<Block, Block> coercer)
{
this.block = requireNonNull(block, "block is null");
this.coercer = requireNonNull(coercer, "coercer is null");
}
@Override
public void load(LazyBlock lazyBlock)
{
if (block == null) {
return;
}
lazyBlock.setBlock(coercer.apply(block.getLoadedBlock()));
// clear reference to loader to free resources, since load was successful
block = null;
}
}
private static final class RowFilterLazyBlockLoader
implements LazyBlockLoader<LazyBlock>
{
private Block block;
private final IntArrayList rowsToKeep;
public RowFilterLazyBlockLoader(Block block, IntArrayList rowsToKeep)
{
this.block = requireNonNull(block, "block is null");
this.rowsToKeep = requireNonNull(rowsToKeep, "rowsToKeep is null");
}
@Override
public void load(LazyBlock lazyBlock)
{
if (block == null) {
return;
}
lazyBlock.setBlock(block.getPositions(rowsToKeep.elements(), 0, rowsToKeep.size()));
// clear reference to loader to free resources, since load was successful
block = null;
}
}
private static Page extractColumns(Page page, int[] columns)
{
Block[] blocks = new Block[columns.length];
for (int i = 0; i < columns.length; i++) {
int dataColumn = columns[i];
blocks[i] = page.getBlock(dataColumn);
}
return new Page(page.getPositionCount(), blocks);
}
public static class BucketAdapter
{
public final int[] bucketColumns;
public final int bucketToKeep;
public final int tableBucketCount;
public final int partitionBucketCount; // for sanity check only
private final List<TypeInfo> typeInfoList;
public BucketAdapter(BucketAdaptation bucketAdaptation)
{
this.bucketColumns = bucketAdaptation.getBucketColumnIndices();
this.bucketToKeep = bucketAdaptation.getBucketToKeep();
this.typeInfoList = bucketAdaptation.getBucketColumnHiveTypes().stream()
.map(HiveType::getTypeInfo)
.collect(toImmutableList());
this.tableBucketCount = bucketAdaptation.getTableBucketCount();
this.partitionBucketCount = bucketAdaptation.getPartitionBucketCount();
}
public IntArrayList computeEligibleRowIds(Page page)
{
IntArrayList ids = new IntArrayList(page.getPositionCount());
Page bucketColumnsPage = extractColumns(page, bucketColumns);
for (int position = 0; position < page.getPositionCount(); position++) {
int bucket = getHiveBucket(tableBucketCount, typeInfoList, bucketColumnsPage, position);
if ((bucket - bucketToKeep) % partitionBucketCount != 0) {
throw new PrestoException(HIVE_INVALID_BUCKET_FILES, format(
"A row that is supposed to be in bucket %s is encountered. Only rows in bucket %s (modulo %s) are expected",
bucket, bucketToKeep % partitionBucketCount, partitionBucketCount));
}
if (bucket == bucketToKeep) {
ids.add(position);
}
}
return ids;
}
}
}
| |
package edu.berkeley.nlp.mt;
import java.io.Serializable;
import java.io.StringReader;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import edu.berkeley.nlp.syntax.Tree;
import edu.berkeley.nlp.syntax.Trees;
import fig.basic.ListUtils;
import fig.basic.StrUtils;
/**
* A holder for a pair of sentences, each a list of strings. Sentences in the
* test sets have integer IDs, as well, which are used to retreive the gold
* standard alignments for those sentences.
*/
public class SentencePair implements Serializable {
static final long serialVersionUID = 42;
public int ID;
String sourceFile;
int lineNumber;
List<String> englishWords, englishTags;
List<String> foreignWords, foreignTags;
Tree<String> englishTree;
Tree<String> foreignTree;
Alignment alignment;
public SentencePair reverse() {
SentencePair pair = new SentencePair(ID, sourceFile, lineNumber, foreignWords,
englishWords);
pair.foreignTags = englishTags;
pair.englishTags = foreignTags;
pair.foreignTree = englishTree;
pair.englishTree = foreignTree;
return pair;
}
public SentencePair(int sentenceID, String sourceFile, int lineNumber,
List<String> englishWords, List<String> frenchWords) {
this.ID = sentenceID;
this.sourceFile = sourceFile;
this.lineNumber = lineNumber;
this.englishWords = englishWords;
this.foreignWords = frenchWords;
}
public int getSentenceID() {
return ID;
}
public String getSourceFile() {
return sourceFile;
}
public List<String> getEnglishWords() {
return englishWords;
}
public List<String> getForeignWords() {
return foreignWords;
}
public int I() {
return englishWords.size();
}
public int J() {
return foreignWords.size();
}
public String en(int i) {
return englishWords.get(i);
}
public String fr(int j) {
return foreignWords.get(j);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (int englishPosition = 0; englishPosition < englishWords.size(); englishPosition++) {
String englishWord = englishWords.get(englishPosition);
sb.append(englishPosition);
sb.append(":");
sb.append(englishWord);
sb.append(" ");
}
sb.append("\n");
for (int frenchPosition = 0; frenchPosition < foreignWords.size(); frenchPosition++) {
String frenchWord = foreignWords.get(frenchPosition);
sb.append(frenchPosition);
sb.append(":");
sb.append(frenchWord);
sb.append(" ");
}
sb.append("\n");
return sb.toString();
}
// Return the set of words used in these sentences.
public static Set<String> getWordSet(List<SentencePair> sentencePairs, boolean isForeign) {
Set<String> set = new HashSet<String>();
for (SentencePair sp : sentencePairs) {
List<String> words = isForeign ? sp.getForeignWords() : sp.getEnglishWords();
for (String w : words)
set.add(w);
}
return set;
}
public SentencePair chop(int i1, int i2, int j1, int j2) {
return new SentencePair(ID, sourceFile, lineNumber, englishWords.subList(i1, i2),
foreignWords.subList(j1, j2));
}
public Tree<String> getEnglishTree() {
return englishTree;
}
public void setEnglishTree(Tree<String> englishTree) {
this.englishTree = englishTree;
}
public Tree<String> getForeignTree() {
return foreignTree;
}
public void setForeignTree(Tree<String> frenchTree) {
this.foreignTree = frenchTree;
}
public Alignment getAlignment() {
return alignment;
}
public void setAlignment(Alignment referenceAlignment) {
this.alignment = referenceAlignment;
}
public List<String> getEnglishTags() {
return englishTags;
}
public void setEnglishTags(List<String> englishTags) {
this.englishTags = englishTags;
}
public List<String> getForeignTags() {
return foreignTags;
}
public void setForeignTags(List<String> foreignTags) {
this.foreignTags = foreignTags;
}
public String dump() {
StringBuffer sbuf = new StringBuffer();
sbuf.append("ID:\t" + ID + "\tSource file:\t" + sourceFile + "\n");
sbuf.append("En:\t" + StrUtils.join(englishWords, " ") + "\n");
sbuf.append("Fr:\t" + StrUtils.join(foreignWords, " ") + "\n");
sbuf.append("EnTags:\t");
sbuf.append(englishTags);
sbuf.append("\n");
sbuf.append("FrTags:\t");
sbuf.append(foreignTags);
sbuf.append("\n");
sbuf.append("EnTree:\t");
sbuf.append(englishTree);
sbuf.append("\n");
sbuf.append("FrTree:\t");
sbuf.append(foreignTree);
sbuf.append("\n");
sbuf.append("Alignment:\n");
sbuf.append(alignment);
return sbuf.toString();
}
public static SentencePair getSampleSentencePair() {
String p = "(S (NP (DT the) (NNS jobs)) (VP (VBP are) (ADJP (NN career) (VBN oriented))) (. .))";
Trees.PennTreeReader treeReader = new Trees.PennTreeReader(new StringReader(p));
Tree<String> tree = treeReader.next();
List<String> en = tree.getYield();
List<String> fr = ListUtils.newList("les", "emplois", "sont", "axes", "sur", "la",
"carriere", ".");
SentencePair sp = new SentencePair(0, "", 0, en, fr);
sp.setEnglishTree(tree);
Alignment a = new Alignment(en, fr);
a.addAlignment(0, 0);
a.addAlignment(1, 1);
a.addAlignment(2, 2);
a.addAlignment(3, 6);
a.addAlignment(4, 3);
a.addAlignment(5, 7);
// a.addAlignment(0, 5);
sp.setAlignment(a);
return sp;
}
public void setLineNumber(int sent) {
lineNumber = sent;
}
}
| |
package org.spongycastle.jcajce.provider.asymmetric.x509;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Principal;
import java.security.Provider;
import java.security.PublicKey;
import java.security.Security;
import java.security.Signature;
import java.security.SignatureException;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateExpiredException;
import java.security.cert.CertificateNotYetValidException;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.security.auth.x500.X500Principal;
import org.spongycastle.asn1.ASN1Encodable;
import org.spongycastle.asn1.ASN1Encoding;
import org.spongycastle.asn1.ASN1InputStream;
import org.spongycastle.asn1.ASN1ObjectIdentifier;
import org.spongycastle.asn1.ASN1OutputStream;
import org.spongycastle.asn1.ASN1Primitive;
import org.spongycastle.asn1.ASN1Sequence;
import org.spongycastle.asn1.ASN1String;
import org.spongycastle.asn1.DERBitString;
import org.spongycastle.asn1.DERIA5String;
import org.spongycastle.asn1.DERNull;
import org.spongycastle.asn1.DEROctetString;
import org.spongycastle.asn1.misc.MiscObjectIdentifiers;
import org.spongycastle.asn1.misc.NetscapeCertType;
import org.spongycastle.asn1.misc.NetscapeRevocationURL;
import org.spongycastle.asn1.misc.VerisignCzagExtension;
import org.spongycastle.asn1.util.ASN1Dump;
import org.spongycastle.asn1.x500.X500Name;
import org.spongycastle.asn1.x500.style.RFC4519Style;
import org.spongycastle.asn1.x509.AlgorithmIdentifier;
import org.spongycastle.asn1.x509.BasicConstraints;
import org.spongycastle.asn1.x509.Extension;
import org.spongycastle.asn1.x509.Extensions;
import org.spongycastle.asn1.x509.GeneralName;
import org.spongycastle.asn1.x509.KeyUsage;
import org.spongycastle.jcajce.provider.asymmetric.util.PKCS12BagAttributeCarrierImpl;
import org.spongycastle.jce.X509Principal;
import org.spongycastle.jce.interfaces.PKCS12BagAttributeCarrier;
import org.spongycastle.jce.provider.BouncyCastleProvider;
import org.spongycastle.jce.provider.RFC3280CertPathUtilities;
import org.spongycastle.util.Arrays;
import org.spongycastle.util.Integers;
import org.spongycastle.util.encoders.Hex;
class X509CertificateObject
extends X509Certificate
implements PKCS12BagAttributeCarrier
{
private org.spongycastle.asn1.x509.Certificate c;
private BasicConstraints basicConstraints;
private boolean[] keyUsage;
private boolean hashValueSet;
private int hashValue;
private PKCS12BagAttributeCarrier attrCarrier = new PKCS12BagAttributeCarrierImpl();
public X509CertificateObject(
org.spongycastle.asn1.x509.Certificate c)
throws CertificateParsingException
{
this.c = c;
try
{
byte[] bytes = this.getExtensionBytes("2.5.29.19");
if (bytes != null)
{
basicConstraints = BasicConstraints.getInstance(ASN1Primitive.fromByteArray(bytes));
}
}
catch (Exception e)
{
throw new CertificateParsingException("cannot construct BasicConstraints: " + e);
}
try
{
byte[] bytes = this.getExtensionBytes("2.5.29.15");
if (bytes != null)
{
DERBitString bits = DERBitString.getInstance(ASN1Primitive.fromByteArray(bytes));
bytes = bits.getBytes();
int length = (bytes.length * 8) - bits.getPadBits();
keyUsage = new boolean[(length < 9) ? 9 : length];
for (int i = 0; i != length; i++)
{
keyUsage[i] = (bytes[i / 8] & (0x80 >>> (i % 8))) != 0;
}
}
else
{
keyUsage = null;
}
}
catch (Exception e)
{
throw new CertificateParsingException("cannot construct KeyUsage: " + e);
}
}
public void checkValidity()
throws CertificateExpiredException, CertificateNotYetValidException
{
this.checkValidity(new Date());
}
public void checkValidity(
Date date)
throws CertificateExpiredException, CertificateNotYetValidException
{
if (date.getTime() > this.getNotAfter().getTime()) // for other VM compatibility
{
throw new CertificateExpiredException("certificate expired on " + c.getEndDate().getTime());
}
if (date.getTime() < this.getNotBefore().getTime())
{
throw new CertificateNotYetValidException("certificate not valid till " + c.getStartDate().getTime());
}
}
public int getVersion()
{
return c.getVersionNumber();
}
public BigInteger getSerialNumber()
{
return c.getSerialNumber().getValue();
}
public Principal getIssuerDN()
{
try
{
return new X509Principal(X500Name.getInstance(c.getIssuer().getEncoded()));
}
catch (IOException e)
{
return null;
}
}
public X500Principal getIssuerX500Principal()
{
try
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ASN1OutputStream aOut = new ASN1OutputStream(bOut);
aOut.writeObject(c.getIssuer());
return new X500Principal(bOut.toByteArray());
}
catch (IOException e)
{
throw new IllegalStateException("can't encode issuer DN");
}
}
public Principal getSubjectDN()
{
return new X509Principal(X500Name.getInstance(c.getSubject().toASN1Primitive()));
}
public X500Principal getSubjectX500Principal()
{
try
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
ASN1OutputStream aOut = new ASN1OutputStream(bOut);
aOut.writeObject(c.getSubject());
return new X500Principal(bOut.toByteArray());
}
catch (IOException e)
{
throw new IllegalStateException("can't encode issuer DN");
}
}
public Date getNotBefore()
{
return c.getStartDate().getDate();
}
public Date getNotAfter()
{
return c.getEndDate().getDate();
}
public byte[] getTBSCertificate()
throws CertificateEncodingException
{
try
{
return c.getTBSCertificate().getEncoded(ASN1Encoding.DER);
}
catch (IOException e)
{
throw new CertificateEncodingException(e.toString());
}
}
public byte[] getSignature()
{
return c.getSignature().getBytes();
}
/**
* return a more "meaningful" representation for the signature algorithm used in
* the certficate.
*/
public String getSigAlgName()
{
Provider prov = Security.getProvider(BouncyCastleProvider.PROVIDER_NAME);
if (prov != null)
{
String algName = prov.getProperty("Alg.Alias.Signature." + this.getSigAlgOID());
if (algName != null)
{
return algName;
}
}
Provider[] provs = Security.getProviders();
//
// search every provider looking for a real algorithm
//
for (int i = 0; i != provs.length; i++)
{
String algName = provs[i].getProperty("Alg.Alias.Signature." + this.getSigAlgOID());
if (algName != null)
{
return algName;
}
}
return this.getSigAlgOID();
}
/**
* return the object identifier for the signature.
*/
public String getSigAlgOID()
{
return c.getSignatureAlgorithm().getAlgorithm().getId();
}
/**
* return the signature parameters, or null if there aren't any.
*/
public byte[] getSigAlgParams()
{
if (c.getSignatureAlgorithm().getParameters() != null)
{
try
{
return c.getSignatureAlgorithm().getParameters().toASN1Primitive().getEncoded(ASN1Encoding.DER);
}
catch (IOException e)
{
return null;
}
}
else
{
return null;
}
}
public boolean[] getIssuerUniqueID()
{
DERBitString id = c.getTBSCertificate().getIssuerUniqueId();
if (id != null)
{
byte[] bytes = id.getBytes();
boolean[] boolId = new boolean[bytes.length * 8 - id.getPadBits()];
for (int i = 0; i != boolId.length; i++)
{
boolId[i] = (bytes[i / 8] & (0x80 >>> (i % 8))) != 0;
}
return boolId;
}
return null;
}
public boolean[] getSubjectUniqueID()
{
DERBitString id = c.getTBSCertificate().getSubjectUniqueId();
if (id != null)
{
byte[] bytes = id.getBytes();
boolean[] boolId = new boolean[bytes.length * 8 - id.getPadBits()];
for (int i = 0; i != boolId.length; i++)
{
boolId[i] = (bytes[i / 8] & (0x80 >>> (i % 8))) != 0;
}
return boolId;
}
return null;
}
public boolean[] getKeyUsage()
{
return keyUsage;
}
public List getExtendedKeyUsage()
throws CertificateParsingException
{
byte[] bytes = this.getExtensionBytes("2.5.29.37");
if (bytes != null)
{
try
{
ASN1InputStream dIn = new ASN1InputStream(bytes);
ASN1Sequence seq = (ASN1Sequence)dIn.readObject();
List list = new ArrayList();
for (int i = 0; i != seq.size(); i++)
{
list.add(((ASN1ObjectIdentifier)seq.getObjectAt(i)).getId());
}
return Collections.unmodifiableList(list);
}
catch (Exception e)
{
throw new CertificateParsingException("error processing extended key usage extension");
}
}
return null;
}
public int getBasicConstraints()
{
if (basicConstraints != null)
{
if (basicConstraints.isCA())
{
if (basicConstraints.getPathLenConstraint() == null)
{
return Integer.MAX_VALUE;
}
else
{
return basicConstraints.getPathLenConstraint().intValue();
}
}
else
{
return -1;
}
}
return -1;
}
public Collection getSubjectAlternativeNames()
throws CertificateParsingException
{
return getAlternativeNames(getExtensionBytes(Extension.subjectAlternativeName.getId()));
}
public Collection getIssuerAlternativeNames()
throws CertificateParsingException
{
return getAlternativeNames(getExtensionBytes(Extension.issuerAlternativeName.getId()));
}
public Set getCriticalExtensionOIDs()
{
if (this.getVersion() == 3)
{
Set set = new HashSet();
Extensions extensions = c.getTBSCertificate().getExtensions();
if (extensions != null)
{
Enumeration e = extensions.oids();
while (e.hasMoreElements())
{
ASN1ObjectIdentifier oid = (ASN1ObjectIdentifier)e.nextElement();
Extension ext = extensions.getExtension(oid);
if (ext.isCritical())
{
set.add(oid.getId());
}
}
return set;
}
}
return null;
}
private byte[] getExtensionBytes(String oid)
{
Extensions exts = c.getTBSCertificate().getExtensions();
if (exts != null)
{
Extension ext = exts.getExtension(new ASN1ObjectIdentifier(oid));
if (ext != null)
{
return ext.getExtnValue().getOctets();
}
}
return null;
}
public byte[] getExtensionValue(String oid)
{
Extensions exts = c.getTBSCertificate().getExtensions();
if (exts != null)
{
Extension ext = exts.getExtension(new ASN1ObjectIdentifier(oid));
if (ext != null)
{
try
{
return ext.getExtnValue().getEncoded();
}
catch (Exception e)
{
throw new IllegalStateException("error parsing " + e.toString());
}
}
}
return null;
}
public Set getNonCriticalExtensionOIDs()
{
if (this.getVersion() == 3)
{
Set set = new HashSet();
Extensions extensions = c.getTBSCertificate().getExtensions();
if (extensions != null)
{
Enumeration e = extensions.oids();
while (e.hasMoreElements())
{
ASN1ObjectIdentifier oid = (ASN1ObjectIdentifier)e.nextElement();
Extension ext = extensions.getExtension(oid);
if (!ext.isCritical())
{
set.add(oid.getId());
}
}
return set;
}
}
return null;
}
public boolean hasUnsupportedCriticalExtension()
{
if (this.getVersion() == 3)
{
Extensions extensions = c.getTBSCertificate().getExtensions();
if (extensions != null)
{
Enumeration e = extensions.oids();
while (e.hasMoreElements())
{
ASN1ObjectIdentifier oid = (ASN1ObjectIdentifier)e.nextElement();
String oidId = oid.getId();
if (oidId.equals(RFC3280CertPathUtilities.KEY_USAGE)
|| oidId.equals(RFC3280CertPathUtilities.CERTIFICATE_POLICIES)
|| oidId.equals(RFC3280CertPathUtilities.POLICY_MAPPINGS)
|| oidId.equals(RFC3280CertPathUtilities.INHIBIT_ANY_POLICY)
|| oidId.equals(RFC3280CertPathUtilities.CRL_DISTRIBUTION_POINTS)
|| oidId.equals(RFC3280CertPathUtilities.ISSUING_DISTRIBUTION_POINT)
|| oidId.equals(RFC3280CertPathUtilities.DELTA_CRL_INDICATOR)
|| oidId.equals(RFC3280CertPathUtilities.POLICY_CONSTRAINTS)
|| oidId.equals(RFC3280CertPathUtilities.BASIC_CONSTRAINTS)
|| oidId.equals(RFC3280CertPathUtilities.SUBJECT_ALTERNATIVE_NAME)
|| oidId.equals(RFC3280CertPathUtilities.NAME_CONSTRAINTS))
{
continue;
}
Extension ext = extensions.getExtension(oid);
if (ext.isCritical())
{
return true;
}
}
}
}
return false;
}
public PublicKey getPublicKey()
{
try
{
return BouncyCastleProvider.getPublicKey(c.getSubjectPublicKeyInfo());
}
catch (IOException e)
{
return null; // should never happen...
}
}
public byte[] getEncoded()
throws CertificateEncodingException
{
try
{
return c.getEncoded(ASN1Encoding.DER);
}
catch (IOException e)
{
throw new CertificateEncodingException(e.toString());
}
}
public boolean equals(
Object o)
{
if (o == this)
{
return true;
}
if (!(o instanceof Certificate))
{
return false;
}
Certificate other = (Certificate)o;
try
{
byte[] b1 = this.getEncoded();
byte[] b2 = other.getEncoded();
return Arrays.areEqual(b1, b2);
}
catch (CertificateEncodingException e)
{
return false;
}
}
public synchronized int hashCode()
{
if (!hashValueSet)
{
hashValue = calculateHashCode();
hashValueSet = true;
}
return hashValue;
}
private int calculateHashCode()
{
try
{
int hashCode = 0;
byte[] certData = this.getEncoded();
for (int i = 1; i < certData.length; i++)
{
hashCode += certData[i] * i;
}
return hashCode;
}
catch (CertificateEncodingException e)
{
return 0;
}
}
public void setBagAttribute(
ASN1ObjectIdentifier oid,
ASN1Encodable attribute)
{
attrCarrier.setBagAttribute(oid, attribute);
}
public ASN1Encodable getBagAttribute(
ASN1ObjectIdentifier oid)
{
return attrCarrier.getBagAttribute(oid);
}
public Enumeration getBagAttributeKeys()
{
return attrCarrier.getBagAttributeKeys();
}
public String toString()
{
StringBuffer buf = new StringBuffer();
String nl = System.getProperty("line.separator");
buf.append(" [0] Version: ").append(this.getVersion()).append(nl);
buf.append(" SerialNumber: ").append(this.getSerialNumber()).append(nl);
buf.append(" IssuerDN: ").append(this.getIssuerDN()).append(nl);
buf.append(" Start Date: ").append(this.getNotBefore()).append(nl);
buf.append(" Final Date: ").append(this.getNotAfter()).append(nl);
buf.append(" SubjectDN: ").append(this.getSubjectDN()).append(nl);
buf.append(" Public Key: ").append(this.getPublicKey()).append(nl);
buf.append(" Signature Algorithm: ").append(this.getSigAlgName()).append(nl);
byte[] sig = this.getSignature();
buf.append(" Signature: ").append(new String(Hex.encode(sig, 0, 20))).append(nl);
for (int i = 20; i < sig.length; i += 20)
{
if (i < sig.length - 20)
{
buf.append(" ").append(new String(Hex.encode(sig, i, 20))).append(nl);
}
else
{
buf.append(" ").append(new String(Hex.encode(sig, i, sig.length - i))).append(nl);
}
}
Extensions extensions = c.getTBSCertificate().getExtensions();
if (extensions != null)
{
Enumeration e = extensions.oids();
if (e.hasMoreElements())
{
buf.append(" Extensions: \n");
}
while (e.hasMoreElements())
{
ASN1ObjectIdentifier oid = (ASN1ObjectIdentifier)e.nextElement();
Extension ext = extensions.getExtension(oid);
if (ext.getExtnValue() != null)
{
byte[] octs = ext.getExtnValue().getOctets();
ASN1InputStream dIn = new ASN1InputStream(octs);
buf.append(" critical(").append(ext.isCritical()).append(") ");
try
{
if (oid.equals(Extension.basicConstraints))
{
buf.append(BasicConstraints.getInstance(dIn.readObject())).append(nl);
}
else if (oid.equals(Extension.keyUsage))
{
buf.append(KeyUsage.getInstance(dIn.readObject())).append(nl);
}
else if (oid.equals(MiscObjectIdentifiers.netscapeCertType))
{
buf.append(new NetscapeCertType((DERBitString)dIn.readObject())).append(nl);
}
else if (oid.equals(MiscObjectIdentifiers.netscapeRevocationURL))
{
buf.append(new NetscapeRevocationURL((DERIA5String)dIn.readObject())).append(nl);
}
else if (oid.equals(MiscObjectIdentifiers.verisignCzagExtension))
{
buf.append(new VerisignCzagExtension((DERIA5String)dIn.readObject())).append(nl);
}
else
{
buf.append(oid.getId());
buf.append(" value = ").append(ASN1Dump.dumpAsString(dIn.readObject())).append(nl);
//buf.append(" value = ").append("*****").append(nl);
}
}
catch (Exception ex)
{
buf.append(oid.getId());
// buf.append(" value = ").append(new String(Hex.encode(ext.getExtnValue().getOctets()))).append(nl);
buf.append(" value = ").append("*****").append(nl);
}
}
else
{
buf.append(nl);
}
}
}
return buf.toString();
}
public final void verify(
PublicKey key)
throws CertificateException, NoSuchAlgorithmException,
InvalidKeyException, NoSuchProviderException, SignatureException
{
Signature signature;
String sigName = X509SignatureUtil.getSignatureName(c.getSignatureAlgorithm());
try
{
signature = Signature.getInstance(sigName, BouncyCastleProvider.PROVIDER_NAME);
}
catch (Exception e)
{
signature = Signature.getInstance(sigName);
}
checkSignature(key, signature);
}
public final void verify(
PublicKey key,
String sigProvider)
throws CertificateException, NoSuchAlgorithmException,
InvalidKeyException, NoSuchProviderException, SignatureException
{
String sigName = X509SignatureUtil.getSignatureName(c.getSignatureAlgorithm());
Signature signature = Signature.getInstance(sigName, sigProvider);
checkSignature(key, signature);
}
private void checkSignature(
PublicKey key,
Signature signature)
throws CertificateException, NoSuchAlgorithmException,
SignatureException, InvalidKeyException
{
if (!isAlgIdEqual(c.getSignatureAlgorithm(), c.getTBSCertificate().getSignature()))
{
throw new CertificateException("signature algorithm in TBS cert not same as outer cert");
}
ASN1Encodable params = c.getSignatureAlgorithm().getParameters();
// TODO This should go after the initVerify?
X509SignatureUtil.setSignatureParameters(signature, params);
signature.initVerify(key);
signature.update(this.getTBSCertificate());
if (!signature.verify(this.getSignature()))
{
throw new SignatureException("certificate does not verify with supplied key");
}
}
private boolean isAlgIdEqual(AlgorithmIdentifier id1, AlgorithmIdentifier id2)
{
if (!id1.getAlgorithm().equals(id2.getAlgorithm()))
{
return false;
}
if (id1.getParameters() == null)
{
if (id2.getParameters() != null && !id2.getParameters().equals(DERNull.INSTANCE))
{
return false;
}
return true;
}
if (id2.getParameters() == null)
{
if (id1.getParameters() != null && !id1.getParameters().equals(DERNull.INSTANCE))
{
return false;
}
return true;
}
return id1.getParameters().equals(id2.getParameters());
}
private static Collection getAlternativeNames(byte[] extVal)
throws CertificateParsingException
{
if (extVal == null)
{
return null;
}
try
{
Collection temp = new ArrayList();
Enumeration it = ASN1Sequence.getInstance(extVal).getObjects();
while (it.hasMoreElements())
{
GeneralName genName = GeneralName.getInstance(it.nextElement());
List list = new ArrayList();
list.add(Integers.valueOf(genName.getTagNo()));
switch (genName.getTagNo())
{
case GeneralName.ediPartyName:
case GeneralName.x400Address:
case GeneralName.otherName:
list.add(genName.getEncoded());
break;
case GeneralName.directoryName:
list.add(X500Name.getInstance(RFC4519Style.INSTANCE, genName.getName()).toString());
break;
case GeneralName.dNSName:
case GeneralName.rfc822Name:
case GeneralName.uniformResourceIdentifier:
list.add(((ASN1String)genName.getName()).getString());
break;
case GeneralName.registeredID:
list.add(ASN1ObjectIdentifier.getInstance(genName.getName()).getId());
break;
case GeneralName.iPAddress:
byte[] addrBytes = DEROctetString.getInstance(genName.getName()).getOctets();
final String addr;
try
{
addr = InetAddress.getByAddress(addrBytes).getHostAddress();
}
catch (UnknownHostException e)
{
continue;
}
list.add(addr);
break;
default:
throw new IOException("Bad tag number: " + genName.getTagNo());
}
temp.add(Collections.unmodifiableList(list));
}
if (temp.size() == 0)
{
return null;
}
return Collections.unmodifiableCollection(temp);
}
catch (Exception e)
{
throw new CertificateParsingException(e.getMessage());
}
}
}
| |
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Polygon;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
//The class representing each tile on the hexagonal map
public class Hextile {
private int q, r, x, y;
private int[][] displayVerts;
private int[][] realVerts;
private int[][] indVerts;
private Polygon displayhex;
private Polygon hexagon;
private static Polygon bigContainHex, bigShakeHex;
private static double tiles_h;
private static double tiles_w;
public static final int n_padding = 130, s_padding = 50, h_padding = 100,
screen_x = 1280, screen_y = 720, tileHGap = 2, tileVGap = 1;
public static int size;
// Returns the map-encompassing shape
public static Polygon getBigContainHex() {
return bigContainHex;
}
// Algorithm to generate the map-encompassing boundary shape
public static void createBigContainHex(Hextile[][] hextiles) {
bigContainHex = new Polygon();
Polygon tempHex;
for (int i = 0; i < size / 2 + 1; i++) {
tempHex = hextiles[size / 2 - i][i].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[2], tempHex.ypoints[2]);
bigContainHex.addPoint(tempHex.xpoints[3], tempHex.ypoints[3]);
}
for (int i = 0; i < size / 2; i++) {
tempHex = hextiles[0][size / 2 + 1 + i].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[2], tempHex.ypoints[2]);
bigContainHex.addPoint(tempHex.xpoints[3], tempHex.ypoints[3]);
}
for (int i = 0; i < size / 2 + 1; i++) {
tempHex = hextiles[i][size - 1].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[4], tempHex.ypoints[4]);
bigContainHex.addPoint(tempHex.xpoints[5], tempHex.ypoints[5]);
}
for (int i = 0; i < size / 2 + 1; i++) {
tempHex = hextiles[size / 2 + i][size - 1 - i].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[4], tempHex.ypoints[4]);
bigContainHex.addPoint(tempHex.xpoints[5], tempHex.ypoints[5]);
}
for (int i = 0; i < size / 2 + 1; i++) {
tempHex = hextiles[size - 1][size / 2 - i].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[0], tempHex.ypoints[0]);
bigContainHex.addPoint(tempHex.xpoints[1], tempHex.ypoints[1]);
}
for (int i = 0; i < size / 2; i++) {
tempHex = hextiles[size - 2 - i][0].getHexagon();
bigContainHex.addPoint(tempHex.xpoints[0], tempHex.ypoints[0]);
bigContainHex.addPoint(tempHex.xpoints[1], tempHex.ypoints[1]);
}
bigShakeHex = new Polygon(bigContainHex.xpoints, bigContainHex.ypoints,
bigContainHex.npoints);
}
// Calculates the vertices for the hextile
private void fillverts() {
double h = tiles_h / 2 - tileVGap, w = tiles_w / 2 - tileHGap;
displayVerts[0][0] = (int) (x + w);
displayVerts[1][0] = (int) (y);
displayVerts[0][1] = (int) (x + w / 2);
displayVerts[1][1] = (int) (y - h);
displayVerts[0][2] = (int) (x - w / 2);
displayVerts[1][2] = (int) (y - h);
displayVerts[0][3] = (int) (x - w);
displayVerts[1][3] = (int) (y);
displayVerts[0][4] = (int) (x - w / 2);
displayVerts[1][4] = (int) (y + h);
displayVerts[0][5] = (int) (x + w / 2);
displayVerts[1][5] = (int) (y + h);
displayhex = new Polygon(displayVerts[0], displayVerts[1], 6);
h = tiles_h / 2;
w = tiles_w / 2;
realVerts[0][0] = (int) (x + w);
realVerts[1][0] = (int) (y);
realVerts[0][1] = (int) (x + w / 2);
realVerts[1][1] = (int) (y - h);
realVerts[0][2] = (int) (x - w / 2);
realVerts[1][2] = (int) (y - h);
realVerts[0][3] = (int) (x - w);
realVerts[1][3] = (int) (y);
realVerts[0][4] = (int) (x - w / 2);
realVerts[1][4] = (int) (y + h);
realVerts[0][5] = (int) (x + w / 2);
realVerts[1][5] = (int) (y + h);
hexagon = new Polygon(realVerts[0], realVerts[1], 6);
h = tiles_h / 2 - tileVGap * 4;
w = tiles_w / 2 - tileHGap * 4;
indVerts[0][0] = (int) (x + w);
indVerts[1][0] = (int) (y);
indVerts[0][1] = (int) (x + w / 2);
indVerts[1][1] = (int) (y - h);
indVerts[0][2] = (int) (x - w / 2);
indVerts[1][2] = (int) (y - h);
indVerts[0][3] = (int) (x - w);
indVerts[1][3] = (int) (y);
indVerts[0][4] = (int) (x - w / 2);
indVerts[1][4] = (int) (y + h);
indVerts[0][5] = (int) (x + w / 2);
indVerts[1][5] = (int) (y + h);
}
// fills a hexagon grid
public static Hextile[][] fillHexGrid(int lvl) throws IOException {
Hextile[][] hextiles = null;
BufferedReader br;
String filename = "";
String[] line;
// Determines the level properties based on the number
switch (lvl) {
case 1:
size = 21;
filename = "levels/1.txt";
break;
}
// Calculates the width and height of a single tile
tiles_w = (screen_x - h_padding * 2) / (size * 3 + 1.0) * 4;
tiles_h = (screen_y - n_padding - s_padding) / ((double) size);
// Reads the text file and fills the grid
br = new BufferedReader(new FileReader(filename));
hextiles = new Hextile[size][size];
for (int i = 0; i < size; i++) {
line = br.readLine().split(" ");
for (int j = 0; j < size; j++) {
if (line[j].equals("O")) { // The space is a tile
hextiles[i][j] = new Hextile(i, j);
} else if (line[j].equals("B")) {
hextiles[i][j] = null; // TODO add barrier tiles?
} else {
hextiles[i][j] = null; // The space is empty
}
}
}
br.close();
return hextiles;
}
public Hextile(int i, int j) {
setQ(i - size / 2);
setR(j - size / 2);
// This part took so long >.> Calculates the coordinates
x = (int) ((q + size / 2) * tiles_w * 3 / 4.0 + tiles_w / 2.0)
+ h_padding;
y = (int) ((Math.abs(q) + (r + Math.min(0, q) + size / 2) * 2)
* tiles_h / 2.0 + tiles_h / 2.0)
+ n_padding;
displayVerts = new int[6][6];
realVerts = new int[6][6];
indVerts = new int[6][6];
fillverts();
}
// Calculates where the point is on the grid. Null if not on grid
public static int[] hexContainCal(Hextile[][] hextiles, int x, int y) {
int[] qr = new int[2];
int c;
c = (int) ((x - h_padding) / (tiles_w / 4));
int p = Math.min(c / 3, size - 1);
if (c < size * 3 + 1 && c >= 0 && y > n_padding
&& y < screen_y - s_padding && x > h_padding
&& x < screen_x - h_padding) {
if (c % 3 == 0) {
for (int i = Math.max(p - 1, 0); i < p + 1; i++) {
for (int j = 0; j < hextiles[i].length; j++) {
if (hextiles[i][j] != null
&& hextiles[i][j].getHexagon().contains(x, y)) {
qr[0] = hextiles[i][j].getQ();
qr[1] = hextiles[i][j].getR();
return qr;
}
}
}
} else {
for (int j = 0; j < hextiles[p].length; j++) {
if (hextiles[p][j] != null
&& hextiles[p][j].getHexagon().contains(x, y)) {
qr[0] = hextiles[p][j].getQ();
qr[1] = hextiles[p][j].getR();
return qr;
}
}
}
}
return null;
}
public void draw(Graphics2D g) {
g.drawPolygon(displayhex);
}
public static void drawBigContainHex(Graphics2D g) {
g.draw(bigContainHex);
}
public static void drawShakeHex(Graphics2D g) {
int addX = (int) (Math.random() * 10) - 5, addY = (int) (Math.random() * 10) - 5;
int[] newXPoints = new int[bigContainHex.xpoints.length];
int[] newYPoints = new int[bigContainHex.ypoints.length];
for (int i = 0; i < bigContainHex.xpoints.length; i++) {
newXPoints[i] = bigContainHex.xpoints[i] + addX;
newYPoints[i] = bigContainHex.ypoints[i] + addY;
}
bigShakeHex = new Polygon(newXPoints, newYPoints, newXPoints.length);
g.drawPolygon(bigShakeHex);
}
public void drawFilled(Graphics2D g) {
g.fillPolygon(displayhex);
}
public void drawPlayerOcc(Graphics2D g) {
drawFilled(g);
// TODOCreate actual animation
}
public void drawEnemyOcc(Graphics2D g, double progress) {
drawCasting(g, progress);
// TODO create actual animation
}
public static void fillBigContainHex(Graphics2D g) {
g.fillPolygon(bigContainHex);
}
// Draws the ability indicators
public void drawIndicatorOcc(Graphics2D g) {
g.setColor(Color.white);
draw(g);
int state = (int) PlayerAbilities.getAbFocState();
if (state == 5)
g.drawLine(indVerts[0][state], indVerts[1][state], indVerts[0][0],
indVerts[1][0]);
else
g.drawLine(indVerts[0][state], indVerts[1][state],
indVerts[0][state + 1], indVerts[1][state + 1]);
g.setColor(Color.gray);
}
public void drawCasting(Graphics2D g, double progress) {
double theta = Math.PI * (1 - progress);
double radius = (tiles_w / 2 - tileHGap) * (1 - progress);
int[][] tVerts = new int[2][6];
for (int i = 0; i < 6; i++) {
tVerts[0][i] = (int) (x + Math.cos(theta + i * Math.PI / 3)
* radius);
tVerts[1][i] = (int) (y + Math.sin(theta + i * Math.PI / 3)
* radius / 2);
}
g.drawPolygon(new Polygon(tVerts[0], tVerts[1], 6));
}
// draws the player projectile occupation
public void drawPShot(Graphics2D g, int k) {
drawFilled(g);
}
// draws the enemy projectile occupation
public void drawEShot(Graphics2D g, int k) {
drawFilled(g);
}
public void drawSpawning(Graphics2D g, double progress) {
double theta = Math.PI * (1 - progress);
double radius = (tiles_h * 2 / 2 - tileHGap) * (1 - progress) + Math.PI
/ 2;
int[][] tVerts = new int[2][3];
int[][] tVerts2 = new int[2][3];
for (int i = 0; i < 3; i++) {
tVerts[0][i] = (int) (x + Math.cos(theta + i * 2 * Math.PI / 3)
* radius);
tVerts[1][i] = (int) (y + Math.sin(theta + i * 2 * Math.PI / 3)
* radius / 2);
tVerts2[0][i] = (int) (x + Math.cos(-theta - i * 2 * Math.PI / 3)
* radius);
tVerts2[1][i] = (int) (y + Math.sin(-theta - i * 2 * Math.PI / 3)
* radius / 2);
}
g.drawPolygon(new Polygon(tVerts[0], tVerts[1], 3));
g.drawPolygon(new Polygon(tVerts2[0], tVerts2[1], 3));
}
public void drawSpells(Graphics2D g, int k) {
if (k == 1) {
g.drawImage(PlayerSpells.getWrpic().getImage(), x - 12, y - 40, 25,
50, null);
} else if (k == 2) {
g.drawImage(PlayerSpells.getEpic().getImage(), x - 25, y - 40, 50,
50, null);
} else if (k == 3) {
g.drawImage(PlayerSpells.getWrpic().getImage(), x - 12, y - 40, 25,
50, null);
}
}
public Polygon getHexagon() {
return hexagon;
}
// The setter/getter methods
public int getQ() {
return q;
}
public void setQ(int q) {
this.q = q;
}
public int getR() {
return r;
}
public void setR(int r) {
this.r = r;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
public double getTilesh() {
return tiles_h;
}
public double getTilesw() {
return tiles_w;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.snapshots;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.store.support.AbstractIndexStore;
import org.elasticsearch.repositories.RepositoryMissingException;
import org.elasticsearch.snapshots.mockstore.MockRepositoryModule;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import static com.google.common.collect.Lists.newArrayList;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.*;
/**
*/
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests {
@Test
public void restorePersistentSettingsTest() throws Exception {
logger.info("--> start node");
internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
Client client = client();
// Add dummy persistent setting
logger.info("--> set test persistent setting");
String settingValue = "test-" + randomInt();
client.admin().cluster().prepareUpdateSettings().setPersistentSettings(ImmutableSettings.settingsBuilder().put(ThreadPool.THREADPOOL_GROUP + "dummy.value", settingValue)).execute().actionGet();
assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
.getMetaData().persistentSettings().get(ThreadPool.THREADPOOL_GROUP + "dummy.value"), equalTo(settingValue));
logger.info("--> create repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", newTempDir())).execute().actionGet();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> start snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(0));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").execute().actionGet().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> clean the test persistent setting");
client.admin().cluster().prepareUpdateSettings().setPersistentSettings(ImmutableSettings.settingsBuilder().put(ThreadPool.THREADPOOL_GROUP + "dummy.value", "")).execute().actionGet();
assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
.getMetaData().persistentSettings().get(ThreadPool.THREADPOOL_GROUP + "dummy.value"), equalTo(""));
logger.info("--> restore snapshot");
client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setRestoreGlobalState(true).setWaitForCompletion(true).execute().actionGet();
assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState()
.getMetaData().persistentSettings().get(ThreadPool.THREADPOOL_GROUP + "dummy.value"), equalTo(settingValue));
}
@Test
public void restoreCustomMetadata() throws Exception {
File tempDir = newTempDir();
logger.info("--> start node");
internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
Client client = client();
createIndex("test-idx");
ensureYellow();
logger.info("--> add custom persistent metadata");
updateClusterState(new ClusterStateUpdater() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
ClusterState.Builder builder = ClusterState.builder(currentState);
MetaData.Builder metadataBuilder = MetaData.builder(currentState.metaData());
metadataBuilder.putCustom(SnapshottableMetadata.TYPE, new SnapshottableMetadata("before_snapshot_s"));
metadataBuilder.putCustom(NonSnapshottableMetadata.TYPE, new NonSnapshottableMetadata("before_snapshot_ns"));
metadataBuilder.putCustom(SnapshottableGatewayMetadata.TYPE, new SnapshottableGatewayMetadata("before_snapshot_s_gw"));
metadataBuilder.putCustom(NonSnapshottableGatewayMetadata.TYPE, new NonSnapshottableGatewayMetadata("before_snapshot_ns_gw"));
metadataBuilder.putCustom(SnapshotableGatewayNoApiMetadata.TYPE, new SnapshotableGatewayNoApiMetadata("before_snapshot_s_gw_noapi"));
builder.metaData(metadataBuilder);
return builder.build();
}
});
logger.info("--> create repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", tempDir)).execute().actionGet();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> start snapshot");
CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet();
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().successfulShards()));
assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").execute().actionGet().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS));
logger.info("--> change custom persistent metadata");
updateClusterState(new ClusterStateUpdater() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
ClusterState.Builder builder = ClusterState.builder(currentState);
MetaData.Builder metadataBuilder = MetaData.builder(currentState.metaData());
if (randomBoolean()) {
metadataBuilder.putCustom(SnapshottableMetadata.TYPE, new SnapshottableMetadata("after_snapshot_s"));
} else {
metadataBuilder.removeCustom(SnapshottableMetadata.TYPE);
}
metadataBuilder.putCustom(NonSnapshottableMetadata.TYPE, new NonSnapshottableMetadata("after_snapshot_ns"));
if (randomBoolean()) {
metadataBuilder.putCustom(SnapshottableGatewayMetadata.TYPE, new SnapshottableGatewayMetadata("after_snapshot_s_gw"));
} else {
metadataBuilder.removeCustom(SnapshottableGatewayMetadata.TYPE);
}
metadataBuilder.putCustom(NonSnapshottableGatewayMetadata.TYPE, new NonSnapshottableGatewayMetadata("after_snapshot_ns_gw"));
metadataBuilder.removeCustom(SnapshotableGatewayNoApiMetadata.TYPE);
builder.metaData(metadataBuilder);
return builder.build();
}
});
logger.info("--> delete repository");
assertAcked(client.admin().cluster().prepareDeleteRepository("test-repo"));
logger.info("--> create repository");
putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo-2")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", tempDir)).execute().actionGet();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> restore snapshot");
client.admin().cluster().prepareRestoreSnapshot("test-repo-2", "test-snap").setRestoreGlobalState(true).setIndices("-*").setWaitForCompletion(true).execute().actionGet();
logger.info("--> make sure old repository wasn't restored");
assertThrows(client.admin().cluster().prepareGetRepositories("test-repo"), RepositoryMissingException.class);
assertThat(client.admin().cluster().prepareGetRepositories("test-repo-2").get().repositories().size(), equalTo(1));
logger.info("--> check that custom persistent metadata was restored");
ClusterState clusterState = client.admin().cluster().prepareState().get().getState();
logger.info("Cluster state: {}", clusterState);
MetaData metaData = clusterState.getMetaData();
assertThat(((SnapshottableMetadata)metaData.custom(SnapshottableMetadata.TYPE)).getData(), equalTo("before_snapshot_s"));
assertThat(((NonSnapshottableMetadata)metaData.custom(NonSnapshottableMetadata.TYPE)).getData(), equalTo("after_snapshot_ns"));
assertThat(((SnapshottableGatewayMetadata)metaData.custom(SnapshottableGatewayMetadata.TYPE)).getData(), equalTo("before_snapshot_s_gw"));
assertThat(((NonSnapshottableGatewayMetadata)metaData.custom(NonSnapshottableGatewayMetadata.TYPE)).getData(), equalTo("after_snapshot_ns_gw"));
logger.info("--> restart all nodes");
internalCluster().fullRestart();
ensureYellow();
logger.info("--> check that gateway-persistent custom metadata survived full cluster restart");
clusterState = client().admin().cluster().prepareState().get().getState();
logger.info("Cluster state: {}", clusterState);
metaData = clusterState.getMetaData();
assertThat(metaData.custom(SnapshottableMetadata.TYPE), nullValue());
assertThat(metaData.custom(NonSnapshottableMetadata.TYPE), nullValue());
assertThat(((SnapshottableGatewayMetadata)metaData.custom(SnapshottableGatewayMetadata.TYPE)).getData(), equalTo("before_snapshot_s_gw"));
assertThat(((NonSnapshottableGatewayMetadata)metaData.custom(NonSnapshottableGatewayMetadata.TYPE)).getData(), equalTo("after_snapshot_ns_gw"));
// Shouldn't be returned as part of API response
assertThat(metaData.custom(SnapshotableGatewayNoApiMetadata.TYPE), nullValue());
// But should still be in state
metaData = internalCluster().getInstance(ClusterService.class).state().metaData();
assertThat(((SnapshotableGatewayNoApiMetadata)metaData.custom(SnapshotableGatewayNoApiMetadata.TYPE)).getData(), equalTo("before_snapshot_s_gw_noapi"));
}
private void updateClusterState(final ClusterStateUpdater updater) throws InterruptedException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final ClusterService clusterService = internalCluster().getInstance(ClusterService.class);
clusterService.submitStateUpdateTask("test", new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
return updater.execute(currentState);
}
@Override
public void onFailure(String source, @Nullable Throwable t) {
countDownLatch.countDown();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
countDownLatch.countDown();
}
});
countDownLatch.await();
}
private static interface ClusterStateUpdater {
public ClusterState execute(ClusterState currentState) throws Exception;
}
@Test
public void snapshotDuringNodeShutdownTest() throws Exception {
logger.info("--> start 2 nodes");
Client client = client();
assertAcked(prepareCreate("test-idx", 2, settingsBuilder().put("number_of_shards", 2).put("number_of_replicas", 0)));
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> create repository");
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.TEST))
.put("random", randomAsciiOfLength(10))
.put("wait_after_unblock", 200)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
// Pick one node and block it
String blockedNode = blockNodeWithIndex("test-idx");
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
logger.info("--> waiting for block to kick in");
waitForBlock(blockedNode, "test-repo", TimeValue.timeValueSeconds(60));
logger.info("--> execution was blocked on node [{}], shutting it down", blockedNode);
unblockNode(blockedNode);
logger.info("--> stopping node", blockedNode);
stopNode(blockedNode);
logger.info("--> waiting for completion");
SnapshotInfo snapshotInfo = waitForCompletion("test-repo", "test-snap", TimeValue.timeValueSeconds(60));
logger.info("Number of failed shards [{}]", snapshotInfo.shardFailures().size());
logger.info("--> done");
}
@Test
public void snapshotWithStuckNodeTest() throws Exception {
logger.info("--> start 2 nodes");
ArrayList<String> nodes = newArrayList();
nodes.add(internalCluster().startNode());
nodes.add(internalCluster().startNode());
Client client = client();
assertAcked(prepareCreate("test-idx", 2, settingsBuilder().put("number_of_shards", 2).put("number_of_replicas", 0)));
ensureGreen();
logger.info("--> indexing some data");
for (int i = 0; i < 100; i++) {
index("test-idx", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client.prepareCount("test-idx").get().getCount(), equalTo(100L));
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType(MockRepositoryModule.class.getCanonicalName()).setSettings(
ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.TEST))
.put("random", randomAsciiOfLength(10))
.put("wait_after_unblock", 200)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
// Pick one node and block it
String blockedNode = blockNodeWithIndex("test-idx");
// Remove it from the list of available nodes
nodes.remove(blockedNode);
logger.info("--> snapshot");
client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get();
logger.info("--> waiting for block to kick in");
waitForBlock(blockedNode, "test-repo", TimeValue.timeValueSeconds(60));
logger.info("--> execution was blocked on node [{}], aborting snapshot", blockedNode);
ListenableActionFuture<DeleteSnapshotResponse> deleteSnapshotResponseFuture = internalCluster().client(nodes.get(0)).admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap").execute();
// Make sure that abort makes some progress
Thread.sleep(100);
unblockNode(blockedNode);
logger.info("--> stopping node", blockedNode);
stopNode(blockedNode);
try {
DeleteSnapshotResponse deleteSnapshotResponse = deleteSnapshotResponseFuture.actionGet();
assertThat(deleteSnapshotResponse.isAcknowledged(), equalTo(true));
} catch (SnapshotMissingException ex) {
// When master node is closed during this test, it sometime manages to delete the snapshot files before
// completely stopping. In this case the retried delete snapshot operation on the new master can fail
// with SnapshotMissingException
}
logger.info("--> making sure that snapshot no longer exists");
assertThrows(client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").execute(), SnapshotMissingException.class);
logger.info("--> done");
}
@Test
@TestLogging("snapshots:TRACE")
public void restoreIndexWithMissingShards() throws Exception {
logger.info("--> start 2 nodes");
internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
internalCluster().startNode(settingsBuilder().put("gateway.type", "local"));
cluster().wipeIndices("_all");
logger.info("--> create an index that will have some unallocated shards");
assertAcked(prepareCreate("test-idx-some", 2, settingsBuilder().put("number_of_shards", 6)
.put("number_of_replicas", 0)));
ensureGreen();
logger.info("--> indexing some data into test-idx-some");
for (int i = 0; i < 100; i++) {
index("test-idx-some", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client().prepareCount("test-idx-some").get().getCount(), equalTo(100L));
logger.info("--> shutdown one of the nodes");
internalCluster().stopRandomDataNode();
assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("1m").setWaitForNodes("<2").execute().actionGet().isTimedOut(), equalTo(false));
logger.info("--> create an index that will have all allocated shards");
assertAcked(prepareCreate("test-idx-all", 1, settingsBuilder().put("number_of_shards", 6)
.put("number_of_replicas", 0)));
ensureGreen("test-idx-all");
logger.info("--> indexing some data into test-idx-all");
for (int i = 0; i < 100; i++) {
index("test-idx-all", "doc", Integer.toString(i), "foo", "bar" + i);
}
refresh();
assertThat(client().prepareCount("test-idx-all").get().getCount(), equalTo(100L));
logger.info("--> create an index that will have no allocated shards");
assertAcked(prepareCreate("test-idx-none", 1, settingsBuilder().put("number_of_shards", 6)
.put("index.routing.allocation.include.tag", "nowhere")
.put("number_of_replicas", 0)));
logger.info("--> create repository");
logger.info("--> creating repository");
PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder().put("location", newTempDir())).execute().actionGet();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
logger.info("--> start snapshot with default settings - should fail");
CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true).execute().actionGet();
assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.FAILED));
if (randomBoolean()) {
logger.info("checking snapshot completion using status");
client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-2").setWaitForCompletion(false).setPartial(true).execute().actionGet();
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-snap-2").get();
ImmutableList<SnapshotStatus> snapshotStatuses = snapshotsStatusResponse.getSnapshots();
if (snapshotStatuses.size() == 1) {
logger.trace("current snapshot status [{}]", snapshotStatuses.get(0));
return snapshotStatuses.get(0).getState().completed();
}
return false;
}
});
SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-snap-2").get();
ImmutableList<SnapshotStatus> snapshotStatuses = snapshotsStatusResponse.getSnapshots();
assertThat(snapshotStatuses.size(), equalTo(1));
SnapshotStatus snapshotStatus = snapshotStatuses.get(0);
logger.info("State: [{}], Reason: [{}]", createSnapshotResponse.getSnapshotInfo().state(), createSnapshotResponse.getSnapshotInfo().reason());
assertThat(snapshotStatus.getShardsStats().getTotalShards(), equalTo(18));
assertThat(snapshotStatus.getShardsStats().getDoneShards(), lessThan(12));
assertThat(snapshotStatus.getShardsStats().getDoneShards(), greaterThan(6));
// There is slight delay between snapshot being marked as completed in the cluster state and on the file system
// After it was marked as completed in the cluster state - we need to check if it's completed on the file system as well
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object o) {
GetSnapshotsResponse response = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-2").get();
assertThat(response.getSnapshots().size(), equalTo(1));
SnapshotInfo snapshotInfo = response.getSnapshots().get(0);
if (snapshotInfo.state().completed()) {
assertThat(snapshotInfo.state(), equalTo(SnapshotState.PARTIAL));
return true;
}
return false;
}
});
} else {
logger.info("checking snapshot completion using wait_for_completion flag");
createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-2").setWaitForCompletion(true).setPartial(true).execute().actionGet();
logger.info("State: [{}], Reason: [{}]", createSnapshotResponse.getSnapshotInfo().state(), createSnapshotResponse.getSnapshotInfo().reason());
assertThat(createSnapshotResponse.getSnapshotInfo().totalShards(), equalTo(18));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), lessThan(12));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(6));
assertThat(client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-2").execute().actionGet().getSnapshots().get(0).state(), equalTo(SnapshotState.PARTIAL));
}
assertAcked(client().admin().indices().prepareClose("test-idx-some", "test-idx-all").execute().actionGet());
logger.info("--> restore incomplete snapshot - should fail");
assertThrows(client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2").setRestoreGlobalState(false).setWaitForCompletion(true).execute(), SnapshotRestoreException.class);
logger.info("--> restore snapshot for the index that was snapshotted completely");
RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2").setRestoreGlobalState(false).setIndices("test-idx-all").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo(), notNullValue());
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(6));
assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), equalTo(6));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0));
ensureGreen("test-idx-all");
assertThat(client().prepareCount("test-idx-all").get().getCount(), equalTo(100L));
logger.info("--> restore snapshot for the partial index");
cluster().wipeIndices("test-idx-some");
restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2")
.setRestoreGlobalState(false).setIndices("test-idx-some").setPartial(true).setWaitForCompletion(true).get();
assertThat(restoreSnapshotResponse.getRestoreInfo(), notNullValue());
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(6));
assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), allOf(greaterThan(0), lessThan(6)));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), greaterThan(0));
ensureGreen("test-idx-some");
assertThat(client().prepareCount("test-idx-some").get().getCount(), allOf(greaterThan(0L), lessThan(100L)));
logger.info("--> restore snapshot for the index that didn't have any shards snapshotted successfully");
cluster().wipeIndices("test-idx-none");
restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2")
.setRestoreGlobalState(false).setIndices("test-idx-none").setPartial(true).setWaitForCompletion(true).get();
assertThat(restoreSnapshotResponse.getRestoreInfo(), notNullValue());
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), equalTo(6));
assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), equalTo(0));
assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(6));
ensureGreen("test-idx-some");
assertThat(client().prepareCount("test-idx-some").get().getCount(), allOf(greaterThan(0L), lessThan(100L)));
}
@Test
@TestLogging("snapshots:TRACE,repositories:TRACE")
@Ignore
public void chaosSnapshotTest() throws Exception {
final List<String> indices = new CopyOnWriteArrayList<>();
Settings settings = settingsBuilder().put("action.write_consistency", "one").build();
int initialNodes = between(1, 3);
logger.info("--> start {} nodes", initialNodes);
for (int i = 0; i < initialNodes; i++) {
internalCluster().startNode(settings);
}
logger.info("--> creating repository");
assertAcked(client().admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE))
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
int initialIndices = between(1, 3);
logger.info("--> create {} indices", initialIndices);
for (int i = 0; i < initialIndices; i++) {
createTestIndex("test-" + i);
indices.add("test-" + i);
}
int asyncNodes = between(0, 5);
logger.info("--> start {} additional nodes asynchronously", asyncNodes);
ListenableFuture<List<String>> asyncNodesFuture = internalCluster().startNodesAsync(asyncNodes, settings);
int asyncIndices = between(0, 10);
logger.info("--> create {} additional indices asynchronously", asyncIndices);
Thread[] asyncIndexThreads = new Thread[asyncIndices];
for (int i = 0; i < asyncIndices; i++) {
final int cur = i;
asyncIndexThreads[i] = new Thread(new Runnable() {
@Override
public void run() {
createTestIndex("test-async-" + cur);
indices.add("test-async-" + cur);
}
});
asyncIndexThreads[i].start();
}
logger.info("--> snapshot");
ListenableActionFuture<CreateSnapshotResponse> snapshotResponseFuture = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-*").setPartial(true).execute();
long start = System.currentTimeMillis();
// Produce chaos for 30 sec or until snapshot is done whatever comes first
int randomIndices = 0;
while (System.currentTimeMillis() - start < 30000 && !snapshotIsDone("test-repo", "test-snap")) {
Thread.sleep(100);
int chaosType = randomInt(10);
if (chaosType < 4) {
// Randomly delete an index
if (indices.size() > 0) {
String index = indices.remove(randomInt(indices.size() - 1));
logger.info("--> deleting random index [{}]", index);
internalCluster().wipeIndices(index);
}
} else if (chaosType < 6) {
// Randomly shutdown a node
if (cluster().size() > 1) {
logger.info("--> shutting down random node");
internalCluster().stopRandomDataNode();
}
} else if (chaosType < 8) {
// Randomly create an index
String index = "test-rand-" + randomIndices;
logger.info("--> creating random index [{}]", index);
createTestIndex(index);
randomIndices++;
} else {
// Take a break
logger.info("--> noop");
}
}
logger.info("--> waiting for async indices creation to finish");
for (int i = 0; i < asyncIndices; i++) {
asyncIndexThreads[i].join();
}
logger.info("--> update index settings to back to normal");
assertAcked(client().admin().indices().prepareUpdateSettings("test-*").setSettings(ImmutableSettings.builder()
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "node")
));
// Make sure that snapshot finished - doesn't matter if it failed or succeeded
try {
CreateSnapshotResponse snapshotResponse = snapshotResponseFuture.get();
SnapshotInfo snapshotInfo = snapshotResponse.getSnapshotInfo();
assertNotNull(snapshotInfo);
logger.info("--> snapshot is done with state [{}], total shards [{}], successful shards [{}]", snapshotInfo.state(), snapshotInfo.totalShards(), snapshotInfo.successfulShards());
} catch (Exception ex) {
logger.info("--> snapshot didn't start properly", ex);
}
asyncNodesFuture.get();
logger.info("--> done");
}
private boolean snapshotIsDone(String repository, String snapshot) {
try {
SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus(repository).setSnapshots(snapshot).get();
if (snapshotsStatusResponse.getSnapshots().isEmpty()) {
return false;
}
for (SnapshotStatus snapshotStatus : snapshotsStatusResponse.getSnapshots()) {
if (snapshotStatus.getState().completed()) {
return true;
}
}
return false;
} catch (SnapshotMissingException ex) {
return false;
}
}
private void createTestIndex(String name) {
assertAcked(prepareCreate(name, 0, settingsBuilder().put("number_of_shards", between(1, 6))
.put("number_of_replicas", between(1, 6))));
ensureYellow(name);
logger.info("--> indexing some data into {}", name);
for (int i = 0; i < between(10, 500); i++) {
index(name, "doc", Integer.toString(i), "foo", "bar" + i);
}
assertAcked(client().admin().indices().prepareUpdateSettings(name).setSettings(ImmutableSettings.builder()
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "all")
.put(AbstractIndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, between(100, 50000))
));
}
public static abstract class TestCustomMetaData implements MetaData.Custom {
private final String data;
protected TestCustomMetaData(String data) {
this.data = data;
}
public String getData() {
return data;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestCustomMetaData that = (TestCustomMetaData) o;
if (!data.equals(that.data)) return false;
return true;
}
@Override
public int hashCode() {
return data.hashCode();
}
public static abstract class TestCustomMetaDataFactory<T extends TestCustomMetaData> extends MetaData.Custom.Factory<T> {
protected abstract TestCustomMetaData newTestCustomMetaData(String data);
@Override
public T readFrom(StreamInput in) throws IOException {
return (T)newTestCustomMetaData(in.readString());
}
@Override
public void writeTo(T metadata, StreamOutput out) throws IOException {
out.writeString(metadata.getData());
}
@Override
public T fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
String data = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName();
if ("data".equals(currentFieldName)) {
if (parser.nextToken() != XContentParser.Token.VALUE_STRING) {
throw new ElasticsearchParseException("failed to parse snapshottable metadata, invalid data type");
}
data = parser.text();
} else {
throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [" + currentFieldName + "]");
}
} else {
throw new ElasticsearchParseException("failed to parse snapshottable metadata");
}
}
if (data == null) {
throw new ElasticsearchParseException("failed to parse snapshottable metadata, data not found");
}
return (T)newTestCustomMetaData(data);
}
@Override
public void toXContent(T metadata, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.field("data", metadata.getData());
}
}
}
static {
MetaData.registerFactory(SnapshottableMetadata.TYPE, SnapshottableMetadata.FACTORY);
MetaData.registerFactory(NonSnapshottableMetadata.TYPE, NonSnapshottableMetadata.FACTORY);
MetaData.registerFactory(SnapshottableGatewayMetadata.TYPE, SnapshottableGatewayMetadata.FACTORY);
MetaData.registerFactory(NonSnapshottableGatewayMetadata.TYPE, NonSnapshottableGatewayMetadata.FACTORY);
MetaData.registerFactory(SnapshotableGatewayNoApiMetadata.TYPE, SnapshotableGatewayNoApiMetadata.FACTORY);
}
public static class SnapshottableMetadata extends TestCustomMetaData {
public static final String TYPE = "test_snapshottable";
public static final Factory FACTORY = new Factory();
public SnapshottableMetadata(String data) {
super(data);
}
private static class Factory extends TestCustomMetaDataFactory<SnapshottableMetadata> {
@Override
public String type() {
return TYPE;
}
@Override
protected TestCustomMetaData newTestCustomMetaData(String data) {
return new SnapshottableMetadata(data);
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return MetaData.API_AND_SNAPSHOT;
}
}
}
public static class NonSnapshottableMetadata extends TestCustomMetaData {
public static final String TYPE = "test_non_snapshottable";
public static final Factory FACTORY = new Factory();
public NonSnapshottableMetadata(String data) {
super(data);
}
private static class Factory extends TestCustomMetaDataFactory<NonSnapshottableMetadata> {
@Override
public String type() {
return TYPE;
}
@Override
protected NonSnapshottableMetadata newTestCustomMetaData(String data) {
return new NonSnapshottableMetadata(data);
}
}
}
public static class SnapshottableGatewayMetadata extends TestCustomMetaData {
public static final String TYPE = "test_snapshottable_gateway";
public static final Factory FACTORY = new Factory();
public SnapshottableGatewayMetadata(String data) {
super(data);
}
private static class Factory extends TestCustomMetaDataFactory<SnapshottableGatewayMetadata> {
@Override
public String type() {
return TYPE;
}
@Override
protected TestCustomMetaData newTestCustomMetaData(String data) {
return new SnapshottableGatewayMetadata(data);
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return EnumSet.of(MetaData.XContentContext.API, MetaData.XContentContext.SNAPSHOT, MetaData.XContentContext.GATEWAY);
}
}
}
public static class NonSnapshottableGatewayMetadata extends TestCustomMetaData {
public static final String TYPE = "test_non_snapshottable_gateway";
public static final Factory FACTORY = new Factory();
public NonSnapshottableGatewayMetadata(String data) {
super(data);
}
private static class Factory extends TestCustomMetaDataFactory<NonSnapshottableGatewayMetadata> {
@Override
public String type() {
return TYPE;
}
@Override
protected NonSnapshottableGatewayMetadata newTestCustomMetaData(String data) {
return new NonSnapshottableGatewayMetadata(data);
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return MetaData.API_AND_GATEWAY;
}
}
}
public static class SnapshotableGatewayNoApiMetadata extends TestCustomMetaData {
public static final String TYPE = "test_snapshottable_gateway_no_api";
public static final Factory FACTORY = new Factory();
public SnapshotableGatewayNoApiMetadata(String data) {
super(data);
}
private static class Factory extends TestCustomMetaDataFactory<SnapshotableGatewayNoApiMetadata> {
@Override
public String type() {
return TYPE;
}
@Override
protected SnapshotableGatewayNoApiMetadata newTestCustomMetaData(String data) {
return new SnapshotableGatewayNoApiMetadata(data);
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return EnumSet.of(MetaData.XContentContext.GATEWAY, MetaData.XContentContext.SNAPSHOT);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.commons;
import java.util.HashMap;
import java.util.Map;
import javax.jcr.NamespaceException;
import javax.jcr.NamespaceRegistry;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import org.apache.jackrabbit.util.XMLChar;
/**
* Helper class for working with JCR namespaces.
*
* @since Jackrabbit JCR Commons 1.5
*/
public class NamespaceHelper {
/**
* The <code>jcr</code> namespace URI.
*/
public static final String JCR = "http://www.jcp.org/jcr/1.0";
/**
* The <code>nt</code> namespace URI.
*/
public static final String NT = "http://www.jcp.org/jcr/nt/1.0";
/**
* The <code>mix</code> namespace URI.
*/
public static final String MIX = "http://www.jcp.org/jcr/mix/1.0";
/**
* Current session.
*/
private final Session session;
/**
* Creates a namespace helper for the given session.
*
* @param session current session
*/
public NamespaceHelper(Session session) {
this.session = session;
}
/**
* Returns a map containing all prefix to namespace URI mappings of
* the current session. The returned map is newly allocated and can
* can be freely modified by the caller.
*
* @see Session#getNamespacePrefixes()
* @return namespace mappings
* @throws RepositoryException if the namespaces could not be retrieved
*/
public Map<String, String> getNamespaces() throws RepositoryException {
Map<String, String> namespaces = new HashMap<String, String>();
String[] prefixes = session.getNamespacePrefixes();
for (String prefixe : prefixes) {
namespaces.put(prefixe, session.getNamespaceURI(prefixe));
}
return namespaces;
}
/**
* Returns the prefix mapped to the given namespace URI in the current
* session, or <code>null</code> if the namespace does not exist.
*
* @see Session#getNamespacePrefix(String)
* @param uri namespace URI
* @return namespace prefix, or <code>null</code>
* @throws RepositoryException if the namespace could not be retrieved
*/
public String getPrefix(String uri) throws RepositoryException {
try {
return session.getNamespacePrefix(uri);
} catch (NamespaceException e) {
return null;
}
}
/**
* Returns the namespace URI mapped to the given prefix in the current
* session, or <code>null</code> if the namespace does not exist.
*
* @see Session#getNamespaceURI(String)
* @param prefix namespace prefix
* @return namespace prefix, or <code>null</code>
* @throws RepositoryException if the namespace could not be retrieved
*/
public String getURI(String prefix) throws RepositoryException {
try {
return session.getNamespaceURI(prefix);
} catch (NamespaceException e) {
return null;
}
}
/**
* Returns the prefixed JCR name for the given namespace URI and local
* name in the current session.
*
* @param uri namespace URI
* @param name local name
* @return prefixed JCR name
* @throws NamespaceException if the namespace does not exist
* @throws RepositoryException if the namespace could not be retrieved
*/
public String getJcrName(String uri, String name)
throws NamespaceException, RepositoryException {
if (uri != null && uri.length() > 0) {
return session.getNamespacePrefix(uri) + ":" + name;
} else {
return name;
}
}
/**
* Replaces the standard <code>jcr</code>, <code>nt</code>, or
* <code>mix</code> prefix in the given name with the prefix
* mapped to that namespace in the current session.
* <p>
* The purpose of this method is to make it easier to write
* namespace-aware code that uses names in the standard JCR namespaces.
* For example:
* <pre>
* node.getProperty(helper.getName("jcr:data"));
* </pre>
*
* @param name prefixed name using the standard JCR prefixes
* @return prefixed name using the current session namespace mappings
* @throws IllegalArgumentException if the prefix is unknown
* @throws RepositoryException if the namespace could not be retrieved
*/
public String getJcrName(String name)
throws IllegalArgumentException, RepositoryException {
String standardPrefix;
String currentPrefix;
if (name.startsWith("jcr:")) {
standardPrefix = "jcr";
currentPrefix = session.getNamespacePrefix(JCR);
} else if (name.startsWith("nt:")) {
standardPrefix = "nt";
currentPrefix = session.getNamespacePrefix(NT);
} else if (name.startsWith("mix:")) {
standardPrefix = "mix";
currentPrefix = session.getNamespacePrefix(MIX);
} else {
throw new IllegalArgumentException("Unknown prefix: " + name);
}
if (currentPrefix.equals(standardPrefix)) {
return name;
} else {
return currentPrefix + name.substring(standardPrefix.length());
}
}
/**
* Safely registers the given namespace. If the namespace already exists,
* then the prefix mapped to the namespace in the current session is
* returned. Otherwise the namespace is registered to the namespace
* registry. If the given prefix is already registered for some other
* namespace or otherwise invalid, then another prefix is automatically
* generated. After the namespace has been registered, the prefix mapped
* to it in the current session is returned.
*
* @see NamespaceRegistry#registerNamespace(String, String)
* @param prefix namespace prefix
* @param uri namespace URI
* @return namespace prefix in the current session
* @throws RepositoryException if the namespace could not be registered
*/
public String registerNamespace(String prefix, String uri)
throws RepositoryException {
NamespaceRegistry registry =
session.getWorkspace().getNamespaceRegistry();
try {
// Check if the namespace is registered
registry.getPrefix(uri);
} catch (NamespaceException e1) {
// Replace troublesome prefix hints
if (prefix == null || prefix.length() == 0
|| prefix.toLowerCase().startsWith("xml")
|| !XMLChar.isValidNCName(prefix)) {
prefix = "ns"; // ns, ns2, ns3, ns4, ...
}
// Loop until an unused prefix is found
try {
String base = prefix;
for (int i = 2; true; i++) {
registry.getURI(prefix);
prefix = base + i;
}
} catch (NamespaceException e2) {
// Exit the loop
}
// Register the namespace
registry.registerNamespace(prefix, uri);
}
return session.getNamespacePrefix(uri);
}
/**
* Safely registers all namespaces in the given map from
* prefixes to namespace URIs.
*
* @param namespaces namespace mappings
* @throws RepositoryException if the namespaces could not be registered
*/
public void registerNamespaces(Map<String,String> namespaces) throws RepositoryException {
for (Map.Entry<String, String> stringStringEntry : namespaces.entrySet()) {
Map.Entry<String, String> entry = stringStringEntry;
registerNamespace(entry.getKey(), entry.getValue());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.NoSuchEndpointException;
import org.apache.camel.Processor;
import org.apache.camel.Route;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.ShutdownRoute;
import org.apache.camel.ShutdownRunningTask;
import org.apache.camel.model.FromDefinition;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.processor.CamelInternalProcessor;
import org.apache.camel.processor.Pipeline;
import org.apache.camel.spi.InterceptStrategy;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.spi.RoutePolicy;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ObjectHelper;
/**
* The context used to activate new routing rules
*
* @version
*/
public class DefaultRouteContext implements RouteContext {
private final Map<ProcessorDefinition<?>, AtomicInteger> nodeIndex = new HashMap<ProcessorDefinition<?>, AtomicInteger>();
private final RouteDefinition route;
private FromDefinition from;
private final Collection<Route> routes;
private Endpoint endpoint;
private final List<Processor> eventDrivenProcessors = new ArrayList<Processor>();
private CamelContext camelContext;
private List<InterceptStrategy> interceptStrategies = new ArrayList<InterceptStrategy>();
private InterceptStrategy managedInterceptStrategy;
private boolean routeAdded;
private Boolean trace;
private Boolean messageHistory;
private Boolean streamCache;
private Boolean handleFault;
private Long delay;
private Boolean autoStartup = Boolean.TRUE;
private List<RoutePolicy> routePolicyList = new ArrayList<RoutePolicy>();
private ShutdownRoute shutdownRoute;
private ShutdownRunningTask shutdownRunningTask;
public DefaultRouteContext(CamelContext camelContext, RouteDefinition route, FromDefinition from, Collection<Route> routes) {
this.camelContext = camelContext;
this.route = route;
this.from = from;
this.routes = routes;
}
/**
* Only used for lazy construction from inside ExpressionType
*/
public DefaultRouteContext(CamelContext camelContext) {
this.camelContext = camelContext;
this.routes = new ArrayList<Route>();
this.route = new RouteDefinition("temporary");
}
public Endpoint getEndpoint() {
if (endpoint == null) {
endpoint = from.resolveEndpoint(this);
}
return endpoint;
}
public FromDefinition getFrom() {
return from;
}
public RouteDefinition getRoute() {
return route;
}
public CamelContext getCamelContext() {
return camelContext;
}
public Endpoint resolveEndpoint(String uri) {
return route.resolveEndpoint(getCamelContext(), uri);
}
public Endpoint resolveEndpoint(String uri, String ref) {
Endpoint endpoint = null;
if (uri != null) {
endpoint = resolveEndpoint(uri);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
}
if (ref != null) {
endpoint = lookup(ref, Endpoint.class);
if (endpoint == null) {
throw new NoSuchEndpointException("ref:" + ref, "check your camel registry with id " + ref);
}
// Check the endpoint has the right CamelContext
if (!this.getCamelContext().equals(endpoint.getCamelContext())) {
throw new NoSuchEndpointException("ref:" + ref, "make sure the endpoint has the same camel context as the route does.");
}
try {
// need add the endpoint into service
getCamelContext().addService(endpoint);
} catch (Exception ex) {
throw new RuntimeCamelException(ex);
}
}
if (endpoint == null) {
throw new IllegalArgumentException("Either 'uri' or 'ref' must be specified on: " + this);
} else {
return endpoint;
}
}
public <T> T lookup(String name, Class<T> type) {
return getCamelContext().getRegistry().lookupByNameAndType(name, type);
}
public <T> Map<String, T> lookupByType(Class<T> type) {
return getCamelContext().getRegistry().findByTypeWithName(type);
}
@Override
public <T> T mandatoryLookup(String name, Class<T> type) {
return CamelContextHelper.mandatoryLookup(getCamelContext(), name, type);
}
public void commit() {
// now lets turn all of the event driven consumer processors into a single route
if (!eventDrivenProcessors.isEmpty()) {
Processor target = Pipeline.newInstance(getCamelContext(), eventDrivenProcessors);
// force creating the route id so its known ahead of the route is started
String routeId = route.idOrCreate(getCamelContext().getNodeIdFactory());
// and wrap it in a unit of work so the UoW is on the top, so the entire route will be in the same UoW
CamelInternalProcessor internal = new CamelInternalProcessor(target);
internal.addAdvice(new CamelInternalProcessor.UnitOfWorkProcessorAdvice(this));
// and then optionally add route policy processor if a custom policy is set
List<RoutePolicy> routePolicyList = getRoutePolicyList();
if (routePolicyList != null && !routePolicyList.isEmpty()) {
for (RoutePolicy policy : routePolicyList) {
// add policy as service if we have not already done that (eg possible if two routes have the same service)
// this ensures Camel can control the lifecycle of the policy
if (!camelContext.hasService(policy)) {
try {
camelContext.addService(policy);
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
}
internal.addAdvice(new CamelInternalProcessor.RoutePolicyAdvice(routePolicyList));
}
// wrap in route inflight processor to track number of inflight exchanges for the route
internal.addAdvice(new CamelInternalProcessor.RouteInflightRepositoryAdvice(camelContext.getInflightRepository(), routeId));
// wrap in JMX instrumentation processor that is used for performance stats
internal.addAdvice(new CamelInternalProcessor.InstrumentationAdvice("route"));
// wrap in route lifecycle
internal.addAdvice(new CamelInternalProcessor.RouteLifecycleAdvice());
// and create the route that wraps the UoW
Route edcr = new EventDrivenConsumerRoute(this, getEndpoint(), internal);
edcr.getProperties().put(Route.ID_PROPERTY, routeId);
edcr.getProperties().put(Route.PARENT_PROPERTY, Integer.toHexString(route.hashCode()));
edcr.getProperties().put(Route.DESCRIPTION_PROPERTY, route.getDescriptionText());
if (route.getGroup() != null) {
edcr.getProperties().put(Route.GROUP_PROPERTY, route.getGroup());
}
String rest = "false";
if (route.isRest() != null && route.isRest()) {
rest = "true";
}
edcr.getProperties().put(Route.REST_PROPERTY, rest);
// after the route is created then set the route on the policy processor so we get hold of it
CamelInternalProcessor.RoutePolicyAdvice task = internal.getAdvice(CamelInternalProcessor.RoutePolicyAdvice.class);
if (task != null) {
task.setRoute(edcr);
}
CamelInternalProcessor.RouteLifecycleAdvice task2 = internal.getAdvice(CamelInternalProcessor.RouteLifecycleAdvice.class);
if (task2 != null) {
task2.setRoute(edcr);
}
// invoke init on route policy
if (routePolicyList != null && !routePolicyList.isEmpty()) {
for (RoutePolicy policy : routePolicyList) {
policy.onInit(edcr);
}
}
routes.add(edcr);
}
}
public void addEventDrivenProcessor(Processor processor) {
eventDrivenProcessors.add(processor);
}
public List<InterceptStrategy> getInterceptStrategies() {
return interceptStrategies;
}
public void setInterceptStrategies(List<InterceptStrategy> interceptStrategies) {
this.interceptStrategies = interceptStrategies;
}
public void addInterceptStrategy(InterceptStrategy interceptStrategy) {
getInterceptStrategies().add(interceptStrategy);
}
public void setManagedInterceptStrategy(InterceptStrategy interceptStrategy) {
this.managedInterceptStrategy = interceptStrategy;
}
public InterceptStrategy getManagedInterceptStrategy() {
return managedInterceptStrategy;
}
public boolean isRouteAdded() {
return routeAdded;
}
public void setIsRouteAdded(boolean routeAdded) {
this.routeAdded = routeAdded;
}
public void setTracing(Boolean tracing) {
this.trace = tracing;
}
public Boolean isTracing() {
if (trace != null) {
return trace;
} else {
// fallback to the option from camel context
return getCamelContext().isTracing();
}
}
public void setMessageHistory(Boolean messageHistory) {
this.messageHistory = messageHistory;
}
public Boolean isMessageHistory() {
if (messageHistory != null) {
return messageHistory;
} else {
// fallback to the option from camel context
return getCamelContext().isMessageHistory();
}
}
public void setStreamCaching(Boolean cache) {
this.streamCache = cache;
}
public Boolean isStreamCaching() {
if (streamCache != null) {
return streamCache;
} else {
// fallback to the option from camel context
return getCamelContext().isStreamCaching();
}
}
public void setHandleFault(Boolean handleFault) {
this.handleFault = handleFault;
}
public Boolean isHandleFault() {
if (handleFault != null) {
return handleFault;
} else {
// fallback to the option from camel context
return getCamelContext().isHandleFault();
}
}
public void setDelayer(Long delay) {
this.delay = delay;
}
public Long getDelayer() {
if (delay != null) {
return delay;
} else {
// fallback to the option from camel context
return getCamelContext().getDelayer();
}
}
public void setAutoStartup(Boolean autoStartup) {
this.autoStartup = autoStartup;
}
public Boolean isAutoStartup() {
if (autoStartup != null) {
return autoStartup;
}
// default to true
return true;
}
public void setShutdownRoute(ShutdownRoute shutdownRoute) {
this.shutdownRoute = shutdownRoute;
}
public void setAllowUseOriginalMessage(Boolean allowUseOriginalMessage) {
throw new IllegalArgumentException("This option can only be configured on CamelContext");
}
public Boolean isAllowUseOriginalMessage() {
return getCamelContext().isAllowUseOriginalMessage();
}
public ShutdownRoute getShutdownRoute() {
if (shutdownRoute != null) {
return shutdownRoute;
} else {
// fallback to the option from camel context
return getCamelContext().getShutdownRoute();
}
}
public void setShutdownRunningTask(ShutdownRunningTask shutdownRunningTask) {
this.shutdownRunningTask = shutdownRunningTask;
}
public ShutdownRunningTask getShutdownRunningTask() {
if (shutdownRunningTask != null) {
return shutdownRunningTask;
} else {
// fallback to the option from camel context
return getCamelContext().getShutdownRunningTask();
}
}
public int getAndIncrement(ProcessorDefinition<?> node) {
AtomicInteger count = nodeIndex.get(node);
if (count == null) {
count = new AtomicInteger();
nodeIndex.put(node, count);
}
return count.getAndIncrement();
}
public void setRoutePolicyList(List<RoutePolicy> routePolicyList) {
this.routePolicyList = routePolicyList;
}
public List<RoutePolicy> getRoutePolicyList() {
return routePolicyList;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CyclicBarrier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.MockApps;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.ReservationDefinition;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.ReservationRequest;
import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter;
import org.apache.hadoop.yarn.api.records.ReservationRequests;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter;
import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystemTestUtil;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.UTCClock;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
public class TestClientRMService {
private static final Log LOG = LogFactory.getLog(TestClientRMService.class);
private RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
private String appType = "MockApp";
private static RMDelegationTokenSecretManager dtsm;
private final static String QUEUE_1 = "Q-1";
private final static String QUEUE_2 = "Q-2";
private final static String kerberosRule = "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT";
static {
KerberosName.setRules(kerberosRule);
}
@BeforeClass
public static void setupSecretManager() throws IOException {
RMContext rmContext = mock(RMContext.class);
when(rmContext.getStateStore()).thenReturn(new NullRMStateStore());
dtsm = new RMDelegationTokenSecretManager(60000, 60000, 60000, 60000, rmContext);
dtsm.startThreads();
}
@AfterClass
public static void teardownSecretManager() {
if (dtsm != null) {
dtsm.stopThreads();
}
}
@Test
public void testGetClusterNodes() throws Exception {
MockRM rm = new MockRM() {
protected ClientRMService createClientRMService() {
return new ClientRMService(this.rmContext, scheduler,
this.rmAppManager, this.applicationACLsManager, this.queueACLsManager,
this.getRMContext().getRMDelegationTokenSecretManager());
};
};
rm.start();
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
labelsMgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
// Add a healthy node with label = x
MockNM node = rm.registerNode("host1:1234", 1024);
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
map.put(node.getNodeId(), ImmutableSet.of("x"));
labelsMgr.replaceLabelsOnNode(map);
rm.sendNodeStarted(node);
node.nodeHeartbeat(true);
// Add and lose a node with label = y
MockNM lostNode = rm.registerNode("host2:1235", 1024);
rm.sendNodeStarted(lostNode);
lostNode.nodeHeartbeat(true);
rm.NMwaitForState(lostNode.getNodeId(), NodeState.RUNNING);
rm.sendNodeLost(lostNode);
// Create a client.
Configuration conf = new Configuration();
YarnRPC rpc = YarnRPC.create(conf);
InetSocketAddress rmAddress = rm.getClientRMService().getBindAddress();
LOG.info("Connecting to ResourceManager at " + rmAddress);
ApplicationClientProtocol client =
(ApplicationClientProtocol) rpc
.getProxy(ApplicationClientProtocol.class, rmAddress, conf);
// Make call
GetClusterNodesRequest request =
GetClusterNodesRequest.newInstance(EnumSet.of(NodeState.RUNNING));
List<NodeReport> nodeReports =
client.getClusterNodes(request).getNodeReports();
Assert.assertEquals(1, nodeReports.size());
Assert.assertNotSame("Node is expected to be healthy!", NodeState.UNHEALTHY,
nodeReports.get(0).getNodeState());
// Check node's label = x
Assert.assertTrue(nodeReports.get(0).getNodeLabels().contains("x"));
// Now make the node unhealthy.
node.nodeHeartbeat(false);
// Call again
nodeReports = client.getClusterNodes(request).getNodeReports();
Assert.assertEquals("Unhealthy nodes should not show up by default", 0,
nodeReports.size());
// Change label of host1 to y
map = new HashMap<NodeId, Set<String>>();
map.put(node.getNodeId(), ImmutableSet.of("y"));
labelsMgr.replaceLabelsOnNode(map);
// Now query for UNHEALTHY nodes
request = GetClusterNodesRequest.newInstance(EnumSet.of(NodeState.UNHEALTHY));
nodeReports = client.getClusterNodes(request).getNodeReports();
Assert.assertEquals(1, nodeReports.size());
Assert.assertEquals("Node is expected to be unhealthy!", NodeState.UNHEALTHY,
nodeReports.get(0).getNodeState());
Assert.assertTrue(nodeReports.get(0).getNodeLabels().contains("y"));
// Remove labels of host1
map = new HashMap<NodeId, Set<String>>();
map.put(node.getNodeId(), ImmutableSet.of("y"));
labelsMgr.removeLabelsFromNode(map);
// Query all states should return all nodes
rm.registerNode("host3:1236", 1024);
request = GetClusterNodesRequest.newInstance(EnumSet.allOf(NodeState.class));
nodeReports = client.getClusterNodes(request).getNodeReports();
Assert.assertEquals(3, nodeReports.size());
// All host1-3's label should be empty (instead of null)
for (NodeReport report : nodeReports) {
Assert.assertTrue(report.getNodeLabels() != null
&& report.getNodeLabels().isEmpty());
}
rpc.stopProxy(client, conf);
rm.close();
}
@Test
public void testNonExistingApplicationReport() throws YarnException {
RMContext rmContext = mock(RMContext.class);
when(rmContext.getRMApps()).thenReturn(
new ConcurrentHashMap<ApplicationId, RMApp>());
ClientRMService rmService = new ClientRMService(rmContext, null, null,
null, null, null);
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetApplicationReportRequest request = recordFactory
.newRecordInstance(GetApplicationReportRequest.class);
request.setApplicationId(ApplicationId.newInstance(0, 0));
try {
rmService.getApplicationReport(request);
Assert.fail();
} catch (ApplicationNotFoundException ex) {
Assert.assertEquals(ex.getMessage(),
"Application with id '" + request.getApplicationId()
+ "' doesn't exist in RM.");
}
}
@Test
public void testGetApplicationReport() throws Exception {
YarnScheduler yarnScheduler = mock(YarnScheduler.class);
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
ApplicationId appId1 = getApplicationId(1);
ApplicationACLsManager mockAclsManager = mock(ApplicationACLsManager.class);
when(
mockAclsManager.checkAccess(UserGroupInformation.getCurrentUser(),
ApplicationAccessType.VIEW_APP, null, appId1)).thenReturn(true);
ClientRMService rmService = new ClientRMService(rmContext, yarnScheduler,
null, mockAclsManager, null, null);
try {
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetApplicationReportRequest request = recordFactory
.newRecordInstance(GetApplicationReportRequest.class);
request.setApplicationId(appId1);
GetApplicationReportResponse response =
rmService.getApplicationReport(request);
ApplicationReport report = response.getApplicationReport();
ApplicationResourceUsageReport usageReport =
report.getApplicationResourceUsageReport();
Assert.assertEquals(10, usageReport.getMemorySeconds());
Assert.assertEquals(3, usageReport.getVcoreSeconds());
} finally {
rmService.close();
}
}
@Test
public void testGetApplicationAttemptReport() throws YarnException,
IOException {
ClientRMService rmService = createRMService();
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetApplicationAttemptReportRequest request = recordFactory
.newRecordInstance(GetApplicationAttemptReportRequest.class);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(123456, 1), 1);
request.setApplicationAttemptId(attemptId);
try {
GetApplicationAttemptReportResponse response = rmService
.getApplicationAttemptReport(request);
Assert.assertEquals(attemptId, response.getApplicationAttemptReport()
.getApplicationAttemptId());
} catch (ApplicationNotFoundException ex) {
Assert.fail(ex.getMessage());
}
}
@Test
public void testGetApplicationResourceUsageReportDummy() throws YarnException,
IOException {
ApplicationAttemptId attemptId = getApplicationAttemptId(1);
YarnScheduler yarnScheduler = mockYarnScheduler();
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
when(rmContext.getDispatcher().getEventHandler()).thenReturn(
new EventHandler<Event>() {
public void handle(Event event) {
}
});
ApplicationSubmissionContext asContext =
mock(ApplicationSubmissionContext.class);
YarnConfiguration config = new YarnConfiguration();
RMAppAttemptImpl rmAppAttemptImpl = new RMAppAttemptImpl(attemptId,
rmContext, yarnScheduler, null, asContext, config, false, null);
ApplicationResourceUsageReport report = rmAppAttemptImpl
.getApplicationResourceUsageReport();
assertEquals(report, RMServerUtils.DUMMY_APPLICATION_RESOURCE_USAGE_REPORT);
}
@Test
public void testGetApplicationAttempts() throws YarnException, IOException {
ClientRMService rmService = createRMService();
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetApplicationAttemptsRequest request = recordFactory
.newRecordInstance(GetApplicationAttemptsRequest.class);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(123456, 1), 1);
request.setApplicationId(ApplicationId.newInstance(123456, 1));
try {
GetApplicationAttemptsResponse response = rmService
.getApplicationAttempts(request);
Assert.assertEquals(1, response.getApplicationAttemptList().size());
Assert.assertEquals(attemptId, response.getApplicationAttemptList()
.get(0).getApplicationAttemptId());
} catch (ApplicationNotFoundException ex) {
Assert.fail(ex.getMessage());
}
}
@Test
public void testGetContainerReport() throws YarnException, IOException {
ClientRMService rmService = createRMService();
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetContainerReportRequest request = recordFactory
.newRecordInstance(GetContainerReportRequest.class);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(123456, 1), 1);
ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
request.setContainerId(containerId);
try {
GetContainerReportResponse response = rmService
.getContainerReport(request);
Assert.assertEquals(containerId, response.getContainerReport()
.getContainerId());
} catch (ApplicationNotFoundException ex) {
Assert.fail(ex.getMessage());
}
}
@Test
public void testGetContainers() throws YarnException, IOException {
ClientRMService rmService = createRMService();
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
GetContainersRequest request = recordFactory
.newRecordInstance(GetContainersRequest.class);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(123456, 1), 1);
ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
request.setApplicationAttemptId(attemptId);
try {
GetContainersResponse response = rmService.getContainers(request);
Assert.assertEquals(containerId, response.getContainerList().get(0)
.getContainerId());
} catch (ApplicationNotFoundException ex) {
Assert.fail(ex.getMessage());
}
}
public ClientRMService createRMService() throws IOException {
YarnScheduler yarnScheduler = mockYarnScheduler();
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
ConcurrentHashMap<ApplicationId, RMApp> apps = getRMApps(rmContext,
yarnScheduler);
when(rmContext.getRMApps()).thenReturn(apps);
when(rmContext.getYarnConfiguration()).thenReturn(new Configuration());
RMAppManager appManager = new RMAppManager(rmContext, yarnScheduler, null,
mock(ApplicationACLsManager.class), new Configuration());
when(rmContext.getDispatcher().getEventHandler()).thenReturn(
new EventHandler<Event>() {
public void handle(Event event) {
}
});
ApplicationACLsManager mockAclsManager = mock(ApplicationACLsManager.class);
QueueACLsManager mockQueueACLsManager = mock(QueueACLsManager.class);
when(
mockQueueACLsManager.checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), anyString())).thenReturn(true);
return new ClientRMService(rmContext, yarnScheduler, appManager,
mockAclsManager, mockQueueACLsManager, null);
}
@Test
public void testForceKillNonExistingApplication() throws YarnException {
RMContext rmContext = mock(RMContext.class);
when(rmContext.getRMApps()).thenReturn(
new ConcurrentHashMap<ApplicationId, RMApp>());
ClientRMService rmService = new ClientRMService(rmContext, null, null,
null, null, null);
ApplicationId applicationId =
BuilderUtils.newApplicationId(System.currentTimeMillis(), 0);
KillApplicationRequest request =
KillApplicationRequest.newInstance(applicationId);
try {
rmService.forceKillApplication(request);
Assert.fail();
} catch (ApplicationNotFoundException ex) {
Assert.assertEquals(ex.getMessage(),
"Trying to kill an absent " +
"application " + request.getApplicationId());
}
}
@Test
public void testForceKillApplication() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
MockRM rm = new MockRM();
rm.init(conf);
rm.start();
ClientRMService rmService = rm.getClientRMService();
GetApplicationsRequest getRequest = GetApplicationsRequest.newInstance(
EnumSet.of(YarnApplicationState.KILLED));
RMApp app1 = rm.submitApp(1024);
RMApp app2 = rm.submitApp(1024, true);
assertEquals("Incorrect number of apps in the RM", 0,
rmService.getApplications(getRequest).getApplicationList().size());
KillApplicationRequest killRequest1 =
KillApplicationRequest.newInstance(app1.getApplicationId());
KillApplicationRequest killRequest2 =
KillApplicationRequest.newInstance(app2.getApplicationId());
int killAttemptCount = 0;
for (int i = 0; i < 100; i++) {
KillApplicationResponse killResponse1 =
rmService.forceKillApplication(killRequest1);
killAttemptCount++;
if (killResponse1.getIsKillCompleted()) {
break;
}
Thread.sleep(10);
}
assertTrue("Kill attempt count should be greater than 1 for managed AMs",
killAttemptCount > 1);
assertEquals("Incorrect number of apps in the RM", 1,
rmService.getApplications(getRequest).getApplicationList().size());
KillApplicationResponse killResponse2 =
rmService.forceKillApplication(killRequest2);
assertTrue("Killing UnmanagedAM should falsely acknowledge true",
killResponse2.getIsKillCompleted());
for (int i = 0; i < 100; i++) {
if (2 ==
rmService.getApplications(getRequest).getApplicationList().size()) {
break;
}
Thread.sleep(10);
}
assertEquals("Incorrect number of apps in the RM", 2,
rmService.getApplications(getRequest).getApplicationList().size());
}
@Test (expected = ApplicationNotFoundException.class)
public void testMoveAbsentApplication() throws YarnException {
RMContext rmContext = mock(RMContext.class);
when(rmContext.getRMApps()).thenReturn(
new ConcurrentHashMap<ApplicationId, RMApp>());
ClientRMService rmService = new ClientRMService(rmContext, null, null,
null, null, null);
ApplicationId applicationId =
BuilderUtils.newApplicationId(System.currentTimeMillis(), 0);
MoveApplicationAcrossQueuesRequest request =
MoveApplicationAcrossQueuesRequest.newInstance(applicationId, "newqueue");
rmService.moveApplicationAcrossQueues(request);
}
@Test
public void testGetQueueInfo() throws Exception {
YarnScheduler yarnScheduler = mock(YarnScheduler.class);
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
ApplicationACLsManager mockAclsManager = mock(ApplicationACLsManager.class);
QueueACLsManager mockQueueACLsManager = mock(QueueACLsManager.class);
when(mockQueueACLsManager.checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), anyString())).thenReturn(true);
when(mockAclsManager.checkAccess(any(UserGroupInformation.class),
any(ApplicationAccessType.class), anyString(),
any(ApplicationId.class))).thenReturn(true);
ClientRMService rmService = new ClientRMService(rmContext, yarnScheduler,
null, mockAclsManager, mockQueueACLsManager, null);
GetQueueInfoRequest request = recordFactory
.newRecordInstance(GetQueueInfoRequest.class);
request.setQueueName("testqueue");
request.setIncludeApplications(true);
GetQueueInfoResponse queueInfo = rmService.getQueueInfo(request);
List<ApplicationReport> applications = queueInfo.getQueueInfo()
.getApplications();
Assert.assertEquals(2, applications.size());
request.setQueueName("nonexistentqueue");
request.setIncludeApplications(true);
// should not throw exception on nonexistent queue
queueInfo = rmService.getQueueInfo(request);
// Case where user does not have application access
ApplicationACLsManager mockAclsManager1 =
mock(ApplicationACLsManager.class);
QueueACLsManager mockQueueACLsManager1 =
mock(QueueACLsManager.class);
when(mockQueueACLsManager1.checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), anyString())).thenReturn(false);
when(mockAclsManager1.checkAccess(any(UserGroupInformation.class),
any(ApplicationAccessType.class), anyString(),
any(ApplicationId.class))).thenReturn(false);
ClientRMService rmService1 = new ClientRMService(rmContext, yarnScheduler,
null, mockAclsManager1, mockQueueACLsManager1, null);
request.setQueueName("testqueue");
request.setIncludeApplications(true);
GetQueueInfoResponse queueInfo1 = rmService1.getQueueInfo(request);
List<ApplicationReport> applications1 = queueInfo1.getQueueInfo()
.getApplications();
Assert.assertEquals(0, applications1.size());
}
private static final UserGroupInformation owner =
UserGroupInformation.createRemoteUser("owner");
private static final UserGroupInformation other =
UserGroupInformation.createRemoteUser("other");
private static final UserGroupInformation tester =
UserGroupInformation.createRemoteUser("tester");
private static final String testerPrincipal = "tester@EXAMPLE.COM";
private static final String ownerPrincipal = "owner@EXAMPLE.COM";
private static final String otherPrincipal = "other@EXAMPLE.COM";
private static final UserGroupInformation testerKerb =
UserGroupInformation.createRemoteUser(testerPrincipal);
private static final UserGroupInformation ownerKerb =
UserGroupInformation.createRemoteUser(ownerPrincipal);
private static final UserGroupInformation otherKerb =
UserGroupInformation.createRemoteUser(otherPrincipal);
@Test
public void testTokenRenewalByOwner() throws Exception {
owner.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenRenewal(owner, owner);
return null;
}
});
}
@Test
public void testTokenRenewalWrongUser() throws Exception {
try {
owner.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
checkTokenRenewal(owner, other);
return null;
} catch (YarnException ex) {
Assert.assertTrue(ex.getMessage().contains(owner.getUserName() +
" tries to renew a token with renewer " +
other.getUserName()));
throw ex;
}
}
});
} catch (Exception e) {
return;
}
Assert.fail("renew should have failed");
}
@Test
public void testTokenRenewalByLoginUser() throws Exception {
UserGroupInformation.getLoginUser().doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenRenewal(owner, owner);
checkTokenRenewal(owner, other);
return null;
}
});
}
private void checkTokenRenewal(UserGroupInformation owner,
UserGroupInformation renewer) throws IOException, YarnException {
RMDelegationTokenIdentifier tokenIdentifier =
new RMDelegationTokenIdentifier(
new Text(owner.getUserName()), new Text(renewer.getUserName()), null);
Token<?> token =
new Token<RMDelegationTokenIdentifier>(tokenIdentifier, dtsm);
org.apache.hadoop.yarn.api.records.Token dToken = BuilderUtils.newDelegationToken(
token.getIdentifier(), token.getKind().toString(),
token.getPassword(), token.getService().toString());
RenewDelegationTokenRequest request =
Records.newRecord(RenewDelegationTokenRequest.class);
request.setDelegationToken(dToken);
RMContext rmContext = mock(RMContext.class);
ClientRMService rmService = new ClientRMService(
rmContext, null, null, null, null, dtsm);
rmService.renewDelegationToken(request);
}
@Test
public void testTokenCancellationByOwner() throws Exception {
// two tests required - one with a kerberos name
// and with a short name
RMContext rmContext = mock(RMContext.class);
final ClientRMService rmService =
new ClientRMService(rmContext, null, null, null, null, dtsm);
testerKerb.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenCancellation(rmService, testerKerb, other);
return null;
}
});
owner.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenCancellation(owner, other);
return null;
}
});
}
@Test
public void testTokenCancellationByRenewer() throws Exception {
// two tests required - one with a kerberos name
// and with a short name
RMContext rmContext = mock(RMContext.class);
final ClientRMService rmService =
new ClientRMService(rmContext, null, null, null, null, dtsm);
testerKerb.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenCancellation(rmService, owner, testerKerb);
return null;
}
});
other.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
checkTokenCancellation(owner, other);
return null;
}
});
}
@Test
public void testTokenCancellationByWrongUser() {
// two sets to test -
// 1. try to cancel tokens of short and kerberos users as a kerberos UGI
// 2. try to cancel tokens of short and kerberos users as a simple auth UGI
RMContext rmContext = mock(RMContext.class);
final ClientRMService rmService =
new ClientRMService(rmContext, null, null, null, null, dtsm);
UserGroupInformation[] kerbTestOwners =
{ owner, other, tester, ownerKerb, otherKerb };
UserGroupInformation[] kerbTestRenewers =
{ owner, other, ownerKerb, otherKerb };
for (final UserGroupInformation tokOwner : kerbTestOwners) {
for (final UserGroupInformation tokRenewer : kerbTestRenewers) {
try {
testerKerb.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
checkTokenCancellation(rmService, tokOwner, tokRenewer);
Assert.fail("We should not reach here; token owner = "
+ tokOwner.getUserName() + ", renewer = "
+ tokRenewer.getUserName());
return null;
} catch (YarnException e) {
Assert.assertTrue(e.getMessage().contains(
testerKerb.getUserName()
+ " is not authorized to cancel the token"));
return null;
}
}
});
} catch (Exception e) {
Assert.fail("Unexpected exception; " + e.getMessage());
}
}
}
UserGroupInformation[] simpleTestOwners =
{ owner, other, ownerKerb, otherKerb, testerKerb };
UserGroupInformation[] simpleTestRenewers =
{ owner, other, ownerKerb, otherKerb };
for (final UserGroupInformation tokOwner : simpleTestOwners) {
for (final UserGroupInformation tokRenewer : simpleTestRenewers) {
try {
tester.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
checkTokenCancellation(tokOwner, tokRenewer);
Assert.fail("We should not reach here; token owner = "
+ tokOwner.getUserName() + ", renewer = "
+ tokRenewer.getUserName());
return null;
} catch (YarnException ex) {
Assert.assertTrue(ex.getMessage().contains(
tester.getUserName()
+ " is not authorized to cancel the token"));
return null;
}
}
});
} catch (Exception e) {
Assert.fail("Unexpected exception; " + e.getMessage());
}
}
}
}
private void checkTokenCancellation(UserGroupInformation owner,
UserGroupInformation renewer) throws IOException, YarnException {
RMContext rmContext = mock(RMContext.class);
final ClientRMService rmService =
new ClientRMService(rmContext, null, null, null, null, dtsm);
checkTokenCancellation(rmService, owner, renewer);
}
private void checkTokenCancellation(ClientRMService rmService,
UserGroupInformation owner, UserGroupInformation renewer)
throws IOException, YarnException {
RMDelegationTokenIdentifier tokenIdentifier =
new RMDelegationTokenIdentifier(new Text(owner.getUserName()),
new Text(renewer.getUserName()), null);
Token<?> token =
new Token<RMDelegationTokenIdentifier>(tokenIdentifier, dtsm);
org.apache.hadoop.yarn.api.records.Token dToken =
BuilderUtils.newDelegationToken(token.getIdentifier(), token.getKind()
.toString(), token.getPassword(), token.getService().toString());
CancelDelegationTokenRequest request =
Records.newRecord(CancelDelegationTokenRequest.class);
request.setDelegationToken(dToken);
rmService.cancelDelegationToken(request);
}
@Test (timeout = 30000)
@SuppressWarnings ("rawtypes")
public void testAppSubmit() throws Exception {
YarnScheduler yarnScheduler = mockYarnScheduler();
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
RMStateStore stateStore = mock(RMStateStore.class);
when(rmContext.getStateStore()).thenReturn(stateStore);
RMAppManager appManager = new RMAppManager(rmContext, yarnScheduler,
null, mock(ApplicationACLsManager.class), new Configuration());
when(rmContext.getDispatcher().getEventHandler()).thenReturn(
new EventHandler<Event>() {
public void handle(Event event) {}
});
ApplicationId appId1 = getApplicationId(100);
ApplicationACLsManager mockAclsManager = mock(ApplicationACLsManager.class);
when(
mockAclsManager.checkAccess(UserGroupInformation.getCurrentUser(),
ApplicationAccessType.VIEW_APP, null, appId1)).thenReturn(true);
QueueACLsManager mockQueueACLsManager = mock(QueueACLsManager.class);
when(mockQueueACLsManager.checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), anyString())).thenReturn(true);
ClientRMService rmService =
new ClientRMService(rmContext, yarnScheduler, appManager,
mockAclsManager, mockQueueACLsManager, null);
// without name and queue
SubmitApplicationRequest submitRequest1 = mockSubmitAppRequest(
appId1, null, null);
try {
rmService.submitApplication(submitRequest1);
} catch (YarnException e) {
Assert.fail("Exception is not expected.");
}
RMApp app1 = rmContext.getRMApps().get(appId1);
Assert.assertNotNull("app doesn't exist", app1);
Assert.assertEquals("app name doesn't match",
YarnConfiguration.DEFAULT_APPLICATION_NAME, app1.getName());
Assert.assertEquals("app queue doesn't match",
YarnConfiguration.DEFAULT_QUEUE_NAME, app1.getQueue());
// with name and queue
String name = MockApps.newAppName();
String queue = MockApps.newQueue();
ApplicationId appId2 = getApplicationId(101);
SubmitApplicationRequest submitRequest2 = mockSubmitAppRequest(
appId2, name, queue);
submitRequest2.getApplicationSubmissionContext().setApplicationType(
"matchType");
try {
rmService.submitApplication(submitRequest2);
} catch (YarnException e) {
Assert.fail("Exception is not expected.");
}
RMApp app2 = rmContext.getRMApps().get(appId2);
Assert.assertNotNull("app doesn't exist", app2);
Assert.assertEquals("app name doesn't match", name, app2.getName());
Assert.assertEquals("app queue doesn't match", queue, app2.getQueue());
// duplicate appId
try {
rmService.submitApplication(submitRequest2);
} catch (YarnException e) {
Assert.fail("Exception is not expected.");
}
GetApplicationsRequest getAllAppsRequest =
GetApplicationsRequest.newInstance(new HashSet<String>());
GetApplicationsResponse getAllApplicationsResponse =
rmService.getApplications(getAllAppsRequest);
Assert.assertEquals(5,
getAllApplicationsResponse.getApplicationList().size());
Set<String> appTypes = new HashSet<String>();
appTypes.add("matchType");
getAllAppsRequest = GetApplicationsRequest.newInstance(appTypes);
getAllApplicationsResponse =
rmService.getApplications(getAllAppsRequest);
Assert.assertEquals(1,
getAllApplicationsResponse.getApplicationList().size());
Assert.assertEquals(appId2,
getAllApplicationsResponse.getApplicationList()
.get(0).getApplicationId());
}
@Test
public void testGetApplications() throws IOException, YarnException {
/**
* 1. Submit 3 applications alternately in two queues
* 2. Test each of the filters
*/
// Basic setup
YarnScheduler yarnScheduler = mockYarnScheduler();
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
RMStateStore stateStore = mock(RMStateStore.class);
when(rmContext.getStateStore()).thenReturn(stateStore);
RMAppManager appManager = new RMAppManager(rmContext, yarnScheduler,
null, mock(ApplicationACLsManager.class), new Configuration());
when(rmContext.getDispatcher().getEventHandler()).thenReturn(
new EventHandler<Event>() {
public void handle(Event event) {}
});
ApplicationACLsManager mockAclsManager = mock(ApplicationACLsManager.class);
QueueACLsManager mockQueueACLsManager = mock(QueueACLsManager.class);
when(mockQueueACLsManager.checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), anyString())).thenReturn(true);
ClientRMService rmService =
new ClientRMService(rmContext, yarnScheduler, appManager,
mockAclsManager, mockQueueACLsManager, null);
// Initialize appnames and queues
String[] queues = {QUEUE_1, QUEUE_2};
String[] appNames =
{MockApps.newAppName(), MockApps.newAppName(), MockApps.newAppName()};
ApplicationId[] appIds =
{getApplicationId(101), getApplicationId(102), getApplicationId(103)};
List<String> tags = Arrays.asList("Tag1", "Tag2", "Tag3");
long[] submitTimeMillis = new long[3];
// Submit applications
for (int i = 0; i < appIds.length; i++) {
ApplicationId appId = appIds[i];
when(mockAclsManager.checkAccess(UserGroupInformation.getCurrentUser(),
ApplicationAccessType.VIEW_APP, null, appId)).thenReturn(true);
SubmitApplicationRequest submitRequest = mockSubmitAppRequest(
appId, appNames[i], queues[i % queues.length],
new HashSet<String>(tags.subList(0, i + 1)));
rmService.submitApplication(submitRequest);
submitTimeMillis[i] = System.currentTimeMillis();
}
// Test different cases of ClientRMService#getApplications()
GetApplicationsRequest request = GetApplicationsRequest.newInstance();
assertEquals("Incorrect total number of apps", 6,
rmService.getApplications(request).getApplicationList().size());
// Check limit
request.setLimit(1L);
assertEquals("Failed to limit applications", 1,
rmService.getApplications(request).getApplicationList().size());
// Check start range
request = GetApplicationsRequest.newInstance();
request.setStartRange(submitTimeMillis[0], System.currentTimeMillis());
// 2 applications are submitted after first timeMills
assertEquals("Incorrect number of matching start range",
2, rmService.getApplications(request).getApplicationList().size());
// 1 application is submitted after the second timeMills
request.setStartRange(submitTimeMillis[1], System.currentTimeMillis());
assertEquals("Incorrect number of matching start range",
1, rmService.getApplications(request).getApplicationList().size());
// no application is submitted after the third timeMills
request.setStartRange(submitTimeMillis[2], System.currentTimeMillis());
assertEquals("Incorrect number of matching start range",
0, rmService.getApplications(request).getApplicationList().size());
// Check queue
request = GetApplicationsRequest.newInstance();
Set<String> queueSet = new HashSet<String>();
request.setQueues(queueSet);
queueSet.add(queues[0]);
assertEquals("Incorrect number of applications in queue", 2,
rmService.getApplications(request).getApplicationList().size());
assertEquals("Incorrect number of applications in queue", 2,
rmService.getApplications(request, false).getApplicationList().size());
queueSet.add(queues[1]);
assertEquals("Incorrect number of applications in queue", 3,
rmService.getApplications(request).getApplicationList().size());
// Check user
request = GetApplicationsRequest.newInstance();
Set<String> userSet = new HashSet<String>();
request.setUsers(userSet);
userSet.add("random-user-name");
assertEquals("Incorrect number of applications for user", 0,
rmService.getApplications(request).getApplicationList().size());
userSet.add(UserGroupInformation.getCurrentUser().getShortUserName());
assertEquals("Incorrect number of applications for user", 3,
rmService.getApplications(request).getApplicationList().size());
// Check tags
request = GetApplicationsRequest.newInstance(
ApplicationsRequestScope.ALL, null, null, null, null, null, null,
null, null);
Set<String> tagSet = new HashSet<String>();
request.setApplicationTags(tagSet);
assertEquals("Incorrect number of matching tags", 6,
rmService.getApplications(request).getApplicationList().size());
tagSet = Sets.newHashSet(tags.get(0));
request.setApplicationTags(tagSet);
assertEquals("Incorrect number of matching tags", 3,
rmService.getApplications(request).getApplicationList().size());
tagSet = Sets.newHashSet(tags.get(1));
request.setApplicationTags(tagSet);
assertEquals("Incorrect number of matching tags", 2,
rmService.getApplications(request).getApplicationList().size());
tagSet = Sets.newHashSet(tags.get(2));
request.setApplicationTags(tagSet);
assertEquals("Incorrect number of matching tags", 1,
rmService.getApplications(request).getApplicationList().size());
// Check scope
request = GetApplicationsRequest.newInstance(
ApplicationsRequestScope.VIEWABLE);
assertEquals("Incorrect number of applications for the scope", 6,
rmService.getApplications(request).getApplicationList().size());
request = GetApplicationsRequest.newInstance(
ApplicationsRequestScope.OWN);
assertEquals("Incorrect number of applications for the scope", 3,
rmService.getApplications(request).getApplicationList().size());
}
@Test(timeout=4000)
public void testConcurrentAppSubmit()
throws IOException, InterruptedException, BrokenBarrierException,
YarnException {
YarnScheduler yarnScheduler = mockYarnScheduler();
RMContext rmContext = mock(RMContext.class);
mockRMContext(yarnScheduler, rmContext);
RMStateStore stateStore = mock(RMStateStore.class);
when(rmContext.getStateStore()).thenReturn(stateStore);
RMAppManager appManager = new RMAppManager(rmContext, yarnScheduler,
null, mock(ApplicationACLsManager.class), new Configuration());
final ApplicationId appId1 = getApplicationId(100);
final ApplicationId appId2 = getApplicationId(101);
final SubmitApplicationRequest submitRequest1 = mockSubmitAppRequest(
appId1, null, null);
final SubmitApplicationRequest submitRequest2 = mockSubmitAppRequest(
appId2, null, null);
final CyclicBarrier startBarrier = new CyclicBarrier(2);
final CyclicBarrier endBarrier = new CyclicBarrier(2);
@SuppressWarnings("rawtypes")
EventHandler eventHandler = new EventHandler() {
@Override
public void handle(Event rawEvent) {
if (rawEvent instanceof RMAppEvent) {
RMAppEvent event = (RMAppEvent) rawEvent;
if (event.getApplicationId().equals(appId1)) {
try {
startBarrier.await();
endBarrier.await();
} catch (BrokenBarrierException e) {
LOG.warn("Broken Barrier", e);
} catch (InterruptedException e) {
LOG.warn("Interrupted while awaiting barriers", e);
}
}
}
}
};
when(rmContext.getDispatcher().getEventHandler()).thenReturn(eventHandler);
final ClientRMService rmService =
new ClientRMService(rmContext, yarnScheduler, appManager, null, null,
null);
// submit an app and wait for it to block while in app submission
Thread t = new Thread() {
@Override
public void run() {
try {
rmService.submitApplication(submitRequest1);
} catch (YarnException e) {}
}
};
t.start();
// submit another app, so go through while the first app is blocked
startBarrier.await();
rmService.submitApplication(submitRequest2);
endBarrier.await();
t.join();
}
private SubmitApplicationRequest mockSubmitAppRequest(ApplicationId appId,
String name, String queue) {
return mockSubmitAppRequest(appId, name, queue, null);
}
private SubmitApplicationRequest mockSubmitAppRequest(ApplicationId appId,
String name, String queue, Set<String> tags) {
return mockSubmitAppRequest(appId, name, queue, tags, false);
}
@SuppressWarnings("deprecation")
private SubmitApplicationRequest mockSubmitAppRequest(ApplicationId appId,
String name, String queue, Set<String> tags, boolean unmanaged) {
ContainerLaunchContext amContainerSpec = mock(ContainerLaunchContext.class);
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
ApplicationSubmissionContext submissionContext =
recordFactory.newRecordInstance(ApplicationSubmissionContext.class);
submissionContext.setAMContainerSpec(amContainerSpec);
submissionContext.setApplicationName(name);
submissionContext.setQueue(queue);
submissionContext.setApplicationId(appId);
submissionContext.setResource(resource);
submissionContext.setApplicationType(appType);
submissionContext.setApplicationTags(tags);
submissionContext.setUnmanagedAM(unmanaged);
SubmitApplicationRequest submitRequest =
recordFactory.newRecordInstance(SubmitApplicationRequest.class);
submitRequest.setApplicationSubmissionContext(submissionContext);
return submitRequest;
}
private void mockRMContext(YarnScheduler yarnScheduler, RMContext rmContext)
throws IOException {
Dispatcher dispatcher = mock(Dispatcher.class);
when(rmContext.getDispatcher()).thenReturn(dispatcher);
EventHandler eventHandler = mock(EventHandler.class);
when(dispatcher.getEventHandler()).thenReturn(eventHandler);
QueueInfo queInfo = recordFactory.newRecordInstance(QueueInfo.class);
queInfo.setQueueName("testqueue");
when(yarnScheduler.getQueueInfo(eq("testqueue"), anyBoolean(), anyBoolean()))
.thenReturn(queInfo);
when(yarnScheduler.getQueueInfo(eq("nonexistentqueue"), anyBoolean(), anyBoolean()))
.thenThrow(new IOException("queue does not exist"));
RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class);
when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer);
SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class);
when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher);
ConcurrentHashMap<ApplicationId, RMApp> apps = getRMApps(rmContext,
yarnScheduler);
when(rmContext.getRMApps()).thenReturn(apps);
when(yarnScheduler.getAppsInQueue(eq("testqueue"))).thenReturn(
getSchedulerApps(apps));
ResourceScheduler rs = mock(ResourceScheduler.class);
when(rmContext.getScheduler()).thenReturn(rs);
}
private ConcurrentHashMap<ApplicationId, RMApp> getRMApps(
RMContext rmContext, YarnScheduler yarnScheduler) {
ConcurrentHashMap<ApplicationId, RMApp> apps =
new ConcurrentHashMap<ApplicationId, RMApp>();
ApplicationId applicationId1 = getApplicationId(1);
ApplicationId applicationId2 = getApplicationId(2);
ApplicationId applicationId3 = getApplicationId(3);
YarnConfiguration config = new YarnConfiguration();
apps.put(applicationId1, getRMApp(rmContext, yarnScheduler, applicationId1,
config, "testqueue", 10, 3));
apps.put(applicationId2, getRMApp(rmContext, yarnScheduler, applicationId2,
config, "a", 20, 2));
apps.put(applicationId3, getRMApp(rmContext, yarnScheduler, applicationId3,
config, "testqueue", 40, 5));
return apps;
}
private List<ApplicationAttemptId> getSchedulerApps(
Map<ApplicationId, RMApp> apps) {
List<ApplicationAttemptId> schedApps = new ArrayList<ApplicationAttemptId>();
// Return app IDs for the apps in testqueue (as defined in getRMApps)
schedApps.add(ApplicationAttemptId.newInstance(getApplicationId(1), 0));
schedApps.add(ApplicationAttemptId.newInstance(getApplicationId(3), 0));
return schedApps;
}
private static ApplicationId getApplicationId(int id) {
return ApplicationId.newInstance(123456, id);
}
private static ApplicationAttemptId getApplicationAttemptId(int id) {
return ApplicationAttemptId.newInstance(getApplicationId(id), 1);
}
private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler,
ApplicationId applicationId3, YarnConfiguration config, String queueName,
final long memorySeconds, final long vcoreSeconds) {
ApplicationSubmissionContext asContext = mock(ApplicationSubmissionContext.class);
when(asContext.getMaxAppAttempts()).thenReturn(1);
RMAppImpl app =
spy(new RMAppImpl(applicationId3, rmContext, config, null, null,
queueName, asContext, yarnScheduler, null,
System.currentTimeMillis(), "YARN", null,
BuilderUtils.newResourceRequest(
RMAppAttemptImpl.AM_CONTAINER_PRIORITY, ResourceRequest.ANY,
Resource.newInstance(1024, 1), 1)){
@Override
public ApplicationReport createAndGetApplicationReport(
String clientUserName, boolean allowAccess) {
ApplicationReport report = super.createAndGetApplicationReport(
clientUserName, allowAccess);
ApplicationResourceUsageReport usageReport =
report.getApplicationResourceUsageReport();
usageReport.setMemorySeconds(memorySeconds);
usageReport.setVcoreSeconds(vcoreSeconds);
report.setApplicationResourceUsageReport(usageReport);
return report;
}
});
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(123456, 1), 1);
RMAppAttemptImpl rmAppAttemptImpl = spy(new RMAppAttemptImpl(attemptId,
rmContext, yarnScheduler, null, asContext, config, false, null));
Container container = Container.newInstance(
ContainerId.newContainerId(attemptId, 1), null, "", null, null, null);
RMContainerImpl containerimpl = spy(new RMContainerImpl(container,
attemptId, null, "", rmContext));
Map<ApplicationAttemptId, RMAppAttempt> attempts =
new HashMap<ApplicationAttemptId, RMAppAttempt>();
attempts.put(attemptId, rmAppAttemptImpl);
when(app.getCurrentAppAttempt()).thenReturn(rmAppAttemptImpl);
when(app.getAppAttempts()).thenReturn(attempts);
when(rmAppAttemptImpl.getMasterContainer()).thenReturn(container);
ResourceScheduler rs = mock(ResourceScheduler.class);
when(rmContext.getScheduler()).thenReturn(rs);
when(rmContext.getScheduler().getRMContainer(any(ContainerId.class)))
.thenReturn(containerimpl);
SchedulerAppReport sAppReport = mock(SchedulerAppReport.class);
when(
rmContext.getScheduler().getSchedulerAppInfo(
any(ApplicationAttemptId.class))).thenReturn(sAppReport);
List<RMContainer> rmContainers = new ArrayList<RMContainer>();
rmContainers.add(containerimpl);
when(
rmContext.getScheduler().getSchedulerAppInfo(attemptId)
.getLiveContainers()).thenReturn(rmContainers);
ContainerStatus cs = mock(ContainerStatus.class);
when(containerimpl.getFinishedStatus()).thenReturn(cs);
when(containerimpl.getDiagnosticsInfo()).thenReturn("N/A");
when(containerimpl.getContainerExitStatus()).thenReturn(0);
when(containerimpl.getContainerState()).thenReturn(ContainerState.COMPLETE);
return app;
}
private static YarnScheduler mockYarnScheduler() {
YarnScheduler yarnScheduler = mock(YarnScheduler.class);
when(yarnScheduler.getMinimumResourceCapability()).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB));
when(yarnScheduler.getMaximumResourceCapability()).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB));
when(yarnScheduler.getAppsInQueue(QUEUE_1)).thenReturn(
Arrays.asList(getApplicationAttemptId(101), getApplicationAttemptId(102)));
when(yarnScheduler.getAppsInQueue(QUEUE_2)).thenReturn(
Arrays.asList(getApplicationAttemptId(103)));
ApplicationAttemptId attemptId = getApplicationAttemptId(1);
when(yarnScheduler.getAppResourceUsageReport(attemptId)).thenReturn(null);
return yarnScheduler;
}
@Test
public void testReservationAPIs() {
// initialize
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
ReservationSystemTestUtil.setupQueueConfiguration(conf);
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.RM_RESERVATION_SYSTEM_ENABLE, true);
MockRM rm = new MockRM(conf);
rm.start();
MockNM nm;
try {
nm = rm.registerNode("127.0.0.1:1", 102400, 100);
// allow plan follower to synchronize
Thread.sleep(1050);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
// Create a client.
ClientRMService clientService = rm.getClientRMService();
// create a reservation
Clock clock = new UTCClock();
long arrival = clock.getTime();
long duration = 60000;
long deadline = (long) (arrival + 1.05 * duration);
ReservationSubmissionRequest sRequest =
createSimpleReservationRequest(4, arrival, deadline, duration);
ReservationSubmissionResponse sResponse = null;
try {
sResponse = clientService.submitReservation(sRequest);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
Assert.assertNotNull(sResponse);
ReservationId reservationID = sResponse.getReservationId();
Assert.assertNotNull(reservationID);
LOG.info("Submit reservation response: " + reservationID);
// Update the reservation
ReservationDefinition rDef = sRequest.getReservationDefinition();
ReservationRequest rr =
rDef.getReservationRequests().getReservationResources().get(0);
rr.setNumContainers(5);
arrival = clock.getTime();
duration = 30000;
deadline = (long) (arrival + 1.05 * duration);
rr.setDuration(duration);
rDef.setArrival(arrival);
rDef.setDeadline(deadline);
ReservationUpdateRequest uRequest =
ReservationUpdateRequest.newInstance(rDef, reservationID);
ReservationUpdateResponse uResponse = null;
try {
uResponse = clientService.updateReservation(uRequest);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
Assert.assertNotNull(sResponse);
LOG.info("Update reservation response: " + uResponse);
// Delete the reservation
ReservationDeleteRequest dRequest =
ReservationDeleteRequest.newInstance(reservationID);
ReservationDeleteResponse dResponse = null;
try {
dResponse = clientService.deleteReservation(dRequest);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
Assert.assertNotNull(sResponse);
LOG.info("Delete reservation response: " + dResponse);
// clean-up
rm.stop();
nm = null;
rm = null;
}
private ReservationSubmissionRequest createSimpleReservationRequest(
int numContainers, long arrival, long deadline, long duration) {
// create a request with a single atomic ask
ReservationRequest r =
ReservationRequest.newInstance(Resource.newInstance(1024, 1),
numContainers, 1, duration);
ReservationRequests reqs =
ReservationRequests.newInstance(Collections.singletonList(r),
ReservationRequestInterpreter.R_ALL);
ReservationDefinition rDef =
ReservationDefinition.newInstance(arrival, deadline, reqs,
"testClientRMService#reservation");
ReservationSubmissionRequest request =
ReservationSubmissionRequest.newInstance(rDef,
ReservationSystemTestUtil.reservationQ);
return request;
}
@Test
public void testGetNodeLabels() throws Exception {
MockRM rm = new MockRM() {
protected ClientRMService createClientRMService() {
return new ClientRMService(this.rmContext, scheduler,
this.rmAppManager, this.applicationACLsManager,
this.queueACLsManager, this.getRMContext()
.getRMDelegationTokenSecretManager());
};
};
rm.start();
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
labelsMgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
map.put(NodeId.newInstance("host1", 0), ImmutableSet.of("x"));
map.put(NodeId.newInstance("host2", 0), ImmutableSet.of("y"));
labelsMgr.replaceLabelsOnNode(map);
// Create a client.
Configuration conf = new Configuration();
YarnRPC rpc = YarnRPC.create(conf);
InetSocketAddress rmAddress = rm.getClientRMService().getBindAddress();
LOG.info("Connecting to ResourceManager at " + rmAddress);
ApplicationClientProtocol client =
(ApplicationClientProtocol) rpc.getProxy(
ApplicationClientProtocol.class, rmAddress, conf);
// Get node labels collection
GetClusterNodeLabelsResponse response =
client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance());
Assert.assertTrue(response.getNodeLabels().containsAll(
Arrays.asList("x", "y")));
// Get node labels mapping
GetNodesToLabelsResponse response1 =
client.getNodeToLabels(GetNodesToLabelsRequest.newInstance());
Map<NodeId, Set<String>> nodeToLabels = response1.getNodeToLabels();
Assert.assertTrue(nodeToLabels.keySet().containsAll(
Arrays.asList(NodeId.newInstance("host1", 0),
NodeId.newInstance("host2", 0))));
Assert.assertTrue(nodeToLabels.get(NodeId.newInstance("host1", 0))
.containsAll(Arrays.asList("x")));
Assert.assertTrue(nodeToLabels.get(NodeId.newInstance("host2", 0))
.containsAll(Arrays.asList("y")));
rpc.stopProxy(client, conf);
rm.close();
}
}
| |
// Copyright (C) 2010 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.mail;
import com.google.gerrit.common.errors.EmailException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.AccountProjectWatch.NotifyType;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.Patch;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.client.StarredChange;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.mail.ProjectWatch.Watchers;
import com.google.gerrit.server.notedb.ReviewerState;
import com.google.gerrit.server.patch.PatchList;
import com.google.gerrit.server.patch.PatchListEntry;
import com.google.gerrit.server.patch.PatchListNotAvailableException;
import com.google.gerrit.server.patch.PatchSetInfoNotAvailableException;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gwtorm.server.OrmException;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
/** Sends an email to one or more interested parties. */
public abstract class ChangeEmail extends NotificationEmail {
private static final Logger log = LoggerFactory.getLogger(ChangeEmail.class);
protected final Change change;
protected final ChangeData changeData;
protected PatchSet patchSet;
protected PatchSetInfo patchSetInfo;
protected ChangeMessage changeMessage;
protected ProjectState projectState;
protected Set<Account.Id> authors;
protected boolean emailOnlyAuthors;
protected ChangeEmail(EmailArguments ea, Change c, String mc) {
super(ea, mc, c.getProject(), c.getDest());
change = c;
changeData = ea.changeDataFactory.create(ea.db.get(), c);
emailOnlyAuthors = false;
}
@Override
public void setFrom(final Account.Id id) {
super.setFrom(id);
/** Is the from user in an email squelching group? */
final IdentifiedUser user = args.identifiedUserFactory.create(id);
emailOnlyAuthors = !user.getCapabilities().canEmailReviewers();
}
public void setPatchSet(final PatchSet ps) {
patchSet = ps;
}
public void setPatchSet(final PatchSet ps, final PatchSetInfo psi) {
patchSet = ps;
patchSetInfo = psi;
}
public void setChangeMessage(final ChangeMessage cm) {
changeMessage = cm;
}
/** Format the message body by calling {@link #appendText(String)}. */
@Override
protected void format() throws EmailException {
formatChange();
appendText(velocifyFile("ChangeFooter.vm"));
try {
TreeSet<String> names = new TreeSet<>();
for (Account.Id who : changeData.reviewers().values()) {
names.add(getNameEmailFor(who));
}
for (String name : names) {
appendText("Gerrit-Reviewer: " + name + "\n");
}
} catch (OrmException e) {
}
formatFooter();
}
/** Format the message body by calling {@link #appendText(String)}. */
protected abstract void formatChange() throws EmailException;
/**
* Format the message footer by calling {@link #appendText(String)}.
*
* @throws EmailException if an error occurred.
*/
protected void formatFooter() throws EmailException {
}
/** Setup the message headers and envelope (TO, CC, BCC). */
@Override
protected void init() throws EmailException {
if (args.projectCache != null) {
projectState = args.projectCache.get(change.getProject());
} else {
projectState = null;
}
if (patchSet == null) {
try {
patchSet = args.db.get().patchSets().get(change.currentPatchSetId());
} catch (OrmException err) {
patchSet = null;
}
}
if (patchSet != null && patchSetInfo == null) {
try {
patchSetInfo = args.patchSetInfoFactory.get(args.db.get(), patchSet.getId());
} catch (PatchSetInfoNotAvailableException err) {
patchSetInfo = null;
}
}
authors = getAuthors();
super.init();
if (changeMessage != null && changeMessage.getWrittenOn() != null) {
setHeader("Date", new Date(changeMessage.getWrittenOn().getTime()));
}
setChangeSubjectHeader();
setHeader("X-Gerrit-Change-Id", "" + change.getKey().get());
setChangeUrlHeader();
setCommitIdHeader();
}
private void setChangeUrlHeader() {
final String u = getChangeUrl();
if (u != null) {
setHeader("X-Gerrit-ChangeURL", "<" + u + ">");
}
}
private void setCommitIdHeader() {
if (patchSet != null && patchSet.getRevision() != null
&& patchSet.getRevision().get() != null
&& patchSet.getRevision().get().length() > 0) {
setHeader("X-Gerrit-Commit", patchSet.getRevision().get());
}
}
private void setChangeSubjectHeader() throws EmailException {
setHeader("Subject", velocifyFile("ChangeSubject.vm"));
}
/** Get a link to the change; null if the server doesn't know its own address. */
public String getChangeUrl() {
if (getGerritUrl() != null) {
final StringBuilder r = new StringBuilder();
r.append(getGerritUrl());
r.append(change.getChangeId());
return r.toString();
}
return null;
}
public String getChangeMessageThreadId() throws EmailException {
return velocify("<gerrit.${change.createdOn.time}.$change.key.get()" +
"@$email.gerritHost>");
}
/** Format the sender's "cover letter", {@link #getCoverLetter()}. */
protected void formatCoverLetter() {
final String cover = getCoverLetter();
if (!"".equals(cover)) {
appendText(cover);
appendText("\n\n");
}
}
/** Get the text of the "cover letter", from {@link ChangeMessage}. */
public String getCoverLetter() {
if (changeMessage != null) {
final String txt = changeMessage.getMessage();
if (txt != null) {
return txt.trim();
}
}
return "";
}
/** Format the change message and the affected file list. */
protected void formatChangeDetail() {
appendText(getChangeDetail());
}
/** Create the change message and the affected file list. */
public String getChangeDetail() {
try {
StringBuilder detail = new StringBuilder();
if (patchSetInfo != null) {
detail.append(patchSetInfo.getMessage().trim()).append("\n");
} else {
detail.append(change.getSubject().trim()).append("\n");
}
if (patchSet != null) {
detail.append("---\n");
PatchList patchList = getPatchList();
for (PatchListEntry p : patchList.getPatches()) {
if (Patch.COMMIT_MSG.equals(p.getNewName())) {
continue;
}
detail.append(p.getChangeType().getCode())
.append(" ").append(p.getNewName()).append("\n");
}
detail.append(MessageFormat.format("" //
+ "{0,choice,0#0 files|1#1 file|1<{0} files} changed, " //
+ "{1,choice,0#0 insertions|1#1 insertion|1<{1} insertions}(+), " //
+ "{2,choice,0#0 deletions|1#1 deletion|1<{2} deletions}(-)" //
+ "\n", patchList.getPatches().size() - 1, //
patchList.getInsertions(), //
patchList.getDeletions()));
detail.append("\n");
}
return detail.toString();
} catch (Exception err) {
log.warn("Cannot format change detail", err);
return "";
}
}
/** Get the patch list corresponding to this patch set. */
protected PatchList getPatchList() throws PatchListNotAvailableException {
if (patchSet != null) {
return args.patchListCache.get(change, patchSet);
}
throw new PatchListNotAvailableException("no patchSet specified");
}
/** Get the project entity the change is in; null if its been deleted. */
protected ProjectState getProjectState() {
return projectState;
}
/** Get the groups which own the project. */
protected Set<AccountGroup.UUID> getProjectOwners() {
final ProjectState r;
r = args.projectCache.get(change.getProject());
return r != null ? r.getOwners() : Collections.<AccountGroup.UUID> emptySet();
}
/** TO or CC all vested parties (change owner, patch set uploader, author). */
protected void rcptToAuthors(final RecipientType rt) {
for (final Account.Id id : authors) {
add(rt, id);
}
}
/** BCC any user who has starred this change. */
protected void bccStarredBy() {
try {
// BCC anyone who has starred this change.
//
for (StarredChange w : args.db.get().starredChanges().byChange(
change.getId())) {
super.add(RecipientType.BCC, w.getAccountId());
}
} catch (OrmException err) {
// Just don't BCC everyone. Better to send a partial message to those
// we already have queued up then to fail deliver entirely to people
// who have a lower interest in the change.
log.warn("Cannot BCC users that starred updated change", err);
}
}
@Override
protected final Watchers getWatchers(NotifyType type) throws OrmException {
ProjectWatch watch = new ProjectWatch(args, project, projectState, changeData);
return watch.getWatchers(type);
}
/** Any user who has published comments on this change. */
protected void ccAllApprovals() {
try {
for (Account.Id id : changeData.reviewers().values()) {
add(RecipientType.CC, id);
}
} catch (OrmException err) {
log.warn("Cannot CC users that reviewed updated change", err);
}
}
/** Users who have non-zero approval codes on the change. */
protected void ccExistingReviewers() {
try {
for (Account.Id id : changeData.reviewers().get(ReviewerState.REVIEWER)) {
add(RecipientType.CC, id);
}
} catch (OrmException err) {
log.warn("Cannot CC users that commented on updated change", err);
}
}
@Override
protected void add(final RecipientType rt, final Account.Id to) {
if (! emailOnlyAuthors || authors.contains(to)) {
super.add(rt, to);
}
}
@Override
protected boolean isVisibleTo(final Account.Id to) throws OrmException {
return projectState == null
|| projectState.controlFor(args.identifiedUserFactory.create(to))
.controlFor(change).isVisible(args.db.get());
}
/** Find all users who are authors of any part of this change. */
protected Set<Account.Id> getAuthors() {
Set<Account.Id> authors = new HashSet<>();
authors.add(change.getOwner());
if (patchSet != null) {
authors.add(patchSet.getUploader());
}
if (patchSetInfo != null) {
if (patchSetInfo.getAuthor().getAccount() != null) {
authors.add(patchSetInfo.getAuthor().getAccount());
}
if (patchSetInfo.getCommitter().getAccount() != null) {
authors.add(patchSetInfo.getCommitter().getAccount());
}
}
return authors;
}
@Override
protected void setupVelocityContext() {
super.setupVelocityContext();
velocityContext.put("change", change);
velocityContext.put("changeId", change.getKey());
velocityContext.put("coverLetter", getCoverLetter());
velocityContext.put("fromName", getNameFor(fromId));
velocityContext.put("patchSet", patchSet);
velocityContext.put("patchSetInfo", patchSetInfo);
}
public boolean getIncludeDiff() {
return args.settings.includeDiff;
}
/** Show patch set as unified difference. */
public String getUnifiedDiff() {
PatchList patchList;
try {
patchList = getPatchList();
if (patchList.getOldId() == null) {
// Octopus merges are not well supported for diff output by Gerrit.
// Currently these always have a null oldId in the PatchList.
return "[Octopus merge; cannot be formatted as a diff.]\n";
}
} catch (PatchListNotAvailableException e) {
log.error("Cannot format patch", e);
return "";
}
TemporaryBuffer.Heap buf =
new TemporaryBuffer.Heap(args.settings.maximumDiffSize);
DiffFormatter fmt = new DiffFormatter(buf);
Repository git;
try {
git = args.server.openRepository(change.getProject());
} catch (IOException e) {
log.error("Cannot open repository to format patch", e);
return "";
}
try {
fmt.setRepository(git);
fmt.setDetectRenames(true);
fmt.format(patchList.getOldId(), patchList.getNewId());
return RawParseUtils.decode(buf.toByteArray());
} catch (IOException e) {
if (JGitText.get().inMemoryBufferLimitExceeded.equals(e.getMessage())) {
return "";
}
log.error("Cannot format patch", e);
return "";
} finally {
fmt.release();
git.close();
}
}
}
| |
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import org.springframework.lang.Nullable;
/**
* Helper class that allows for specifying a method to invoke in a declarative
* fashion, be it static or non-static.
*
* <p>Usage: Specify "targetClass"/"targetMethod" or "targetObject"/"targetMethod",
* optionally specify arguments, prepare the invoker. Afterwards, you may
* invoke the method any number of times, obtaining the invocation result.
*
* @author Colin Sampaleanu
* @author Juergen Hoeller
* @since 19.02.2004
* @see #prepare
* @see #invoke
*/
public class MethodInvoker {
private static final Object[] EMPTY_ARGUMENTS = new Object[0];
@Nullable
protected Class<?> targetClass;
@Nullable
private Object targetObject;
@Nullable
private String targetMethod;
@Nullable
private String staticMethod;
@Nullable
private Object[] arguments;
/** The method we will call. */
@Nullable
private Method methodObject;
/**
* Set the target class on which to call the target method.
* Only necessary when the target method is static; else,
* a target object needs to be specified anyway.
* @see #setTargetObject
* @see #setTargetMethod
*/
public void setTargetClass(@Nullable Class<?> targetClass) {
this.targetClass = targetClass;
}
/**
* Return the target class on which to call the target method.
*/
@Nullable
public Class<?> getTargetClass() {
return this.targetClass;
}
/**
* Set the target object on which to call the target method.
* Only necessary when the target method is not static;
* else, a target class is sufficient.
* @see #setTargetClass
* @see #setTargetMethod
*/
public void setTargetObject(@Nullable Object targetObject) {
this.targetObject = targetObject;
if (targetObject != null) {
this.targetClass = targetObject.getClass();
}
}
/**
* Return the target object on which to call the target method.
*/
@Nullable
public Object getTargetObject() {
return this.targetObject;
}
/**
* Set the name of the method to be invoked.
* Refers to either a static method or a non-static method,
* depending on a target object being set.
* @see #setTargetClass
* @see #setTargetObject
*/
public void setTargetMethod(@Nullable String targetMethod) {
this.targetMethod = targetMethod;
}
/**
* Return the name of the method to be invoked.
*/
@Nullable
public String getTargetMethod() {
return this.targetMethod;
}
/**
* Set a fully qualified static method name to invoke,
* e.g. "example.MyExampleClass.myExampleMethod".
* Convenient alternative to specifying targetClass and targetMethod.
* @see #setTargetClass
* @see #setTargetMethod
*/
public void setStaticMethod(String staticMethod) {
this.staticMethod = staticMethod;
}
/**
* Set arguments for the method invocation. If this property is not set,
* or the Object array is of length 0, a method with no arguments is assumed.
*/
public void setArguments(Object... arguments) {
this.arguments = arguments;
}
/**
* Return the arguments for the method invocation.
*/
public Object[] getArguments() {
return (this.arguments != null ? this.arguments : EMPTY_ARGUMENTS);
}
/**
* Prepare the specified method.
* The method can be invoked any number of times afterwards.
* @see #getPreparedMethod
* @see #invoke
*/
public void prepare() throws ClassNotFoundException, NoSuchMethodException {
if (this.staticMethod != null) {
int lastDotIndex = this.staticMethod.lastIndexOf('.');
if (lastDotIndex == -1 || lastDotIndex == this.staticMethod.length()) {
throw new IllegalArgumentException(
"staticMethod must be a fully qualified class plus method name: " +
"e.g. 'example.MyExampleClass.myExampleMethod'");
}
String className = this.staticMethod.substring(0, lastDotIndex);
String methodName = this.staticMethod.substring(lastDotIndex + 1);
this.targetClass = resolveClassName(className);
this.targetMethod = methodName;
}
Class<?> targetClass = getTargetClass();
String targetMethod = getTargetMethod();
Assert.notNull(targetClass, "Either 'targetClass' or 'targetObject' is required");
Assert.notNull(targetMethod, "Property 'targetMethod' is required");
Object[] arguments = getArguments();
Class<?>[] argTypes = new Class<?>[arguments.length];
for (int i = 0; i < arguments.length; ++i) {
argTypes[i] = (arguments[i] != null ? arguments[i].getClass() : Object.class);
}
// Try to get the exact method first.
try {
this.methodObject = targetClass.getMethod(targetMethod, argTypes);
}
catch (NoSuchMethodException ex) {
// Just rethrow exception if we can't get any match.
this.methodObject = findMatchingMethod();
if (this.methodObject == null) {
throw ex;
}
}
}
/**
* Resolve the given class name into a Class.
* <p>The default implementations uses {@code ClassUtils.forName},
* using the thread context class loader.
* @param className the class name to resolve
* @return the resolved Class
* @throws ClassNotFoundException if the class name was invalid
*/
protected Class<?> resolveClassName(String className) throws ClassNotFoundException {
return ClassUtils.forName(className, ClassUtils.getDefaultClassLoader());
}
/**
* Find a matching method with the specified name for the specified arguments.
* @return a matching method, or {@code null} if none
* @see #getTargetClass()
* @see #getTargetMethod()
* @see #getArguments()
*/
@Nullable
protected Method findMatchingMethod() {
String targetMethod = getTargetMethod();
Object[] arguments = getArguments();
int argCount = arguments.length;
Class<?> targetClass = getTargetClass();
Assert.state(targetClass != null, "No target class set");
Method[] candidates = ReflectionUtils.getAllDeclaredMethods(targetClass);
int minTypeDiffWeight = Integer.MAX_VALUE;
Method matchingMethod = null;
for (Method candidate : candidates) {
if (candidate.getName().equals(targetMethod)) {
if (candidate.getParameterCount() == argCount) {
Class<?>[] paramTypes = candidate.getParameterTypes();
int typeDiffWeight = getTypeDifferenceWeight(paramTypes, arguments);
if (typeDiffWeight < minTypeDiffWeight) {
minTypeDiffWeight = typeDiffWeight;
matchingMethod = candidate;
}
}
}
}
return matchingMethod;
}
/**
* Return the prepared Method object that will be invoked.
* <p>Can for example be used to determine the return type.
* @return the prepared Method object (never {@code null})
* @throws IllegalStateException if the invoker hasn't been prepared yet
* @see #prepare
* @see #invoke
*/
public Method getPreparedMethod() throws IllegalStateException {
if (this.methodObject == null) {
throw new IllegalStateException("prepare() must be called prior to invoke() on MethodInvoker");
}
return this.methodObject;
}
/**
* Return whether this invoker has been prepared already,
* i.e. whether it allows access to {@link #getPreparedMethod()} already.
*/
public boolean isPrepared() {
return (this.methodObject != null);
}
/**
* Invoke the specified method.
* <p>The invoker needs to have been prepared before.
* @return the object (possibly null) returned by the method invocation,
* or {@code null} if the method has a void return type
* @throws InvocationTargetException if the target method threw an exception
* @throws IllegalAccessException if the target method couldn't be accessed
* @see #prepare
*/
@Nullable
public Object invoke() throws InvocationTargetException, IllegalAccessException {
// In the static case, target will simply be {@code null}.
Object targetObject = getTargetObject();
Method preparedMethod = getPreparedMethod();
if (targetObject == null && !Modifier.isStatic(preparedMethod.getModifiers())) {
throw new IllegalArgumentException("Target method must not be non-static without a target");
}
ReflectionUtils.makeAccessible(preparedMethod);
return preparedMethod.invoke(targetObject, getArguments());
}
/**
* Algorithm that judges the match between the declared parameter types of a candidate method
* and a specific list of arguments that this method is supposed to be invoked with.
* <p>Determines a weight that represents the class hierarchy difference between types and
* arguments. A direct match, i.e. type Integer → arg of class Integer, does not increase
* the result - all direct matches means weight 0. A match between type Object and arg of
* class Integer would increase the weight by 2, due to the superclass 2 steps up in the
* hierarchy (i.e. Object) being the last one that still matches the required type Object.
* Type Number and class Integer would increase the weight by 1 accordingly, due to the
* superclass 1 step up the hierarchy (i.e. Number) still matching the required type Number.
* Therefore, with an arg of type Integer, a constructor (Integer) would be preferred to a
* constructor (Number) which would in turn be preferred to a constructor (Object).
* All argument weights get accumulated.
* <p>Note: This is the algorithm used by MethodInvoker itself and also the algorithm
* used for constructor and factory method selection in Spring's bean container (in case
* of lenient constructor resolution which is the default for regular bean definitions).
* @param paramTypes the parameter types to match
* @param args the arguments to match
* @return the accumulated weight for all arguments
*/
public static int getTypeDifferenceWeight(Class<?>[] paramTypes, Object[] args) {
int result = 0;
for (int i = 0; i < paramTypes.length; i++) {
if (!ClassUtils.isAssignableValue(paramTypes[i], args[i])) {
return Integer.MAX_VALUE;
}
if (args[i] != null) {
Class<?> paramType = paramTypes[i];
Class<?> superClass = args[i].getClass().getSuperclass();
while (superClass != null) {
if (paramType.equals(superClass)) {
result = result + 2;
superClass = null;
}
else if (ClassUtils.isAssignable(paramType, superClass)) {
result = result + 2;
superClass = superClass.getSuperclass();
}
else {
superClass = null;
}
}
if (paramType.isInterface()) {
result = result + 1;
}
}
}
return result;
}
}
| |
//
// This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.06.04 at 07:58:30 PM BST
//
package pubmed.openAccess.jaxb.generated;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import elsevier.jaxb.math.mathml.Math;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice maxOccurs="unbounded" minOccurs="0">
* <element ref="{}email"/>
* <element ref="{}ext-link"/>
* <element ref="{}uri"/>
* <element ref="{}inline-supplementary-material"/>
* <element ref="{}related-article"/>
* <element ref="{}related-object"/>
* <element ref="{}hr"/>
* <element ref="{}bold"/>
* <element ref="{}italic"/>
* <element ref="{}monospace"/>
* <element ref="{}overline"/>
* <element ref="{}overline-start"/>
* <element ref="{}overline-end"/>
* <element ref="{}roman"/>
* <element ref="{}sans-serif"/>
* <element ref="{}sc"/>
* <element ref="{}strike"/>
* <element ref="{}underline"/>
* <element ref="{}underline-start"/>
* <element ref="{}underline-end"/>
* <element ref="{}alternatives"/>
* <element ref="{}inline-graphic"/>
* <element ref="{}private-char"/>
* <element ref="{}chem-struct"/>
* <element ref="{}inline-formula"/>
* <element ref="{}tex-math"/>
* <element ref="{http://www.w3.org/1998/Math/MathML}math"/>
* <element ref="{}abbrev"/>
* <element ref="{}milestone-end"/>
* <element ref="{}milestone-start"/>
* <element ref="{}named-content"/>
* <element ref="{}styled-content"/>
* <element ref="{}fn"/>
* <element ref="{}target"/>
* <element ref="{}xref"/>
* <element ref="{}sub"/>
* <element ref="{}sup"/>
* <element ref="{}x"/>
* </choice>
* <attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "conf-loc")
public class ConfLoc {
@XmlElementRefs({
@XmlElementRef(name = "related-article", type = RelatedArticle.class),
@XmlElementRef(name = "underline-start", type = UnderlineStart.class),
@XmlElementRef(name = "hr", type = Hr.class),
@XmlElementRef(name = "xref", type = Xref.class),
@XmlElementRef(name = "inline-formula", type = InlineFormula.class),
@XmlElementRef(name = "roman", type = Roman.class),
@XmlElementRef(name = "fn", type = Fn.class),
@XmlElementRef(name = "italic", type = Italic.class),
@XmlElementRef(name = "private-char", type = PrivateChar.class),
@XmlElementRef(name = "related-object", type = RelatedObject.class),
@XmlElementRef(name = "overline", type = Overline.class),
@XmlElementRef(name = "email", type = Email.class),
@XmlElementRef(name = "alternatives", type = Alternatives.class),
@XmlElementRef(name = "chem-struct", type = ChemStruct.class),
@XmlElementRef(name = "inline-graphic", type = InlineGraphic.class),
@XmlElementRef(name = "uri", type = Uri.class),
@XmlElementRef(name = "x", type = X.class),
@XmlElementRef(name = "styled-content", type = StyledContent.class),
@XmlElementRef(name = "overline-end", type = OverlineEnd.class),
@XmlElementRef(name = "strike", type = Strike.class),
@XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class),
@XmlElementRef(name = "milestone-start", type = MilestoneStart.class),
@XmlElementRef(name = "abbrev", type = Abbrev.class),
@XmlElementRef(name = "target", type = Target.class),
@XmlElementRef(name = "monospace", type = Monospace.class),
@XmlElementRef(name = "sup", type = Sup.class),
@XmlElementRef(name = "milestone-end", type = MilestoneEnd.class),
@XmlElementRef(name = "ext-link", type = ExtLink.class),
@XmlElementRef(name = "underline-end", type = UnderlineEnd.class),
@XmlElementRef(name = "bold", type = Bold.class),
@XmlElementRef(name = "sub", type = Sub.class),
@XmlElementRef(name = "sans-serif", type = SansSerif.class),
@XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class),
@XmlElementRef(name = "named-content", type = NamedContent.class),
@XmlElementRef(name = "underline", type = Underline.class),
@XmlElementRef(name = "sc", type = Sc.class),
@XmlElementRef(name = "tex-math", type = TexMath.class),
@XmlElementRef(name = "overline-start", type = OverlineStart.class)
})
@XmlMixed
protected List<Object> content;
@XmlAttribute(name = "content-type")
@XmlSchemaType(name = "anySimpleType")
protected String contentType;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RelatedArticle }
* {@link Hr }
* {@link UnderlineStart }
* {@link InlineFormula }
* {@link Xref }
* {@link String }
* {@link Roman }
* {@link Italic }
* {@link Fn }
* {@link RelatedObject }
* {@link PrivateChar }
* {@link Overline }
* {@link Email }
* {@link Alternatives }
* {@link InlineGraphic }
* {@link ChemStruct }
* {@link X }
* {@link Uri }
* {@link StyledContent }
* {@link Strike }
* {@link OverlineEnd }
* {@link InlineSupplementaryMaterial }
* {@link MilestoneStart }
* {@link Sup }
* {@link Monospace }
* {@link Target }
* {@link Abbrev }
* {@link MilestoneEnd }
* {@link ExtLink }
* {@link UnderlineEnd }
* {@link Bold }
* {@link SansSerif }
* {@link Sub }
* {@link Underline }
* {@link NamedContent }
* {@link Math }
* {@link Sc }
* {@link TexMath }
* {@link OverlineStart }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the contentType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentType() {
return contentType;
}
/**
* Sets the value of the contentType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentType(String value) {
this.contentType = value;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.demos.mroperator;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.validation.constraints.Min;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.DefaultPartition;
import com.datatorrent.api.InputOperator;
import com.datatorrent.api.Partitioner;
import com.datatorrent.demos.mroperator.ReporterImpl.ReporterType;
import com.datatorrent.lib.util.KeyHashValPair;
/**
* <p>
* MapOperator class.
* </p>
*
* @since 0.9.0
*/
@SuppressWarnings({ "unchecked"})
public class MapOperator<K1, V1, K2, V2> implements InputOperator, Partitioner<MapOperator<K1, V1, K2, V2>>
{
private static final Logger logger = LoggerFactory.getLogger(MapOperator.class);
private String dirName;
private boolean emitPartitioningCountOnce = false;
private boolean emitLastCountOnce = false;
private int operatorId;
private Class<? extends InputFormat<K1, V1>> inputFormatClass;
private transient InputFormat<K1, V1> inputFormat;
private transient InputSplit inputSplit;
private Class<? extends InputSplit> inputSplitClass;
private ByteArrayOutputStream outstream = new ByteArrayOutputStream();
private transient RecordReader<K1, V1> reader;
private boolean emittedAll = false;
public final transient DefaultOutputPort<KeyHashValPair<Integer, Integer>> outputCount = new DefaultOutputPort<KeyHashValPair<Integer, Integer>>();
public final transient DefaultOutputPort<KeyHashValPair<K2, V2>> output = new DefaultOutputPort<KeyHashValPair<K2, V2>>();
private transient JobConf jobConf;
@Min(1)
private int partitionCount = 1;
public Class<? extends InputSplit> getInputSplitClass()
{
return inputSplitClass;
}
public void setInputSplitClass(Class<? extends InputSplit> inputSplitClass)
{
this.inputSplitClass = inputSplitClass;
}
public Class<? extends InputFormat<K1, V1>> getInputFormatClass()
{
return inputFormatClass;
}
public void setInputFormatClass(Class<? extends InputFormat<K1, V1>> inputFormatClass)
{
this.inputFormatClass = inputFormatClass;
}
public String getDirName()
{
return dirName;
}
public void setDirName(String dirName)
{
this.dirName = dirName;
}
public int getPartitionCount()
{
return partitionCount;
}
public void setPartitionCount(int partitionCount)
{
this.partitionCount = partitionCount;
}
@Override
public void beginWindow(long windowId)
{
if (!emitPartitioningCountOnce) {
outputCount.emit(new KeyHashValPair<Integer, Integer>(operatorId, 1));
emitPartitioningCountOnce = true;
}
if (reader == null) {
try {
reader = inputFormat.getRecordReader(inputSplit, new JobConf(new Configuration()), reporter);
} catch (IOException e) {
logger.info("error getting record reader {}", e.getMessage());
}
}
}
@Override
public void teardown()
{
}
@Override
public void setup(OperatorContext context)
{
if (context != null) {
operatorId = context.getId();
}
reporter = new ReporterImpl(ReporterType.Mapper, new Counters());
outputCollector = new OutputCollectorImpl<K2, V2>();
Configuration conf = new Configuration();
try {
inputFormat = inputFormatClass.newInstance();
SerializationFactory serializationFactory = new SerializationFactory(conf);
Deserializer keyDesiralizer = serializationFactory.getDeserializer(inputSplitClass);
keyDesiralizer.open(new ByteArrayInputStream(outstream.toByteArray()));
inputSplit = (InputSplit)keyDesiralizer.deserialize(null);
((ReporterImpl)reporter).setInputSplit(inputSplit);
reader = inputFormat.getRecordReader(inputSplit, new JobConf(conf), reporter);
} catch (Exception e) {
logger.info("failed to initialize inputformat obj {}", inputFormat);
throw new RuntimeException(e);
}
InputStream stream = null;
if (configFile != null && configFile.length() > 0) {
stream = ClassLoader.getSystemResourceAsStream("/" + configFile);
if (stream == null) {
stream = ClassLoader.getSystemResourceAsStream(configFile);
}
}
if (stream != null) {
conf.addResource(stream);
}
jobConf = new JobConf(conf);
if (mapClass != null) {
try {
mapObject = mapClass.newInstance();
} catch (Exception e) {
logger.info("can't instantiate object {}", e.getMessage());
}
mapObject.configure(jobConf);
}
if (combineClass != null) {
try {
combineObject = combineClass.newInstance();
} catch (Exception e) {
logger.info("can't instantiate object {}", e.getMessage());
}
combineObject.configure(jobConf);
}
}
@Override
public void emitTuples()
{
if (!emittedAll) {
try {
K1 key = reader.createKey();
V1 val = reader.createValue();
emittedAll = !reader.next(key, val);
if (!emittedAll) {
KeyHashValPair<K1, V1> keyValue = new KeyHashValPair<K1, V1>(key, val);
mapObject.map(keyValue.getKey(), keyValue.getValue(), outputCollector, reporter);
if (combineObject == null) {
List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>)outputCollector).getList();
for (KeyHashValPair<K2, V2> e : list) {
output.emit(e);
}
list.clear();
}
}
} catch (IOException ex) {
logger.debug(ex.toString());
throw new RuntimeException(ex);
}
}
}
@Override
public void endWindow()
{
List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>)outputCollector).getList();
if (combineObject != null) {
Map<K2, List<V2>> cacheObject = new HashMap<K2, List<V2>>();
for (KeyHashValPair<K2, V2> tuple : list) {
List<V2> cacheList = cacheObject.get(tuple.getKey());
if (cacheList == null) {
cacheList = new ArrayList<V2>();
cacheList.add(tuple.getValue());
cacheObject.put(tuple.getKey(), cacheList);
} else {
cacheList.add(tuple.getValue());
}
}
list.clear();
OutputCollector<K2, V2> tempOutputCollector = new OutputCollectorImpl<K2, V2>();
for (Map.Entry<K2, List<V2>> e : cacheObject.entrySet()) {
try {
combineObject.reduce(e.getKey(), e.getValue().iterator(), tempOutputCollector, reporter);
} catch (IOException e1) {
logger.info(e1.getMessage());
}
}
list = ((OutputCollectorImpl<K2, V2>)tempOutputCollector).getList();
for (KeyHashValPair<K2, V2> e : list) {
output.emit(e);
}
}
if (!emitLastCountOnce && emittedAll) {
outputCount.emit(new KeyHashValPair<Integer, Integer>(operatorId, -1));
logger.info("emitting end of file {}", new KeyHashValPair<Integer, Integer>(operatorId, -1));
emitLastCountOnce = true;
}
list.clear();
}
private InputSplit[] getSplits(JobConf conf, int numSplits, String path) throws Exception
{
FileInputFormat.setInputPaths(conf, new Path(path));
if (inputFormat == null) {
inputFormat = inputFormatClass.newInstance();
String inputFormatClassName = inputFormatClass.getName();
if (inputFormatClassName.equals("org.apache.hadoop.mapred.TextInputFormat")) {
((TextInputFormat)inputFormat).configure(conf);
} else if (inputFormatClassName.equals("org.apache.hadoop.mapred.KeyValueTextInputFormat")) {
((KeyValueTextInputFormat)inputFormat).configure(conf);
}
}
return inputFormat.getSplits(conf, numSplits);
// return null;
}
@Override
public void partitioned(Map<Integer, Partition<MapOperator<K1, V1, K2, V2>>> partitions)
{
}
@SuppressWarnings("rawtypes")
@Override
public Collection<Partition<MapOperator<K1, V1, K2, V2>>> definePartitions(Collection<Partition<MapOperator<K1, V1, K2, V2>>> partitions, PartitioningContext context)
{
int tempPartitionCount = partitionCount;
Collection c = partitions;
Collection<Partition<MapOperator<K1, V1, K2, V2>>> operatorPartitions = c;
Partition<MapOperator<K1, V1, K2, V2>> template;
Iterator<Partition<MapOperator<K1, V1, K2, V2>>> itr = operatorPartitions.iterator();
template = itr.next();
Configuration conf = new Configuration();
SerializationFactory serializationFactory = new SerializationFactory(conf);
if (outstream.size() == 0) {
InputSplit[] splits;
try {
splits = getSplits(new JobConf(conf), tempPartitionCount, template.getPartitionedInstance().getDirName());
} catch (Exception e1) {
logger.info(" can't get splits {}", e1.getMessage());
throw new RuntimeException(e1);
}
Collection<Partition<MapOperator<K1, V1, K2, V2>>> operList = new ArrayList<Partition<MapOperator<K1, V1, K2, V2>>>();
itr = operatorPartitions.iterator();
int size = splits.length;
Serializer keySerializer = serializationFactory.getSerializer(splits[0].getClass());
while (size > 0 && itr.hasNext()) {
Partition<MapOperator<K1, V1, K2, V2>> p = itr.next();
MapOperator<K1, V1, K2, V2> opr = p.getPartitionedInstance();
opr.setInputFormatClass(inputFormatClass);
opr.setMapClass(mapClass);
opr.setCombineClass(combineClass);
opr.setConfigFile(configFile);
try {
keySerializer.open(opr.getOutstream());
keySerializer.serialize(splits[size - 1]);
opr.setInputSplitClass(splits[size - 1].getClass());
} catch (IOException e) {
logger.info("error while serializing {}", e.getMessage());
}
size--;
operList.add(p);
}
while (size > 0) {
MapOperator<K1, V1, K2, V2> opr = new MapOperator<K1, V1, K2, V2>();
opr.setInputFormatClass(inputFormatClass);
opr.setMapClass(mapClass);
opr.setCombineClass(combineClass);
opr.setConfigFile(configFile);
try {
keySerializer.open(opr.getOutstream());
keySerializer.serialize(splits[size - 1]);
opr.setInputSplitClass(splits[size - 1].getClass());
} catch (IOException e) {
logger.info("error while serializing {}", e.getMessage());
}
size--;
operList.add(new DefaultPartition<MapOperator<K1, V1, K2, V2>>(opr));
}
try {
keySerializer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
return operList;
}
return null;
}
public ByteArrayOutputStream getOutstream()
{
return outstream;
}
public void setOutstream(ByteArrayOutputStream outstream)
{
this.outstream = outstream;
}
/**
* adding map code
*/
private Class<? extends Mapper<K1, V1, K2, V2>> mapClass;
private Class<? extends Reducer<K2, V2, K2, V2>> combineClass;
private transient Mapper<K1, V1, K2, V2> mapObject;
private transient Reducer<K2, V2, K2, V2> combineObject;
private transient Reporter reporter;
private String configFile;
public String getConfigFile()
{
return configFile;
}
public void setConfigFile(String configFile)
{
this.configFile = configFile;
}
private transient OutputCollector<K2, V2> outputCollector;
public Class<? extends Mapper<K1, V1, K2, V2>> getMapClass()
{
return mapClass;
}
public void setMapClass(Class<? extends Mapper<K1, V1, K2, V2>> mapClass)
{
this.mapClass = mapClass;
}
public Class<? extends Reducer<K2, V2, K2, V2>> getCombineClass()
{
return combineClass;
}
public void setCombineClass(Class<? extends Reducer<K2, V2, K2, V2>> combineClass)
{
this.combineClass = combineClass;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.