gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2013 Daniel Baumann
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package at.db.rc;
import static at.db.rc.Debugger.debug;
import static at.db.rc.Debugger.error;
import static at.db.rc.Debugger.info;
import static at.db.rc.Debugger.trace;
import static at.db.rc.Translations.MNI_CLOSE;
import static at.db.rc.Translations.MNI_INSTALL_ANDROID_APP;
import static at.db.rc.Translations.MNI_KEYBOARD_DEUTSCH;
import static at.db.rc.Translations.MNI_KEYBOARD_ENGLISH;
import static at.db.rc.Translations.MNI_KEYBOARD_LANGUAGE;
import static at.db.rc.Translations.MNI_SHOW_SETTINGS_QR_CODE;
import static at.db.rc.Translations.MSG_DESC_REMOTE_CONTROL_SETTINGS_SCAN_SETTINGS;
import static at.db.rc.Translations.MSG_ERROR_AN_ERROR_OCCURED;
import static at.db.rc.Translations.MSG_ERROR_COULD_NOT_CREATE_CONNECTION_URL;
import static at.db.rc.Translations.MSG_ERROR_PORT_IN_USE;
import static at.db.rc.Translations.MSG_INFO_0_CONNECTIONS;
import static at.db.rc.Translations.MSG_INFO_1_CONNECTION;
import static at.db.rc.Translations.MSG_INFO_APP_STARTING_UP;
import static at.db.rc.Translations.MSG_INFO_N_CONNECTIONS;
import java.awt.AWTException;
import java.awt.CheckboxMenuItem;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Menu;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.BindException;
import java.net.DatagramSocket;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.URL;
import java.net.URLConnection;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.imageio.ImageIO;
import javax.net.ServerSocketFactory;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import at.db.rc.FastClient.EndPointType;
import at.db.rc.keyboard.IKeyboardLayout;
import at.db.rc.keyboard.KeyboardLayoutLinuxDe;
import at.db.rc.keyboard.KeyboardLayoutLinuxEn;
import at.db.rc.keyboard.KeyboardLayoutWinDe;
import at.db.rc.keyboard.KeyboardLayoutWinEn;
import at.db.rc.server.discoverable.InteruptableSocketThread;
import at.db.rc.server.discoverable.ServerDiscovery;
import at.db.rc.server.discoverable.UdpClient;
import at.db.rc.ui.InitFrame;
import at.db.rc.ui.QrCodeWindow;
import com.google.zxing.qrcode.decoder.ErrorCorrectionLevel;
import com.google.zxing.qrcode.encoder.ByteMatrix;
import com.google.zxing.qrcode.encoder.Encoder;
import com.google.zxing.qrcode.encoder.QRCode;
public class RemoteControlServer implements Runnable, ITerminationListener {
public static final String serverVersion = "v0.3";
private TrayIcon trayIcon;
private ServerSocket serversocket;
private Set<FastClient> endPoints = new HashSet<FastClient>();
private UdpClient udpClient = null;
private int port = Parameters.DEFAULT_PORT;
private int maxConnections = 0;
private String keyMd5;
private KeyPair keypair;
private Integer connections = 0;
private BufferedImage originalImage;
private QrCodeWindow connectionUrlWindow;
private BufferedImage imgConnectionUrlQr;
private QrCodeWindow downloadUrlWindow;
private BufferedImage imgDownloadUrlQr;
private InitFrame initFrame = null;
private IKeyboardLayout keyboardLayout;
private Thread discoveryThread = null;
private Map<String, CheckboxMenuItem> keyboardCheckBoxes = new HashMap<String, CheckboxMenuItem>();
private Menu mniKeyboardLanguage;
private ServerDiscovery serverDiscovery;
public static void main(String[] args) {
Debugger.init(Debugger.NONE, false);
Translations.init();
info("RemoteControlServer.main()");
new Thread(new RemoteControlServer()).start();
}
public RemoteControlServer() {
info("RemoteControlServer.RemoteControlServer()");
try {
initFrame = new InitFrame(MSG_INFO_APP_STARTING_UP);
initFrame.showWindow();
} catch (Exception e) {
error(e);
}
try {
tryGetPort();
initializeUdpClient();
generateKeyPair();
initializeNetworking();
startServerDiscoveryThread();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
createAndShowGUI();
if (initFrame != null) {
initFrame.closeWindow();
}
showUriQrCode();
}
});
} catch (BindException e) {
error(e);
initFrame = new InitFrame(MSG_ERROR_PORT_IN_USE);
initFrame.showWindow();
debug("exiting application");
try {
Thread.sleep(5000);
} catch (InterruptedException ie) {
error(ie);
}
System.exit(0);
} catch (Exception e) {
error(e);
}
}
private void tryGetPort() {
String portProperty = System.getProperty("port");
if (portProperty != null) {
try {
int portInt = Integer.parseInt(portProperty);
if ((portInt > 0) && (portInt <= Character.MAX_VALUE)) {
port = portInt;
} else {
Debugger.error("port property can not be used as port: " + portProperty);
}
} catch (NumberFormatException e) {
Debugger.error("could not parse port property " + portProperty);
}
}
}
private void initializeUdpClient() {
debug("RemoteControlServer.initializeUdpClient()");
ClientHandler clientHandler = new ClientHandler();
clientHandler.setKeyboardLayout(keyboardLayout);
udpClient = new UdpClient(clientHandler);
}
private void startServerDiscoveryThread() {
info("RemoteControlServer.startServerDiscoveryThread()");
try {
DatagramSocket socket = new DatagramSocket(port);
serverDiscovery = new ServerDiscovery(socket, udpClient);
discoveryThread = new InteruptableSocketThread(serverDiscovery);
discoveryThread.start();
debug("discoveryThread started");
} catch (SocketException e) {
error(e);
}
}
private void createAndShowGUI() {
info("RemoteControlServer.createAndShowGUI()");
try {
trayIcon = null;
if (SystemTray.isSupported()) {
SystemTray tray = SystemTray.getSystemTray();
PopupMenu ppmMainMenu = new PopupMenu();
URL imageUrl = RemoteControlServer.class.getResource("/at/db/rc/remote_control.png");
originalImage = ImageIO.read(imageUrl);
trayIcon = new TrayIcon(originalImage, MSG_INFO_0_CONNECTIONS, ppmMainMenu);
BufferedImage image = updateImage(originalImage, 0);
trayIcon.setImage(image);
debug("trayIcon created");
// create menu item for the default action
MenuItem mniClose = new MenuItem(MNI_CLOSE);
mniClose.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
debug("RemoteControlServer.createAndShowGUI().new mniClose.actionPerformed()");
closeAllConnections();
closeAllWindows();
debug("exiting application");
} catch (Exception ex) {
error(ex);
}
System.exit(0);
}
});
ppmMainMenu.add(mniClose);
MenuItem mniUriQrCode = new MenuItem(MNI_SHOW_SETTINGS_QR_CODE);
mniUriQrCode.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
showUriQrCode();
}
});
ppmMainMenu.add(mniUriQrCode);
MenuItem mniAppQrCode = new MenuItem(MNI_INSTALL_ANDROID_APP);
mniAppQrCode.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
showAppDownloadUrl();
}
});
ppmMainMenu.add(mniAppQrCode);
mniKeyboardLanguage = new Menu(MNI_KEYBOARD_LANGUAGE);
ppmMainMenu.add(mniKeyboardLanguage);
CheckboxMenuItem mniKeyboardEn = new CheckboxMenuItem(MNI_KEYBOARD_ENGLISH);
mniKeyboardEn.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
updateKeyboardLanguage("en");
}
});
mniKeyboardLanguage.add(mniKeyboardEn);
keyboardCheckBoxes.put("en", mniKeyboardEn);
CheckboxMenuItem mniKeyboardDe = new CheckboxMenuItem(MNI_KEYBOARD_DEUTSCH);
mniKeyboardDe.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
updateKeyboardLanguage("de");
}
});
mniKeyboardLanguage.add(mniKeyboardDe);
keyboardCheckBoxes.put("de", mniKeyboardDe);
updateKeyboardLanguage(System.getProperty("user.language"));
try {
tray.add(trayIcon);
} catch (AWTException e) {
error(e);
}
debug("GUI created");
} else {
error("could not create tray icon ... but server should be up");
}
} catch (Exception e) {
error(e);
}
}
protected void updateKeyboardLanguage(String language) {
debug("RemoteControlServer.updateKeyboardLanguage()");
if (mniKeyboardLanguage != null) {
for (int i = 0; i < mniKeyboardLanguage.getItemCount(); i++) {
MenuItem item = mniKeyboardLanguage.getItem(i);
if (item instanceof CheckboxMenuItem) {
((CheckboxMenuItem) item).setState(false);
}
}
}
CheckboxMenuItem checkbox = keyboardCheckBoxes.get(language);
if (checkbox != null) {
debug("keyboardCheckBox for language [" + language + "] found");
checkbox.setState(true);
}
setKeyboardLanguage(language);
}
private void setKeyboardLanguage(String language) {
debug("RemoteControlServer.setKeyboardLanguage(" + language + ")");
debug("os.name is [" + System.getProperty("os.name") + "]");
if ("de".equals(language)) {
if (System.getProperty("os.name").contains("Windows")) {
keyboardLayout = new KeyboardLayoutWinDe();
} else {
keyboardLayout = new KeyboardLayoutLinuxDe();
}
} else {
if (System.getProperty("os.name").contains("Windows")) {
keyboardLayout = new KeyboardLayoutWinEn();
} else {
keyboardLayout = new KeyboardLayoutLinuxEn();
}
}
for (FastClient client : endPoints) {
IEventHandler handler = client.getEventHandler();
if (handler instanceof ClientHandler) {
ClientHandler clientHandler = (ClientHandler) handler;
clientHandler.setKeyboardLayout(keyboardLayout);
}
}
udpClient.getEventHandler().setKeyboardLayout(keyboardLayout);
// info("os.name: " + System.getProperty("os.name"));
// info("os.version: " + System.getProperty("os.version"));
// info("user.home: " + System.getProperty("user.home"));
// info("user.country: " +
// System.getProperty("user.country"));
// info("user.language: " +
// System.getProperty("user.language"));
}
protected void closeAllWindows() {
debug("RemoteControlServer.closeAllWindows()");
if (downloadUrlWindow != null) {
downloadUrlWindow.closeWindow();
}
if (connectionUrlWindow != null) {
connectionUrlWindow.closeWindow();
}
}
protected void closeAllConnections() {
info("RemoteControlServer.closeAllConnections()");
try {
if (discoveryThread != null) {
discoveryThread.interrupt();
}
trace("disconnectiong all clients");
for (FastClient endPoint : new ArrayList<FastClient>(endPoints)) {
endPoint.disconnect();
}
trace("closing server socket");
serversocket.close();
trace("joining discoveryThread");
discoveryThread.join(5000);
} catch (Exception e) {
error(e);
}
}
protected void showAppDownloadUrl() {
info("RemoteControlServer.showAppDownloadUrl()");
if (downloadUrlWindow != null) {
debug("downloadUrlWindow already exists ... show it");
downloadUrlWindow.showWindow();
} else {
debug("creating downloadUrlWindow");
BufferedImage image = getDownloadUrlQrCode();
if (image != null) {
downloadUrlWindow = new QrCodeWindow(image, MNI_INSTALL_ANDROID_APP, null);
downloadUrlWindow.showWindow();
} else {
JOptionPane.showMessageDialog(null, MSG_ERROR_COULD_NOT_CREATE_CONNECTION_URL, MSG_ERROR_AN_ERROR_OCCURED,
JOptionPane.ERROR_MESSAGE);
}
}
}
private void showUriQrCode() {
info("RemoteControlServer.showUriQrCode()");
if (connectionUrlWindow != null) {
debug("connectionUrlWindow already exists ... show it");
connectionUrlWindow.showWindow();
} else {
debug("creating connectionUrlWindow");
BufferedImage image = getConnectionQrCode();
if (image != null) {
String message = getMessage();
connectionUrlWindow = new QrCodeWindow(image, MSG_DESC_REMOTE_CONTROL_SETTINGS_SCAN_SETTINGS, message);
connectionUrlWindow.showWindow();
} else {
JOptionPane.showMessageDialog(null, MSG_ERROR_COULD_NOT_CREATE_CONNECTION_URL, MSG_ERROR_AN_ERROR_OCCURED,
JOptionPane.ERROR_MESSAGE);
}
}
}
private String getMessage() {
try {
debug("fetching update message");
URL url = new URL("http://www.daniel-baumann.at/RemoteControlServer/" + serverVersion);
URLConnection ucon = url.openConnection();
ucon.setConnectTimeout(3000);
InputStream inputStream = ucon.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
StringBuilder buffer = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
if (buffer.length() > 0) {
buffer.append(' ');
}
buffer.append(line);
}
br.close();
String message = buffer.toString().trim();
debug("returning message [" + message + "]");
return message;
} catch (Exception e) {
debug(e);
}
debug("returning message [null]");
return null;
}
private void closeUriQrCode() {
info("RemoteControlServer.closeUriQrCode()");
if (connectionUrlWindow != null) {
debug("hiding existing connectionUrlWindow");
connectionUrlWindow.hildeWindow();
}
}
@Override
public void onTerminated(FastClient endPoint) {
info("RemoteControlServer.onTerminated()");
serverDiscovery.removeAddress(endPoint.getSocket().getInetAddress().getAddress());
endPoints.remove(endPoint);
synchronized (connections) {
connections--;
if (connections < 0) {
connections = 0;
}
if (connections == 0) {
showUriQrCode();
}
}
updateTrayMenu(connections);
}
private void updateTrayMenu(int connections) {
info("RemoteControlServer.updateTrayMenu()");
if (trayIcon != null) {
trayIcon.setToolTip((connections != 1) ? String.format(MSG_INFO_N_CONNECTIONS, connections)
: MSG_INFO_1_CONNECTION);
if (originalImage != null) {
BufferedImage image = updateImage(originalImage, connections);
trayIcon.setImage(image);
}
}
}
private BufferedImage updateImage(BufferedImage image, int connections) {
info("RemoteControlServer.updateImage()");
int w = trayIcon.getSize().width - 3;
int h = trayIcon.getSize().height - 3;
BufferedImage img = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
Graphics2D g2d = img.createGraphics();
g2d.drawImage(image, 0, 0, w, h, null);
g2d.setPaint(Color.white);
g2d.setFont(new Font("Arial", Font.BOLD, 10));
String text = "" + connections;
int x = 8;
int y = 15;
g2d.drawString(text, x, y);
g2d.setPaint(Color.black);
x = 9;
y = 16;
g2d.drawString(text, x, y);
g2d.dispose();
return img;
}
private BufferedImage getConnectionQrCode() {
info("RemoteControlServer.getConnectionQrCode()");
if (imgConnectionUrlQr == null) {
imgConnectionUrlQr = getQrCode(getConnectionUrl());
}
return imgConnectionUrlQr;
}
private BufferedImage getDownloadUrlQrCode() {
info("RemoteControlServer.getDownloadUrlQrCode()");
if (imgDownloadUrlQr == null) {
imgDownloadUrlQr = getQrCode("market://search?q=pname:at.db.rc.free");
}
return imgDownloadUrlQr;
}
private BufferedImage getQrCode(String connectionUrl) {
debug("creating QR code image for [" + connectionUrl + "]");
BufferedImage qrImage = null;
try {
if (connectionUrl != null) {
QRCode qrCode = Encoder.encode(connectionUrl, ErrorCorrectionLevel.L);
ByteMatrix matrix = qrCode.getMatrix();
// generate an imgConnectionUrlQr from the byte matrix
int width = matrix.getWidth();
int height = matrix.getHeight();
byte[][] array = matrix.getArray();
int pixelSize = 10;
// create buffered imgConnectionUrlQr to draw to
qrImage = new BufferedImage(width * pixelSize, height * pixelSize, BufferedImage.TYPE_INT_RGB);
// iterate through the matrix and draw the pixels to the
// imgConnectionUrlQr
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int grayValue = array[y][x] & 0xff;
for (int i = 0; i < pixelSize; i++) {
for (int j = 0; j < pixelSize; j++) {
qrImage.setRGB(x * pixelSize + i, y * pixelSize + j, (grayValue == 1 ? 0 : 0xFFFFFF));
}
}
}
}
} else {
qrImage = null;
}
} catch (Exception e) {
error(e);
}
return qrImage;
}
private String getConnectionUrl() {
info("RemoteControlServer.getConnectionUrl()");
try {
String hostName = null;
String hostAddress = null;
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
boolean found = false;
debug("* checking network interfaces:");
while (interfaces.hasMoreElements()) {
NetworkInterface current = interfaces.nextElement();
debug("** current: " + current.getDisplayName() + " | " + current.getName());
if (current.isUp() && !current.isLoopback() && !current.isVirtual() && !isVmWareInterface(current)) {
Enumeration<InetAddress> addresses = current.getInetAddresses();
while (!found && addresses.hasMoreElements()) {
InetAddress address = addresses.nextElement();
debug("*** adderss: " + address);
if (!address.isLoopbackAddress() && (address instanceof Inet4Address)) {
debug("**** set as found");
found = true;
hostName = address.getHostName();
hostAddress = address.getHostAddress();
}
}
}
}
String connectionUrl = "http://remotehost/" + ((hostAddress == null) ? hostName : hostAddress) + "/" + port + "/"
+ keyMd5;
debug("connection url: " + connectionUrl);
return connectionUrl;
} catch (Exception e) {
error(e);
}
debug("connection url: null");
return null;
}
private boolean isVmWareInterface(NetworkInterface networkInterface) {
return networkInterface.getName().toLowerCase().contains("vmnet");
}
private void generateKeyPair() {
info("RemoteControlServer.generateKeyPair()");
try {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(Parameters.KEY_ALGORITHM);
SecureRandom sr = new SecureRandom();
keyGen.initialize(Parameters.RSA_KEY_LENGTH, sr);
keypair = keyGen.generateKeyPair();
keyMd5 = Conv.toHex(Conv.toMD5(keypair.getPublic().getEncoded()));
debug("md5sum = " + keyMd5);
} catch (Exception e) {
error(e);
}
}
@Override
public void run() {
info("RemoteControlServer.run()");
try {
while ((connections + 1 < maxConnections) || (maxConnections == 0)) {
Socket socket = null;
try {
debug("RemoteControlServer.run() ... accepting connection");
socket = serversocket.accept();
} catch (Exception e) {
error(e);
return;
}
closeUriQrCode();
debug("creating endpoint");
serverDiscovery.addAddress(socket.getInetAddress().getAddress());
FastClient endpoint = new FastClient(socket, EndPointType.SERVER);
endpoint.setKeypair(keypair);
ClientHandler clientHandler = new ClientHandler();
clientHandler.setKeyboardLayout(keyboardLayout);
endpoint.setEventHandler(clientHandler);
endpoint.setTerminationListener(this);
endPoints.add(endpoint);
Thread thread = new Thread(endpoint);
thread.start();
synchronized (connections) {
connections++;
}
updateTrayMenu(connections);
}
} catch (InterruptedException e) {
error(e);
} catch (ExecutionException e) {
if (e.getCause() != null) {
error(e.getCause());
} else {
error(e);
}
}
}
private void initializeNetworking() throws IOException {
info("RemoteControlServer.initializeNetworking()");
ServerSocketFactory factory = ServerSocketFactory.getDefault();
serversocket = factory.createServerSocket(port);
debug("Server running at [" + serversocket.getInetAddress() + " : " + serversocket.getLocalPort() + "].");
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.connector.jmx;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.InMemoryRecordSet;
import com.facebook.presto.spi.NodeManager;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.connector.ConnectorRecordSetProvider;
import com.facebook.presto.spi.connector.ConnectorTransactionHandle;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import javax.inject.Inject;
import javax.management.Attribute;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.Objects.requireNonNull;
public class JmxRecordSetProvider
implements ConnectorRecordSetProvider
{
private final MBeanServer mbeanServer;
private final String nodeId;
private final JmxHistoricalData jmxHistoricalData;
@Inject
public JmxRecordSetProvider(MBeanServer mbeanServer, NodeManager nodeManager, JmxHistoricalData jmxHistoricalData)
{
this.mbeanServer = requireNonNull(mbeanServer, "mbeanServer is null");
this.nodeId = requireNonNull(nodeManager, "nodeManager is null").getCurrentNode().getNodeIdentifier();
this.jmxHistoricalData = requireNonNull(jmxHistoricalData, "jmxHistoryHolder is null");
}
public List<Object> getLiveRow(String objectName, List<? extends ColumnHandle> columns, long entryTimestamp)
throws JMException
{
ImmutableMap<String, Optional<Object>> attributes = getAttributes(getColumnNames(columns), objectName);
List<Object> row = new ArrayList<>();
for (ColumnHandle column : columns) {
JmxColumnHandle jmxColumn = (JmxColumnHandle) column;
if (jmxColumn.getColumnName().equals(JmxMetadata.NODE_COLUMN_NAME)) {
row.add(nodeId);
}
else if (jmxColumn.getColumnName().equals(JmxMetadata.OBJECT_NAME_NAME)) {
row.add(objectName);
}
else if (jmxColumn.getColumnName().equals(JmxMetadata.TIMESTAMP_COLUMN_NAME)) {
row.add(entryTimestamp);
}
else {
Optional<Object> optionalValue = attributes.get(jmxColumn.getColumnName());
if (optionalValue == null || !optionalValue.isPresent()) {
row.add(null);
}
else {
Object value = optionalValue.get();
Class<?> javaType = jmxColumn.getColumnType().getJavaType();
if (javaType == boolean.class) {
if (value instanceof Boolean) {
row.add(value);
}
else {
// mbeans can lie about types
row.add(null);
}
}
else if (javaType == long.class) {
if (value instanceof Number) {
row.add(((Number) value).longValue());
}
else {
// mbeans can lie about types
row.add(null);
}
}
else if (javaType == double.class) {
if (value instanceof Number) {
row.add(((Number) value).doubleValue());
}
else {
// mbeans can lie about types
row.add(null);
}
}
else if (javaType == Slice.class) {
if (value.getClass().isArray()) {
// return a string representation of the array
if (value.getClass().getComponentType() == boolean.class) {
row.add(Arrays.toString((boolean[]) value));
}
else if (value.getClass().getComponentType() == byte.class) {
row.add(Arrays.toString((byte[]) value));
}
else if (value.getClass().getComponentType() == char.class) {
row.add(Arrays.toString((char[]) value));
}
else if (value.getClass().getComponentType() == double.class) {
row.add(Arrays.toString((double[]) value));
}
else if (value.getClass().getComponentType() == float.class) {
row.add(Arrays.toString((float[]) value));
}
else if (value.getClass().getComponentType() == int.class) {
row.add(Arrays.toString((int[]) value));
}
else if (value.getClass().getComponentType() == long.class) {
row.add(Arrays.toString((long[]) value));
}
else if (value.getClass().getComponentType() == short.class) {
row.add(Arrays.toString((short[]) value));
}
else {
row.add(Arrays.toString((Object[]) value));
}
}
else {
row.add(value.toString());
}
}
}
}
}
return row;
}
@Override
public RecordSet getRecordSet(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorSplit split, List<? extends ColumnHandle> columns)
{
JmxTableHandle tableHandle = ((JmxSplit) split).getTableHandle();
requireNonNull(columns, "columns is null");
checkArgument(!columns.isEmpty(), "must provide at least one column");
List<List<Object>> rows;
try {
if (tableHandle.isLiveData()) {
rows = getLiveRows(tableHandle, columns);
}
else {
List<Integer> selectedColumns = calculateSelectedColumns(tableHandle.getColumnHandles(), getColumnNames(columns));
rows = tableHandle.getObjectNames().stream()
.flatMap(objectName -> jmxHistoricalData.getRows(objectName, selectedColumns).stream())
.collect(toImmutableList());
}
}
catch (JMException e) {
rows = ImmutableList.of();
}
return new InMemoryRecordSet(getColumnTypes(columns), rows);
}
private List<Integer> calculateSelectedColumns(List<JmxColumnHandle> columnHandles, Set<String> selectedColumnNames)
{
ImmutableList.Builder<Integer> selectedColumns = ImmutableList.builder();
for (int i = 0; i < columnHandles.size(); i++) {
JmxColumnHandle column = columnHandles.get(i);
if (selectedColumnNames.contains((column.getColumnName()))) {
selectedColumns.add(i);
}
}
return selectedColumns.build();
}
private static Set<String> getColumnNames(List<? extends ColumnHandle> columnHandles)
{
return columnHandles.stream()
.map(column -> (JmxColumnHandle) column)
.map(JmxColumnHandle::getColumnName)
.collect(Collectors.toSet());
}
private static List<Type> getColumnTypes(List<? extends ColumnHandle> columnHandles)
{
return columnHandles.stream()
.map(column -> (JmxColumnHandle) column)
.map(JmxColumnHandle::getColumnType)
.collect(Collectors.toList());
}
private ImmutableMap<String, Optional<Object>> getAttributes(Set<String> uniqueColumnNames, String name)
throws JMException
{
ObjectName objectName = new ObjectName(name);
String[] columnNamesArray = uniqueColumnNames.toArray(new String[uniqueColumnNames.size()]);
ImmutableMap.Builder<String, Optional<Object>> attributes = ImmutableMap.builder();
for (Attribute attribute : mbeanServer.getAttributes(objectName, columnNamesArray).asList()) {
attributes.put(attribute.getName(), Optional.ofNullable(attribute.getValue()));
}
return attributes.build();
}
private List<List<Object>> getLiveRows(JmxTableHandle tableHandle, List<? extends ColumnHandle> columns)
throws JMException
{
ImmutableList.Builder<List<Object>> rows = ImmutableList.builder();
for (String objectName : tableHandle.getObjectNames()) {
rows.add(getLiveRow(objectName, columns, 0));
}
return rows.build();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.hadoop.serialization;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.elasticsearch.hadoop.serialization.Parser.Token;
import org.elasticsearch.hadoop.serialization.json.JacksonJsonGenerator;
import org.elasticsearch.hadoop.util.FastByteArrayOutputStream;
import org.elasticsearch.hadoop.util.StringUtils;
public abstract class ParsingUtils {
public static final String NOT_FOUND = "(not found)";
/**
* Seeks the field with the given name in the stream and positions (and returns) the parser to the next available token (value or not).
* Return null if no token is found.
*
* @param path
* @param parser
* @return token associated with the given path or null if not found
*/
public static Token seek(String path, Parser parser) {
// return current token if no path is given
if (!StringUtils.hasText(path)) {
return null;
}
List<String> tokens = StringUtils.tokenize(path, ".");
return seek(parser, tokens.toArray(new String[tokens.size()]));
}
public static Token seek(Parser parser, String[] path1) {
return seek(parser, path1, null);
}
public static Token seek(Parser parser, String[] path1, String[] path2) {
return doSeekToken(parser, path1, 0, path2, 0);
}
private static Token doSeekToken(Parser parser, String[] path1, int index1, String[] path2, int index2) {
Token token = null;
String currentName;
token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
while ((token = parser.nextToken()) != null) {
if (token == Token.START_OBJECT) {
token = parser.nextToken();
}
if (token == Token.FIELD_NAME) {
// found a node, go one level deep
currentName = parser.currentName();
if (path1 != null && currentName.equals(path1[index1])) {
if (index1 + 1 < path1.length) {
return doSeekToken(parser, path1, index1 + 1, null, 0);
}
else {
return parser.nextToken();
}
}
else if (path2 != null && currentName.equals(path2[index2])) {
if (index2 + 1 < path2.length) {
return doSeekToken(parser, null, 0, path2, index2 + 1);
}
else {
return parser.nextToken();
}
}
else {
// get field token (can be value, object or array)
parser.nextToken();
parser.skipChildren();
}
}
else {
break;
}
}
return null;
}
private static class Matcher {
private final List<String> tokens;
private int tokenIndex = 0;
private boolean matched = false;
private Object value;
Matcher(String path) {
tokens = StringUtils.tokenize(path, ".");
}
boolean matches(String value) {
boolean match = tokens.get(tokenIndex).equals(value);
if (match) {
if (tokenIndex < tokens.size() - 1) {
tokenIndex++;
}
else {
matched = true;
this.value = value;
}
}
return match;
};
}
public static List<String> values(Parser parser, String... paths) {
List<Matcher> matchers = new ArrayList<Matcher>(paths.length);
for (String path : paths) {
matchers.add(new Matcher(path));
}
List<Matcher> active = new ArrayList<Matcher>(matchers);
Set<Matcher> inactive = new LinkedHashSet<Matcher>();
doFind(parser, new ArrayList<Matcher>(matchers), active, inactive);
List<String> matches = new ArrayList<String>();
for (Matcher matcher : matchers) {
matches.add(matcher.matched ? (matcher.value != null ? matcher.value.toString() : StringUtils.EMPTY) : NOT_FOUND);
}
return matches;
}
private static void doFind(Parser parser, List<Matcher> current, List<Matcher> active, Set<Matcher> inactive) {
Token token = null;
List<Matcher> matchingCurrentLevel = null;
String currentName;
token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
while ((token = parser.nextToken()) != null) {
if (token == Token.START_OBJECT) {
token = parser.nextToken();
if (matchingCurrentLevel == null) {
parser.skipChildren();
}
else {
doFind(parser, matchingCurrentLevel, active, inactive);
}
}
else if (token == Token.FIELD_NAME) {
currentName = parser.currentName();
Object value = null;
boolean valueRead = false;
for (Matcher matcher : current) {
if (matcher.matches(currentName)) {
if (matcher.matched) {
inactive.add(matcher);
if (!valueRead) {
valueRead = true;
switch (parser.nextToken()) {
case VALUE_NUMBER:
value = parser.numberValue();
break;
case VALUE_BOOLEAN:
value = Boolean.valueOf(parser.booleanValue());
break;
case VALUE_NULL:
value = null;
break;
case VALUE_STRING:
value = parser.text();
break;
default:
value = readValueAsString(parser);
}
}
matcher.value = value;
}
else {
if (matchingCurrentLevel == null) {
matchingCurrentLevel = new ArrayList<Matcher>(current.size());
}
matchingCurrentLevel.add(matcher);
}
}
}
}
else if (token == Token.END_OBJECT) {
// once matching, the matcher needs to match all the way - if it's not inactive (since it matched)
if (matchingCurrentLevel != null) {
for (Matcher matcher : matchingCurrentLevel) {
active.remove(matcher);
inactive.add(matcher);
}
}
}
// ignore other tokens
}
}
private static String readValueAsString(Parser parser) {
FastByteArrayOutputStream out = new FastByteArrayOutputStream(256);
JacksonJsonGenerator generator = new JacksonJsonGenerator(out);
traverse(parser, generator);
generator.close();
return out.toString();
}
private static void traverse(Parser parser, Generator generator) {
Token t = parser.currentToken();
switch (t) {
case START_OBJECT:
traverseMap(parser, generator);
break;
case START_ARRAY:
traverseArray(parser, generator);
break;
case FIELD_NAME:
generator.writeFieldName(parser.currentName());
parser.nextToken();
traverse(parser, generator);
break;
case VALUE_STRING:
generator.writeString(parser.text());
parser.nextToken();
break;
case VALUE_BOOLEAN:
generator.writeBoolean(parser.booleanValue());
parser.nextToken();
break;
case VALUE_NULL:
generator.writeNull();
parser.nextToken();
break;
case VALUE_NUMBER:
switch (parser.numberType()) {
case INT:
generator.writeNumber(parser.intValue());
break;
case LONG:
generator.writeNumber(parser.longValue());
break;
case DOUBLE:
generator.writeNumber(parser.doubleValue());
break;
case FLOAT:
generator.writeNumber(parser.floatValue());
break;
}
parser.nextToken();
break;
}
}
private static void traverseMap(Parser parser, Generator generator) {
generator.writeBeginObject();
parser.nextToken();
for (; parser.currentToken() != Token.END_OBJECT;) {
traverse(parser, generator);
}
generator.writeEndObject();
parser.nextToken();
}
private static void traverseArray(Parser parser, Generator generator) {
generator.writeBeginArray();
parser.nextToken();
for (; parser.currentToken() != Token.END_ARRAY;) {
traverse(parser, generator);
}
generator.writeEndArray();
parser.nextToken();
}
}
| |
package com.rainerschuster.sudoku.vaadin;
//import java.awt.GridLayout;
import java.util.ArrayList;
import java.util.List;
import com.rainerschuster.sudoku.SudokuField;
import com.rainerschuster.sudoku.SudokuProperties;
import com.rainerschuster.sudoku.SudokuValue;
import com.rainerschuster.sudoku.view.ViewProperties;
import com.vaadin.ui.CustomComponent;
import com.vaadin.ui.GridLayout;
public class SudokuFieldVaadin extends CustomComponent {
private static final long serialVersionUID = 1L;
private GridLayout mainLayout;
private SudokuProperties properties;
//private int[][] field;
private SudokuCellVaadin[][] cells;
/**
* This is the default constructor
*/
public SudokuFieldVaadin(SudokuProperties properties) {
super();
this.properties = properties;
initialize();
}
/**
* This method initializes this field
*/
private void initialize() {
mainLayout = new GridLayout(properties.getEdgeLength(), properties.getEdgeLength());
cells = new SudokuCellVaadin[properties.getEdgeLength()][properties.getEdgeLength()];
for (int row = 0; row < properties.getEdgeLength(); row++) {
for (int col = 0; col < properties.getEdgeLength(); col++) {
cells[row][col] = new SudokuCellVaadin();
mainLayout.addComponent(cells[row][col], col, row);
}
}
final int size = ViewProperties.FIELD_SIZE * properties.getEdgeLength();
mainLayout.setWidth(size, Unit.PIXELS);
mainLayout.setHeight(size, Unit.PIXELS);
setWidth(size, Unit.PIXELS);
setHeight(size, Unit.PIXELS);
setCompositionRoot(mainLayout);
}
/*public int[][] getField() {
return field;
}
public void setField(int[][] field) {
this.field = field;
}*/
public SudokuProperties getProperties() {
return properties;
}
public void setProperties(SudokuProperties properties) {
this.properties = properties;
}
public void clear() {
for (int row = 0; row < properties.getEdgeLength(); row++) {
for (int col = 0; col < properties.getEdgeLength(); col++) {
cells[row][col].setNumber(null);
// cells[row][col].setEditable(true);
cells[row][col].setEnabled(true);
// cells[row][col].setFont(ViewProperties.defaultFont);
cells[row][col].removeStyleName("given");
cells[row][col].removeStyleName("north");
cells[row][col].removeStyleName("east");
cells[row][col].removeStyleName("south");
cells[row][col].removeStyleName("west");
}
}
}
public void importData(final SudokuField field, final boolean showSolution) {
// final ViewProperties vProps = new ViewProperties();
for (int row = 0; row < properties.getEdgeLength(); row++) {
for (int col = 0; col < properties.getEdgeLength(); col++) {
final List<Integer> coordinates = new ArrayList<Integer>(2);
coordinates.add(row);
coordinates.add(col);
if (showSolution || field.isGiven(coordinates)) {
cells[row][col].setNumber(field.getField().get(coordinates));
}
// cells[row][col].setEditable(!field.isGiven(coordinates));
// //cells[row][col].setEnabled(!bean.isGiven());
cells[row][col].setEnabled(!field.isGiven(coordinates));
if (field.isGiven(coordinates)) {
cells[row][col].addStyleName("given");
} else {
cells[row][col].removeStyleName("given");
}
// Color cellColor = Color.WHITE;
// Color borderColor = ViewProperties.defaultColor;
if (properties.isXSudoku()) {
if (row == col || row + col + 1 == properties.getEdgeLength()) {
// cellColor = vProps.getXCellColor();
// borderColor = vProps.getXBorderColor();
cells[row][col].addStyleName("colorX");
} else {
cells[row][col].removeStyleName("colorX");
}
}
if (properties.isColorSudoku()) {
final int colorIndex = properties.getColor().get(coordinates);
// cellColor = vProps.getColor(colorIndex);
for (int i = 0; i < properties.getEdgeLength(); i++) {
final String cellColorString = "color" + (i + 1);
if (i == colorIndex) {
cells[row][col].addStyleName(cellColorString);
} else {
cells[row][col].removeStyleName(cellColorString);
}
}
}
// cells[row][col].setBackground(cellColor);
cells[row][col].setMinNumber(1);
cells[row][col].setMaxNumber(properties.getNumbers());
final List<Integer> neighborCoordinates = new ArrayList<Integer>(2);
neighborCoordinates.add(row);
neighborCoordinates.add(col);
// cells[row][col].getBorder().setLineColor(borderColor);
// north (top)
neighborCoordinates.set(0, row - 1);
if (row == 0 || !properties.getRegion().get(coordinates).equals(properties.getRegion().get(neighborCoordinates))) {
cells[row][col].addStyleName("north");
} else {
cells[row][col].removeStyleName("north");
}
// south (bottom)
neighborCoordinates.set(0, row + 1);
if (row == properties.getEdgeLength() - 1 || !properties.getRegion().get(coordinates).equals(properties.getRegion().get(neighborCoordinates))) {
cells[row][col].addStyleName("south");
} else {
cells[row][col].removeStyleName("south");
}
// east (right)
neighborCoordinates.set(0, row);
neighborCoordinates.set(1, col + 1);
if (col == properties.getEdgeLength() - 1 || !properties.getRegion().get(coordinates).equals(properties.getRegion().get(neighborCoordinates))) {
cells[row][col].addStyleName("east");
} else {
cells[row][col].removeStyleName("east");
}
// west (left)
neighborCoordinates.set(1, col - 1);
if (col == 0 || !properties.getRegion().get(coordinates).equals(properties.getRegion().get(neighborCoordinates))) {
cells[row][col].addStyleName("west");
} else {
cells[row][col].removeStyleName("west");
}
// // to reset validation-color
// cells[row][col].addKeyListener(new java.awt.event.KeyAdapter() {
// @Override
// public void keyTyped(final java.awt.event.KeyEvent e) {
// ((JComponent) e.getSource()).setForeground(ViewProperties.defaultColor);
// }
// });
}
}
//revalidate();
}
// TODO QUEST replace by exportData (which returns SudokuField)?
public List<SudokuValue> exportValues() {
final List<SudokuValue> list = new ArrayList<SudokuValue>();
for (int row = 0; row < properties.getEdgeLength(); row++) {
for (int col = 0; col < properties.getEdgeLength(); col++) {
if (cells[row][col].getNumber() > 0) {
final List<Integer> coordinates = new ArrayList<Integer>(2);
coordinates.add(row);
coordinates.add(col);
final SudokuValue value = new SudokuValue(properties, cells[row][col].getNumber());
value.setCoordinates(coordinates);
list.add(value);
}
}
}
return list;
}
}
| |
package org.semanticweb.drew.ldlp.reasoner;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.semanticweb.drew.ldlp.reasoner.LDLPClosure;
import org.semanticweb.drew.ldlp.reasoner.LDLPClosureBuilder;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom;
import org.semanticweb.owlapi.model.OWLObjectIntersectionOf;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
import org.semanticweb.owlapi.model.OWLObjectUnionOf;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
//import at.ac.tuwien.kr.owlapi.model.ldl.LDLObjectPropertyIntersectionOf;
//import at.ac.tuwien.kr.owlapi.model.ldl.LDLObjectPropertyTransitiveClosureOf;
//
//import edu.stanford.db.lp.ProgramClause;
public class LDLPObjectClosureBuilderTest {
//private List<ProgramClause> clauses;
private OWLOntologyManager manager;
private OWLDataFactory factory;
private OWLIndividual a;
private OWLClass A;
private OWLClass B;
private OWLClass C;
private OWLObjectProperty E;
private OWLObjectProperty F;
private LDLPClosureBuilder builder;
private OWLOntology ontology;
@Before
public void setUp() {
//clauses = new ArrayList<ProgramClause>();
manager = OWLManager.createOWLOntologyManager();
factory = manager.getOWLDataFactory();
A = factory.getOWLClass(IRI.create("A"));
B = factory.getOWLClass(IRI.create("B"));
C = factory.getOWLClass(IRI.create("C"));
a = factory.getOWLNamedIndividual(IRI.create("a"));
OWLIndividual b = factory.getOWLNamedIndividual(IRI.create("b"));
OWLIndividual c = factory.getOWLNamedIndividual(IRI.create("c"));
E = factory.getOWLObjectProperty(IRI.create("E"));
F = factory.getOWLObjectProperty(IRI.create("F"));
builder = new LDLPClosureBuilder();
}
// a:A
@Test
public void testVisitOWLClassAssertionAxiom1() throws OWLOntologyCreationException {
final OWLClassAssertionAxiom a_is_A = factory.getOWLClassAssertionAxiom(A, a);
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(a_is_A);
ontology = manager.createOntology(axioms);
final LDLPClosure closure = builder.build(ontology);
assertEquals(1, closure.getNamedClasses().size());
assertTrue(closure.getNamedClasses().contains(A));
assertEquals(0, closure.getComplexClassExpressions().size());
assertEquals(0, closure.getComplexPropertyExpressions().size());
assertEquals(0, closure.getNamedProperties().size());
assertEquals(1, closure.getNamedIndividuals().size());
}
// a:A and B and C
@Test
public void testVisitOWLClassAssertionAxiom2() throws OWLOntologyCreationException {
final OWLObjectIntersectionOf A_and_B_and_C = factory.getOWLObjectIntersectionOf(A, B, C);
final OWLClassAssertionAxiom axiom = factory.getOWLClassAssertionAxiom(A_and_B_and_C, a);
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(axiom);
ontology = manager.createOntology(axioms);
final LDLPClosure closure = builder.build(ontology);
assertEquals(3, closure.getNamedClasses().size());
assertTrue(closure.getNamedClasses().contains(A));
assertTrue(closure.getNamedClasses().contains(B));
assertTrue(closure.getNamedClasses().contains(B));
assertEquals(1, closure.getComplexClassExpressions().size());
assertTrue(closure.getComplexClassExpressions().contains(A_and_B_and_C));
assertEquals(0, closure.getComplexPropertyExpressions().size());
assertEquals(0, closure.getNamedProperties().size());
assertEquals(1, closure.getNamedIndividuals().size());
}
// @Test
// public void testVisitOWLObjectPropertyAssertionAxiom() throws OWLOntologyCreationException {
// final LDLObjectPropertyIntersectionOf property = factory.getLDLObjectPropertyIntersectionOf(E, F);
// final OWLAxiom axiom = factory.getOWLObjectPropertyAssertionAxiom(property, a, b);
// Set<OWLAxiom> axioms = new HashSet<OWLAxiom>();
// axioms.add(axiom);
// ontology = manager.createOntology(axioms);
// final LDLPClosure closure = builder.build(ontology);
// assertEquals(0, closure.getNamedClasses().size());
//
// assertEquals(0, closure.getComplexClassExpressions().size());
//
// assertEquals(1, closure.getComplexPropertyExpressions().size());
// assertTrue(closure.getComplexPropertyExpressions().contains(property));
//
// assertEquals(2, closure.getNamedProperties().size());
// assertEquals(2, closure.getNamedIndividuals().size());
// }
@Test
public void testVisitOWLSubClassOfAxiom() throws OWLOntologyCreationException {
final OWLAxiom axiom = factory.getOWLSubClassOfAxiom(A, B);
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(axiom);
ontology = manager.createOntology(axioms);
final LDLPClosure closure = builder.build(ontology);
assertEquals(2, closure.getNamedClasses().size());
assertEquals(0, closure.getComplexClassExpressions().size());
assertEquals(0, closure.getComplexPropertyExpressions().size());
assertEquals(0, closure.getNamedProperties().size());
assertEquals(0, closure.getNamedIndividuals().size());
}
@Test
public void testVisitOWLSubObjectPropertyOfAxiom() throws OWLOntologyCreationException {
final OWLAxiom axiom = factory.getOWLSubObjectPropertyOfAxiom(E, F);
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(axiom);
ontology = manager.createOntology(axioms);
final LDLPClosure closure = builder.build(ontology);
assertEquals(0, closure.getNamedClasses().size());
assertEquals(0, closure.getComplexClassExpressions().size());
assertEquals(0, closure.getComplexPropertyExpressions().size());
assertEquals(2, closure.getNamedProperties().size());
assertEquals(0, closure.getNamedIndividuals().size());
}
// A or B or C subClassOf E some A
@Test
public void testVisitOWLObjectUnionOf() throws OWLOntologyCreationException {
final OWLObjectUnionOf sub = factory.getOWLObjectUnionOf(A, B, C);
final OWLObjectSomeValuesFrom sup = factory.getOWLObjectSomeValuesFrom(E, A);
final OWLAxiom axiom = factory.getOWLSubClassOfAxiom(sub, sup);
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(axiom);
ontology = manager.createOntology(axioms);
final LDLPClosure closure = builder.build(ontology);
assertEquals(3, closure.getNamedClasses().size());
assertEquals(2, closure.getComplexClassExpressions().size());
assertEquals(0, closure.getComplexPropertyExpressions().size());
assertEquals(1, closure.getNamedProperties().size());
assertEquals(0, closure.getNamedIndividuals().size());
}
//a:(E and E and F) some (E^+ all A)
// @Test
// public void testVisitOWLObjectAllValuesFrom() throws OWLOntologyCreationException {
// final LDLObjectPropertyIntersectionOf E_and_E_and_F = factory.getLDLObjectPropertyIntersectionOf(E,E,F);
// final LDLObjectPropertyTransitiveClosureOf Et = factory.getLDLObjectPropertyTransitiveClosureOf(E);
// final OWLObjectAllValuesFrom Ep_all_A = factory.getOWLObjectAllValuesFrom(Et, A);
// final OWLObjectSomeValuesFrom cls = factory.getOWLObjectSomeValuesFrom(E_and_E_and_F, Ep_all_A);
// final OWLClassAssertionAxiom axiom = factory.getOWLClassAssertionAxiom(cls, a);
// Set<OWLAxiom> axioms = new HashSet<OWLAxiom>();
// axioms.add(axiom);
// ontology = manager.createOntology(axioms);
// final LDLPClosure closure = builder.build(ontology);
// assertEquals(1, closure.getNamedClasses().size());
//
// assertEquals(2, closure.getComplexClassExpressions().size());
//
// assertEquals(2, closure.getComplexPropertyExpressions().size());
//
// assertEquals(2, closure.getNamedProperties().size());
// assertEquals(1, closure.getNamedIndividuals().size());
// }
@Test
public void testVisitOWLObjectHasValue() {
}
@Test
public void testVisitOWLObjectMinCardinality() {
}
@Test
public void testVisitOWLObjectExactCardinality() {
}
@Test
public void testVisitOWLObjectOneOf() {
}
@Test
public void testVisitOWLObjectInverseOf() {
}
@Test
public void testVisitOWLDataProperty() {
}
@Test
public void testVisitLDLObjectPropertyIntersectionOf() {
}
@Test
public void testVisitLDLObjectPropertyUnionOf() {
}
@Test
public void testVisitLDLObjectPropertyChainOf() {
}
@Test
public void testVisitOWLNamedIndividual() {
}
@Test
public void testVisitOWLAnonymousIndividual() {
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ctakes.relationextractor.eval;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.ctakes.relationextractor.ae.DegreeOfRelationExtractorAnnotator;
import org.apache.ctakes.relationextractor.ae.LocationOfRelationExtractorAnnotator;
import org.apache.ctakes.relationextractor.ae.RelationExtractorAnnotator;
import org.apache.ctakes.typesystem.type.relation.BinaryTextRelation;
import org.apache.ctakes.typesystem.type.relation.DegreeOfTextRelation;
import org.apache.ctakes.typesystem.type.relation.LocationOfTextRelation;
import org.apache.ctakes.typesystem.type.relation.RelationArgument;
import org.apache.ctakes.typesystem.type.textsem.EntityMention;
import org.apache.ctakes.typesystem.type.textsem.EventMention;
import org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation;
import org.apache.ctakes.typesystem.type.textsem.Modifier;
import org.apache.uima.UIMAFramework;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASException;
import org.apache.uima.cas.Feature;
import org.apache.uima.collection.CollectionReader;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.factory.AggregateBuilder;
import org.apache.uima.fit.factory.AnalysisEngineFactory;
import org.apache.uima.fit.factory.ConfigurationParameterFactory;
import org.apache.uima.fit.pipeline.JCasIterator;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.tcas.Annotation;
import org.apache.uima.util.CasCopier;
import org.apache.uima.util.Level;
import org.apache.uima.util.XMLInputSource;
import org.cleartk.eval.AnnotationStatistics;
import org.cleartk.ml.jar.DefaultDataWriterFactory;
import org.cleartk.ml.jar.DirectoryDataWriterFactory;
import org.cleartk.ml.jar.GenericJarClassifierFactory;
import org.cleartk.ml.jar.JarClassifierBuilder;
import org.cleartk.ml.liblinear.LibLinearStringOutcomeDataWriter;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.lexicalscope.jewel.cli.CliFactory;
import com.lexicalscope.jewel.cli.Option;
public class RelationExtractorEvaluation extends SHARPXMI.Evaluation_ImplBase {
public static interface Options extends SHARPXMI.EvaluationOptions {
@Option(
longName = "relations",
description = "determines which relations to evaluate on (separately)",
defaultValue = { "degree_of", "location_of" })
public List<String> getRelations();
@Option(
longName = "test-on-ctakes",
description = "evaluate test performance on ctakes entities, instead of gold standard "
+ "entities")
public boolean getTestOnCTakes();
@Option(
longName = "allow-smaller-system-arguments",
description = "for evaluation, allow system relation arguments to match gold relation "
+ "arguments that enclose them")
public boolean getAllowSmallerSystemArguments();
@Option(
longName = "ignore-impossible-gold-relations",
description = "for evaluation, ignore gold relations that would be impossible to find "
+ "because there are no corresponding system mentions")
public boolean getIgnoreImpossibleGoldRelations();
@Option(
longName = "--print-errors",
description = "print relations that were incorrectly predicted")
public boolean getPrintErrors();
}
public static final Map<String, Class<? extends BinaryTextRelation>> RELATION_CLASSES =
Maps.newHashMap();
public static final Map<Class<? extends BinaryTextRelation>, Class<? extends RelationExtractorAnnotator>> ANNOTATOR_CLASSES =
Maps.newHashMap();
public static final Map<Class<? extends BinaryTextRelation>, ParameterSettings> BEST_PARAMETERS =
Maps.newHashMap();
static {
RELATION_CLASSES.put("degree_of", DegreeOfTextRelation.class);
ANNOTATOR_CLASSES.put(DegreeOfTextRelation.class, DegreeOfRelationExtractorAnnotator.class);
BEST_PARAMETERS.put(DegreeOfTextRelation.class, new ParameterSettings(
LibLinearStringOutcomeDataWriter.class,
new Object[] { RelationExtractorAnnotator.PARAM_PROBABILITY_OF_KEEPING_A_NEGATIVE_EXAMPLE,
0.5f },
new String[] { "-s", "1", "-c", "0.5" }));
RELATION_CLASSES.put("location_of", LocationOfTextRelation.class);
ANNOTATOR_CLASSES.put(LocationOfTextRelation.class, LocationOfRelationExtractorAnnotator.class);
BEST_PARAMETERS.put(LocationOfTextRelation.class, new ParameterSettings(
LibLinearStringOutcomeDataWriter.class,
new Object[] { RelationExtractorAnnotator.PARAM_PROBABILITY_OF_KEEPING_A_NEGATIVE_EXAMPLE,
0.5f },
new String[] { "-s", "0", "-c", "1.0" }));
}
public static void main(String[] args) throws Exception {
// parse the options, validate them, and generate XMI if necessary
final Options options = CliFactory.parseArguments(Options.class, args);
SHARPXMI.validate(options);
SHARPXMI.generateXMI(options);
// determine the grid of parameters to search through
// for the full set of LibLinear parameters, see:
// https://github.com/bwaldvogel/liblinear-java/blob/master/src/main/java/de/bwaldvogel/liblinear/Train.java
List<ParameterSettings> gridOfSettings = Lists.newArrayList();
for (float probabilityOfKeepingANegativeExample : new float[] { 0.5f, 1.0f }) {
for (int solver : new int[] { 0 /* logistic regression */, 1 /* SVM */}) {
for (double svmCost : new double[] { 0.01, 0.05, 0.1, 0.5, 1, 5, 10, 50, 100 }) {
gridOfSettings.add(new ParameterSettings(
LibLinearStringOutcomeDataWriter.class,
new Object[] {
RelationExtractorAnnotator.PARAM_PROBABILITY_OF_KEEPING_A_NEGATIVE_EXAMPLE,
probabilityOfKeepingANegativeExample },
new String[] { "-s", String.valueOf(solver), "-c", String.valueOf(svmCost) }));
}
}
}
// run an evaluation for each selected relation
for (final String relationCategory : options.getRelations()) {
// get the best parameters for the relation
final Class<? extends BinaryTextRelation> relationClass =
RELATION_CLASSES.get(relationCategory);
ParameterSettings bestSettings = BEST_PARAMETERS.get(relationClass);
// run the evaluation
SHARPXMI.evaluate(
options,
bestSettings,
gridOfSettings,
new Function<ParameterSettings, RelationExtractorEvaluation>() {
@Override
public RelationExtractorEvaluation apply(@Nullable ParameterSettings params) {
return new RelationExtractorEvaluation(
new File("target/models/" + relationCategory),
relationClass,
ANNOTATOR_CLASSES.get(relationClass),
params,
options.getTestOnCTakes(),
options.getAllowSmallerSystemArguments(),
options.getIgnoreImpossibleGoldRelations(),
options.getPrintErrors());
}
});
}
}
private Class<? extends BinaryTextRelation> relationClass;
private Class<? extends RelationExtractorAnnotator> classifierAnnotatorClass;
private ParameterSettings parameterSettings;
private boolean testOnCTakes;
private boolean allowSmallerSystemArguments;
private boolean ignoreImpossibleGoldRelations;
private boolean printErrors;
/**
* An evaluation of a relation extractor.
*
* @param baseDirectory
* The directory where models, etc. should be written
* @param relationClass
* The class of the relation to be predicted
* @param classifierAnnotatorClass
* The CleartkAnnotator class that learns a relation extractor model
* @param parameterSettings
* The parameters defining how to train a classifier
* @param testOnCTakes
* During testing, use annotations from cTAKES, not from the gold
* standard
* @param allowSmallerSystemArguments
* During testing, allow system annotations to match gold annotations
* that enclose them
* @param ignoreImpossibleGoldRelations
* During testing, ignore gold relations that would be impossible to
* find because there are no corresponding system mentions
*/
public RelationExtractorEvaluation(
File baseDirectory,
Class<? extends BinaryTextRelation> relationClass,
Class<? extends RelationExtractorAnnotator> classifierAnnotatorClass,
ParameterSettings parameterSettings,
boolean testOnCTakes,
boolean allowSmallerSystemArguments,
boolean ignoreImpossibleGoldRelations,
boolean printErrors) {
super(baseDirectory);
this.relationClass = relationClass;
this.classifierAnnotatorClass = classifierAnnotatorClass;
this.parameterSettings = parameterSettings;
this.testOnCTakes = testOnCTakes;
this.allowSmallerSystemArguments = allowSmallerSystemArguments;
this.ignoreImpossibleGoldRelations = ignoreImpossibleGoldRelations;
this.printErrors = printErrors;
}
public RelationExtractorEvaluation(
File baseDirectory,
Class<? extends BinaryTextRelation> relationClass,
Class<? extends RelationExtractorAnnotator> classifierAnnotatorClass,
ParameterSettings parameterSettings) {
this(
baseDirectory,
relationClass,
classifierAnnotatorClass,
parameterSettings,
false,
false,
false,
false);
}
@Override
public void train(CollectionReader collectionReader, File directory) throws Exception {
System.err.printf(
"%s: %s: %s:\n",
this.getClass().getSimpleName(),
this.relationClass.getSimpleName(),
directory.getName());
System.err.println(this.parameterSettings);
AggregateBuilder builder = new AggregateBuilder();
// remove cTAKES entity mentions and modifiers in the system view and copy
// in the gold relations
builder.add(AnalysisEngineFactory.createEngineDescription(RemoveCTakesMentionsAndCopyGoldRelations.class));
// add the relation extractor, configured for training mode
AnalysisEngineDescription classifierAnnotator =
AnalysisEngineFactory.createEngineDescription(
this.classifierAnnotatorClass,
this.parameterSettings.configurationParameters);
ConfigurationParameterFactory.addConfigurationParameters(
classifierAnnotator,
DefaultDataWriterFactory.PARAM_DATA_WRITER_CLASS_NAME,
this.parameterSettings.dataWriterClass,
DirectoryDataWriterFactory.PARAM_OUTPUT_DIRECTORY,
directory.getPath());
builder.add(classifierAnnotator);
// run the data-writing pipeline
SimplePipeline.runPipeline(collectionReader, builder.createAggregateDescription());
// train the classifier and package it into a .jar file
JarClassifierBuilder.trainAndPackage(directory, this.parameterSettings.trainingArguments);
}
@Override
protected AnnotationStatistics<String> test(CollectionReader collectionReader, File directory)
throws Exception {
AggregateBuilder builder = new AggregateBuilder();
if (this.testOnCTakes) {
// add the modifier extractor
File file = new File("desc/analysis_engine/ModifierExtractorAnnotator.xml");
XMLInputSource source = new XMLInputSource(file);
builder.add(UIMAFramework.getXMLParser().parseAnalysisEngineDescription(source));
// remove extraneous entity mentions
builder.add(AnalysisEngineFactory.createEngineDescription(RemoveSmallerEventMentions.class));
} else {
// replace cTAKES entity mentions and modifiers in the system view with
// the gold annotations
builder.add(AnalysisEngineFactory.createEngineDescription(ReplaceCTakesMentionsWithGoldMentions.class));
}
// add the relation extractor, configured for classification mode
AnalysisEngineDescription classifierAnnotator =
AnalysisEngineFactory.createEngineDescription(
this.classifierAnnotatorClass,
this.parameterSettings.configurationParameters);
ConfigurationParameterFactory.addConfigurationParameters(
classifierAnnotator,
GenericJarClassifierFactory.PARAM_CLASSIFIER_JAR_PATH,
JarClassifierBuilder.getModelJarFile(directory));
builder.add(classifierAnnotator);
// statistics will be based on the "category" feature of the
// BinaryTextRelations
AnnotationStatistics<String> stats = new AnnotationStatistics<String>();
Function<BinaryTextRelation, HashableArguments> getSpan =
new Function<BinaryTextRelation, HashableArguments>() {
@Override
public HashableArguments apply(BinaryTextRelation relation) {
return new HashableArguments(relation);
}
};
Function<BinaryTextRelation, String> getOutcome =
AnnotationStatistics.annotationToFeatureValue("category");
// calculate statistics, iterating over the results of the classifier
AnalysisEngine engine = builder.createAggregate();
for (Iterator<JCas> casIter = new JCasIterator(collectionReader, engine); casIter.hasNext();) {
JCas jCas = casIter.next();
// get the gold view
JCas goldView;
try {
goldView = jCas.getView(SHARPXMI.GOLD_VIEW_NAME);
} catch (CASException e) {
throw new AnalysisEngineProcessException(e);
}
// get the gold and system annotations
Collection<? extends BinaryTextRelation> goldBinaryTextRelations =
JCasUtil.select(goldView, this.relationClass);
Collection<? extends BinaryTextRelation> systemBinaryTextRelations =
JCasUtil.select(jCas, this.relationClass);
if (this.ignoreImpossibleGoldRelations) {
// collect only relations where both arguments have some possible system
// arguments
List<BinaryTextRelation> relations = Lists.newArrayList();
for (BinaryTextRelation relation : goldBinaryTextRelations) {
boolean hasSystemArgs = true;
for (RelationArgument relArg : Lists.newArrayList(relation.getArg1(), relation.getArg2())) {
IdentifiedAnnotation goldArg = (IdentifiedAnnotation) relArg.getArgument();
Class<? extends IdentifiedAnnotation> goldClass = goldArg.getClass();
boolean noSystemArg = JCasUtil.selectCovered(jCas, goldClass, goldArg).isEmpty();
hasSystemArgs = hasSystemArgs && !noSystemArg;
}
if (hasSystemArgs) {
relations.add(relation);
} else {
IdentifiedAnnotation arg1 = (IdentifiedAnnotation) relation.getArg1().getArgument();
IdentifiedAnnotation arg2 = (IdentifiedAnnotation) relation.getArg2().getArgument();
String messageFormat =
"removing relation between %s and %s which is impossible to "
+ "find with system mentions";
String message = String.format(messageFormat, format(arg1), format(arg2));
UIMAFramework.getLogger(this.getClass()).log(Level.WARNING, message);
}
}
goldBinaryTextRelations = relations;
}
if (this.allowSmallerSystemArguments) {
// collect all the arguments of the manually annotated relations
Set<IdentifiedAnnotation> goldArgs = Sets.newHashSet();
for (BinaryTextRelation relation : goldBinaryTextRelations) {
for (RelationArgument relArg : Lists.newArrayList(relation.getArg1(), relation.getArg2())) {
goldArgs.add((IdentifiedAnnotation) relArg.getArgument());
}
}
// collect all the arguments of system-predicted relations that don't
// match some gold argument
Set<IdentifiedAnnotation> unmatchedSystemArgs = Sets.newHashSet();
for (BinaryTextRelation relation : systemBinaryTextRelations) {
for (RelationArgument relArg : Lists.newArrayList(relation.getArg1(), relation.getArg2())) {
IdentifiedAnnotation systemArg = (IdentifiedAnnotation) relArg.getArgument();
Class<? extends IdentifiedAnnotation> systemClass = systemArg.getClass();
boolean matchesSomeGold = false;
for (IdentifiedAnnotation goldArg : JCasUtil.selectCovered(
goldView,
systemClass,
systemArg)) {
if (goldArg.getBegin() == systemArg.getBegin()
&& goldArg.getEnd() == systemArg.getEnd()) {
matchesSomeGold = true;
break;
}
}
if (!matchesSomeGold) {
unmatchedSystemArgs.add(systemArg);
}
}
}
// map each unmatched system argument to the gold argument that encloses
// it
Map<IdentifiedAnnotation, IdentifiedAnnotation> systemToGold = Maps.newHashMap();
for (IdentifiedAnnotation goldArg : goldArgs) {
Class<? extends IdentifiedAnnotation> goldClass = goldArg.getClass();
for (IdentifiedAnnotation systemArg : JCasUtil.selectCovered(jCas, goldClass, goldArg)) {
if (unmatchedSystemArgs.contains(systemArg)) {
// if there's no mapping yet for this system arg, map it to the
// enclosing gold arg
IdentifiedAnnotation oldGoldArg = systemToGold.get(systemArg);
if (oldGoldArg == null) {
systemToGold.put(systemArg, goldArg);
}
// if there's already a mapping for this system arg, only re-map
// it to match the type
else {
IdentifiedAnnotation current, other;
if (systemArg.getTypeID() == goldArg.getTypeID()) {
systemToGold.put(systemArg, goldArg);
current = goldArg;
other = oldGoldArg;
} else {
current = oldGoldArg;
other = goldArg;
}
// issue a warning since this re-mapping procedure is imperfect
String message =
"system argument %s mapped to gold argument %s, but could also be mapped to %s";
message = String.format(message, format(systemArg), format(current), format(other));
UIMAFramework.getLogger(this.getClass()).log(Level.WARNING, message);
}
}
}
}
// replace system arguments with gold arguments where necessary/possible
for (BinaryTextRelation relation : systemBinaryTextRelations) {
for (RelationArgument relArg : Lists.newArrayList(relation.getArg1(), relation.getArg2())) {
IdentifiedAnnotation systemArg = (IdentifiedAnnotation) relArg.getArgument();
IdentifiedAnnotation matchingGoldArg = systemToGold.get(systemArg);
if (matchingGoldArg != null) {
String messageFormat = "replacing system argument %s with gold argument %s";
String message =
String.format(messageFormat, format(systemArg), format(matchingGoldArg));
UIMAFramework.getLogger(this.getClass()).log(Level.WARNING, message);
relArg.setArgument(matchingGoldArg);
}
}
}
}
// update the statistics based on the argument spans of the relation
stats.add(goldBinaryTextRelations, systemBinaryTextRelations, getSpan, getOutcome);
// print errors if requested
if (this.printErrors) {
Map<HashableArguments, BinaryTextRelation> goldMap = Maps.newHashMap();
for (BinaryTextRelation relation : goldBinaryTextRelations) {
goldMap.put(new HashableArguments(relation), relation);
}
Map<HashableArguments, BinaryTextRelation> systemMap = Maps.newHashMap();
for (BinaryTextRelation relation : systemBinaryTextRelations) {
systemMap.put(new HashableArguments(relation), relation);
}
Set<HashableArguments> all = Sets.union(goldMap.keySet(), systemMap.keySet());
List<HashableArguments> sorted = Lists.newArrayList(all);
Collections.sort(sorted);
for (HashableArguments key : sorted) {
BinaryTextRelation goldRelation = goldMap.get(key);
BinaryTextRelation systemRelation = systemMap.get(key);
if (goldRelation == null) {
System.out.println("System added: " + formatRelation(systemRelation));
} else if (systemRelation == null) {
System.out.println("System dropped: " + formatRelation(goldRelation));
} else if (!systemRelation.getCategory().equals(goldRelation.getCategory())) {
String label = systemRelation.getCategory();
System.out.printf("System labeled %s for %s\n", label, formatRelation(systemRelation));
}
}
}
}
System.err.print(stats);
System.err.println();
return stats;
}
private static String formatRelation(BinaryTextRelation relation) {
IdentifiedAnnotation arg1 = (IdentifiedAnnotation) relation.getArg1().getArgument();
IdentifiedAnnotation arg2 = (IdentifiedAnnotation) relation.getArg2().getArgument();
String text = arg1.getCAS().getDocumentText();
int begin = Math.min(arg1.getBegin(), arg2.getBegin());
int end = Math.max(arg1.getBegin(), arg2.getBegin());
begin = Math.max(0, begin - 50);
end = Math.min(text.length(), end + 50);
return String.format(
"%s(%s(type=%d), %s(type=%d)) in ...%s...",
relation.getCategory(),
arg1.getCoveredText(),
arg1.getTypeID(),
arg2.getCoveredText(),
arg2.getTypeID(),
text.substring(begin, end).replaceAll("[\r\n]", " "));
}
/**
* Annotator that removes cTAKES mentions in the system view and copies
* relations from the gold view to the system view
*/
public static class RemoveCTakesMentionsAndCopyGoldRelations extends JCasAnnotator_ImplBase {
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
JCas goldView, systemView;
try {
goldView = jCas.getView(SHARPXMI.GOLD_VIEW_NAME);
systemView = jCas.getView(CAS.NAME_DEFAULT_SOFA);
} catch (CASException e) {
throw new AnalysisEngineProcessException(e);
}
// remove cTAKES Mentions and Modifiers from system view
List<IdentifiedAnnotation> cTakesMentions = new ArrayList<IdentifiedAnnotation>();
cTakesMentions.addAll(JCasUtil.select(systemView, EventMention.class));
cTakesMentions.addAll(JCasUtil.select(systemView, EntityMention.class));
cTakesMentions.addAll(JCasUtil.select(systemView, Modifier.class));
for (IdentifiedAnnotation cTakesMention : cTakesMentions) {
cTakesMention.removeFromIndexes();
}
// copy gold Mentions and Modifiers to the system view
List<IdentifiedAnnotation> goldMentions = new ArrayList<IdentifiedAnnotation>();
goldMentions.addAll(JCasUtil.select(goldView, EventMention.class));
goldMentions.addAll(JCasUtil.select(goldView, EntityMention.class));
goldMentions.addAll(JCasUtil.select(goldView, Modifier.class));
CasCopier copier = new CasCopier(goldView.getCas(), systemView.getCas());
Feature sofaFeature = jCas.getTypeSystem().getFeatureByFullName(CAS.FEATURE_FULL_NAME_SOFA);
for (IdentifiedAnnotation goldMention : goldMentions) {
Annotation copy = (Annotation) copier.copyFs(goldMention);
copy.setFeatureValue(sofaFeature, systemView.getSofa());
copy.addToIndexes();
}
// copy gold relations to the system view
for (BinaryTextRelation goldRelation : JCasUtil.select(goldView, BinaryTextRelation.class)) {
BinaryTextRelation relation = (BinaryTextRelation) copier.copyFs(goldRelation);
relation.addToIndexes(systemView);
for (RelationArgument relArg : Lists.newArrayList(relation.getArg1(), relation.getArg2())) {
relArg.addToIndexes(systemView);
// relArg.getArgument() should have been added to indexes with
// mentions above
}
}
}
}
/**
* Annotator that removes cTAKES Mentions and Modifiers from the system view,
* and copies over the manually annotated Mentions and Modifiers from the gold
* view.
*/
public static class ReplaceCTakesMentionsWithGoldMentions extends JCasAnnotator_ImplBase {
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
JCas goldView, systemView;
try {
goldView = jCas.getView(SHARPXMI.GOLD_VIEW_NAME);
systemView = jCas.getView(CAS.NAME_DEFAULT_SOFA);
} catch (CASException e) {
throw new AnalysisEngineProcessException(e);
}
// remove cTAKES Mentions and Modifiers from system view
List<IdentifiedAnnotation> cTakesMentions = new ArrayList<IdentifiedAnnotation>();
cTakesMentions.addAll(JCasUtil.select(systemView, EventMention.class));
cTakesMentions.addAll(JCasUtil.select(systemView, EntityMention.class));
cTakesMentions.addAll(JCasUtil.select(systemView, Modifier.class));
for (IdentifiedAnnotation cTakesMention : cTakesMentions) {
cTakesMention.removeFromIndexes();
}
// copy gold Mentions and Modifiers to the system view
List<IdentifiedAnnotation> goldMentions = new ArrayList<IdentifiedAnnotation>();
goldMentions.addAll(JCasUtil.select(goldView, EventMention.class));
goldMentions.addAll(JCasUtil.select(goldView, EntityMention.class));
goldMentions.addAll(JCasUtil.select(goldView, Modifier.class));
CasCopier copier = new CasCopier(goldView.getCas(), systemView.getCas());
for (IdentifiedAnnotation goldMention : goldMentions) {
Annotation copy = (Annotation) copier.copyFs(goldMention);
Feature sofaFeature = copy.getType().getFeatureByBaseName("sofa");
copy.setFeatureValue(sofaFeature, systemView.getSofa());
copy.addToIndexes();
}
}
}
static String format(IdentifiedAnnotation a) {
return a == null ? null : String.format("\"%s\"(type=%d)", a.getCoveredText(), a.getTypeID());
}
public static class RemoveSmallerEventMentions extends JCasAnnotator_ImplBase {
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
Collection<EventMention> mentions = JCasUtil.select(jCas, EventMention.class);
for (EventMention mention : Lists.newArrayList(mentions)) {
int begin = mention.getBegin();
int end = mention.getEnd();
int typeID = mention.getTypeID();
List<EventMention> subMentions = JCasUtil.selectCovered(jCas, EventMention.class, mention);
for (EventMention subMention : subMentions) {
if (subMention.getBegin() > begin || subMention.getEnd() < end) {
if (subMention.getTypeID() == typeID) {
String message =
String.format("removed %s inside %s", format(subMention), format(mention));
this.getContext().getLogger().log(Level.WARNING, message);
subMention.removeFromIndexes();
}
}
}
}
}
}
/**
* This class is useful for mapping the spans of relation arguments to the
* relation's category.
*/
public static class HashableArguments implements Comparable<HashableArguments> {
protected int arg1begin;
protected int arg1end;
protected int arg2begin;
protected int arg2end;
public HashableArguments(int arg1begin, int arg1end, int arg2begin, int arg2end) {
this.arg1begin = arg1begin;
this.arg1end = arg1end;
this.arg2begin = arg2begin;
this.arg2end = arg2end;
}
public HashableArguments(Annotation arg1, Annotation arg2) {
this(arg1.getBegin(), arg1.getEnd(), arg2.getBegin(), arg2.getEnd());
}
public HashableArguments(BinaryTextRelation relation) {
this(relation.getArg1().getArgument(), relation.getArg2().getArgument());
}
@Override
public boolean equals(Object otherObject) {
boolean result = false;
if (otherObject instanceof HashableArguments) {
HashableArguments other = (HashableArguments) otherObject;
result =
(this.getClass() == other.getClass()
&& this.arg1begin == other.arg1begin
&& this.arg1end == other.arg1end
&& this.arg2begin == other.arg2begin && this.arg2end == other.arg2end);
}
return result;
}
@Override
public int hashCode() {
return Objects.hashCode(this.arg1begin, this.arg1end, this.arg2begin, this.arg2end);
}
@Override
public String toString() {
return String.format(
"%s(%s,%s,%s,%s)",
this.getClass().getSimpleName(),
this.arg1begin,
this.arg1end,
this.arg2begin,
this.arg2end);
}
@Override
public int compareTo(HashableArguments that) {
int thisBegin = Math.min(this.arg1begin, this.arg2begin);
int thatBegin = Math.min(that.arg1begin, that.arg2begin);
int thisEnd = Math.max(this.arg1end, this.arg2end);
int thatEnd = Math.max(that.arg1end, that.arg2end);
if (thisBegin < thatBegin) {
return -1;
} else if (thisBegin > thatBegin) {
return +1;
} else if (this.equals(that)) {
return 0;
} else if (thisEnd < thatEnd) {
return -1;
} else{
return 1;
}
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs.impl.jar;
import com.intellij.notification.NotificationGroup;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.util.ShutDownTracker;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.util.io.FileSystemUtil;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VfsBundle;
import com.intellij.openapi.vfs.impl.ZipHandler;
import com.intellij.openapi.vfs.newvfs.persistent.FSRecords;
import com.intellij.openapi.vfs.newvfs.persistent.FlushingDaemon;
import com.intellij.util.CommonProcessors;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.io.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.io.DataOutputStream;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipFile;
import static com.intellij.util.ObjectUtils.assertNotNull;
import static com.intellij.util.containers.ContainerUtil.newTroveSet;
/**
* @author max
*/
public class JarHandler extends ZipHandler {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vfs.impl.jar.JarHandler");
private static final String JARS_FOLDER = "jars";
private static final int FS_TIME_RESOLUTION = 2000;
private final JarFileSystemImpl myFileSystem;
private volatile File myFileWithMirrorResolved; // field is reflectively referenced in tests
public JarHandler(@NotNull String path) {
super(path);
myFileSystem = (JarFileSystemImpl)JarFileSystem.getInstance();
}
@NotNull
@Override
protected File getFileToUse() {
File fileWithMirrorResolved = myFileWithMirrorResolved;
if (fileWithMirrorResolved == null) {
File file = getFile();
fileWithMirrorResolved = getMirrorFile(file);
if (FileUtil.compareFiles(file, fileWithMirrorResolved) == 0) {
fileWithMirrorResolved = file;
}
myFileWithMirrorResolved = fileWithMirrorResolved;
}
return fileWithMirrorResolved;
}
@NotNull
@Override
protected Map<String, EntryInfo> createEntriesMap() throws IOException {
FileAccessorCache.Handle<ZipFile> existingZipRef = getCachedZipFileHandle(
!myFileSystem.isMakeCopyOfJar(getFile()) || myFileWithMirrorResolved != null);
if (existingZipRef == null) {
File file = getFile();
try (ZipFile zipFile = new ZipFile(file)) {
setFileAttributes(this, file.getPath());
return buildEntryMapForZipFile(zipFile);
}
}
try {
return buildEntryMapForZipFile(existingZipRef.get());
}
finally {
existingZipRef.release();
}
}
private File getMirrorFile(@NotNull File originalFile) {
if (!myFileSystem.isMakeCopyOfJar(originalFile)) return originalFile;
final FileAttributes originalAttributes = FileSystemUtil.getAttributes(originalFile);
if (originalAttributes == null) return originalFile;
final String folderPath = getJarsDir();
if (!new File(folderPath).exists() && !new File(folderPath).mkdirs()) {
return originalFile;
}
if (FSRecords.WE_HAVE_CONTENT_HASHES) {
return getMirrorWithContentHash(originalFile, originalAttributes);
}
final String mirrorName = originalFile.getName() + "." + Integer.toHexString(originalFile.getPath().hashCode());
final File mirrorFile = new File(folderPath, mirrorName);
final FileAttributes mirrorAttributes = FileSystemUtil.getAttributes(mirrorFile);
return mirrorDiffers(originalAttributes, mirrorAttributes, false) ? copyToMirror(originalFile, mirrorFile) : mirrorFile;
}
private File getMirrorWithContentHash(File originalFile, FileAttributes originalAttributes) {
File mirrorFile = null;
String jarDir = getJarsDir();
try {
String path = originalFile.getPath();
CacheLibraryInfo info = CacheLibraryInfo.ourCachedLibraryInfo.get(path);
if (info != null &&
originalAttributes.length == info.myFileLength &&
Math.abs(originalAttributes.lastModified - info.myModificationTime) <= FS_TIME_RESOLUTION) {
mirrorFile = new File(jarDir, info.mySnapshotPath);
if (!mirrorDiffers(originalAttributes, FileSystemUtil.getAttributes(mirrorFile), true)) {
return mirrorFile;
}
}
MessageDigest sha1;
File tempJarFile = null;
try {
tempJarFile = FileUtil.createTempFile(new File(jarDir), originalFile.getName(), "", true, false);
try (DataOutputStream os = new DataOutputStream(new FileOutputStream(tempJarFile));
FileInputStream is = new FileInputStream(originalFile)) {
sha1 = MessageDigest.getInstance("SHA1");
sha1.update(String.valueOf(originalAttributes.length).getBytes(Charset.defaultCharset()));
sha1.update((byte)0);
byte[] buffer = new byte[8192];
long totalBytes = 0;
while (true) {
int read = is.read(buffer);
if (read < 0) break;
totalBytes += read;
sha1.update(buffer, 0, read);
os.write(buffer, 0, read);
if (totalBytes == originalAttributes.length) break;
}
}
}
catch (IOException ex) {
File target = mirrorFile != null ? mirrorFile : tempJarFile != null ? tempJarFile : new File(jarDir);
reportIOErrorWithJars(originalFile, target, ex);
return originalFile;
}
catch (NoSuchAlgorithmException ex) {
LOG.error(ex);
return originalFile; // should never happen for sha1
}
String mirrorName = getSnapshotName(originalFile.getName(), sha1.digest());
mirrorFile = new File(jarDir, mirrorName);
FileAttributes mirrorFileAttributes = FileSystemUtil.getAttributes(mirrorFile);
if (mirrorFileAttributes == null) {
try {
FileUtil.rename(tempJarFile, mirrorFile);
FileUtil.setLastModified(mirrorFile, originalAttributes.lastModified);
}
catch (IOException ex) {
reportIOErrorWithJars(originalFile, mirrorFile, ex);
return originalFile;
}
}
else {
FileUtil.delete(tempJarFile);
}
info = new CacheLibraryInfo(mirrorFile.getName(), originalAttributes.lastModified, originalAttributes.length);
CacheLibraryInfo.ourCachedLibraryInfo.put(path, info);
return mirrorFile;
}
catch (IOException ex) {
CacheLibraryInfo.ourCachedLibraryInfo.markCorrupted();
reportIOErrorWithJars(originalFile, mirrorFile != null ? mirrorFile : new File(jarDir, originalFile.getName()), ex);
return originalFile;
}
}
private static boolean mirrorDiffers(FileAttributes original, @Nullable FileAttributes mirror, boolean permitOlderMirror) {
if (mirror == null || mirror.length != original.length) return true;
long timeDiff = mirror.lastModified - original.lastModified;
if (!permitOlderMirror) timeDiff = Math.abs(timeDiff);
return timeDiff > FS_TIME_RESOLUTION;
}
private static String getSnapshotName(String name, byte[] digest) {
StringBuilder builder = new StringBuilder(name.length() + 1 + 2 * digest.length);
builder.append(name).append('.');
for (byte b : digest) {
builder.append(Character.forDigit((b & 0xF0) >> 4, 16));
builder.append(Character.forDigit(b & 0xF, 16));
}
return builder.toString();
}
@NotNull
private static String getJarsDir() {
String dir = System.getProperty("jars_dir");
return dir == null ? PathManager.getSystemPath() + File.separatorChar + JARS_FOLDER : dir;
}
@NotNull
private File copyToMirror(@NotNull File original, @NotNull File mirror) {
ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator();
if (progress != null) {
progress.pushState();
progress.setText(VfsBundle.message("jar.copy.progress", original.getPath()));
progress.setFraction(0);
}
try {
FileUtil.copy(original, mirror);
}
catch (final IOException e) {
reportIOErrorWithJars(original, mirror, e);
return original;
}
finally {
if (progress != null) {
progress.popState();
}
}
return mirror;
}
private static class CacheLibraryInfo {
private final String mySnapshotPath;
private final long myModificationTime;
private final long myFileLength;
private static final PersistentHashMap<String, CacheLibraryInfo> ourCachedLibraryInfo;
private static final int VERSION = 1 + (PersistentHashMapValueStorage.COMPRESSION_ENABLED ? 15 : 0);
static {
File jarsDir = new File(getJarsDir());
File snapshotInfoFile = new File(jarsDir, "snapshots_info");
int currentVersion = -1;
long currentVfsVersion = -1;
File versionFile = getVersionFile(snapshotInfoFile);
if (versionFile.exists()) {
try (DataInputStream versionStream = new DataInputStream(new BufferedInputStream(new FileInputStream(versionFile)))) {
currentVersion = DataInputOutputUtil.readINT(versionStream);
currentVfsVersion = DataInputOutputUtil.readTIME(versionStream);
}
catch (IOException ignore) { }
}
if (currentVfsVersion != FSRecords.getCreationTimestamp()) {
FileUtil.deleteWithRenaming(jarsDir);
FileUtil.createDirectory(jarsDir);
saveVersion(versionFile);
}
else if (currentVersion != VERSION) {
PersistentHashMap.deleteFilesStartingWith(snapshotInfoFile);
saveVersion(versionFile);
}
PersistentHashMap<String, CacheLibraryInfo> info = null;
for (int i = 0; i < 2; ++i) {
try {
info = new PersistentHashMap<>(snapshotInfoFile, EnumeratorStringDescriptor.INSTANCE, new DataExternalizer<CacheLibraryInfo>() {
@Override
public void save(@NotNull DataOutput out, CacheLibraryInfo value) throws IOException {
IOUtil.writeUTF(out, value.mySnapshotPath);
DataInputOutputUtil.writeTIME(out, value.myModificationTime);
DataInputOutputUtil.writeLONG(out, value.myFileLength);
}
@Override
public CacheLibraryInfo read(@NotNull DataInput in) throws IOException {
return new CacheLibraryInfo(IOUtil.readUTF(in), DataInputOutputUtil.readTIME(in), DataInputOutputUtil.readLONG(in));
}
});
if (i == 0) removeStaleJarFilesIfNeeded(snapshotInfoFile, info);
break;
}
catch (IOException ex) {
if (info != null) {
try {
info.close();
}
catch (IOException ignored) {
}
}
PersistentHashMap.deleteFilesStartingWith(snapshotInfoFile);
saveVersion(versionFile);
}
}
assert info != null;
ourCachedLibraryInfo = info;
FlushingDaemon.everyFiveSeconds(CacheLibraryInfo::flushCachedLibraryInfos);
ShutDownTracker.getInstance().registerShutdownTask(CacheLibraryInfo::flushCachedLibraryInfos);
Disposer.register(ApplicationManager.getApplication(), () -> {
try {
ourCachedLibraryInfo.close();
}
catch (IOException ignored) {
}
});
}
@NotNull
private static File getVersionFile(File file) {
return new File(file.getParentFile(), file.getName() + ".version");
}
private static void removeStaleJarFilesIfNeeded(File snapshotInfoFile, PersistentHashMap<String, CacheLibraryInfo> info) throws IOException {
File versionFile = getVersionFile(snapshotInfoFile);
long lastModified = versionFile.lastModified();
if (System.currentTimeMillis() - lastModified < 30 * 24 * 60 * 60 * 1000L) {
return;
}
// snapshotInfo is persistent mapping of project library path -> jar snapshot path
// Stale jars are the jars that do not exist with registered paths, to remove them:
// - Take all snapshot library files in jar directory
// - Collect librarySnapshot -> projectLibraryPaths and existing projectLibraryPath -> librarySnapshot
// - Remove all projectLibraryPaths that doesn't exist from persistent mapping
// - Remove jar library snapshots that have no projectLibraryPath
Set<String> availableLibrarySnapshots = newTroveSet(assertNotNull(snapshotInfoFile.getParentFile().list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
int lastDotPosition = name.lastIndexOf('.');
if (lastDotPosition == -1) return false;
String extension = name.substring(lastDotPosition + 1);
if (extension.length() != 40 || !consistsOfHexLetters(extension)) return false;
return true;
}
private boolean consistsOfHexLetters(String extension) {
for (int i = 0; i < extension.length(); ++i) {
if (Character.digit(extension.charAt(i), 16) == -1) return false;
}
return true;
}
})));
final List<String> invalidLibraryFilePaths = ContainerUtil.newArrayList();
final List<String> allLibraryFilePaths = ContainerUtil.newArrayList();
MultiMap<String, String> jarSnapshotFileToLibraryFilePaths = new MultiMap<>();
Set<String> validLibraryFilePathToJarSnapshotFilePaths = newTroveSet();
info.processKeys(new CommonProcessors.CollectProcessor<>(allLibraryFilePaths));
for (String filePath:allLibraryFilePaths) {
CacheLibraryInfo libraryInfo = info.get(filePath);
if (libraryInfo == null) continue;
jarSnapshotFileToLibraryFilePaths.putValue(libraryInfo.mySnapshotPath, filePath);
if (new File(filePath).exists()) {
validLibraryFilePathToJarSnapshotFilePaths.add(filePath);
}
else {
invalidLibraryFilePaths.add(filePath);
}
}
for (String invalidLibraryFilePath : invalidLibraryFilePaths) {
LOG.info("removing stale library reference:" + invalidLibraryFilePath);
info.remove(invalidLibraryFilePath);
}
for (Map.Entry<String, Collection<String>> e: jarSnapshotFileToLibraryFilePaths.entrySet()) {
for (String libraryFilePath:e.getValue()) {
if (validLibraryFilePathToJarSnapshotFilePaths.contains(libraryFilePath)) {
availableLibrarySnapshots.remove(e.getKey());
break;
}
}
}
for (String availableLibrarySnapshot:availableLibrarySnapshots) {
File librarySnapshotFileToDelete = new File(snapshotInfoFile.getParentFile(), availableLibrarySnapshot);
LOG.info("removing stale library snapshot:" + librarySnapshotFileToDelete);
FileUtil.delete(librarySnapshotFileToDelete);
}
saveVersion(versionFile); // time stamp will change to start another time interval when stale jar files are tracked
}
private static void saveVersion(File versionFile) {
try (DataOutputStream versionOutputStream = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(versionFile)))) {
DataInputOutputUtil.writeINT(versionOutputStream, VERSION);
DataInputOutputUtil.writeTIME(versionOutputStream, FSRecords.getCreationTimestamp());
}
catch (IOException ignore) { }
}
private static void flushCachedLibraryInfos() {
if (ourCachedLibraryInfo.isDirty()) ourCachedLibraryInfo.force();
}
private CacheLibraryInfo(@NotNull String path, long time, long length) {
mySnapshotPath = path;
myModificationTime = time;
myFileLength = length;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CacheLibraryInfo info = (CacheLibraryInfo)o;
if (myFileLength != info.myFileLength) return false;
if (myModificationTime != info.myModificationTime) return false;
if (!mySnapshotPath.equals(info.mySnapshotPath)) return false;
return true;
}
@Override
public int hashCode() {
int result = mySnapshotPath.hashCode();
result = 31 * result + (int)(myModificationTime ^ (myModificationTime >>> 32));
result = 31 * result + (int)(myFileLength ^ (myFileLength >>> 32));
return result;
}
}
private static final NotNullLazyValue<NotificationGroup> ERROR_COPY_NOTIFICATION = new NotNullLazyValue<NotificationGroup>() {
@NotNull
@Override
protected NotificationGroup compute() {
return NotificationGroup.balloonGroup(VfsBundle.message("jar.copy.error.title"));
}
};
private void reportIOErrorWithJars(File original, File target, IOException e) {
LOG.warn(e);
String path = original.getPath();
myFileSystem.setNoCopyJarForPath(path);
String message = VfsBundle.message("jar.copy.error.message", path, target.getPath(), e.getMessage());
ERROR_COPY_NOTIFICATION.getValue().createNotification(message, NotificationType.ERROR).notify(null);
}
}
| |
package ru.anatoli.addressbook.models;
import com.google.gson.annotations.Expose;
import org.hibernate.annotations.Type;
import javax.persistence.*;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
/**
* Created by anatoli.anukevich on 6/27/2017.
*/
@Entity
@Table(name = "addressbook")
public class ContactData {
@Id
@Column(name = "id")
@Type(type = "int")
private int id = Integer.MAX_VALUE;
@Expose
@Column(name = "firstname")
@Type(type = "string")
private String firstName;
@Expose
@Column(name = "middlename")
@Type(type = "string")
private String middleName;
@Expose
@Column(name = "lastname")
@Type(type = "string")
private String lastName;
@Expose
@Column(name = "nickname")
@Type(type = "string")
private String nickname;
@Expose
@Column(name = "photo")
@Type(type = "text")
private String photo;
@Expose
@Column(name = "title")
@Type(type = "string")
private String title;
@Expose
@Column(name = "company")
@Type(type = "string")
private String company;
@Expose
@Column(name = "address")
@Type(type = "text")
private String address;
@Expose
@Transient //Ignoring
private String allPhones;
@Expose
@Column(name = "home")
@Type(type = "text")
private String homePhone;
@Expose
@Column(name = "mobile")
@Type(type = "text")
private String mobilePhone;
@Expose
@Column(name = "work")
@Type(type = "text")
private String workPhone;
@Expose
@Column(name = "fax")
@Type(type = "text")
private String fax;
@Expose
@Transient //Ignoring
private String allEmails;
@Expose
@Column(name = "email")
@Type(type = "text")
private String email;
@Expose
@Column(name = "email2")
@Type(type = "text")
private String email2;
@Expose
@Column(name = "email3")
@Type(type = "text")
private String email3;
@Expose
@Column(name = "homepage")
@Type(type = "text")
private String homepage;
@Expose
@Column(name = "bday", columnDefinition = "TINYINT")
private String birthDay;
@Expose
@Column(name = "bmonth")
@Type(type = "string")
private String birthMonth;
@Expose
@Column(name = "byear")
@Type(type = "string")
private String birthYear;
@Expose
@Column(name = "aday", columnDefinition = "TINYINT")
private String anniversaryDay;
@Expose
@Column(name = "amonth")
@Type(type = "string")
private String anniversaryMonth;
@Expose
@Column(name = "ayear")
@Type(type = "string")
private String anniversaryYear;
@Expose
@Column(name = "address2")
@Type(type = "text")
private String secondaryAddress;
@Expose
@Column(name = "phone2")
@Type(type = "text")
private String secondaryPhone;
@Expose
@Column(name = "notes")
@Type(type = "text")
private String secondaryNotes;
@ManyToMany(fetch = FetchType.EAGER)
@JoinTable(name = "address_in_groups",
joinColumns = @JoinColumn(name = "id"),
inverseJoinColumns = @JoinColumn(name = "group_id"))
private Set<GroupData> groups = new HashSet<GroupData>(0);
//Getters
public int getContactId() {
return id;
}
public String getFirstName() {
return firstName;
}
public String getMiddleName() {
return middleName;
}
public String getLastName() {
return lastName;
}
public String getNickname() {
return nickname;
}
public File getPhoto() {
return new File(photo);
}
public String getTitle() {
return title;
}
public String getCompany() {
return company;
}
public String getAddress() {
return address;
}
public String getAllPhones() {
return allPhones;
}
public String getHomePhone() {
return homePhone;
}
public String getMobilePhone() {
return mobilePhone;
}
public String getWorkPhone() {
return workPhone;
}
public String getFax() {
return fax;
}
public String getAllEmails() {
return allEmails;
}
public String getEmail() {
return email;
}
public String getEmail2() {
return email2;
}
public String getEmail3() {
return email3;
}
public String getHomepage() {
return homepage;
}
public String getBirthDay() {
return birthDay;
}
public String getBirthMonth() {
return birthMonth;
}
public String getBirthYear() {
return birthYear;
}
public String getAnniversaryDay() {
return anniversaryDay;
}
public String getAnniversaryMonth() {
return anniversaryMonth;
}
public String getAnniversaryYear() {
return anniversaryYear;
}
public String getSecondaryAddress() {
return secondaryAddress;
}
public String getSecondaryPhone() {
return secondaryPhone;
}
public String getSecondaryNotes() {
return secondaryNotes;
}
public Set<GroupData> getGroups() {
return groups;
}
//Setters
public ContactData withContactId(int id) {
this.id = id;
return this;
}
public ContactData withFirstName(String firstName) {
this.firstName = firstName;
return this;
}
public ContactData withMiddleName(String middleName) {
this.middleName = middleName;
return this;
}
public ContactData withLastName(String lastName) {
this.lastName = lastName;
return this;
}
public ContactData withNickname(String nickname) {
this.nickname = nickname;
return this;
}
public ContactData withPhoto(File photo) {
this.photo = photo.getPath();
return this;
}
public ContactData withTitle(String title) {
this.title = title;
return this;
}
public ContactData withCompany(String company) {
this.company = company;
return this;
}
public ContactData withAddress(String address) {
this.address = address;
return this;
}
public ContactData withAllPhones(String allPhones) {
this.allPhones = allPhones;
return this;
}
public ContactData withHomePhone(String homePhone) {
this.homePhone = homePhone;
return this;
}
public ContactData withMobilePhone(String mobilePhone) {
this.mobilePhone = mobilePhone;
return this;
}
public ContactData withWorkPhone(String workPhone) {
this.workPhone = workPhone;
return this;
}
public ContactData withFax(String fax) {
this.fax = fax;
return this;
}
public ContactData withAllEmails(String allEmails) {
this.allEmails = allEmails;
return this;
}
public ContactData withEmail(String email) {
this.email = email;
return this;
}
public ContactData withEmail2(String email2) {
this.email2 = email2;
return this;
}
public ContactData withEmail3(String email3) {
this.email3 = email3;
return this;
}
public ContactData withHomepage(String homepage) {
this.homepage = homepage;
return this;
}
public ContactData withBirthDay(String birthDay) {
this.birthDay = birthDay;
return this;
}
public ContactData withBirthMonth(String birthMonth) {
this.birthMonth = birthMonth;
return this;
}
public ContactData withBirthYear(String birthYear) {
this.birthYear = birthYear;
return this;
}
public ContactData withAnniversaryDay(String anniversaryDay) {
this.anniversaryDay = anniversaryDay;
return this;
}
public ContactData withAnniversaryMonth(String anniversaryMonth) {
this.anniversaryMonth = anniversaryMonth;
return this;
}
public ContactData withAnniversaryYear(String anniversaryYear) {
this.anniversaryYear = anniversaryYear;
return this;
}
public ContactData withSecondaryAddress(String secondaryAddress) {
this.secondaryAddress = secondaryAddress;
return this;
}
public ContactData withSecondaryPhone(String secondaryPhone) {
this.secondaryPhone = secondaryPhone;
return this;
}
public ContactData withSecondaryNotes(String secondaryNotes) {
this.secondaryNotes = secondaryNotes;
return this;
}
public void setGroups(Set<GroupData> groups) {
this.groups = groups;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ContactData that = (ContactData) o;
if (id != that.id) return false;
return firstName != null ? firstName.equals(that.firstName) : that.firstName == null;
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (firstName != null ? firstName.hashCode() : 0);
return result;
}
}
| |
/*******************************************************************************
* Copyright 2015, 2016 Junichi Tatemura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.nec.strudel.workload.session;
import com.nec.strudel.exceptions.ConfigException;
import com.nec.strudel.metrics.Output;
import com.nec.strudel.param.ParamConfig;
import com.nec.strudel.session.InteractionFactory;
import com.nec.strudel.util.ClassUtil;
import com.nec.strudel.workload.job.ConfigParam;
import com.nec.strudel.workload.session.runner.SessionExecFactory;
import com.nec.strudel.workload.session.runner.SessionExecs;
/**
* Session config contains the following elements:
*
* <pre>
* "Session" : {
* "maxCount" : 0,
* "maxTime": 0,
* "minTime" : 0,
* "Package" : null,
* "Factory" : null,
* "Interactions": {},
* "Transitions": {},
* "ThinkTime" : null
* "Params" : { NAME : VALUE }
* "Runner" : null
* }
* </pre>
*
* Interactions contains a key-value:
*
* <pre>
* NAME : {
* "prob" : 0,
* "ThinkTime" : null
* }
* </pre>
*
* Transition contains a key-value:
*
* <pre>
* NAME : {
* NAME_1 : { "prob" : 1},
* ...
* NAME_N : { "prob" : 1}
* }
* </pre>
*
* @param <T>
*/
public class SessionConfig<T> {
public static final String SESSION = "session";
@SuppressWarnings("unchecked")
public static <T> SessionConfig<T> create(ConfigParam param) {
return param.getObject(SESSION, SessionConfig.class);
}
private int maxCount = 0;
private int maxTime = 0;
private int minTime = 0;
private ParamConfig params = ParamConfig.empty();
private ThinkTime thinkTime = ThinkTime.noTime();
private String packageName = "";
private String factory = "";
private String execType = "";
private String execFactory = "";
private int sessionConcurrency = 0;
private InteractionSet interactions = InteractionSet.empty();
private TransitionSet transitions = TransitionSet.empty();
public SessionConfig() {
}
public int getSessionConcurrency() {
return sessionConcurrency;
}
public void setSessionConcurrency(int sessionConcurrency) {
this.sessionConcurrency = sessionConcurrency;
}
public ParamConfig getParams() {
return params;
}
public void setParams(ParamConfig params) {
this.params = params;
}
public ThinkTime getThinkTime() {
return thinkTime;
}
public void setThinkTime(ThinkTime thinkTime) {
this.thinkTime = thinkTime;
}
public int getMaxCount() {
return maxCount;
}
public void setMaxCount(int maxCount) {
this.maxCount = maxCount;
}
public int getMaxTime() {
return maxTime;
}
public void setMaxTime(int maxTime) {
this.maxTime = maxTime;
}
public int getMinTime() {
return minTime;
}
public void setMinTime(int minTime) {
this.minTime = minTime;
}
public String getPackageName() {
return packageName;
}
public void setPackageName(String packageName) {
this.packageName = packageName;
}
public String getFactory() {
return factory;
}
public void setFactory(String factory) {
this.factory = factory;
}
public String getExecType() {
return execType;
}
public void setExecType(String execType) {
this.execType = execType;
}
public String getExecFactory() {
return execFactory;
}
public void setExecFactory(String execFactory) {
this.execFactory = execFactory;
}
public InteractionSet getInteractions() {
return interactions;
}
public void setInteractions(InteractionSet interactions) {
this.interactions = interactions;
}
public TransitionSet getTransitions() {
return transitions;
}
public void setTransitions(TransitionSet transitions) {
this.transitions = transitions;
}
public SessionFactory<T> createSessionFactory(String classPath) {
SessionFactory.Builder<T> builder = new SessionFactory.Builder<T>()
.maxCount(maxCount)
.maxTime(maxTime)
.minTime(minTime);
InteractionBuilder ib = InteractionBuilder.builder(interactions,
transitions);
ib.build(createInteractionFactory(classPath), builder);
return builder
.waitTime(ib.buildWaitTime(thinkTime))
.build();
}
public InteractionFactory<T> createInteractionFactory(String classPath) {
if (!packageName.isEmpty()) {
return new PackageInteractionFactory<T>(packageName, classPath);
} else if (!factory.isEmpty()) {
return ClassUtil.create(factory, classPath);
} else {
throw new ConfigException("missing packageName or factory");
}
}
public SessionExecFactory<T> getSessionExecFactory(String classPath) {
SessionExecFactory<T> factory = createSessionExecFactory(classPath);
factory.initialize(this);
return factory;
}
public Output output(String classPath) {
return createSessionExecFactory(classPath).output(this);
}
private SessionExecFactory<T> createSessionExecFactory(String classPath) {
if (!execType.isEmpty()) {
return SessionExecs.createFactory(execType, classPath);
} else if (!execFactory.isEmpty()) {
return ClassUtil.create(execFactory, classPath);
}
return SessionExecs.createFactory(
this, classPath);
}
}
| |
package ru.histone.v2.utils;
import org.apache.commons.lang3.StringUtils;
import ru.histone.v2.evaluator.Converter;
import ru.histone.v2.evaluator.node.EvalNode;
import java.io.Serializable;
import java.time.DayOfWeek;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAdjusters;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Gali Alykoff
*/
public class DateUtils implements Serializable {
public static final int JS_MAX_BOUND_OF_YEAR = 275_761;
public static final int JS_MIN_BOUND_OF_YEAR = 1_000;
public static final int MIN_MONTH = 1;
public static final int MAX_MONTH = 12;
public static final int MIN_DAY = 1;
public static final String DAY_SYMBOL = "D";
public static final String WEEK_SYMBOL = "W";
public static final String MONTH_SYMBOL = "M";
public static final String YEAR_SYMBOL = "Y";
public static final String HOUR_SYMBOL = "h";
public static final String MINUTE_SYMBOL = "m";
public static final String SECOND_SYMBOL = "s";
private static final Pattern PATTERN_DELTA_DATE = Pattern.compile("([\\^\\$+-])(\\d*)([DMWYhms])");
private static final String NEGATIVE_SIGN = "-";
private static final String POSITIVE_SIGN = "+";
private static final String START_SIGN = "^";
private static final String END_SIGN = "$";
private static final TemporalAdjuster LAST_SECOND_OF_MINUTE_ADJUSTER = temporal -> temporal
.with(ChronoField.SECOND_OF_MINUTE, 59);
private static final TemporalAdjuster LAST_MINUTE_OF_HOUR_ADJUSTER = temporal -> temporal
.with(ChronoField.MINUTE_OF_HOUR, 59).with(LAST_SECOND_OF_MINUTE_ADJUSTER);
private static final TemporalAdjuster LAST_HOUR_OF_DAY_ADJUSTER = temporal -> temporal
.with(ChronoField.HOUR_OF_DAY, 23).with(LAST_MINUTE_OF_HOUR_ADJUSTER);
public static int getDaysInMonth(int year, int month) throws IllegalArgumentException {
final int day = 1;
final Calendar calendar = new GregorianCalendar(year, month, day);
calendar.setFirstDayOfWeek(Calendar.MONDAY);
calendar.setLenient(false);
// check
calendar.getTimeInMillis();
return calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
}
public static LocalDateTime applyOffset(LocalDateTime date, String offset) {
LocalDateTime result = date;
final Matcher matcher = PATTERN_DELTA_DATE.matcher(offset);
while (matcher.find()) {
final String sign = matcher.group(1);
final int num = StringUtils.isNotEmpty(matcher.group(2)) ? Integer.parseInt(matcher.group(2)) : 0;
final String period = matcher.group(3);
switch (period) {
case DAY_SYMBOL:
result = applyDayOffset(sign, result, num);
break;
case WEEK_SYMBOL:
result = applyWeekOffset(sign, result, num);
break;
case MONTH_SYMBOL:
result = applyMonthOffset(sign, result, num);
break;
case YEAR_SYMBOL:
result = applyYearOffset(sign, result, num);
break;
case HOUR_SYMBOL:
result = applyHourOffset(sign, result, num);
break;
case MINUTE_SYMBOL:
result = applyMinuteOffset(sign, result, num);
break;
case SECOND_SYMBOL:
result = applySecondOffset(sign, result, num);
break;
}
}
return result;
}
public static Map<String, EvalNode> createMapFromDate(Converter converter, LocalDateTime date) {
final Map<String, EvalNode> res = new LinkedHashMap<>();
res.put("day", converter.createEvalNode(date.getDayOfMonth()));
res.put("month", converter.createEvalNode(date.getMonthValue()));
res.put("year", converter.createEvalNode(date.getYear()));
res.put("hour", converter.createEvalNode(date.getHour()));
res.put("minute", converter.createEvalNode(date.getMinute()));
res.put("second", converter.createEvalNode(date.getSecond()));
return res;
}
public static LocalDateTime createDate(Converter converter, Map<String, EvalNode> map) {
EvalNode yearNode = map.get("year");
final int yearValue;
if (yearNode != null) {
Optional<Integer> year = converter.tryPureIntegerValue(yearNode);
if (year.isPresent()) {
yearValue = year.get();
} else {
return null;
}
} else {
return null;
}
EvalNode monthNode = map.get("month");
final int monthValue;
if (monthNode != null) {
ProcessResult res = processValue(converter, monthNode, 0, 13);
if (res.future != null) {
return null;
}
monthValue = res.value;
} else {
return constructDate(yearValue, -1, -1, -1, -1, -1);
}
LocalDate currentDate = LocalDate.of(yearValue, monthValue, 1);
int daysCount = currentDate.getMonth().length(currentDate.isLeapYear());
EvalNode dayNode = map.get("day");
final int dayValue;
if (dayNode != null) {
ProcessResult res = processValue(converter, dayNode, 0, daysCount + 1);
if (res.future != null) {
return null;
}
dayValue = res.value;
} else {
return constructDate(yearValue, monthValue, -1, -1, -1, -1);
}
EvalNode hourNode = map.get("hour");
final int hourValue;
if (hourNode != null) {
ProcessResult res = processValue(converter, hourNode, -1, 24);
if (res.future != null) {
return null;
}
hourValue = res.value;
} else {
return constructDate(yearValue, monthValue, dayValue, -1, -1, -1);
}
EvalNode minuteNode = map.get("minute");
final int minuteValue;
if (minuteNode != null) {
ProcessResult res = processValue(converter, minuteNode, -1, 60);
if (res.future != null) {
return null;
}
minuteValue = res.value;
} else {
return constructDate(yearValue, monthValue, dayValue, hourValue, -1, -1);
}
EvalNode secondNode = map.get("second");
final int secondValue;
if (secondNode != null) {
ProcessResult res = processValue(converter, secondNode, -1, 60);
if (res.future != null) {
return null;
}
secondValue = res.value;
} else {
return constructDate(yearValue, monthValue, dayValue, hourValue, minuteValue, -1);
}
return constructDate(yearValue, monthValue, dayValue, hourValue, minuteValue, secondValue);
}
private static ProcessResult processValue(Converter converter, EvalNode node, int minValue, int maxValue) {
Optional<Integer> value = converter.tryPureIntegerValue(node);
if (value.isPresent() && value.get() > minValue && value.get() < maxValue) {
return new ProcessResult(value.get());
} else {
return new ProcessResult(converter.getValue(null));
}
}
private static LocalDateTime constructDate(int year, int month, int day, int hour, int minute, int second) {
int monthValue = month < 1 || month > 12 ? 1 : month;
LocalDate currentDate = LocalDate.of(year, monthValue, 1);
int dayValue = day < 1 || day > currentDate.getMonth().length(currentDate.isLeapYear()) ? 1 : day;
int hourValue = hour >= 0 && hour <= 23 ? hour : 0;
int minuteValue = minute >= 0 && minute <= 59 ? minute : 0;
int secondValue = second >= 0 && second <= 59 ? second : 0;
return LocalDateTime.of(year, monthValue, dayValue, hourValue, minuteValue, secondValue);
}
private static LocalDateTime applyDayOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.DAYS);
case END_SIGN:
return date.with(LAST_HOUR_OF_DAY_ADJUSTER);
case NEGATIVE_SIGN:
return date.minusDays(offset);
case POSITIVE_SIGN:
return date.plusDays(offset);
}
return date;
}
private static LocalDateTime applyWeekOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.DAYS)
.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY));
case END_SIGN:
return date.with(LAST_HOUR_OF_DAY_ADJUSTER)
.with(TemporalAdjusters.nextOrSame(DayOfWeek.SUNDAY));
case NEGATIVE_SIGN:
return date.minusWeeks(offset);
case POSITIVE_SIGN:
return date.plusWeeks(offset);
}
return date;
}
private static LocalDateTime applyMonthOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.DAYS)
.with(TemporalAdjusters.firstDayOfMonth());
case END_SIGN:
return date.with(LAST_HOUR_OF_DAY_ADJUSTER)
.with(TemporalAdjusters.lastDayOfMonth());
case NEGATIVE_SIGN:
return date.minusMonths(offset);
case POSITIVE_SIGN:
return date.plusMonths(offset);
}
return date;
}
private static LocalDateTime applyYearOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.DAYS)
.with(TemporalAdjusters.firstDayOfYear());
case END_SIGN:
return date.with(LAST_HOUR_OF_DAY_ADJUSTER)
.with(TemporalAdjusters.lastDayOfYear());
case NEGATIVE_SIGN:
return date.minusYears(offset);
case POSITIVE_SIGN:
return date.plusYears(offset);
}
return date;
}
private static LocalDateTime applyHourOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.HOURS);
case END_SIGN:
return date.with(LAST_MINUTE_OF_HOUR_ADJUSTER);
case NEGATIVE_SIGN:
return date.minusHours(offset);
case POSITIVE_SIGN:
return date.plusHours(offset);
}
return date;
}
private static LocalDateTime applyMinuteOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case START_SIGN:
return date.truncatedTo(ChronoUnit.MINUTES);
case END_SIGN:
return date.with(LAST_SECOND_OF_MINUTE_ADJUSTER);
case NEGATIVE_SIGN:
return date.minusMinutes(offset);
case POSITIVE_SIGN:
return date.plusMinutes(offset);
}
return date;
}
private static LocalDateTime applySecondOffset(String sign, LocalDateTime date, int offset) {
switch (sign) {
case NEGATIVE_SIGN:
return date.minusSeconds(offset);
case POSITIVE_SIGN:
return date.plusSeconds(offset);
}
return date;
}
private static class ProcessResult {
CompletableFuture<EvalNode> future = null;
int value = Integer.MIN_VALUE;
ProcessResult(CompletableFuture<EvalNode> future) {
this.future = future;
}
ProcessResult(int value) {
this.value = value;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 11-Nov-2008
*/
package org.jetbrains.idea.eclipse.conversion;
import com.intellij.openapi.components.ExpandMacroToPathMap;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.components.impl.BasePathMacroManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.ex.JavaSdkUtil;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.hash.HashSet;
import org.jdom.Element;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.eclipse.*;
import org.jetbrains.idea.eclipse.config.EclipseModuleManagerImpl;
import org.jetbrains.idea.eclipse.importWizard.EclipseNatureImporter;
import org.jetbrains.idea.eclipse.util.ErrorLog;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
public class EclipseClasspathReader extends AbstractEclipseClasspathReader<ModifiableRootModel> {
private final Project myProject;
private ContentEntry myContentEntry;
public EclipseClasspathReader(final String rootPath, final Project project, @Nullable List<String> currentRoots) {
this(rootPath, project, currentRoots, null);
}
public EclipseClasspathReader(final String rootPath, final Project project, @Nullable List<String> currentRoots, @Nullable Set<String> moduleNames) {
super(rootPath, currentRoots, moduleNames);
myProject = project;
}
public void init(ModifiableRootModel model) {
myContentEntry = model.addContentEntry(pathToUrl(myRootPath));
}
public static void collectVariables(Set<String> usedVariables, Element classpathElement, final String rootPath) {
for (Object o : classpathElement.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
final Element element = (Element)o;
String path = element.getAttributeValue(EclipseXml.PATH_ATTR);
if (path == null) continue;
final String kind = element.getAttributeValue(EclipseXml.KIND_ATTR);
if (Comparing.strEqual(kind, EclipseXml.VAR_KIND)) {
createEPathVariable(usedVariables, path, 0);
final String srcPath = element.getAttributeValue(EclipseXml.SOURCEPATH_ATTR);
if (srcPath != null) {
createEPathVariable(usedVariables, srcPath, srcVarStart(srcPath));
}
} else if (Comparing.strEqual(kind, EclipseXml.SRC_KIND) || Comparing.strEqual(kind, EclipseXml.OUTPUT_KIND)) {
final EclipseProjectFinder.LinkedResource linkedResource = EclipseProjectFinder.findLinkedResource(rootPath, path);
if (linkedResource != null && linkedResource.containsPathVariable()) {
usedVariables.add(linkedResource.getVariableName());
}
}
}
}
public void readClasspath(ModifiableRootModel model,
final Collection<String> unknownLibraries,
Collection<String> unknownJdks,
final Set<String> usedVariables,
Set<String> refsToModules,
final String testPattern,
Element classpathElement) throws IOException, ConversionException {
for (OrderEntry orderEntry : model.getOrderEntries()) {
if (!(orderEntry instanceof ModuleSourceOrderEntry)) {
model.removeOrderEntry(orderEntry);
}
}
int idx = 0;
final EclipseModuleManagerImpl eclipseModuleManager = EclipseModuleManagerImpl.getInstance(model.getModule());
final HashSet<String> libs = new HashSet<String>();
for (Object o : classpathElement.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
try {
readClasspathEntry(model, unknownLibraries, unknownJdks, usedVariables, refsToModules, testPattern, (Element)o, idx++,
eclipseModuleManager,
((BasePathMacroManager)PathMacroManager.getInstance(model.getModule())).getExpandMacroMap(), libs);
}
catch (ConversionException e) {
ErrorLog.rethrow(ErrorLog.Level.Warning, null, EclipseXml.CLASSPATH_FILE, e);
}
}
if (!model.isSdkInherited() && model.getSdkName() == null) {
eclipseModuleManager.setForceConfigureJDK();
model.inheritSdk();
}
}
@Override
protected int rearrange(ModifiableRootModel rootModel) {
return rearrangeOrderEntryOfType(rootModel, ModuleSourceOrderEntry.class);
}
@Override
protected String expandEclipsePath2Url(ModifiableRootModel rootModel, String path) {
final VirtualFile contentRoot = myContentEntry.getFile();
if (contentRoot != null) {
return EPathUtil.expandEclipsePath2Url(path, rootModel, myCurrentRoots, contentRoot);
}
return EPathUtil.expandEclipsePath2Url(path, rootModel, myCurrentRoots);
}
@Override
protected Set<String> getDefinedCons() {
return EclipseNatureImporter.getAllDefinedCons();
}
@Override
protected void addModuleLibrary(ModifiableRootModel rootModel,
Element element,
boolean exported,
String libName,
String url,
String srcUrl, ExpandMacroToPathMap macroMap) {
final Library library = rootModel.getModuleLibraryTable().getModifiableModel().createLibrary(libName);
final Library.ModifiableModel modifiableModel = library.getModifiableModel();
modifiableModel.addRoot(url, OrderRootType.CLASSES);
if (srcUrl != null) {
modifiableModel.addRoot(srcUrl, OrderRootType.SOURCES);
}
EJavadocUtil.appendJavadocRoots(element, rootModel, myCurrentRoots, modifiableModel);
modifiableModel.commit();
setLibraryEntryExported(rootModel, exported, library);
}
@Override
protected void addJUnitDefaultLib(ModifiableRootModel rootModel, String junitName, ExpandMacroToPathMap macroMap) {
final Library library = rootModel.getModuleLibraryTable().getModifiableModel().createLibrary(junitName);
final Library.ModifiableModel modifiableModel = library.getModifiableModel();
modifiableModel.addRoot(getJunitClsUrl(junitName.contains("4")), OrderRootType.CLASSES);
modifiableModel.commit();
}
@Override
protected void addSourceFolderToCurrentContentRoot(ModifiableRootModel rootModel,
String srcUrl,
boolean testFolder) {
myContentEntry.addSourceFolder(srcUrl, testFolder);
}
@Override
protected void addSourceFolder(ModifiableRootModel rootModel, String srcUrl, boolean testFolder) {
rootModel.addContentEntry(srcUrl).addSourceFolder(srcUrl, testFolder);
}
@Override
protected void setUpModuleJdk(ModifiableRootModel rootModel,
Collection<String> unknownJdks,
EclipseModuleManager eclipseModuleManager,
String jdkName) {
if (jdkName == null) {
rootModel.inheritSdk();
}
else {
final Sdk moduleJdk = ProjectJdkTable.getInstance().findJdk(jdkName);
if (moduleJdk != null) {
rootModel.setSdk(moduleJdk);
}
else {
rootModel.setInvalidSdk(jdkName, IdeaXml.JAVA_SDK_TYPE);
eclipseModuleManager.setInvalidJdk(jdkName);
unknownJdks.add(jdkName);
}
}
rearrangeOrderEntryOfType(rootModel, JdkOrderEntry.class);
}
@Override
protected void addInvalidModuleEntry(ModifiableRootModel rootModel, boolean exported, String moduleName) {
rootModel.addInvalidModuleEntry(moduleName).setExported(exported);
}
private static int rearrangeOrderEntryOfType(ModifiableRootModel rootModel, Class<? extends OrderEntry> orderEntryClass) {
OrderEntry[] orderEntries = rootModel.getOrderEntries();
int moduleSourcesIdx = 0;
for (OrderEntry orderEntry : orderEntries) {
if (orderEntryClass.isAssignableFrom(orderEntry.getClass())) {
break;
}
moduleSourcesIdx++;
}
orderEntries = ArrayUtil.append(orderEntries, orderEntries[moduleSourcesIdx]);
orderEntries = ArrayUtil.remove(orderEntries, moduleSourcesIdx);
rootModel.rearrangeOrderEntries(orderEntries);
return orderEntries.length - 1;
}
@Override
public void setupOutput(ModifiableRootModel rootModel, final String path) {
setOutputUrl(rootModel, path);
}
public static void setOutputUrl(ModifiableRootModel rootModel, String path) {
final CompilerModuleExtension compilerModuleExtension = rootModel.getModuleExtension(CompilerModuleExtension.class);
compilerModuleExtension.setCompilerOutputPath(pathToUrl(path));
compilerModuleExtension.inheritCompilerOutputPath(false);
}
private static void setLibraryEntryExported(ModifiableRootModel rootModel, boolean exported, Library library) {
for (OrderEntry orderEntry : rootModel.getOrderEntries()) {
if (orderEntry instanceof LibraryOrderEntry &&
((LibraryOrderEntry)orderEntry).isModuleLevel() &&
Comparing.equal(((LibraryOrderEntry)orderEntry).getLibrary(), library)) {
((LibraryOrderEntry)orderEntry).setExported(exported);
break;
}
}
}
@Override
protected void addNamedLibrary(final ModifiableRootModel rootModel,
final Collection<String> unknownLibraries,
final boolean exported,
final String name,
final boolean applicationLevel) {
Library lib = findLibraryByName(myProject, name);
if (lib != null) {
rootModel.addLibraryEntry(lib).setExported(exported);
}
else {
unknownLibraries.add(name);
rootModel.addInvalidLibrary(name, applicationLevel ? LibraryTablesRegistrar.APPLICATION_LEVEL : LibraryTablesRegistrar.PROJECT_LEVEL).setExported(exported);
}
}
public static Library findLibraryByName(Project project, String name) {
final LibraryTablesRegistrar tablesRegistrar = LibraryTablesRegistrar.getInstance();
Library lib = tablesRegistrar.getLibraryTable().getLibraryByName(name);
if (lib == null) {
lib = tablesRegistrar.getLibraryTable(project).getLibraryByName(name);
}
if (lib == null) {
for (LibraryTable table : tablesRegistrar.getCustomLibraryTables()) {
lib = table.getLibraryByName(name);
if (lib != null) {
break;
}
}
}
return lib;
}
static String getJunitClsUrl(final boolean version4) {
String url = version4 ? JavaSdkUtil.getJunit4JarPath() : JavaSdkUtil.getJunit3JarPath();
final VirtualFile localFile = VirtualFileManager.getInstance().findFileByUrl(pathToUrl(url));
if (localFile != null) {
final VirtualFile jarFile = JarFileSystem.getInstance().getJarRootForLocalFile(localFile);
url = jarFile != null ? jarFile.getUrl() : localFile.getUrl();
}
return url;
}
protected String prepareValidUrlInsideJar(String url) {
final VirtualFile localFile = VirtualFileManager.getInstance().findFileByUrl(url);
if (localFile != null) {
final VirtualFile jarFile = JarFileSystem.getInstance().getJarRootForLocalFile(localFile);
if (jarFile != null) {
return jarFile.getUrl();
}
}
return url;
}
}
| |
/*
* Copyright (c) 2014 Wael Chatila / Icegreen Technologies. All Rights Reserved.
* This software is released under the Apache license 2.0
* This file has been modified by the copyright holder.
* Original file can be found at http://james.apache.org
*/
package com.icegreen.greenmail.store;
import com.icegreen.greenmail.mail.MailAddress;
import com.icegreen.greenmail.util.GreenMailUtil;
import com.sun.mail.imap.protocol.INTERNALDATE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.BodyPart;
import javax.mail.MessagingException;
import javax.mail.internet.*;
import java.util.*;
/**
* Attributes of a Message in IMAP4rev1 style. Message
* Attributes should be set when a message enters a mailbox.
* <p> Note that the message in a mailbox have the same order using either
* Message Sequence Numbers or UIDs.
* <p> reinitialize() must be called on deserialization to reset Logger
* <p/>
* Reference: RFC 2060 - para 2.3 https://www.ietf.org/rfc/rfc2060.txt
*
* @author <a href="mailto:sascha@kulawik.de">Sascha Kulawik</a>
* @author <a href="mailto:charles@benett1.demon.co.uk">Charles Benett</a>
* @version 0.2 on 04 Aug 2002
*/
public class SimpleMessageAttributes
implements MailMessageAttributes {
// Logging.
protected final Logger log = LoggerFactory.getLogger(getClass());
private static final String SP = " ";
private static final String NIL = "NIL";
private static final String Q = "\"";
private static final String LB = "(";
private static final String RB = ")";
private static final String MULTIPART = "MULTIPART";
private static final String MESSAGE = "MESSAGE";
private int uid;
private int messageSequenceNumber;
private Date receivedDate;
private String bodyStructure;
private String envelope;
private int size;
private int lineCount;
public MailMessageAttributes[] parts;
private List<String> headers;
//rfc822 or MIME header fields
//arrays only if multiple values allowed under rfc822
private String subject;
private String[] from;
private String[] sender;
private String[] replyTo;
private String[] to;
private String[] cc;
private String[] bcc;
private String[] inReplyTo;
private String[] date;
private String[] messageID;
private String contentType;
private String primaryType; // parsed from contentType
private String secondaryType; // parsed from contentType
private Set<String> parameters; // parsed from contentType
private String contentID;
private String contentDesc;
private String contentEncoding;
private String receivedDateString;
private String sentDateEnvelopeString;
private Header contentDisposition;
SimpleMessageAttributes(MimeMessage msg, Date receivedDate) throws MessagingException {
Date sentDate = getSentDate(msg, receivedDate);
if(null != receivedDate) {
this.receivedDate = receivedDate;
receivedDateString = INTERNALDATE.format(receivedDate);
}
if(null != sentDate) {
sentDateEnvelopeString = new MailDateFormat().format(sentDate);
}
if (msg != null) {
parseMimePart(msg);
}
}
/**
* Compute "sent" date
*
* @param msg Message to take sent date from. May be null to use default
* @param defaultVal Default if sent date is not present
* @return Sent date or now if no date could be found
*/
private static Date getSentDate(MimeMessage msg, Date defaultVal) {
if (msg == null) {
return defaultVal;
}
try {
Date sentDate = msg.getSentDate();
if (sentDate == null) {
return defaultVal;
} else {
return sentDate;
}
} catch (MessagingException me) {
return new Date();
}
}
void setUID(int thisUID) {
uid = thisUID;
}
/**
* Parses key data items from a MimeMessage for seperate storage.
* TODO this is a mess, and should be completely revamped.
*/
void parseMimePart(MimePart part) throws MessagingException {
size = GreenMailUtil.getBody(part).length();
// Section 1 - Message Headers
if (part instanceof MimeMessage) {
try {
String[] subjects = part.getHeader("Subject");
if ((subjects != null) && (subjects.length > 0)) {
subject = subjects[0];
}
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getSubject: " + me);
}
}
try {
from = part.getHeader("From");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(From): " + me);
}
try {
sender = part.getHeader("Sender");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(Sender): " + me);
}
try {
replyTo = part.getHeader("Reply-To");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(Reply To): " + me);
}
try {
to = part.getHeader("To");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(To): " + me);
}
try {
cc = part.getHeader("Cc");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(To): " + me);
}
try {
bcc = part.getHeader("Bcc");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(To): " + me);
}
try {
inReplyTo = part.getHeader("In Reply To");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(In Reply To): " + me);
}
try {
date = part.getHeader("Date");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(Date): " + me);
}
try {
messageID = part.getHeader("Message-ID");
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getHeader(messageID): " + me);
}
String contentTypeLine = null;
try {
contentTypeLine = part.getContentType();
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getContentType(): " + me);
}
if (contentTypeLine != null) {
decodeContentType(contentTypeLine);
}
try {
contentID = part.getContentID();
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getContentUD(): " + me);
}
try {
contentDesc = part.getDescription();
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getDescription(): " + me);
}
try {
contentEncoding = part.getEncoding();
// default value.
if (contentEncoding == null) {
contentEncoding = "7BIT";
}
} catch (MessagingException me) {
// if (DEBUG) getLogger().debug("Messaging Exception for getEncoding(): " + me);
}
try {
// contentDisposition = part.getDisposition();
contentDisposition = Header.create(part.getHeader("Content-Disposition"));
} catch (MessagingException me) {
if (log.isDebugEnabled()) {
log.debug("Can not create content disposition for part " + part, me);
}
}
try {
// TODO this doesn't work
lineCount = getLineCount(part);
} catch (Exception e) {
if (log.isDebugEnabled()) {
log.debug("Can not get line count for part " + part, e);
}
}
// Recurse through any embedded parts
if (primaryType.equalsIgnoreCase(MULTIPART)) {
MimeMultipart container;
try {
container = (MimeMultipart) part.getContent();
int count = container.getCount();
parts = new SimpleMessageAttributes[count];
for (int i = 0; i < count; i++) {
BodyPart nextPart = container.getBodyPart(i);
if (nextPart instanceof MimePart) {
SimpleMessageAttributes partAttrs = new SimpleMessageAttributes(null, receivedDate);
partAttrs.parseMimePart((MimePart) nextPart);
parts[i] = partAttrs;
}
}
} catch (Exception e) {
if (log.isDebugEnabled()) {
log.debug("Can not recurse through multipart content", e);
}
}
} else if (primaryType.equalsIgnoreCase("message")) {
if (secondaryType.equalsIgnoreCase("RFC822")) {
parts = new SimpleMessageAttributes[1];
try {
MimeMessage wrappedMessage = (MimeMessage) part.getContent();
if(log.isDebugEnabled()) {
log.debug("message type : " + wrappedMessage.getContentType());
}
parts[0] = new SimpleMessageAttributes(wrappedMessage, null);
} catch (Exception e) {
throw new IllegalStateException("Can not extract part for "+primaryType+"/"+secondaryType, e);
}
} else {
log.warn("Unknown/unhandled subtype {} of message encountered.", secondaryType);
}
}
}
private int getLineCount(MimePart part) throws MessagingException {
return GreenMailUtil.getLineCount(GreenMailUtil.getBody(part));
}
/**
* Builds IMAP envelope String from pre-parsed data.
*/
private String parseEnvelope() {
List<String> response = new ArrayList<>();
//1. Date ---------------
response.add(LB + Q + sentDateEnvelopeString + Q + SP);
//2. Subject ---------------
if (subject != null && (subject.length() != 0)) {
response.add(Q + escapeHeader(subject) + Q + SP);
} else {
response.add(NIL + SP);
}
//3. From ---------------
addAddressToEnvelopeIfAvailable(from, response);
response.add(SP);
//4. Sender ---------------
addAddressToEnvelopeIfAvailableWithNetscapeFeature(sender, response);
response.add(SP);
addAddressToEnvelopeIfAvailableWithNetscapeFeature(replyTo, response);
response.add(SP);
addAddressToEnvelopeIfAvailable(to, response);
response.add(SP);
addAddressToEnvelopeIfAvailable(cc, response);
response.add(SP);
addAddressToEnvelopeIfAvailable(bcc, response);
response.add(SP);
if (inReplyTo != null && inReplyTo.length > 0) {
response.add(inReplyTo[0]);
} else {
response.add(NIL);
}
response.add(SP);
if (messageID != null && messageID.length > 0) {
messageID[0] = escapeHeader(messageID[0]);
response.add(Q + messageID[0] + Q);
} else {
response.add(NIL);
}
response.add(RB);
StringBuilder buf = new StringBuilder(16 * response.size());
for (String aResponse : response) {
buf.append(aResponse);
}
return buf.toString();
}
private void addAddressToEnvelopeIfAvailableWithNetscapeFeature(String[] addresses, List<String> response) {
if (addresses != null && addresses.length > 0) {
// if (DEBUG) getLogger().debug("parsingEnvelope - sender[0] is: " + sender[0]);
//Check for Netscape feature - sender is local part only
if (addresses[0].indexOf('@') == -1) {
response.add(LB + response.get(3) + RB); //first From address
} else {
response.add(LB);
addAddressToEnvelope(addresses, response);
response.add(RB);
}
} else {
if (from != null && from.length > 0) {
response.add(LB + response.get(3) + RB); //first From address
} else {
response.add(NIL);
}
}
}
private void addAddressToEnvelopeIfAvailable(String[] addresses, List<String> response) {
if (addresses != null && addresses.length > 0) {
response.add(LB);
addAddressToEnvelope(addresses, response);
response.add(RB);
} else {
response.add(NIL);
}
}
private void addAddressToEnvelope(String[] addresses, List<String> response) {
for (String address : addresses) {
response.add(parseAddress(address));
}
}
/**
* Parses a String email address to an IMAP address string.
*/
private String parseAddress(String address) {
try {
StringBuilder buf = new StringBuilder();
InternetAddress[] netAddrs = InternetAddress.parse(address);
for (InternetAddress netAddr : netAddrs) {
if (buf.length() > 0) {
buf.append(SP);
}
buf.append(LB);
String personal = netAddr.getPersonal();
if (personal != null && (personal.length() != 0)) {
buf.append(Q).append(personal).append(Q);
} else {
buf.append(NIL);
}
buf.append(SP);
buf.append(NIL); // should add route-addr
buf.append(SP);
try {
// Remove quotes to avoid double quoting
MailAddress mailAddr = new MailAddress(netAddr.getAddress().replaceAll("\"", "\\\\\""));
buf.append(Q).append(mailAddr.getUser()).append(Q);
buf.append(SP);
buf.append(Q).append(mailAddr.getHost()).append(Q);
} catch (Exception pe) {
buf.append(NIL + SP + NIL);
}
buf.append(RB);
}
return buf.toString();
} catch (AddressException e) {
throw new RuntimeException("Failed to parse address: " + address, e);
}
}
/**
* Decode a content Type header line into types and parameters pairs
*/
void decodeContentType(String rawLine) {
int slash = rawLine.indexOf('/');
if (slash == -1) {
// if (DEBUG) getLogger().debug("decoding ... no slash found");
return;
} else {
primaryType = rawLine.substring(0, slash).trim();
}
int semicolon = rawLine.indexOf(';');
if (semicolon == -1) {
// if (DEBUG) getLogger().debug("decoding ... no semicolon found");
secondaryType = rawLine.substring(slash + 1).trim();
return;
}
// have parameters
secondaryType = rawLine.substring(slash + 1, semicolon).trim();
Header h = new Header(rawLine);
parameters = h.getParams();
}
String parseBodyFields() {
StringBuilder buf = new StringBuilder();
getParameters(buf);
buf.append(SP);
if (contentID == null) {
buf.append(NIL);
} else {
buf.append(Q).append(contentID).append(Q);
}
buf.append(SP);
if (contentDesc == null) {
buf.append(NIL);
} else {
buf.append(Q).append(contentDesc).append(Q);
}
buf.append(SP);
if (contentEncoding == null) {
buf.append(NIL);
} else {
buf.append(Q).append(contentEncoding).append(Q);
}
buf.append(SP);
buf.append(size);
return buf.toString();
}
private void getParameters(StringBuilder buf) {
if (parameters == null || parameters.isEmpty()) {
buf.append(NIL);
} else {
buf.append(LB);
Iterator<String> it = parameters.iterator();
while (it.hasNext()) {
buf.append(it.next());
// Space separated
if (it.hasNext()) {
buf.append(SP);
}
}
buf.append(RB);
}
}
/**
* Produce the IMAP formatted String for the BodyStructure of a pre-parsed MimeMessage
* TODO handle extension elements - Content-disposition, Content-Language and other parameters.
*/
String parseBodyStructure(boolean includeExtension) {
try {
String fields = parseBodyFields();
StringBuilder buf = new StringBuilder();
buf.append(LB);
if (primaryType.equalsIgnoreCase("Text")) {
buf.append("\"TEXT\" \"");
buf.append(secondaryType.toUpperCase());
buf.append("\" ");
buf.append(fields);
buf.append(' ');
buf.append(lineCount);
// is: * 1 FETCH (BODYSTRUCTURE ("Text" "plain" NIL NIL NIL NIL 4 -1))
// wants: * 1 FETCH (BODYSTRUCTURE ("text" "plain" NIL NIL NIL "8bit" 6 1 NIL NIL NIL))
// or: * 1 FETCH (BODYSTRUCTURE ("text" "plain" NIL NIL NIL "7bit" 28 1 NIL NIL NIL))
} else if (primaryType.equalsIgnoreCase(MESSAGE) && secondaryType.equalsIgnoreCase("rfc822")) {
buf.append("\"MESSAGE\" \"RFC822\" ");
buf.append(fields).append(SP);
// setupLogger(parts[0]); // reset transient logger
buf.append(parts[0].getEnvelope()).append(SP);
buf.append(parts[0].getBodyStructure(false)).append(SP);
buf.append(lineCount);
} else if (primaryType.equalsIgnoreCase(MULTIPART)) {
for (MailMessageAttributes part : parts) {
// setupLogger(parts[i]); // reset transient getLogger()
buf.append(part.getBodyStructure(includeExtension));
}
buf.append(SP + Q).append(secondaryType).append(Q);
} else {
//1. primary type -------
buf.append('\"');
buf.append(primaryType.toUpperCase());
buf.append('\"');
//2. sec type -------
buf.append(" \"");
buf.append(secondaryType.toUpperCase());
buf.append('\"');
//3. params -------
buf.append(' ');
getParameters(buf);
//4. body id -------
buf.append(' ');
buf.append(NIL);
//5. content desc -------
buf.append(' ');
if (null != contentDesc) {
buf.append('\"');
buf.append(contentDesc);
buf.append('\"');
} else {
buf.append(NIL);
}
//6. encoding -------
buf.append(' ');
if (null != contentEncoding) {
buf.append('\"');
buf.append(contentEncoding);
buf.append('\"');
} else {
buf.append(NIL);
}
//7. size -------
buf.append(' ');
buf.append(size);
}
if (includeExtension) {
//extension is different for multipart and single parts
if (primaryType.equalsIgnoreCase(MULTIPART)) {
//8. ext1 params -------
buf.append(' ');
getParameters(buf);
//9. ext2 disposition -------
buf.append(' ');
if (null != contentDisposition) {
buf.append(contentDisposition);
} else {
buf.append(NIL);
}
//10. ext3 language -------
buf.append(' ');
buf.append(NIL);
} else {
// ext1 md5 -------
buf.append(' ');
buf.append(NIL);
// ext2 disposition -------
buf.append(' ');
if (null != contentDisposition) {
buf.append(contentDisposition);
} else {
buf.append(NIL);
}
//ext3 language -------
buf.append(' ');
buf.append(NIL);
}
}
buf.append(RB);
return buf.toString();
} catch (Exception e) {
throw new IllegalStateException("Can not parse body structure", e);
}
}
/**
* Provides the current Message Sequence Number for this message. MSNs
* change when messages are expunged from the mailbox.
*
* @return int a positive non-zero integer
*/
public int getMessageSequenceNumber() {
return messageSequenceNumber;
}
void setMessageSequenceNumber(int newMsn) {
messageSequenceNumber = newMsn;
}
/**
* Provides the unique identity value for this message. UIDs combined with
* a UIDValidity value form a unique reference for a message in a given
* mailbox. UIDs persist across sessions unless the UIDValidity value is
* incremented. UIDs are not copied if a message is copied to another
* mailbox.
*
* @return int a 32-bit value
*/
public int getUID() {
return uid;
}
@Override
public Date getReceivedDate() {
return receivedDate;
}
@Override
public String getReceivedDateAsString() {
return receivedDateString;
}
@Override
public int getSize() {
return size;
}
@Override
public String getEnvelope() {
return parseEnvelope();
}
@Override
public String getBodyStructure(boolean includeExtensions) {
return parseBodyStructure(includeExtensions);
}
/**
* http://tools.ietf.org/html/rfc1806
* http://tools.ietf.org/html/rfc2183 content disposition
*/
private static class Header {
String value;
Set<String> params = null;
public Header(String line) {
String[] strs = line.split(";");
value = strs[0];
if (0 != strs.length) {
params = new HashSet<>(strs.length);
for (int i = 1; i < strs.length; i++) {
String p = strs[i].trim();
int e = p.indexOf('=');
String key = p.substring(0, e);
String val = p.substring(e + 1, p.length());
p = Q + strip(key) + Q + SP + Q + strip(val) + Q;
params.add(p);
}
}
}
public Set<String> getParams() {
return params;
}
private String strip(String s) {
return s.trim().replaceAll("\\\"", "");
}
@Override
public String toString() {
StringBuilder ret = new StringBuilder();
if (null == params) {
ret.append(Q).append(value).append(Q);
} else {
if (params.isEmpty()) {
ret.append(NIL);
} else {
ret.append(LB);
ret.append(Q).append(value).append(Q + SP);
ret.append(LB);
int i = 0;
for (String param : params) {
if (i++ > 0) {
ret.append(SP);
}
ret.append(param);
}
ret.append(RB);
ret.append(RB);
}
}
return ret.toString();
}
public static Header create(String[] header) {
if (null == header || 0 == header.length) {
return null;
}
if (header.length > 1) {
throw new IllegalArgumentException("Header creation assumes only one occurrence of header instead of " + header.length);
}
return new Header(header[0]);
}
}
private String escapeHeader(final String text) {
return MimeUtility.unfold(text).replace("\\", "\\\\").replace("\"", "\\\"");
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.registry;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IProjectDescription;
import org.eclipse.core.runtime.*;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.equinox.security.storage.ISecurePreferences;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.access.DBAAuthProfile;
import org.jkiss.dbeaver.model.app.DBPDataSourceRegistry;
import org.jkiss.dbeaver.model.app.DBPPlatform;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.app.DBPWorkspace;
import org.jkiss.dbeaver.model.auth.DBAAuthCredentialsProvider;
import org.jkiss.dbeaver.model.connection.DBPAuthModelDescriptor;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.connection.DBPDataSourceProviderRegistry;
import org.jkiss.dbeaver.model.connection.DBPDriver;
import org.jkiss.dbeaver.model.net.DBWNetworkProfile;
import org.jkiss.dbeaver.model.runtime.*;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.model.struct.DBSObjectFilter;
import org.jkiss.dbeaver.model.virtual.DBVModel;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.resource.DBeaverNature;
import org.jkiss.dbeaver.utils.ContentUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.ArrayUtils;
import org.jkiss.utils.CommonUtils;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.Collectors;
public class DataSourceRegistry implements DBPDataSourceRegistry {
@Deprecated
public static final String DEFAULT_AUTO_COMMIT = "default.autocommit"; //$NON-NLS-1$
@Deprecated
public static final String DEFAULT_ISOLATION = "default.isolation"; //$NON-NLS-1$
@Deprecated
public static final String DEFAULT_ACTIVE_OBJECT = "default.activeObject"; //$NON-NLS-1$
private static final long DISCONNECT_ALL_TIMEOUT = 5000;
private static final Log log = Log.getLog(DataSourceRegistry.class);
public static final String OLD_CONFIG_FILE_NAME = "data-sources.xml"; //$NON-NLS-1$
private final DBPPlatform platform;
private final DBPProject project;
private final Map<Path, DataSourceStorage> storages = new LinkedHashMap<>();
private final Map<String, DataSourceDescriptor> dataSources = new LinkedHashMap<>();
private final List<DBPEventListener> dataSourceListeners = new ArrayList<>();
private final List<DataSourceFolder> dataSourceFolders = new ArrayList<>();
private final List<DBSObjectFilter> savedFilters = new ArrayList<>();
private final List<DBWNetworkProfile> networkProfiles = new ArrayList<>();
private final Map<String, DBAAuthProfile> authProfiles = new LinkedHashMap<>();
private volatile boolean saveInProgress = false;
private final DBVModel.ModelChangeListener modelChangeListener = new DBVModel.ModelChangeListener();
private volatile ConfigSaver configSaver;
private DBAAuthCredentialsProvider authCredentialsProvider;
private Throwable lastLoadError;
public DataSourceRegistry(DBPPlatform platform, DBPProject project) {
this.platform = platform;
this.project = project;
loadDataSources(true);
DataSourceProviderRegistry.getInstance().fireRegistryChange(this, true);
addDataSourceListener(modelChangeListener);
}
@Override
public void dispose() {
removeDataSourceListener(modelChangeListener);
DataSourceProviderRegistry.getInstance().fireRegistryChange(this, false);
synchronized (dataSourceListeners) {
if (!this.dataSourceListeners.isEmpty()) {
log.warn("Some data source listeners are still registered: " + dataSourceListeners);
}
this.dataSourceListeners.clear();
}
// Disconnect in 5 seconds or die
closeConnections(DISCONNECT_ALL_TIMEOUT);
// Do not save config on shutdown.
// Some data source might be broken due to misconfiguration
// and we don't want to lose their config just after restart
// if (getProjectNode().isOpen()) {
// flushConfig();
// }
// Dispose and clear all descriptors
synchronized (dataSources) {
for (DataSourceDescriptor dataSourceDescriptor : this.dataSources.values()) {
dataSourceDescriptor.dispose();
}
this.dataSources.clear();
}
}
private void closeConnections(long waitTime) {
boolean hasConnections = false;
synchronized (dataSources) {
for (DataSourceDescriptor dataSource : dataSources.values()) {
if (dataSource.isConnected()) {
hasConnections = true;
break;
}
}
}
if (!hasConnections) {
return;
}
final DisconnectTask disconnectTask = new DisconnectTask();
if (!RuntimeUtils.runTask(disconnectTask, "Disconnect from data sources", waitTime)) {
log.warn("Some data source connections wasn't closed on shutdown in " + waitTime + "ms. Probably network timeout occurred.");
}
}
DataSourceStorage getDefaultStorage() {
synchronized (storages) {
for (DataSourceStorage storage : storages.values()) {
if (storage.isDefault()) {
return storage;
}
}
Path defFile = getModernConfigFile();
if (!Files.exists(defFile)) {
Path legacyFile = getLegacyConfigFile();
if (Files.exists(legacyFile)) {
defFile = legacyFile;
}
}
DataSourceStorage storage = new DataSourceStorage(defFile, true);
storages.put(defFile, storage);
return storage;
}
}
private Path getLegacyConfigFile() {
return project.getAbsolutePath().resolve(LEGACY_CONFIG_FILE_NAME);
}
private Path getModernConfigFile() {
return project.getMetadataFolder(false).resolve(MODERN_CONFIG_FILE_NAME);
}
@NotNull
public DBPPlatform getPlatform() {
return platform;
}
////////////////////////////////////////////////////
// Data sources
@Nullable
@Override
public DataSourceDescriptor getDataSource(String id) {
synchronized (dataSources) {
return dataSources.get(id);
}
}
@Nullable
@Override
public DataSourceDescriptor getDataSource(DBPDataSource dataSource) {
synchronized (dataSources) {
for (DataSourceDescriptor dsd : dataSources.values()) {
if (dsd.getDataSource() == dataSource) {
return dsd;
}
}
}
return null;
}
@Nullable
@Override
public DataSourceDescriptor findDataSourceByName(String name) {
synchronized (dataSources) {
for (DataSourceDescriptor dsd : dataSources.values()) {
if (!dsd.isHidden() && dsd.getName().equals(name)) {
return dsd;
}
}
}
return null;
}
@NotNull
@Override
public List<? extends DBPDataSourceContainer> getDataSourcesByProfile(@NotNull DBWNetworkProfile profile) {
List<DataSourceDescriptor> dsCopy;
synchronized (dataSources) {
dsCopy = CommonUtils.copyList(dataSources.values());
}
dsCopy.removeIf(ds -> !CommonUtils.equalObjects(ds.getConnectionConfiguration().getConfigProfileName(), profile.getProfileName()));
return dsCopy;
}
@NotNull
@Override
public List<DataSourceDescriptor> getDataSources() {
List<DataSourceDescriptor> dsCopy;
synchronized (dataSources) {
dsCopy = CommonUtils.copyList(dataSources.values());
}
dsCopy.sort((o1, o2) -> CommonUtils.notNull(o1.getName(), o1.getId()).compareToIgnoreCase(
CommonUtils.notNull(o2.getName(), o2.getId())));
return dsCopy;
}
@NotNull
@Override
public DBPDataSourceContainer createDataSource(DBPDriver driver, DBPConnectionConfiguration connConfig) {
return new DataSourceDescriptor(this, DataSourceDescriptor.generateNewId(driver), driver, connConfig);
}
@NotNull
@Override
public DBPDataSourceContainer createDataSource(DBPDataSourceContainer source) {
DataSourceDescriptor newDS = new DataSourceDescriptor((DataSourceDescriptor) source, this);
newDS.setId(DataSourceDescriptor.generateNewId(source.getDriver()));
return newDS;
}
@NotNull
@Override
public List<DataSourceFolder> getAllFolders() {
return dataSourceFolders;
}
@NotNull
@Override
public List<DataSourceFolder> getRootFolders() {
List<DataSourceFolder> rootFolders = new ArrayList<>();
for (DataSourceFolder folder : dataSourceFolders) {
if (folder.getParent() == null) {
rootFolders.add(folder);
}
}
return rootFolders;
}
@Override
public DataSourceFolder addFolder(DBPDataSourceFolder parent, String name) {
DataSourceFolder folder = new DataSourceFolder(this, (DataSourceFolder) parent, name, null);
dataSourceFolders.add(folder);
return folder;
}
@Override
public void removeFolder(DBPDataSourceFolder folder, boolean dropContents) {
final DataSourceFolder folderImpl = (DataSourceFolder) folder;
for (DataSourceFolder child : folderImpl.getChildren()) {
removeFolder(child, dropContents);
}
final DBPDataSourceFolder parent = folder.getParent();
if (parent != null) {
folderImpl.setParent(null);
}
for (DataSourceDescriptor ds : dataSources.values()) {
if (ds.getFolder() == folder) {
if (dropContents) {
removeDataSource(ds);
} else {
ds.setFolder(parent);
}
}
}
dataSourceFolders.remove(folderImpl);
}
private DataSourceFolder findRootFolder(String name) {
for (DataSourceFolder root : getRootFolders()) {
if (root.getName().equals(name)) {
return root;
}
}
return null;
}
@Override
public DBPDataSourceFolder getFolder(String path) {
return findFolderByPath(path, true);
}
DataSourceFolder findFolderByPath(String path, boolean create) {
DataSourceFolder parent = null;
for (String name : path.split("/")) {
DataSourceFolder folder = parent == null ? findRootFolder(name) : parent.getChild(name);
if (folder == null) {
if (!create) {
log.warn("Folder '" + path + "' not found");
break;
} else {
folder = addFolder(parent, name);
}
}
parent = folder;
}
return parent;
}
void addDataSourceFolder(DataSourceFolder folder) {
dataSourceFolders.add(folder);
}
////////////////////////////////////////////////////
// Saved filters
@Nullable
@Override
public DBSObjectFilter getSavedFilter(String name) {
for (DBSObjectFilter filter : savedFilters) {
if (CommonUtils.equalObjects(filter.getName(), name)) {
return filter;
}
}
return null;
}
@NotNull
@Override
public List<DBSObjectFilter> getSavedFilters() {
return savedFilters;
}
@Override
public void updateSavedFilter(DBSObjectFilter filter) {
DBSObjectFilter filterCopy = new DBSObjectFilter(filter);
for (int i = 0; i < savedFilters.size(); i++) {
if (CommonUtils.equalObjects(savedFilters.get(i).getName(), filter.getName())) {
savedFilters.set(i, filterCopy);
return;
}
}
savedFilters.add(filterCopy);
}
@Override
public void removeSavedFilter(String filterName) {
for (int i = 0; i < savedFilters.size(); ) {
if (CommonUtils.equalObjects(savedFilters.get(i).getName(), filterName)) {
savedFilters.remove(i);
} else {
i++;
}
}
}
void addSavedFilter(DBSObjectFilter filter) {
savedFilters.add(filter);
}
////////////////////////////////////////////////////
// Config profiles
@Nullable
@Override
public DBWNetworkProfile getNetworkProfile(String name) {
synchronized (networkProfiles) {
return networkProfiles.stream().filter(profile -> CommonUtils.equalObjects(profile.getProfileName(), name)).findFirst().orElse(null);
}
}
@NotNull
@Override
public List<DBWNetworkProfile> getNetworkProfiles() {
return networkProfiles;
}
@Override
public void updateNetworkProfile(DBWNetworkProfile profile) {
for (int i = 0; i < networkProfiles.size(); i++) {
if (CommonUtils.equalObjects(networkProfiles.get(i).getProfileName(), profile.getProfileName())) {
networkProfiles.set(i, profile);
return;
}
}
networkProfiles.add(profile);
}
@Override
public void removeNetworkProfile(DBWNetworkProfile profile) {
networkProfiles.remove(profile);
}
////////////////////////////////////////////////////
// Auth profiles
@Nullable
@Override
public DBAAuthProfile getAuthProfile(String id) {
synchronized (authProfiles) {
return authProfiles.get(id);
}
}
@NotNull
@Override
public List<DBAAuthProfile> getAllAuthProfiles() {
synchronized (authProfiles) {
return new ArrayList<>(authProfiles.values());
}
}
@NotNull
@Override
public List<DBAAuthProfile> getApplicableAuthProfiles(@Nullable DBPDriver driver) {
DBPDataSourceProviderRegistry dspRegistry = DBWorkbench.getPlatform().getDataSourceProviderRegistry();
synchronized (authProfiles) {
return authProfiles.values().stream().filter(p -> {
DBPAuthModelDescriptor authModel = dspRegistry.getAuthModel(p.getAuthModelId());
return authModel != null && authModel.isApplicableTo(driver);
}).collect(Collectors.toList());
}
}
@Override
public void updateAuthProfile(DBAAuthProfile profile) {
synchronized (authProfiles) {
authProfiles.put(profile.getProfileId(), profile);
}
}
@Override
public void removeAuthProfile(DBAAuthProfile profile) {
synchronized (authProfiles) {
authProfiles.remove(profile.getProfileId());
}
}
////////////////////////////////////////////////////
// Data sources
public void addDataSource(@NotNull DBPDataSourceContainer dataSource) {
final DataSourceDescriptor descriptor = (DataSourceDescriptor) dataSource;
addDataSourceToList(descriptor);
if (!descriptor.isDetached()) {
this.saveDataSources();
}
notifyDataSourceListeners(new DBPEvent(DBPEvent.Action.OBJECT_ADD, descriptor, true));
}
void addDataSourceToList(@NotNull DataSourceDescriptor descriptor) {
synchronized (dataSources) {
this.dataSources.put(descriptor.getId(), descriptor);
}
}
public void removeDataSource(@NotNull DBPDataSourceContainer dataSource) {
final DataSourceDescriptor descriptor = (DataSourceDescriptor) dataSource;
synchronized (dataSources) {
this.dataSources.remove(descriptor.getId());
}
if (!descriptor.isDetached()) {
this.saveDataSources();
}
try {
this.fireDataSourceEvent(DBPEvent.Action.OBJECT_REMOVE, dataSource);
} finally {
descriptor.dispose();
}
}
public void updateDataSource(@NotNull DBPDataSourceContainer dataSource) {
if (!(dataSource instanceof DataSourceDescriptor)) {
return;
}
if (!dataSources.containsKey(dataSource.getId())) {
addDataSource(dataSource);
} else {
if (!((DataSourceDescriptor) dataSource).isDetached()) {
this.saveDataSources();
}
this.fireDataSourceEvent(DBPEvent.Action.OBJECT_UPDATE, dataSource);
}
}
@Override
public void flushConfig() {
if (project.isInMemory()) {
return;
}
// Use async config saver to avoid too frequent configuration re-save during some massive configuration update
if (configSaver == null) {
configSaver = new ConfigSaver();
}
configSaver.schedule(100);
}
@Override
public void refreshConfig() {
if (!saveInProgress) {
this.loadDataSources(true);
}
}
@Override
public Throwable getLastLoadError() {
return lastLoadError;
}
@Override
public void addDataSourceListener(@NotNull DBPEventListener listener) {
synchronized (dataSourceListeners) {
dataSourceListeners.add(listener);
}
}
@Override
public boolean removeDataSourceListener(@NotNull DBPEventListener listener) {
synchronized (dataSourceListeners) {
return dataSourceListeners.remove(listener);
}
}
private void fireDataSourceEvent(
DBPEvent.Action action,
DBSObject object) {
notifyDataSourceListeners(new DBPEvent(action, object));
}
public void notifyDataSourceListeners(final DBPEvent event) {
final List<DBPEventListener> listeners;
synchronized (dataSourceListeners) {
if (dataSourceListeners.isEmpty()) {
return;
}
listeners = new ArrayList<>(dataSourceListeners);
}
new Job("Notify datasource events") {
{
setSystem(true);
}
@Override
protected IStatus run(IProgressMonitor monitor) {
for (DBPEventListener listener : listeners) {
listener.handleDataSourceEvent(event);
}
return Status.OK_STATUS;
}
}.schedule();
}
@Override
@NotNull
public ISecurePreferences getSecurePreferences() {
return platform.getApplication().getSecureStorage().getSecurePreferences().node("datasources");
}
@Nullable
@Override
public DBAAuthCredentialsProvider getAuthCredentialsProvider() {
return authCredentialsProvider;
}
public void setAuthCredentialsProvider(DBAAuthCredentialsProvider authCredentialsProvider) {
this.authCredentialsProvider = authCredentialsProvider;
}
/**
* @return true if there is at least one project which was initialized.
*/
public static boolean isProjectsInitialized() {
for (DBPProject project : DBWorkbench.getPlatform().getWorkspace().getProjects()) {
if (project.isRegistryLoaded()) {
return true;
}
}
return false;
}
public static List<DBPDataSourceContainer> getAllDataSources() {
List<DBPDataSourceContainer> result = new ArrayList<>();
DBPWorkspace workspace = DBWorkbench.getPlatform().getWorkspace();
for (DBPProject project : workspace.getProjects()) {
if (project.isOpen() && project.isRegistryLoaded()) {
result.addAll(project.getDataSourceRegistry().getDataSources());
}
}
return result;
}
@NotNull
@Override
public List<? extends DBPDataSourceContainer> loadDataSourcesFromFile(@NotNull DBPDataSourceConfigurationStorage configurationStorage, @NotNull Path fromPath) {
ParseResults parseResults = new ParseResults();
loadDataSources(fromPath, false, true, parseResults, configurationStorage);
return new ArrayList<>(parseResults.addedDataSources);
}
private void loadDataSources(boolean refresh) {
if (!project.isOpen() || project.isInMemory()) {
return;
}
// Clear filters before reload
savedFilters.clear();
// Parse datasources
ParseResults parseResults = new ParseResults();
// Modern way - search json configs in metadata folder
boolean modernFormat = false;
Path metadataFolder = project.getMetadataFolder(false);
if (Files.exists(metadataFolder)) {
try {
List<Path> mdFiles = Files.list(metadataFolder)
.filter(path -> !Files.isDirectory(path) && Files.exists(path))
.collect(Collectors.toList());
for (Path res : mdFiles) {
String fileName = res.getFileName().toString();
if (fileName.startsWith(MODERN_CONFIG_FILE_PREFIX) && fileName.endsWith(MODERN_CONFIG_FILE_EXT)) {
loadDataSources(res, refresh, true, parseResults);
modernFormat = true;
}
}
} catch (IOException e) {
log.error("Error during project files read", e);
}
}
if (!modernFormat) {
if (Files.exists(project.getAbsolutePath())) {
try {
// Logacy way (search config.xml in project folder)
List<Path> mdFiles = Files.list(project.getAbsolutePath())
.filter(path -> !Files.isDirectory(path) && Files.exists(path))
.collect(Collectors.toList());
for (Path res : mdFiles) {
String fileName = res.getFileName().toString();
if (fileName.startsWith(LEGACY_CONFIG_FILE_PREFIX) && fileName.endsWith(LEGACY_CONFIG_FILE_EXT)) {
loadDataSources(res, refresh, false, parseResults);
}
}
} catch (IOException e) {
log.error("Error during legacy project files read", e);
}
}
if (!storages.isEmpty()) {
// Save config immediately in the new format
flushConfig();
}
}
{
// Call external configurations
Map<String, Object> searchOptions = new LinkedHashMap<>();
for (DataSourceConfigurationStorageDescriptor cfd : DataSourceProviderRegistry.getInstance().getDataSourceConfigurationStorages()) {
try {
List<? extends DBPDataSourceContainer> loadedDS = cfd.getInstance().loadDataSources(this, searchOptions);
if (!loadedDS.isEmpty()) {
parseResults.addedDataSources.addAll(loadedDS);
}
} catch (Exception e) {
log.error("Error loading data sources from storage '" + cfd.getName() + "'", e);
}
}
}
// Reflect changes
if (refresh) {
for (DBPDataSourceContainer ds : parseResults.updatedDataSources) {
fireDataSourceEvent(DBPEvent.Action.OBJECT_UPDATE, ds);
}
for (DBPDataSourceContainer ds : parseResults.addedDataSources) {
fireDataSourceEvent(DBPEvent.Action.OBJECT_ADD, ds);
}
List<DataSourceDescriptor> removedDataSource = new ArrayList<>();
for (DataSourceDescriptor ds : dataSources.values()) {
if (!parseResults.addedDataSources.contains(ds) && !parseResults.updatedDataSources.contains(ds) &&
!ds.isProvided() && !ds.isExternallyProvided() && !ds.isDetached())
{
removedDataSource.add(ds);
}
}
for (DataSourceDescriptor ds : removedDataSource) {
this.dataSources.remove(ds.getId());
this.fireDataSourceEvent(DBPEvent.Action.OBJECT_REMOVE, ds);
ds.dispose();
}
}
}
private void loadDataSources(@NotNull Path path, boolean refresh, boolean modern, @NotNull ParseResults parseResults) {
boolean extraConfig = !path.getFileName().toString().equalsIgnoreCase(modern ? MODERN_CONFIG_FILE_NAME : LEGACY_CONFIG_FILE_NAME);
DataSourceStorage storage;
synchronized (storages) {
storage = storages.get(path);
if (storage == null) {
storage = new DataSourceStorage(path, !extraConfig);
storages.put(path, storage);
}
}
loadDataSources(path, refresh, modern, parseResults, storage);
}
private void loadDataSources(@NotNull Path fromFile, boolean refresh, boolean modern, @NotNull ParseResults parseResults, @NotNull DBPDataSourceConfigurationStorage configurationStorage) {
if (!Files.exists(fromFile)) {
return;
}
try {
DataSourceSerializer serializer = modern ? new DataSourceSerializerModern(this) : new DataSourceSerializerLegacy(this);
serializer.parseDataSources(fromFile, configurationStorage, refresh, parseResults);
updateProjectNature();
lastLoadError = null;
} catch (Exception ex) {
lastLoadError = ex;
log.error("Error loading datasource config from " + fromFile.toAbsolutePath(), ex);
}
}
private void saveDataSources() {
if (project.isInMemory()) {
return;
}
updateProjectNature();
final DBRProgressMonitor monitor = new VoidProgressMonitor();
saveInProgress = true;
try {
for (DataSourceStorage storage : storages.values()) {
List<DataSourceDescriptor> localDataSources = getDataSources(storage);
Path configFile = storage.getSourceFile();
if (storage.isDefault()) {
if (project.isModernProject()) {
configFile = getModernConfigFile();
} else {
configFile = getLegacyConfigFile();
}
} else {
String configFileName = configFile.getFileName().toString();
if (configFileName.startsWith(LEGACY_CONFIG_FILE_PREFIX) && configFileName.endsWith(".xml")) {
// Legacy configuration - move to metadata folder as json
String newFileName = MODERN_CONFIG_FILE_PREFIX + configFileName.substring(LEGACY_CONFIG_FILE_PREFIX.length());
int divPos = newFileName.lastIndexOf(".");
newFileName = newFileName.substring(0, divPos) + ".json";
configFile = project.getMetadataFolder(false).resolve(newFileName);
}
}
try {
ContentUtils.makeFileBackup(configFile);
if (localDataSources.isEmpty()) {
if (Files.exists(configFile)) {
try {
Files.delete(configFile);
} catch (IOException e) {
log.error("Error deleting file '" + configFile.toAbsolutePath() + "'", e);
}
}
} else {
DataSourceSerializer serializer;
if (!project.isModernProject()) {
serializer = new DataSourceSerializerLegacy(this);
} else {
serializer = new DataSourceSerializerModern(this);
}
project.getMetadataFolder(true);
serializer.saveDataSources(
monitor,
storage,
localDataSources,
configFile);
}
try {
getSecurePreferences().flush();
} catch (Throwable e) {
log.error("Error saving secured preferences", e);
}
} catch (Exception ex) {
log.error("Error saving datasources configuration", ex);
}
}
} finally {
saveInProgress = false;
}
}
private List<DataSourceDescriptor> getDataSources(DataSourceStorage storage) {
List<DataSourceDescriptor> result = new ArrayList<>();
synchronized (dataSources) {
for (DataSourceDescriptor ds : dataSources.values()) {
if (ds.getStorage() == storage) {
result.add(ds);
}
}
}
return result;
}
private void updateProjectNature() {
try {
IProject eclipseProject = project.getEclipseProject();
final IProjectDescription description = eclipseProject.getDescription();
if (description != null) {
String[] natureIds = description.getNatureIds();
if (dataSources.isEmpty()) {
// Remove nature
if (ArrayUtils.contains(natureIds, DBeaverNature.NATURE_ID)) {
description.setNatureIds(ArrayUtils.remove(String.class, natureIds, DBeaverNature.NATURE_ID));
eclipseProject.setDescription(description, new NullProgressMonitor());
}
} else {
// Add nature
if (!ArrayUtils.contains(natureIds, DBeaverNature.NATURE_ID)) {
description.setNatureIds(ArrayUtils.add(String.class, natureIds, DBeaverNature.NATURE_ID));
try {
eclipseProject.setDescription(description, new NullProgressMonitor());
} catch (CoreException e) {
log.debug("Can't set project nature", e);
}
}
}
}
} catch (Exception e) {
log.debug(e);
}
}
private void clearSecuredPasswords(DataSourceDescriptor dataSource) {
try {
dataSource.getSecurePreferences().removeNode();
} catch (Throwable e) {
log.debug("Error clearing '" + dataSource.getId() + "' secure storage");
}
}
@Override
public DBPProject getProject() {
return project;
}
@Override
public String toString() {
return project.getName() + " (" + getClass().getSimpleName() + ")";
}
static class ParseResults {
Set<DBPDataSourceContainer> updatedDataSources = new LinkedHashSet<>();
Set<DBPDataSourceContainer> addedDataSources = new LinkedHashSet<>();
}
private class DisconnectTask implements DBRRunnableWithProgress {
boolean disconnected;
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
monitor = new ProxyProgressMonitor(monitor) {
@Override
public boolean isCanceled() {
// It is never canceled because we call DisconnectTask on shutdown when all tasks are canceled
return false;
}
};
List<DataSourceDescriptor> dsSnapshot;
synchronized (dataSources) {
dsSnapshot = CommonUtils.copyList(dataSources.values());
}
monitor.beginTask("Disconnect all databases", dsSnapshot.size());
try {
for (DataSourceDescriptor dataSource : dsSnapshot) {
if (monitor.isCanceled()) {
break;
}
if (dataSource.isConnected()) {
try {
// Disconnect
monitor.subTask("Disconnect from [" + dataSource.getName() + "]");
disconnected = dataSource.disconnect(monitor);
} catch (Exception ex) {
log.error("Can't shutdown data source '" + dataSource.getName() + "'", ex);
}
}
monitor.worked(1);
}
} finally {
monitor.done();
}
}
}
private class ConfigSaver extends AbstractJob {
ConfigSaver() {
super("Datasource configuration save");
}
@Override
protected IStatus run(DBRProgressMonitor monitor) {
synchronized (DataSourceRegistry.this) {
//log.debug("Save column config " + System.currentTimeMillis());
saveDataSources();
}
return Status.OK_STATUS;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.qjournal.client;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.PriorityQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetJournalStateResponseProto;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.SegmentStateProto;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.StorageInfo;
import org.apache.hadoop.hdfs.server.common.Util;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileInputStream;
import org.apache.hadoop.hdfs.server.namenode.EditLogInputStream;
import org.apache.hadoop.hdfs.server.namenode.EditLogOutputStream;
import org.apache.hadoop.hdfs.server.namenode.JournalManager;
import org.apache.hadoop.hdfs.server.namenode.JournalSet;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.protobuf.TextFormat;
/**
* A JournalManager that writes to a set of remote JournalNodes,
* requiring a quorum of nodes to ack each write.
*/
@InterfaceAudience.Private
public class QuorumJournalManager implements JournalManager {
static final Log LOG = LogFactory.getLog(QuorumJournalManager.class);
// Timeouts for which the QJM will wait for each of the following actions.
private final int startSegmentTimeoutMs;
private final int prepareRecoveryTimeoutMs;
private final int acceptRecoveryTimeoutMs;
private final int finalizeSegmentTimeoutMs;
private final int selectInputStreamsTimeoutMs;
private final int getJournalStateTimeoutMs;
private final int newEpochTimeoutMs;
private final int writeTxnsTimeoutMs;
// This timeout is used for calls that don't occur during normal operation
// e.g. format, upgrade operations and a few others. So we can use rather
// lengthy timeouts by default.
private final int timeoutMs;
private final Configuration conf;
private final URI uri;
private final NamespaceInfo nsInfo;
private final String nameServiceId;
private boolean isActiveWriter;
private final AsyncLoggerSet loggers;
private int outputBufferCapacity = 512 * 1024;
private final URLConnectionFactory connectionFactory;
@VisibleForTesting
public QuorumJournalManager(Configuration conf,
URI uri,
NamespaceInfo nsInfo) throws IOException {
this(conf, uri, nsInfo, null, IPCLoggerChannel.FACTORY);
}
public QuorumJournalManager(Configuration conf,
URI uri, NamespaceInfo nsInfo, String nameServiceId) throws IOException {
this(conf, uri, nsInfo, nameServiceId, IPCLoggerChannel.FACTORY);
}
@VisibleForTesting
QuorumJournalManager(Configuration conf,
URI uri, NamespaceInfo nsInfo,
AsyncLogger.Factory loggerFactory) throws IOException {
this(conf, uri, nsInfo, null, loggerFactory);
}
QuorumJournalManager(Configuration conf,
URI uri, NamespaceInfo nsInfo, String nameServiceId,
AsyncLogger.Factory loggerFactory) throws IOException {
Preconditions.checkArgument(conf != null, "must be configured");
this.conf = conf;
this.uri = uri;
this.nsInfo = nsInfo;
this.nameServiceId = nameServiceId;
this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
this.connectionFactory = URLConnectionFactory
.newDefaultURLConnectionFactory(conf);
// Configure timeouts.
this.startSegmentTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
this.prepareRecoveryTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
this.acceptRecoveryTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
this.finalizeSegmentTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
this.selectInputStreamsTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
this.getJournalStateTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
this.newEpochTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
this.writeTxnsTimeoutMs = conf.getInt(
DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
this.timeoutMs = (int) conf.getTimeDuration(DFSConfigKeys
.DFS_QJM_OPERATIONS_TIMEOUT,
DFSConfigKeys.DFS_QJM_OPERATIONS_TIMEOUT_DEFAULT, TimeUnit
.MILLISECONDS);
}
protected List<AsyncLogger> createLoggers(
AsyncLogger.Factory factory) throws IOException {
return createLoggers(conf, uri, nsInfo, factory, nameServiceId);
}
static String parseJournalId(URI uri) {
String path = uri.getPath();
Preconditions.checkArgument(path != null && !path.isEmpty(),
"Bad URI '%s': must identify journal in path component",
uri);
String journalId = path.substring(1);
checkJournalId(journalId);
return journalId;
}
public static void checkJournalId(String jid) {
Preconditions.checkArgument(jid != null &&
!jid.isEmpty() &&
!jid.contains("/") &&
!jid.startsWith("."),
"bad journal id: " + jid);
}
/**
* Fence any previous writers, and obtain a unique epoch number
* for write-access to the journal nodes.
*
* @return the new, unique epoch number
*/
Map<AsyncLogger, NewEpochResponseProto> createNewUniqueEpoch()
throws IOException {
Preconditions.checkState(!loggers.isEpochEstablished(),
"epoch already created");
Map<AsyncLogger, GetJournalStateResponseProto> lastPromises =
loggers.waitForWriteQuorum(loggers.getJournalState(),
getJournalStateTimeoutMs, "getJournalState()");
long maxPromised = Long.MIN_VALUE;
for (GetJournalStateResponseProto resp : lastPromises.values()) {
maxPromised = Math.max(maxPromised, resp.getLastPromisedEpoch());
}
assert maxPromised >= 0;
long myEpoch = maxPromised + 1;
Map<AsyncLogger, NewEpochResponseProto> resps =
loggers.waitForWriteQuorum(loggers.newEpoch(nsInfo, myEpoch),
newEpochTimeoutMs, "newEpoch(" + myEpoch + ")");
loggers.setEpoch(myEpoch);
return resps;
}
@Override
public void format(NamespaceInfo nsInfo) throws IOException {
QuorumCall<AsyncLogger,Void> call = loggers.format(nsInfo);
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"format");
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for format() response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for format() response");
}
if (call.countExceptions() > 0) {
call.rethrowException("Could not format one or more JournalNodes");
}
}
@Override
public boolean hasSomeData() throws IOException {
QuorumCall<AsyncLogger, Boolean> call =
loggers.isFormatted();
try {
call.waitFor(loggers.size(), 0, 0, timeoutMs, "hasSomeData");
} catch (InterruptedException e) {
throw new IOException("Interrupted while determining if JNs have data");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for response from loggers");
}
if (call.countExceptions() > 0) {
call.rethrowException(
"Unable to check if JNs are ready for formatting");
}
// If any of the loggers returned with a non-empty manifest, then
// we should prompt for format.
for (Boolean hasData : call.getResults().values()) {
if (hasData) {
return true;
}
}
// Otherwise, none were formatted, we can safely format.
return false;
}
/**
* Run recovery/synchronization for a specific segment.
* Postconditions:
* <ul>
* <li>This segment will be finalized on a majority
* of nodes.</li>
* <li>All nodes which contain the finalized segment will
* agree on the length.</li>
* </ul>
*
* @param segmentTxId the starting txid of the segment
* @throws IOException
*/
private void recoverUnclosedSegment(long segmentTxId) throws IOException {
Preconditions.checkArgument(segmentTxId > 0);
LOG.info("Beginning recovery of unclosed segment starting at txid " +
segmentTxId);
// Step 1. Prepare recovery
QuorumCall<AsyncLogger,PrepareRecoveryResponseProto> prepare =
loggers.prepareRecovery(segmentTxId);
Map<AsyncLogger, PrepareRecoveryResponseProto> prepareResponses=
loggers.waitForWriteQuorum(prepare, prepareRecoveryTimeoutMs,
"prepareRecovery(" + segmentTxId + ")");
LOG.info("Recovery prepare phase complete. Responses:\n" +
QuorumCall.mapToString(prepareResponses));
// Determine the logger who either:
// a) Has already accepted a previous proposal that's higher than any
// other
//
// OR, if no such logger exists:
//
// b) Has the longest log starting at this transaction ID
// TODO: we should collect any "ties" and pass the URL for all of them
// when syncing, so we can tolerate failure during recovery better.
Entry<AsyncLogger, PrepareRecoveryResponseProto> bestEntry = Collections.max(
prepareResponses.entrySet(), SegmentRecoveryComparator.INSTANCE);
AsyncLogger bestLogger = bestEntry.getKey();
PrepareRecoveryResponseProto bestResponse = bestEntry.getValue();
// Log the above decision, check invariants.
if (bestResponse.hasAcceptedInEpoch()) {
LOG.info("Using already-accepted recovery for segment " +
"starting at txid " + segmentTxId + ": " +
bestEntry);
} else if (bestResponse.hasSegmentState()) {
LOG.info("Using longest log: " + bestEntry);
} else {
// None of the responses to prepareRecovery() had a segment at the given
// txid. This can happen for example in the following situation:
// - 3 JNs: JN1, JN2, JN3
// - writer starts segment 101 on JN1, then crashes before
// writing to JN2 and JN3
// - during newEpoch(), we saw the segment on JN1 and decide to
// recover segment 101
// - before prepare(), JN1 crashes, and we only talk to JN2 and JN3,
// neither of which has any entry for this log.
// In this case, it is allowed to do nothing for recovery, since the
// segment wasn't started on a quorum of nodes.
// Sanity check: we should only get here if none of the responses had
// a log. This should be a postcondition of the recovery comparator,
// but a bug in the comparator might cause us to get here.
for (PrepareRecoveryResponseProto resp : prepareResponses.values()) {
assert !resp.hasSegmentState() :
"One of the loggers had a response, but no best logger " +
"was found.";
}
LOG.info("None of the responders had a log to recover: " +
QuorumCall.mapToString(prepareResponses));
return;
}
SegmentStateProto logToSync = bestResponse.getSegmentState();
assert segmentTxId == logToSync.getStartTxId();
// Sanity check: none of the loggers should be aware of a higher
// txid than the txid we intend to truncate to
for (Map.Entry<AsyncLogger, PrepareRecoveryResponseProto> e :
prepareResponses.entrySet()) {
AsyncLogger logger = e.getKey();
PrepareRecoveryResponseProto resp = e.getValue();
if (resp.hasLastCommittedTxId() &&
resp.getLastCommittedTxId() > logToSync.getEndTxId()) {
throw new AssertionError("Decided to synchronize log to " + logToSync +
" but logger " + logger + " had seen txid " +
resp.getLastCommittedTxId() + " committed");
}
}
URL syncFromUrl = bestLogger.buildURLToFetchLogs(segmentTxId);
QuorumCall<AsyncLogger,Void> accept = loggers.acceptRecovery(logToSync, syncFromUrl);
loggers.waitForWriteQuorum(accept, acceptRecoveryTimeoutMs,
"acceptRecovery(" + TextFormat.shortDebugString(logToSync) + ")");
// If one of the loggers above missed the synchronization step above, but
// we send a finalize() here, that's OK. It validates the log before
// finalizing. Hence, even if it is not "in sync", it won't incorrectly
// finalize.
QuorumCall<AsyncLogger, Void> finalize =
loggers.finalizeLogSegment(logToSync.getStartTxId(), logToSync.getEndTxId());
loggers.waitForWriteQuorum(finalize, finalizeSegmentTimeoutMs,
String.format("finalizeLogSegment(%s-%s)",
logToSync.getStartTxId(),
logToSync.getEndTxId()));
}
static List<AsyncLogger> createLoggers(Configuration conf,
URI uri,
NamespaceInfo nsInfo,
AsyncLogger.Factory factory,
String nameServiceId)
throws IOException {
List<AsyncLogger> ret = Lists.newArrayList();
List<InetSocketAddress> addrs = Util.getAddressesList(uri);
if (addrs.size() % 2 == 0) {
LOG.warn("Quorum journal URI '" + uri + "' has an even number " +
"of Journal Nodes specified. This is not recommended!");
}
String jid = parseJournalId(uri);
for (InetSocketAddress addr : addrs) {
ret.add(factory.createLogger(conf, nsInfo, jid, nameServiceId, addr));
}
return ret;
}
@Override
public EditLogOutputStream startLogSegment(long txId, int layoutVersion)
throws IOException {
Preconditions.checkState(isActiveWriter,
"must recover segments before starting a new one");
QuorumCall<AsyncLogger, Void> q = loggers.startLogSegment(txId,
layoutVersion);
loggers.waitForWriteQuorum(q, startSegmentTimeoutMs,
"startLogSegment(" + txId + ")");
boolean updateCommittedTxId = conf.getBoolean(
DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_KEY,
DFSConfigKeys.DFS_HA_TAILEDITS_INPROGRESS_DEFAULT);
return new QuorumOutputStream(loggers, txId, outputBufferCapacity,
writeTxnsTimeoutMs, updateCommittedTxId);
}
@Override
public void finalizeLogSegment(long firstTxId, long lastTxId)
throws IOException {
QuorumCall<AsyncLogger,Void> q = loggers.finalizeLogSegment(
firstTxId, lastTxId);
loggers.waitForWriteQuorum(q, finalizeSegmentTimeoutMs,
String.format("finalizeLogSegment(%s-%s)", firstTxId, lastTxId));
}
@Override
public void setOutputBufferCapacity(int size) {
outputBufferCapacity = size;
}
@Override
public void purgeLogsOlderThan(long minTxIdToKeep) throws IOException {
// This purges asynchronously -- there's no need to wait for a quorum
// here, because it's always OK to fail.
LOG.info("Purging remote journals older than txid " + minTxIdToKeep);
loggers.purgeLogsOlderThan(minTxIdToKeep);
}
@Override
public void recoverUnfinalizedSegments() throws IOException {
Preconditions.checkState(!isActiveWriter, "already active writer");
LOG.info("Starting recovery process for unclosed journal segments...");
Map<AsyncLogger, NewEpochResponseProto> resps = createNewUniqueEpoch();
LOG.info("Successfully started new epoch " + loggers.getEpoch());
if (LOG.isDebugEnabled()) {
LOG.debug("newEpoch(" + loggers.getEpoch() + ") responses:\n" +
QuorumCall.mapToString(resps));
}
long mostRecentSegmentTxId = Long.MIN_VALUE;
for (NewEpochResponseProto r : resps.values()) {
if (r.hasLastSegmentTxId()) {
mostRecentSegmentTxId = Math.max(mostRecentSegmentTxId,
r.getLastSegmentTxId());
}
}
// On a completely fresh system, none of the journals have any
// segments, so there's nothing to recover.
if (mostRecentSegmentTxId != Long.MIN_VALUE) {
recoverUnclosedSegment(mostRecentSegmentTxId);
}
isActiveWriter = true;
}
@Override
public void close() throws IOException {
loggers.close();
}
public void selectInputStreams(Collection<EditLogInputStream> streams,
long fromTxnId, boolean inProgressOk) throws IOException {
selectInputStreams(streams, fromTxnId, inProgressOk, false);
}
@Override
public void selectInputStreams(Collection<EditLogInputStream> streams,
long fromTxnId, boolean inProgressOk,
boolean onlyDurableTxns) throws IOException {
QuorumCall<AsyncLogger, RemoteEditLogManifest> q =
loggers.getEditLogManifest(fromTxnId, inProgressOk);
Map<AsyncLogger, RemoteEditLogManifest> resps =
loggers.waitForWriteQuorum(q, selectInputStreamsTimeoutMs,
"selectInputStreams");
LOG.debug("selectInputStream manifests:\n" +
Joiner.on("\n").withKeyValueSeparator(": ").join(resps));
final PriorityQueue<EditLogInputStream> allStreams =
new PriorityQueue<EditLogInputStream>(64,
JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
for (Map.Entry<AsyncLogger, RemoteEditLogManifest> e : resps.entrySet()) {
AsyncLogger logger = e.getKey();
RemoteEditLogManifest manifest = e.getValue();
long committedTxnId = manifest.getCommittedTxnId();
for (RemoteEditLog remoteLog : manifest.getLogs()) {
URL url = logger.buildURLToFetchLogs(remoteLog.getStartTxId());
long endTxId = remoteLog.getEndTxId();
// If it's bounded by durable Txns, endTxId could not be larger
// than committedTxnId. This ensures the consistency.
if (onlyDurableTxns && inProgressOk) {
endTxId = Math.min(endTxId, committedTxnId);
}
EditLogInputStream elis = EditLogFileInputStream.fromUrl(
connectionFactory, url, remoteLog.getStartTxId(),
endTxId, remoteLog.isInProgress());
allStreams.add(elis);
}
}
JournalSet.chainAndMakeRedundantStreams(streams, allStreams, fromTxnId);
}
@Override
public String toString() {
return "QJM to " + loggers;
}
@VisibleForTesting
AsyncLoggerSet getLoggerSetForTests() {
return loggers;
}
@Override
public void doPreUpgrade() throws IOException {
QuorumCall<AsyncLogger, Void> call = loggers.doPreUpgrade();
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"doPreUpgrade");
if (call.countExceptions() > 0) {
call.rethrowException("Could not do pre-upgrade of one or more JournalNodes");
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for doPreUpgrade() response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for doPreUpgrade() response");
}
}
@Override
public void doUpgrade(Storage storage) throws IOException {
QuorumCall<AsyncLogger, Void> call = loggers.doUpgrade(storage);
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"doUpgrade");
if (call.countExceptions() > 0) {
call.rethrowException("Could not perform upgrade of one or more JournalNodes");
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for doUpgrade() response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for doUpgrade() response");
}
}
@Override
public void doFinalize() throws IOException {
QuorumCall<AsyncLogger, Void> call = loggers.doFinalize();
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"doFinalize");
if (call.countExceptions() > 0) {
call.rethrowException("Could not finalize one or more JournalNodes");
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for doFinalize() response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for doFinalize() response");
}
}
@Override
public boolean canRollBack(StorageInfo storage, StorageInfo prevStorage,
int targetLayoutVersion) throws IOException {
QuorumCall<AsyncLogger, Boolean> call = loggers.canRollBack(storage,
prevStorage, targetLayoutVersion);
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"lockSharedStorage");
if (call.countExceptions() > 0) {
call.rethrowException("Could not check if roll back possible for"
+ " one or more JournalNodes");
}
// Either they all return the same thing or this call fails, so we can
// just return the first result.
try {
DFSUtil.assertAllResultsEqual(call.getResults().values());
} catch (AssertionError ae) {
throw new IOException("Results differed for canRollBack", ae);
}
for (Boolean result : call.getResults().values()) {
return result;
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for lockSharedStorage() " +
"response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for lockSharedStorage() " +
"response");
}
throw new AssertionError("Unreachable code.");
}
@Override
public void doRollback() throws IOException {
QuorumCall<AsyncLogger, Void> call = loggers.doRollback();
try {
call.waitFor(loggers.size(), loggers.size(), 0, timeoutMs,
"doRollback");
if (call.countExceptions() > 0) {
call.rethrowException("Could not perform rollback of one or more JournalNodes");
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for doFinalize() response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for doFinalize() response");
}
}
@Override
public void discardSegments(long startTxId) throws IOException {
QuorumCall<AsyncLogger, Void> call = loggers.discardSegments(startTxId);
try {
call.waitFor(loggers.size(), loggers.size(), 0,
timeoutMs, "discardSegments");
if (call.countExceptions() > 0) {
call.rethrowException(
"Could not perform discardSegments of one or more JournalNodes");
}
} catch (InterruptedException e) {
throw new IOException(
"Interrupted waiting for discardSegments() response");
} catch (TimeoutException e) {
throw new IOException(
"Timed out waiting for discardSegments() response");
}
}
@Override
public long getJournalCTime() throws IOException {
QuorumCall<AsyncLogger, Long> call = loggers.getJournalCTime();
try {
call.waitFor(loggers.size(), loggers.size(), 0,
timeoutMs, "getJournalCTime");
if (call.countExceptions() > 0) {
call.rethrowException("Could not journal CTime for one "
+ "more JournalNodes");
}
// Either they all return the same thing or this call fails, so we can
// just return the first result.
try {
DFSUtil.assertAllResultsEqual(call.getResults().values());
} catch (AssertionError ae) {
throw new IOException("Results differed for getJournalCTime", ae);
}
for (Long result : call.getResults().values()) {
return result;
}
} catch (InterruptedException e) {
throw new IOException("Interrupted waiting for getJournalCTime() " +
"response");
} catch (TimeoutException e) {
throw new IOException("Timed out waiting for getJournalCTime() " +
"response");
}
throw new AssertionError("Unreachable code.");
}
}
| |
package org.zstack.core.cloudbus;
import org.apache.commons.lang.StringUtils;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.GsonTransient;
import org.zstack.header.message.NeedJsonSchema;
import org.zstack.header.message.NoJsonSchema;
import org.zstack.header.search.Inventory;
import org.zstack.utils.FieldUtils;
import org.zstack.utils.TypeUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
/**
*/
public class MessageJsonSchemaBuilder {
private static Map<Field, Field> skipMap = new ConcurrentHashMap<Field, Field>();
private static boolean isSkip(Field f) {
if (skipMap.containsKey(f)) {
return true;
}
if (TypeUtils.isPrimitiveOrWrapper(f.getType())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(NoJsonSchema.class)) {
skipMap.put(f, f);
return true;
}
if (Modifier.isStatic(f.getModifiers())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(GsonTransient.class)) {
skipMap.put(f, f);
return true;
}
return false;
}
private static Object getValue(Field f, Object obj) throws IllegalAccessException {
f.setAccessible(true);
return f.get(obj);
}
private static boolean isNullValue(Field f, Object obj) throws IllegalAccessException {
return getValue(f, obj) == null;
}
private static void buildSchema(Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
List<Field> fs = FieldUtils.getAllFields(obj.getClass());
for (Field f : fs) {
if (isSkip(f)) {
continue;
}
if (Map.class.isAssignableFrom(f.getType())) {
schemaMap(f, obj, schema, trace, paths);
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
continue;
}
if (!List.class.isAssignableFrom(f.getType())) {
throw new CloudRuntimeException(String.format("the collection type in message can only be List, but %s.%s is %s",
f.getDeclaringClass().getName(), f.getName(), f.getType().getName()));
}
schemaList(f, obj, schema, trace, paths);
continue;
}
schemaObject(f, obj, schema, trace, paths);
}
}
private static void schemaList(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(value);
List col = (List) value;
for (Object item : col) {
String itemName = String.format("%s[%s]", f.getName(), col.indexOf(item));
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
public static Map<String, List<String>> buildSchema(Object msg) {
try {
Stack<Object> paths = new Stack<Object>();
Stack<String> trace = new Stack<String>();
Map<String, List<String>> schema = new LinkedHashMap<String, List<String>>();
buildSchema(msg, schema, trace, paths);
return schema;
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private static void schemaObject(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
if (isObjectNeedSchema(value)) {
addToSchema(value.getClass(), f.getName(), schema, trace);
}
paths.push(value);
trace.push(f.getName());
buildSchema(value, schema, trace, paths);
trace.pop();
paths.pop();
}
private static void addToSchema(Class<?> realClass, String name, Map<String, List<String>> schema, Stack<String> trace) {
String base = StringUtils.join(trace, ".");
List<String> path = schema.get(realClass.getName());
if (path == null) {
path = new ArrayList<String>();
schema.put(realClass.getName(), path);
}
if (base.equals("")) {
path.add(name);
} else {
path.add(String.format("%s.%s", base, name));
}
}
private static boolean isObjectNeedSchema(Object obj) {
return obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class);
}
private static void schemaMap(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
return;
}
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(obj);
Map map = (Map) value;
Iterator<Entry> it = map.entrySet().iterator();
while (it.hasNext()) {
Entry e = it.next();
String key = e.getKey().toString();
Object item = e.getValue();
String itemName = String.format("%s[\"%s\"]", f.getName(), key);
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
}
| |
/**
* Copyright (C) 2011-2012 Turn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client.announce;
import com.turn.ttorrent.client.Context;
import com.turn.ttorrent.common.AnnounceableInformation;
import com.turn.ttorrent.common.LoggerUtils;
import com.turn.ttorrent.common.Peer;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import com.turn.ttorrent.common.protocol.AnnounceRequestMessage;
import org.slf4j.Logger;
import java.net.ConnectException;
import java.net.URI;
import java.net.UnknownHostException;
import java.net.UnknownServiceException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* BitTorrent announce sub-system.
* <p/>
* <p>
* A BitTorrent client must check-in to the torrent's tracker(s) to get peers
* and to report certain events.
* </p>
* <p/>
* <p>
* This Announce class implements a periodic announce request thread that will
* notify announce request event listeners for each tracker response.
* </p>
*
* @author mpetazzoni
* @see com.turn.ttorrent.common.protocol.TrackerMessage
*/
public class Announce implements Runnable {
protected static final Logger logger =
TorrentLoggerFactory.getLogger();
private List<Peer> myPeers;
private final TrackerClientFactory myTrackerClientFactory;
/**
* The tiers of tracker clients matching the tracker URIs defined in the
* torrent.
*/
private final ConcurrentMap<String, TrackerClient> clients;
private final Context myContext;
/**
* Announce thread and control.
*/
private Thread thread;
private volatile boolean stop;
private boolean forceStop;
/**
* Announce interval.
*/
private int myAnnounceInterval;
private TrackerClient myDefaultTracker;
/**
* Initialize the base announce class members for the announcer.
*/
public Announce(Context context, TrackerClientFactory trackerClientFactory) {
this.clients = new ConcurrentHashMap<String, TrackerClient>();
this.thread = null;
myTrackerClientFactory = trackerClientFactory;
myContext = context;
myPeers = new CopyOnWriteArrayList<Peer>();
}
public void forceAnnounce(AnnounceableInformation torrent, AnnounceResponseListener listener, AnnounceRequestMessage.RequestEvent event) throws UnknownServiceException, UnknownHostException {
URI trackerUrl = URI.create(torrent.getAnnounce());
TrackerClient client = this.clients.get(trackerUrl.toString());
try {
if (client == null) {
client = myTrackerClientFactory.createTrackerClient(myPeers, trackerUrl);
client.register(listener);
this.clients.put(trackerUrl.toString(), client);
}
client.announceAllInterfaces(event, false, torrent);
} catch (AnnounceException e) {
logger.info(String.format("Unable to force announce torrent %s on tracker %s.", torrent.getHexInfoHash(), String.valueOf(trackerUrl)));
logger.debug(String.format("Unable to force announce torrent %s on tracker %s.", torrent.getHexInfoHash(), String.valueOf(trackerUrl)), e);
}
}
/**
* Start the announce request thread.
*/
public void start(final URI defaultTrackerURI, final AnnounceResponseListener listener, final Peer[] peers, final int announceInterval) {
myAnnounceInterval = announceInterval;
myPeers.addAll(Arrays.asList(peers));
if (defaultTrackerURI != null) {
try {
myDefaultTracker = myTrackerClientFactory.createTrackerClient(myPeers, defaultTrackerURI);
myDefaultTracker.register(listener);
this.clients.put(defaultTrackerURI.toString(), myDefaultTracker);
} catch (Exception e) {
}
} else {
myDefaultTracker = null;
}
this.stop = false;
this.forceStop = false;
if (this.thread == null || !this.thread.isAlive()) {
this.thread = new Thread(this);
this.thread.setName("torrent tracker announce thread");
this.thread.start();
}
}
/**
* Set the announce interval.
*/
public void setAnnounceInterval(int announceInterval) {
if (announceInterval <= 0) {
this.stop(true);
return;
}
if (this.myAnnounceInterval == announceInterval) {
return;
}
logger.trace("Setting announce interval to {}s per tracker request.",
announceInterval);
this.myAnnounceInterval = announceInterval;
}
/**
* Stop the announce thread.
* <p/>
* <p>
* One last 'stopped' announce event might be sent to the tracker to
* announce we're going away, depending on the implementation.
* </p>
*/
public void stop() {
this.stop = true;
if (this.thread != null && this.thread.isAlive()) {
this.thread.interrupt();
for (TrackerClient client : this.clients.values()) {
client.close();
}
try {
this.thread.join();
} catch (InterruptedException ie) {
// Ignore
}
}
this.myPeers.clear();
this.thread = null;
}
/**
* Main announce loop.
* <p/>
* <p>
* The announce thread starts by making the initial 'started' announce
* request to register on the tracker and get the announce interval value.
* Subsequent announce requests are ordinary, event-less, periodic requests
* for peers.
* </p>
* <p/>
* <p>
* Unless forcefully stopped, the announce thread will terminate by sending
* a 'stopped' announce request before stopping.
* </p>
*/
@Override
public void run() {
logger.info("Starting announce loop...");
while (!this.stop && !Thread.currentThread().isInterrupted()) {
final List<AnnounceableInformation> announceableInformationList = myContext.getTorrentsStorage().announceableTorrents();
logger.debug("Starting announce for {} torrents", announceableInformationList.size());
announceAllTorrents(announceableInformationList, AnnounceRequestMessage.RequestEvent.NONE);
try {
Thread.sleep(this.myAnnounceInterval * 1000);
} catch (InterruptedException ie) {
break;
}
}
announceAllTorrents(myContext.getTorrentsStorage().announceableTorrents(), AnnounceRequestMessage.RequestEvent.STOPPED);
logger.info("Exited announce loop.");
}
private void defaultAnnounce(List<AnnounceableInformation> torrentsForAnnounce) {
for (AnnounceableInformation torrent : torrentsForAnnounce) {
if (this.stop || Thread.currentThread().isInterrupted()) {
break;
}
try {
TrackerClient trackerClient = this.getCurrentTrackerClient(torrent);
if (trackerClient != null) {
trackerClient.announceAllInterfaces(AnnounceRequestMessage.RequestEvent.NONE, false, torrent);
} else {
logger.warn("Tracker client for {} is null. Torrent is not announced on tracker", torrent.getHexInfoHash());
}
} catch (Exception e) {
logger.info(e.getMessage());
logger.debug(e.getMessage(), e);
}
}
}
private void announceAllTorrents(List<AnnounceableInformation> announceableInformationList, AnnounceRequestMessage.RequestEvent event) {
logger.debug("Started multi announce. Event {}, torrents {}", event, announceableInformationList);
final Map<String, List<AnnounceableInformation>> torrentsGroupingByAnnounceUrl = new HashMap<String, List<AnnounceableInformation>>();
for (AnnounceableInformation torrent : announceableInformationList) {
final URI uriForTorrent = getURIForTorrent(torrent);
if (uriForTorrent == null) continue;
String torrentURI = uriForTorrent.toString();
List<AnnounceableInformation> sharedTorrents = torrentsGroupingByAnnounceUrl.get(torrentURI);
if (sharedTorrents == null) {
sharedTorrents = new ArrayList<AnnounceableInformation>();
torrentsGroupingByAnnounceUrl.put(torrentURI, sharedTorrents);
}
sharedTorrents.add(torrent);
}
List<AnnounceableInformation> unannouncedTorrents = new ArrayList<AnnounceableInformation>();
for (Map.Entry<String, List<AnnounceableInformation>> e : torrentsGroupingByAnnounceUrl.entrySet()) {
TrackerClient trackerClient = this.clients.get(e.getKey());
if (trackerClient != null) {
try {
trackerClient.multiAnnounce(event, false, e.getValue(), myPeers);
} catch (AnnounceException t) {
LoggerUtils.warnAndDebugDetails(logger, "problem in multi announce {}", t.getMessage(), t);
unannouncedTorrents.addAll(e.getValue());
} catch (ConnectException t) {
LoggerUtils.warnWithMessageAndDebugDetails(logger, "Cannot connect to the tracker {}", e.getKey(), t);
logger.debug("next torrents contain {} in tracker list. {}", e.getKey(), e.getValue());
}
} else {
logger.warn("Tracker client for {} is null. Torrents are not announced on tracker", e.getKey());
if (e.getKey() == null || e.getKey().isEmpty()) {
for (AnnounceableInformation announceableInformation : e.getValue()) {
myContext.getTorrentsStorage().remove(announceableInformation.getHexInfoHash());
}
}
}
}
if (unannouncedTorrents.size() > 0) {
defaultAnnounce(unannouncedTorrents);
}
}
/**
* Returns the current tracker client used for announces.
*/
public TrackerClient getCurrentTrackerClient(AnnounceableInformation torrent) {
final URI uri = getURIForTorrent(torrent);
if (uri == null) return null;
return this.clients.get(uri.toString());
}
private URI getURIForTorrent(AnnounceableInformation torrent) {
List<List<String>> announceList = torrent.getAnnounceList();
if (announceList.size() == 0) return null;
List<String> uris = announceList.get(0);
if (uris.size() == 0) return null;
return URI.create(uris.get(0));
}
public URI getDefaultTrackerURI() {
if (myDefaultTracker == null) {
return null;
}
return myDefaultTracker.getTrackerURI();
}
/**
* Stop the announce thread.
*
* @param hard Whether to force stop the announce thread or not, i.e. not
* send the final 'stopped' announce request or not.
*/
private void stop(boolean hard) {
this.forceStop = hard;
this.stop();
}
}
| |
/*
* Copyright (c) 2011-2015, fortiss GmbH.
* Licensed under the Apache License, Version 2.0.
*
* Use, modification and distribution are subject to the terms specified
* in the accompanying license file LICENSE.txt located at the root directory
* of this software distribution.
*/
package helper;
/**
* HTML Helper
*
* @author amack
*
*/
public class HTMLHelper {
public static void saveLinePlotHtml(String filePath) {
String html = "<!DOCTYPE html>\n" +
"<html>\n" +
"<head>\n" +
" <title></title>\n" +
" <script src=\"http://ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js\"></script>\n" +
" <script src=\"http://d3js.org/d3.v3.min.js\" charset=\"utf-8\"></script>\n" +
" <link rel=\"stylesheet\" href=\"http://netdna.bootstrapcdn.com/bootstrap/3.0.2/css/bootstrap.min.css\">\n" +
" <style>\n" +
" #drop {\n" +
" min-height: 150px;\n" +
" width: 250px;\n" +
" border: 1px solid blue;\n" +
" margin: 10px;\n" +
" padding: 10px;\n" +
" }\n" +
" body {\n" +
" font: 10px sans-serif;\n" +
" }\n" +
"\n" +
" .axis path,\n" +
" .axis line {\n" +
" fill: none;\n" +
" stroke: #000;\n" +
" shape-rendering: crispEdges;\n" +
" }\n" +
"\n" +
" .x.axis path {\n" +
" display: none;\n" +
" }\n" +
"\n" +
" .line {\n" +
" fill: none;\n" +
" stroke: steelblue;\n" +
" stroke-width: 1.5px;\n" +
" }\n" +
"\n" +
" </style>\n" +
"\n" +
"</head>\n" +
"<body>\n" +
"\n" +
"<div class=\"navbar navbar-inverse navbar-fixed-top\" role=\"navigation\">\n" +
" <div class=\"container\">\n" +
" <div class=\"navbar-header\">\n" +
" <button type=\"button\" class=\"navbar-toggle\" data-toggle=\"collapse\" data-target=\".navbar-collapse\">\n" +
" <span class=\"sr-only\">Toggle navigation</span>\n" +
" <span class=\"icon-bar\"></span>\n" +
" <span class=\"icon-bar\"></span>\n" +
" <span class=\"icon-bar\"></span>\n" +
" </button>\n" +
" <a class=\"navbar-brand\" href=\"#\">Drag/Drop</a>\n" +
" </div>\n" +
" <div class=\"collapse navbar-collapse\">\n" +
"\n" +
" </div><!--/.nav-collapse -->\n" +
" </div>\n" +
"</div>\n" +
"\n" +
"<div class=\"container\">\n" +
"\n" +
" <div class=\"starter-template\">\n" +
" <h1 align=center style=\"margin-top: 100px;\">Drag/Drop a csv to plot</h1>\n" +
" <DIV id=\"status\">Drag a .csv File, works in Firefox</DIV>\n" +
" <DIV id=\"drop\">Drop files here.</DIV>\n" +
"\n" +
" <DIV id=\"list\"></DIV>\n" +
" </div>\n" +
"\n" +
"</div>\n" +
"\n" +
"\n" +
"\n" +
"</body>\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"<script>\n" +
" if(window.FileReader) {\n" +
" var drop;\n" +
" addEventHandler(window, 'load', function() {\n" +
" var status = document.getElementById('status');\n" +
" drop = document.getElementById('drop');\n" +
" var list = document.getElementById('list');\n" +
"\n" +
" function cancel(e) {\n" +
" if (e.preventDefault) { e.preventDefault(); }\n" +
" return false;\n" +
" }\n" +
"\n" +
" // Tells the browser that we *can* drop on this target\n" +
" addEventHandler(drop, 'dragover', cancel);\n" +
" addEventHandler(drop, 'dragenter', cancel);\n" +
"\n" +
" addEventHandler(drop, 'drop', function (e) {\n" +
" e = e || window.event; // get window.event if e argument missing (in IE)\n" +
" if (e.preventDefault) { e.preventDefault(); } // stops the browser from redirecting off to the image.\n" +
"\n" +
" var dt = e.dataTransfer;\n" +
" var files = dt.files;\n" +
" for (var i=0; i<files.length; i++) {\n" +
" var file = files[i];\n" +
" var reader = new FileReader();\n" +
"\n" +
" //attach event handlers here...\n" +
"\n" +
" reader.readAsDataURL(file);\n" +
" addEventHandler(reader, 'loadend', function(e, file) {\n" +
" var bin = this.result;\n" +
" var newFile = document.createElement('div');\n" +
" newFile.innerHTML = 'Loaded : '+file.name+' size '+file.size+' B';\n" +
" list.appendChild(newFile);\n" +
" var fileNumber = list.getElementsByTagName('div').length;\n" +
" status.innerHTML = fileNumber < files.length\n" +
" ? 'Loaded 100% of file '+fileNumber+' of '+files.length+'...'\n" +
" : 'Done loading. processed '+fileNumber+' files.';\n" +
"\n" +
" var img = document.createElement(\"img\");\n" +
" img.file = file;\n" +
" img.src = bin;\n" +
" list.appendChild(img);\n" +
"\n" +
"\n" +
" //d3 start\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
" d3.csv(bin, function(error, data) {\n" +
" color.domain(d3.keys(data[0]).filter(function(key) { return key !== \"timeStep\"; }));\n" +
"\n" +
" var physicalProperty = color.domain().map(function(name) {\n" +
" return {\n" +
" name: name,\n" +
" values: data.map(function(d) {\n" +
" return {timeStep: d.timeStep, givenValue: +d[name]};\n" +
" })\n" +
" };\n" +
" });\n" +
"\n" +
" x.domain(d3.extent(data, function(d) { return d.timeStep; }));\n" +
"\n" +
" y.domain([\n" +
" d3.min(physicalProperty, function(c) { return d3.min(c.values, function(v) { return v.givenValue; }); }),\n" +
" d3.max(physicalProperty, function(c) { return d3.max(c.values, function(v) { return v.givenValue; }); })\n" +
" ]);\n" +
"\n" +
" svg.append(\"g\")\n" +
" .attr(\"class\", \"x axis\")\n" +
" .attr(\"transform\", \"translate(0,\" + height + \")\")\n" +
" .call(xAxis);\n" +
"\n" +
" svg.append(\"g\")\n" +
" .attr(\"class\", \"y axis\")\n" +
" .call(yAxis)\n" +
" .append(\"text\")\n" +
" .attr(\"transform\", \"rotate(-90)\")\n" +
" .attr(\"y\", 6)\n" +
" .attr(\"dy\", \".71em\")\n" +
" .style(\"text-anchor\", \"end\")\n" +
" .text(\"Power [W]\");\n" +
"\n" +
" var property = svg.selectAll(\".property\")\n" +
" .data(physicalProperty)\n" +
" .enter().append(\"g\")\n" +
" .attr(\"class\", \"property\");\n" +
"\n" +
" property.append(\"path\")\n" +
" .attr(\"class\", \"line\")\n" +
" .attr(\"d\", function(d) { return line(d.values); })\n" +
" .style(\"stroke\", function(d) { return color(d.name); });\n" +
"\n" +
" property.append(\"text\")\n" +
" .datum(function(d) { return {name: d.name, value: d.values[d.values.length - 1]}; })\n" +
" .attr(\"transform\", function(d) { return \"translate(\" + x(d.value.timeStep) + \",\" + y(d.value.givenValue) + \")\"; })\n" +
" .attr(\"x\", 3)\n" +
" .attr(\"dy\", \".35em\")\n" +
" .text(function(d) { return d.name; });\n" +
" });\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
" //d3 end\n" +
"\n" +
" }.bindToEventHandler(file));\n" +
" }\n" +
" return false;\n" +
" });\n" +
" Function.prototype.bindToEventHandler = function bindToEventHandler() {\n" +
" var handler = this;\n" +
" var boundParameters = Array.prototype.slice.call(arguments);\n" +
" //create closure\n" +
" return function(e) {\n" +
" e = e || window.event; // get window.event if e argument missing (in IE)\n" +
" boundParameters.unshift(e);\n" +
" handler.apply(this, boundParameters);\n" +
" }\n" +
" };\n" +
" });\n" +
" } else {\n" +
" document.getElementById('status').innerHTML = 'Your browser does not support the HTML5 FileReader.';\n" +
" }\n" +
" function addEventHandler(obj, evt, handler) {\n" +
" if(obj.addEventListener) {\n" +
" // W3C method\n" +
" obj.addEventListener(evt, handler, false);\n" +
" } else if(obj.attachEvent) {\n" +
" // IE method.\n" +
" obj.attachEvent('on'+evt, handler);\n" +
" } else {\n" +
" // Old school method.\n" +
" obj['on'+evt] = handler;\n" +
" }\n" +
" }\n" +
"\n" +
" var margin = {top: 20, right: 80, bottom: 30, left: 50},\n" +
" width = 960 - margin.left - margin.right,\n" +
" height = 500 - margin.top - margin.bottom;\n" +
"\n" +
" var x = d3.scale.linear()\n" +
" .range([0, width]);\n" +
"\n" +
" var y = d3.scale.linear()\n" +
" .range([height, 0]);\n" +
"\n" +
" var color = d3.scale.category10();\n" +
"\n" +
" var xAxis = d3.svg.axis()\n" +
" .scale(x)\n" +
" .orient(\"bottom\");\n" +
"\n" +
" var yAxis = d3.svg.axis()\n" +
" .scale(y)\n" +
" .orient(\"left\");\n" +
"\n" +
" var line = d3.svg.line()\n" +
" .interpolate(\"basis\")\n" +
" .x(function(d) { return x(d.timeStep); })\n" +
" .y(function(d) { return y(d.givenValue); });\n" +
"\n" +
" var svg = d3.select(\"body\").append(\"svg\")\n" +
" .attr(\"width\", width + margin.left + margin.right)\n" +
" .attr(\"height\", height + margin.top + margin.bottom)\n" +
" .append(\"g\")\n" +
" .attr(\"transform\", \"translate(\" + margin.left + \",\" + margin.top + \")\");\n" +
"\n" +
"\n" +
"\n" +
"</script>\n" +
"</html>";
IoHelper.saveFile(filePath, html);
}
}
| |
/**
* Copyright (c) 2016 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.openchain.certification.dbdao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Savepoint;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletConfig;
import org.apache.log4j.Logger;
import org.openchain.certification.InvalidUserException;
import org.openchain.certification.model.User;
/**
* This is a singleton DAO - since the requests will be relatively short and
* this will be frequently used, we will maintain a connection just for the user
* database.
*
* @author Gary O'Neall
*
*/
public class UserDb {
static final Logger logger = Logger.getLogger(UserDb.class);
private static UserDb _userDb;
private Connection connection;
private PreparedStatement getUserQuery;
private PreparedStatement getAllUserQuery;
private PreparedStatement addUserQuery;
private PreparedStatement updateVerifiedQuery;
private PreparedStatement updateUserQuery;
private PreparedStatement getUserIdQuery;
private ServletConfig servletConfig;
/**
* Get the singleton UserDB instance
* @param servletConfig
* @return
* @throws SQLException
*/
public static synchronized UserDb getUserDb(ServletConfig servletConfig) throws SQLException {
if (_userDb == null) {
_userDb = new UserDb(servletConfig);
}
_userDb.checkConnection();
return _userDb;
}
/**
* Check if the connection is still active, creating the connection if needed
* @throws SQLException
*/
protected synchronized void checkConnection() throws SQLException {
if (this.connection == null || this.connection.isClosed()) {
this.connection = SurveyDatabase.createConnection(servletConfig);
this.connection.setAutoCommit(false);
prepareStatements();
}
}
/**
* This should only be called by the statice getUserDb method
* @param servletConfig
* @throws SQLException
*/
private UserDb(ServletConfig servletConfig) throws SQLException {
this.servletConfig = servletConfig;
this.connection = SurveyDatabase.createConnection(servletConfig);
this.connection.setAutoCommit(false);
prepareStatements();
}
/**
* Prepare all statements
* @throws SQLException
*/
private void prepareStatements() throws SQLException {
getUserQuery = connection.prepareStatement("select password_token, name, address, email," + //$NON-NLS-1$
"verified, passwordReset, admin, verificationExpirationDate," + //$NON-NLS-1$
" uuid, organization, name_permission, email_permission, language from openchain_user where username=?"); //$NON-NLS-1$
getAllUserQuery = connection.prepareStatement("select username, password_token, name, address, email," + //$NON-NLS-1$
"verified, passwordReset, admin, verificationExpirationDate," + //$NON-NLS-1$
" uuid, organization, name_permission, email_permission, language from openchain_user order by username asc"); //$NON-NLS-1$
addUserQuery = connection.prepareStatement("insert into openchain_user (username, password_token, name, address, email," + //$NON-NLS-1$
"verified, passwordReset, admin, verificationExpirationDate," + //$NON-NLS-1$
" uuid, organization, name_permission, email_permission, language) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); //$NON-NLS-1$
updateVerifiedQuery = connection.prepareStatement("update openchain_user set verified=? where username=?"); //$NON-NLS-1$
updateUserQuery = connection.prepareStatement("update openchain_user set password_token=?, " + //$NON-NLS-1$
"name=?, address=?, verified=?, passwordReset=?, admin=?, " + //$NON-NLS-1$
"verificationExpirationDate=?, uuid=?, organization=?, email=?, name_permission=?, email_permission=?," + //$NON-NLS-1$
"language=? where username=?"); //$NON-NLS-1$
getUserIdQuery = connection.prepareStatement("select id from openchain_user where username=?"); //$NON-NLS-1$
}
/**
* Get the user from the database
* @param username Username of the user
* @return populated user from the DB
* @throws SQLException
*/
public synchronized User getUser(String username) throws SQLException {
ResultSet result = null;
try {
getUserQuery.setString(1, username);
result = getUserQuery.executeQuery();
if (!result.next()) {
return null;
}
User retval = new User();
retval.setAddress(result.getString("address")); //$NON-NLS-1$
retval.setAdmin(result.getBoolean("admin")); //$NON-NLS-1$
retval.setEmail(result.getString("email")); //$NON-NLS-1$
retval.setName(result.getString("name")); //$NON-NLS-1$
retval.setPasswordReset(result.getBoolean("passwordReset")); //$NON-NLS-1$
retval.setPasswordToken(result.getString("password_token")); //$NON-NLS-1$
retval.setUsername(username);
retval.setUuid(result.getString("uuid")); //$NON-NLS-1$
retval.setVerificationExpirationDate(result.getDate("verificationExpirationDate")); //$NON-NLS-1$
retval.setVerified(result.getBoolean("verified")); //$NON-NLS-1$
retval.setOrganization(result.getString("organization")); //$NON-NLS-1$
retval.setNamePermission(result.getBoolean("name_permission")); //$NON-NLS-1$
retval.setEmailPermission(result.getBoolean("email_permission")); //$NON-NLS-1$
retval.setLanguagePreference(result.getString("language")); //$NON-NLS-1$
return retval;
} finally {
if (result != null) {
result.close();
}
connection.commit();
}
}
/**
* @return all users from the database
* @throws SQLException
*/
public synchronized List<User> getUsers() throws SQLException {
List<User> retval = new ArrayList<User>();
ResultSet result = null;
try {
result = getAllUserQuery.executeQuery();
while (result.next()) {
User user = new User();
user.setAddress(result.getString("address")); //$NON-NLS-1$
user.setAdmin(result.getBoolean("admin")); //$NON-NLS-1$
user.setEmail(result.getString("email")); //$NON-NLS-1$
user.setName(result.getString("name")); //$NON-NLS-1$
user.setPasswordReset(result.getBoolean("passwordReset")); //$NON-NLS-1$
user.setPasswordToken(result.getString("password_token")); //$NON-NLS-1$
user.setUsername(result.getString("username")); //$NON-NLS-1$
user.setUuid(result.getString("uuid")); //$NON-NLS-1$
user.setVerificationExpirationDate(result.getDate("verificationExpirationDate")); //$NON-NLS-1$
user.setVerified(result.getBoolean("verified")); //$NON-NLS-1$
user.setOrganization(result.getString("organization")); //$NON-NLS-1$
user.setNamePermission(result.getBoolean("name_permission")); //$NON-NLS-1$
user.setEmailPermission(result.getBoolean("email_permission")); //$NON-NLS-1$
user.setLanguagePreference(result.getString("language")); //$NON-NLS-1$
retval.add(user);
}
return retval;
} finally {
if (result != null) {
result.close();
connection.commit();
}
}
}
/**
* Add a user to the database. The username must not already exist
* @param user
* @return a positive integer if successful
* @throws SQLException
* @throws InvalidUserException
*/
public synchronized int addUser(User user) throws SQLException, InvalidUserException {
Savepoint save = connection.setSavepoint();
long userId = getUserId(user.getUsername());
if (userId >0) {
throw(new InvalidUserException("Can not add user "+user.getUsername()+": already exists.")); //$NON-NLS-1$ //$NON-NLS-2$
}
try {
this.addUserQuery.setString(1, user.getUsername());
this.addUserQuery.setString(2, user.getPasswordToken());
this.addUserQuery.setString(3, user.getName());
this.addUserQuery.setString(4, user.getAddress());
this.addUserQuery.setString(5, user.getEmail());
this.addUserQuery.setBoolean(6, user.isVerified());
this.addUserQuery.setBoolean(7, user.isPasswordReset());
this.addUserQuery.setBoolean(8, user.isAdmin());
java.sql.Date sqlDate = new java.sql.Date(user.getVerificationExpirationDate().getTime());
this.addUserQuery.setDate(9, sqlDate);
this.addUserQuery.setString(10, user.getUuid());
this.addUserQuery.setString(11, user.getOrganization());
this.addUserQuery.setBoolean(12, user.hasNamePermission());
this.addUserQuery.setBoolean(13, user.hasEmailPermission());
if (user.getLanguagePreference() != null) {
this.addUserQuery.setString(14, user.getLanguagePreference());
} else {
this.addUserQuery.setNull(14, java.sql.Types.VARCHAR);
}
return this.addUserQuery.executeUpdate();
} catch(SQLException ex) {
if (save != null) {
try {
connection.rollback(save);
} catch (SQLException ex2) {
logger.error("Error rolling back transaction",ex2); //$NON-NLS-1$
}
}
throw(ex);
} finally {
if (save != null) {
this.connection.commit();
}
}
}
/**
*
* @param username
* @return The unique user ID in the database
* @throws SQLException
*/
private long getUserId(String username) throws SQLException {
ResultSet result = null;
try {
getUserIdQuery.setString(1, username);
result = getUserIdQuery.executeQuery();
if (result.next()) {
return result.getLong(1);
} else {
return -1;
}
} finally {
if (result != null) {
result.close();
}
}
}
/**
* @param username
* @return true if the user exists
* @throws SQLException
*/
public synchronized boolean userExists(String username) throws SQLException {
try {
return getUserId(username) > 0;
} finally {
this.connection.commit();
}
}
/**
* Set the verified flag based on the verified parameter
* @param username
* @param verified
* @return
* @throws SQLException
*/
public synchronized int setVerified(String username, boolean verified) throws SQLException {
Savepoint save = this.connection.setSavepoint();
try {
this.updateVerifiedQuery.setBoolean(1, verified);
this.updateVerifiedQuery.setString(2, username);
return this.updateVerifiedQuery.executeUpdate();
} catch(SQLException ex) {
if (save != null) {
try {
connection.rollback(save);
} catch (SQLException ex2) {
logger.error("Error rolling back transaction",ex2); //$NON-NLS-1$
}
}
throw(ex);
} finally {
if (save != null) {
this.connection.commit();
}
}
}
/**
* Update all fields in the user with the username user.username
* @param user
* @throws SQLException
* @throws InvalidUserException
*/
public synchronized void updateUser(User user) throws SQLException, InvalidUserException {
if (user == null || user.getUsername() == null || user.getUsername().trim().isEmpty()) {
throw(new InvalidUserException("Can not update user. No username specified")); //$NON-NLS-1$
}
try {
updateUserQuery.setString(1, user.getPasswordToken());
updateUserQuery.setString(2, user.getName());
updateUserQuery.setString(3, user.getAddress());
updateUserQuery.setBoolean(4, user.isVerified());
updateUserQuery.setBoolean(5, user.isPasswordReset());
updateUserQuery.setBoolean(6, user.isAdmin());
updateUserQuery.setDate(7, new java.sql.Date(user.getVerificationExpirationDate().getTime()));
updateUserQuery.setString(8, user.getUuid());
updateUserQuery.setString(9, user.getOrganization());
updateUserQuery.setString(10, user.getEmail());
updateUserQuery.setBoolean(11, user.hasNamePermission());
updateUserQuery.setBoolean(12, user.hasEmailPermission());
if (user.getLanguagePreference() != null) {
updateUserQuery.setString(13, user.getLanguagePreference());
} else {
updateUserQuery.setNull(13, java.sql.Types.VARCHAR);
}
updateUserQuery.setString(14, user.getUsername());
int count = updateUserQuery.executeUpdate();
if (count != 1) {
logger.warn("Unexpected count result from update user query. Expected 1, found "+String.valueOf(count)); //$NON-NLS-1$
}
} finally {
this.connection.commit();
}
}
}
| |
package de.uni_hannover.sra.minimax_simulator.model.configuration;
import com.google.common.collect.ImmutableList;
import de.uni_hannover.sra.minimax_simulator.Main;
import de.uni_hannover.sra.minimax_simulator.model.configuration.alu.AluOperation;
import de.uni_hannover.sra.minimax_simulator.model.configuration.event.MachineConfigEvent;
import de.uni_hannover.sra.minimax_simulator.model.configuration.event.MachineConfigListEvent.MachineConfigAluEvent;
import de.uni_hannover.sra.minimax_simulator.model.configuration.event.MachineConfigListEvent.MachineConfigMuxEvent;
import de.uni_hannover.sra.minimax_simulator.model.configuration.event.MachineConfigListEvent.MachineConfigRegisterEvent;
import de.uni_hannover.sra.minimax_simulator.model.configuration.event.MachineConfigListener;
import de.uni_hannover.sra.minimax_simulator.model.configuration.mux.MuxInput;
import de.uni_hannover.sra.minimax_simulator.model.configuration.mux.MuxType;
import de.uni_hannover.sra.minimax_simulator.model.configuration.mux.NullMuxInput;
import de.uni_hannover.sra.minimax_simulator.model.configuration.mux.RegisterMuxInput;
import de.uni_hannover.sra.minimax_simulator.model.configuration.register.RegisterExtension;
import de.uni_hannover.sra.minimax_simulator.model.signal.SignalRow;
import de.uni_hannover.sra.minimax_simulator.model.signal.SignalTable;
import de.uni_hannover.sra.minimax_simulator.model.signal.SignalValue;
import java.util.*;
/**
* This class represents the configuration of a register machine.<br>
* <br>
* Represented machines consist at least of some registers (which can be divided into base registers
* and extension (user) registers), an ALU for the execution of binary operations and two
* multiplexers providing the input for the ALU.<br>
* <br>
* The purpose of this class is loose the coupling between a user-modifiable, export-friendly
* configuration of a machine and its actual simulation, representation and persistence.<br>
* Instances are created using an {@link MinimaxConfigurationBuilder}.<br>
* This class makes use of the <i>Listener</i> pattern: If the user wishes to visualize or simulate
* a concrete machine represented by an instance of this class, client classes have to register as
* {@link MachineConfigListener} to the instance and then synchronize to it on their own.
*
* @author Martin Lück
*/
// is final since the only constructor is package-private
public final class MachineConfiguration {
private final List<MachineConfigListener> listeners;
private final List<AluOperation> alu;
private final List<AluOperation> aluView;
private final List<RegisterExtension> baseRegisters;
private final List<RegisterExtension> extendedRegisters;
private final List<RegisterExtension> registersView;
private final List<MuxInput> muxSourcesA;
private final List<MuxInput> muxSourcesB;
private final List<MuxInput> muxSourcesAView;
private final List<MuxInput> muxSourcesBView;
private final List<MuxInput> availableMuxSources;
private final List<MuxInput> availableMuxSourcesView;
private SignalTable signalTable;
MachineConfiguration(List<AluOperation> aluOperations, List<RegisterExtension> baseRegisters, List<RegisterExtension> extendedRegisters,
List<MuxInput> availableMuxInput, Map<MuxType, List<MuxInput>> selectedMuxInput) {
listeners = new ArrayList<>(5);
alu = new ArrayList<>(aluOperations);
aluView = Collections.unmodifiableList(alu);
if (alu.contains(null)) {
throw new NullPointerException("Alu operations cannot contain null");
}
this.baseRegisters = ImmutableList.copyOf(baseRegisters);
// implicitly check for null
for (RegisterExtension reg : this.baseRegisters) {
if (reg.isExtended()) {
throw new IllegalArgumentException("Base register cannot have isExtended");
}
}
// implicitly check for null
this.extendedRegisters = new ArrayList<>(extendedRegisters);
registersView = Collections.unmodifiableList(this.extendedRegisters);
for (RegisterExtension reg : this.extendedRegisters) {
if (!reg.isExtended()) {
throw new IllegalArgumentException("Extended register must have isExtended set");
}
}
availableMuxSources = new ArrayList<>(availableMuxInput);
availableMuxSourcesView = Collections.unmodifiableList(availableMuxSources);
List<MuxInput> sourcesA = selectedMuxInput.get(MuxType.A);
List<MuxInput> sourcesB = selectedMuxInput.get(MuxType.B);
if (availableMuxSources.contains(null)) {
throw new NullPointerException("Mux inputs cannot contain null");
}
muxSourcesA = new ArrayList<>(sourcesA);
muxSourcesB = new ArrayList<>(sourcesB);
muxSourcesAView = Collections.unmodifiableList(muxSourcesA);
muxSourcesBView = Collections.unmodifiableList(muxSourcesB);
}
/**
* Adds an {@link SignalTable} to the machine's configuration.
*
* @param signalTable
* the {@code SignalTable} to add
*/
public void setSignalTable(SignalTable signalTable) {
this.signalTable = signalTable;
}
/**
* Adds an {@link AluOperation} to the machine's configuration.
*
* @param aluOp
* the {@code AluOperation} to add
*/
public void addAluOperation(AluOperation aluOp) {
if (alu.contains(aluOp)) {
throw new IllegalStateException("Already contains " + aluOp);
}
alu.add(aluOp);
postEvent(MachineConfigAluEvent.eventAdded(aluOp, alu.size() - 1));
}
/**
* Removes an {@link AluOperation} from the machine's configuration.
*
* @param aluOp
* the {@code AluOperation} to remove
*/
public void removeAluOperation(AluOperation aluOp) {
int index = alu.indexOf(aluOp);
if (index == -1) {
throw new IllegalStateException(aluOp + " not in list");
}
alu.remove(index);
postEvent(MachineConfigAluEvent.eventRemoved(aluOp, index));
}
/**
* Exchanges two {@link AluOperation}s of the machine's configuration.
*
* @param index1
* the index of the first {@code AluOperation}
* @param index2
* the index of the second {@code AluOperation}
*/
public void exchangeAluOperations(int index1, int index2) {
if (index1 == index2) {
return;
}
AluOperation alu1 = alu.get(index1);
AluOperation alu2 = alu.get(index2);
alu.set(index2, alu1);
alu.set(index1, alu2);
postEvent(MachineConfigAluEvent.eventExchanged(alu1, alu2, index1, index2));
}
/**
* Gets the {@link AluOperation} at the specified index.
*
* @param index
* the index of the {@code AluOperation} to get
* @return
* the {@code AluOperation} at the specified index
*/
public AluOperation getAluOperation(int index) {
return alu.get(index);
}
/**
* Gets all {@link AluOperation}s of the machine's configuration.
*
* @return
* a list of all {@code AluOperation}s of the machine
*/
public List<AluOperation> getAluOperations() {
return aluView;
}
/**
* Adds a register ({@link RegisterExtension}) to the machine's configuration.
*
* @param register
* the {@code RegisterExtension} to add
*/
public void addRegisterExtension(RegisterExtension register) {
if (extendedRegisters.contains(register)) {
throw new IllegalStateException("Already contains " + register);
}
if (!register.isExtended()) {
throw new IllegalArgumentException("Can only add extended registers");
}
extendedRegisters.add(register);
postEvent(MachineConfigRegisterEvent.eventAdded(register, extendedRegisters.size() - 1));
RegisterMuxInput rm = new RegisterMuxInput(register.getName());
availableMuxSources.add(rm);
postEvent(MachineConfigMuxEvent.eventAdded(null, rm, availableMuxSources.size() - 1));
}
/**
* Removes a register ({@link RegisterExtension}) from the machine's configuration.
*
* @param register
* the {@code RegisterExtension} to remove
*/
public void removeRegisterExtension(RegisterExtension register) {
int index = extendedRegisters.indexOf(register);
if (index == -1) {
throw new IllegalStateException(register + " not in list");
}
removeRegister(register, index);
}
/**
* Removes the register ({@link RegisterExtension}) at the specified index.
*
* @param index
* the index of the {@code RegisterExtension} to remove
*/
public void removeRegisterExtension(int index) {
RegisterExtension register = extendedRegisters.get(index);
removeRegister(register, index);
}
/**
* Removes the specified register ({@link RegisterExtension}) at the specified index.
*
* @param register
* the {@code RegisterExtension} to remove
* @param index
* the index of the {@code RegisterExtension}
*/
private void removeRegister(RegisterExtension register, int index) {
removeAllMuxInputsOfRegister(register.getName(), null, availableMuxSources);
for (MuxType type : MuxType.values()) {
replaceAllMuxInputsOfRegister(register.getName(), type, getMuxSourcesInternal(type));
}
extendedRegisters.remove(index);
postEvent(MachineConfigRegisterEvent.eventRemoved(register, index));
}
/**
* Removes the specified register ({@link RegisterExtension}) from the specified
* {@link MuxType} and list of {@link MuxInput}s.
*
* @param registerName
* the name of the {@code RegisterExtension} to remove
* @param type
* the {@code MuxType} the {@code RegisterExtension} should be removed from
* @param list
* a list of the {@code MuxInput}s of the multiplexer
*/
private void removeAllMuxInputsOfRegister(String registerName, MuxType type, List<MuxInput> list) {
for (ListIterator<MuxInput> muxIterator = list.listIterator(); muxIterator.hasNext(); ) {
int i = muxIterator.nextIndex();
MuxInput input = muxIterator.next();
if (input instanceof RegisterMuxInput && ((RegisterMuxInput) input).getRegisterName().equals(registerName)) {
muxIterator.remove();
postEvent(MachineConfigMuxEvent.eventRemoved(type, input, i));
}
}
}
/**
* Replaces all occurrences of the specified register ({@link RegisterExtension}) at
* the specified multiplexer ({@link MuxType}) and the list of the {@link MuxInput}s
* with {@link NullMuxInput#INSTANCE}.
*
* @param registerName
* the name of the {@code RegisterExtension} to replace
* @param type
* the {@code MuxType} the {@code RegisterExtension} should be replaced at
* @param list
* a list of the {@code MuxInput}s of the multiplexer
*/
private void replaceAllMuxInputsOfRegister(String registerName, MuxType type, List<MuxInput> list) {
for (ListIterator<MuxInput> muxIterator = list.listIterator(); muxIterator.hasNext(); ) {
int i = muxIterator.nextIndex();
MuxInput input = muxIterator.next();
if (input instanceof RegisterMuxInput && ((RegisterMuxInput) input).getRegisterName().equals(registerName)) {
muxIterator.set(NullMuxInput.INSTANCE);
postEvent(MachineConfigMuxEvent.eventReplaced(type, input, NullMuxInput.INSTANCE, i));
}
}
}
/**
* Replaces all occurrences of the specified register ({@link RegisterExtension}) at
* the specified multiplexer ({@link MuxType}) and the list of the {@link MuxInput}s
* with another register.
*
* @param oldRegisterName
* the name of the {@code RegisterExtension} to replace
* @param newRegisterName
* the name of the {@code RegisterExtension} to use instead
* @param type
* the {@code MuxType} the {@code RegisterExtension} should be replaced at
* @param list
* a list of the {@code MuxInput}s of the multiplexer.
*/
private void replaceAllMuxInputsOfRegister(String oldRegisterName, String newRegisterName, MuxType type, List<MuxInput> list) {
for (ListIterator<MuxInput> muxIterator = list.listIterator(); muxIterator.hasNext(); ) {
int i = muxIterator.nextIndex();
MuxInput input = muxIterator.next();
if (input instanceof RegisterMuxInput && ((RegisterMuxInput) input).getRegisterName().equals(oldRegisterName)) {
RegisterMuxInput newInput = new RegisterMuxInput(newRegisterName);
muxIterator.set(newInput);
postEvent(MachineConfigMuxEvent.eventReplaced(type, input, newInput, i));
}
}
}
/**
* Sets the specified register ({@link RegisterExtension}) at the specified index.
*
* @param index
* the index where the {@code RegisterExtension} should be set
* @param register
* the {@code RegisterExtension} to set
*/
public void setRegisterExtension(int index, RegisterExtension register) {
if (!register.isExtended()) {
throw new IllegalArgumentException("Can only add extended registers");
}
RegisterExtension oldRegister = extendedRegisters.get(index);
if (!oldRegister.equals(register) && extendedRegisters.contains(register)) {
throw new IllegalStateException("Already contains " + register);
}
// get the indices of SignalRows where the writeEnabled signal was set
List<Integer> writeEnabled = new ArrayList<>();
String regName = oldRegister.getName();
int i = 0;
for (SignalRow row : signalTable.getRows()) {
if (row.getSignalValue(regName + ".W") == 1) {
writeEnabled.add(i);
}
i++;
}
// fetch the indices of the mux inputs that currently use this register
Map<MuxType, List<Integer>> indicesInUse = new EnumMap<>(MuxType.class);
for (MuxType type : MuxType.values()) {
indicesInUse.put(type, fetchRegisterInputIndices(oldRegister.getName(), getMuxSourcesInternal(type)));
}
// replace the mux inputs that are currently using the register with null
for (MuxType type : MuxType.values()) {
List<MuxInput> inputs = getMuxSourcesInternal(type);
for (int inputIndex : indicesInUse.get(type)) {
MuxInput oldInput = inputs.get(inputIndex);
inputs.set(inputIndex, NullMuxInput.INSTANCE);
postEvent(MachineConfigMuxEvent.eventReplaced(type, oldInput, NullMuxInput.INSTANCE, inputIndex));
}
}
// now, since no mux input is pointing to the register, actually replace the register
extendedRegisters.set(index, register);
// notify clients that the register was replaced
postEvent(MachineConfigRegisterEvent.eventReplaced(oldRegister, register, index));
// replace the available mux input for this register since the register changed
replaceAllMuxInputsOfRegister(oldRegister.getName(), register.getName(), null, availableMuxSources);
// put the new mux input at the places set to null before
RegisterMuxInput newInput = new RegisterMuxInput(register.getName());
for (MuxType type : MuxType.values()) {
List<MuxInput> inputs = getMuxSourcesInternal(type);
for (int inputIndex : indicesInUse.get(type)) {
MuxInput oldInput = inputs.get(inputIndex);
inputs.set(inputIndex, newInput);
postEvent(MachineConfigMuxEvent.eventReplaced(type, oldInput, newInput, inputIndex));
}
}
// set writeEnabled signal for the new register
final String newRegName = register.getName();
writeEnabled.forEach((integer -> {
signalTable.getRow(integer).setSignal(newRegName + ".W", SignalValue.valueOf(1));
}));
postEvent(MachineConfigRegisterEvent.eventReplaced(oldRegister, register, index));
}
/**
* Fetches the indices of all occurrences of the register ({@link RegisterExtension}) specified
* by name at the specified list of {@link MuxInput}s.
*
* @param registerName
* the name of the {@code RegisterExtension}
* @param list
* the list of the {@code MuxInput}s
* @return
* a list containing the indices of all occurrences of the {@code RegisterExtension}
*/
private static List<Integer> fetchRegisterInputIndices(String registerName, List<MuxInput> list) {
List<Integer> result = new ArrayList<>();
ListIterator<MuxInput> iter = list.listIterator();
while (iter.hasNext()) {
int idx = iter.nextIndex();
MuxInput mux = iter.next();
if (mux instanceof RegisterMuxInput && ((RegisterMuxInput) mux).getRegisterName().equals(registerName)) {
result.add(idx);
}
}
return result;
}
/**
* Exchanges two registers ({@link RegisterExtension}) of the machine's configuration.
*
* @param index1
* the index of the first {@code RegisterExtension}
* @param index2
* the index of the second {@code RegisterExtension}
*/
public void exchangeRegisterExtensions(int index1, int index2) {
if (index1 == index2) {
return;
}
RegisterExtension reg1 = extendedRegisters.get(index1);
RegisterExtension reg2 = extendedRegisters.get(index2);
extendedRegisters.set(index2, reg1);
extendedRegisters.set(index1, reg2);
postEvent(MachineConfigRegisterEvent.eventExchanged(reg1, reg2, index1, index2));
int muxIdx1 = -1;
int muxIdx2 = -1;
MuxInput muxInput1 = null;
MuxInput muxInput2 = null;
for (MuxInput mux : availableMuxSources) {
if (mux instanceof RegisterMuxInput) {
String registerName = ((RegisterMuxInput) mux).getRegisterName();
if (registerName.equals(reg1.getName())) {
muxIdx1 = availableMuxSources.indexOf(mux);
muxInput1 = mux;
}
if (registerName.equals(reg2.getName())) {
muxIdx2 = availableMuxSources.indexOf(mux);
muxInput2 = mux;
}
}
}
availableMuxSources.set(muxIdx1, muxInput2);
availableMuxSources.set(muxIdx2, muxInput1);
postEvent(MachineConfigMuxEvent.eventExchanged(null, muxInput1, muxInput2, muxIdx1, muxIdx2));
}
/**
* Gets the register ({@link RegisterExtension}) at the specified index.
*
* @param index
* the index of the {@code RegisterExtension} to get
* @return
* the {@code RegisterExtension} at the specified index
*/
public RegisterExtension getRegisterExtension(int index) {
return extendedRegisters.get(index);
}
/**
* Gets the base register ({@link RegisterExtension}) at the specified index.
*
* @param index
* the index of the {@code RegisterExtension} to get
* @return
* the {@code RegisterExtension} at the specified index
*/
public RegisterExtension getBaseRegister(int index) {
return baseRegisters.get(index);
}
/**
* Searches a register ({@link RegisterExtension}) with the specified name in
* the list of the extended registers.
*
* @param name
* the name of the {@code RegisterExtension} to search
* @return
* the {@code RegisterExtension} with the specified name, if it exists, {@code null} otherwise
*/
public RegisterExtension findRegisterExtension(String name) {
for (RegisterExtension reg : extendedRegisters) {
if (reg.getName().equalsIgnoreCase(name)) {
return reg;
}
}
return null;
}
/**
* Searches a register ({@link RegisterExtension}) with the specified name in
* the list of the base registers.
*
* @param name
* the name of the {@code RegisterExtension} to search
* @return
* the {@code RegisterExtension} with the specified name, if it exists, {@code null} otherwise
*/
public RegisterExtension findBaseRegister(String name) {
for (RegisterExtension reg : baseRegisters) {
if (reg.getName().equalsIgnoreCase(name)) {
return reg;
}
}
return null;
}
/**
* Gets a list of all extended registers ({@link RegisterExtension}).
*
* @return
* a list of all extended {@code RegisterExtension}s
*/
public List<RegisterExtension> getRegisterExtensions() {
return registersView;
}
/**
* Gets a list of all base registers ({@link RegisterExtension}).
*
* @return
* a list of all base {@code RegisterExtension}s
*/
public List<RegisterExtension> getBaseRegisters() {
return baseRegisters;
}
/**
* Gets a list of all {@link MuxInput}s available to the machine.
*
* @return
* a list of all available {@code MuxInput}s
*/
public List<MuxInput> getAvailableSources() {
return availableMuxSourcesView;
}
/**
* Gets a list of all {@link MuxInput}s of the specified multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} for which all {@code MuxInput}s should be returned
* @return
* a list of all {@code MuxInput}s of the specified multiplexer
*/
public List<MuxInput> getMuxSources(MuxType mux) {
switch (mux) {
case A:
return muxSourcesAView;
case B:
return muxSourcesBView;
}
throw new IllegalArgumentException(mux.toString());
}
/**
* Gets a list of all internal {@link MuxInput}s of the specified multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} for which all internal {@code MuxInput}s should be returned
* @return
* a list of all internal {@code MuxInput}s of the specified multiplexer
*/
private List<MuxInput> getMuxSourcesInternal(MuxType mux) {
switch (mux) {
case A:
return muxSourcesA;
case B:
return muxSourcesB;
}
throw new IllegalArgumentException(mux.toString());
}
/**
* Adds the specified {@link MuxInput} to the specified multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} where the {@code MuxInput} should be added
* @param source
* the {@code MuxInput} to add
*/
public void addMuxSource(MuxType mux, MuxInput source) {
List<MuxInput> muxList = getMuxSourcesInternal(mux);
muxList.add(source);
postEvent(MachineConfigMuxEvent.eventAdded(mux, source, muxList.size() - 1));
}
/**
* Removes all occurrences of the specified {@link MuxInput} from the specified
* multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} where the {@code MuxInput} should be removed
* @param source
* the {@code MuxInput} to remove
*/
public void removeMuxSource(MuxType mux, MuxInput source) {
List<MuxInput> muxList = getMuxSourcesInternal(mux);
int index;
do {
index = muxList.lastIndexOf(source);
muxList.remove(index);
postEvent(MachineConfigMuxEvent.eventRemoved(mux, source, index));
}
while (index != -1);
}
/**
* Removes the {@link MuxInput} at the specified index from the specified multiplexer
* ({@link MuxType}).
*
* @param mux
* the {@code MuxType} where the {@code MuxInput} should be removed
* @param index
* the index of the {@code MuxInput} to remove
*/
public void removeMuxSource(MuxType mux, int index) {
List<MuxInput> muxList = getMuxSourcesInternal(mux);
MuxInput source = muxList.remove(index);
postEvent(MachineConfigMuxEvent.eventRemoved(mux, source, index));
}
/**
* Sets the specified {@link MuxInput} at the specified index of the specified
* multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} where the {@code MuxInput} should be set
* @param index
* the index at which the {@code MuxInput} should be set
* @param source
* the {@code MuxInput} to set
*/
public void setMuxSource(MuxType mux, int index, MuxInput source) {
List<MuxInput> muxList = getMuxSourcesInternal(mux);
MuxInput oldSource = muxList.get(index);
muxList.set(index, source);
postEvent(MachineConfigMuxEvent.eventReplaced(mux, oldSource, source, index));
}
/**
* Exchanges two {@link MuxInput}s of the specified multiplexer ({@link MuxType}).
*
* @param mux
* the {@code MuxType} where the {@code MuxInput}s should be replaced
* @param index1
* the index of the first {@code MuxInput}
* @param index2
* the index of the second {@code MuxInput}
*/
public void exchangeMuxSources(MuxType mux, int index1, int index2) {
if (index1 == index2) {
return;
}
List<MuxInput> list = getMuxSourcesInternal(mux);
MuxInput input1 = list.get(index1);
MuxInput input2 = list.get(index2);
list.set(index2, input1);
list.set(index1, input2);
postEvent(MachineConfigMuxEvent.eventExchanged(mux, input1, input2, index1, index2));
}
/**
* Registers a new {@link MachineConfigListener}.
*
* @param listener
* the {@code MachineConfigListener} to register
*/
public void addMachineConfigListener(MachineConfigListener listener) {
if (!listeners.contains(listener)) {
listeners.add(listener);
}
}
/**
* Removes a {@link MachineConfigListener} from the list of listeners.
*
* @param listener
* the {@code MachineConfigListener} to remove
*/
public void removeMachineConfigListener(MachineConfigListener listener) {
listeners.remove(listener);
}
/**
* Notifies all registered {@link MachineConfigListener}s of the specified
* {@link MachineConfigEvent}.
*
* @param e
* the {@code MachineConfigEvent} the listeners have to be notified of
*/
protected void postEvent(MachineConfigEvent e) {
for (MachineConfigListener l : listeners) {
l.processEvent(e);
}
}
@Override
public String toString() {
return "MachineConfiguration [alu=" + alu + ", registers=" + extendedRegisters
+ ", mux.A=" + muxSourcesA + ", mux.B=" + muxSourcesB + "]";
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import javax.annotation.Nullable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.primitives.Ints;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableInfoMissingException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
/**
* Implementation of {@link TableDescriptors} that reads descriptors from the
* passed filesystem. It expects descriptors to be in a file in the
* {@link #TABLEINFO_DIR} subdir of the table's directory in FS. Can be read-only
* -- i.e. does not modify the filesystem or can be read and write.
*
* <p>Also has utility for keeping up the table descriptors tableinfo file.
* The table schema file is kept in the {@link #TABLEINFO_DIR} subdir
* of the table directory in the filesystem.
* It has a {@link #TABLEINFO_FILE_PREFIX} and then a suffix that is the
* edit sequenceid: e.g. <code>.tableinfo.0000000003</code>. This sequenceid
* is always increasing. It starts at zero. The table schema file with the
* highest sequenceid has the most recent schema edit. Usually there is one file
* only, the most recent but there may be short periods where there are more
* than one file. Old files are eventually cleaned. Presumption is that there
* will not be lots of concurrent clients making table schema edits. If so,
* the below needs a bit of a reworking and perhaps some supporting api in hdfs.
*/
@InterfaceAudience.Private
public class FSTableDescriptors implements TableDescriptors {
private static final Log LOG = LogFactory.getLog(FSTableDescriptors.class);
private final FileSystem fs;
private final Path rootdir;
private final boolean fsreadonly;
private volatile boolean usecache;
private volatile boolean fsvisited;
@VisibleForTesting long cachehits = 0;
@VisibleForTesting long invocations = 0;
/** The file name prefix used to store HTD in HDFS */
static final String TABLEINFO_FILE_PREFIX = ".tableinfo";
static final String TABLEINFO_DIR = ".tabledesc";
static final String TMP_DIR = ".tmp";
// This cache does not age out the old stuff. Thinking is that the amount
// of data we keep up in here is so small, no need to do occasional purge.
// TODO.
private final Map<TableName, TableDescriptor> cache =
new ConcurrentHashMap<TableName, TableDescriptor>();
/**
* Table descriptor for <code>hbase:meta</code> catalog table
*/
private final HTableDescriptor metaTableDescritor;
/**
* Construct a FSTableDescriptors instance using the hbase root dir of the given
* conf and the filesystem where that root dir lives.
* This instance can do write operations (is not read only).
*/
public FSTableDescriptors(final Configuration conf) throws IOException {
this(conf, FSUtils.getCurrentFileSystem(conf), FSUtils.getRootDir(conf));
}
public FSTableDescriptors(final Configuration conf, final FileSystem fs, final Path rootdir)
throws IOException {
this(conf, fs, rootdir, false, true);
}
/**
* @param fsreadonly True if we are read-only when it comes to filesystem
* operations; i.e. on remove, we do not do delete in fs.
*/
public FSTableDescriptors(final Configuration conf, final FileSystem fs,
final Path rootdir, final boolean fsreadonly, final boolean usecache) throws IOException {
super();
this.fs = fs;
this.rootdir = rootdir;
this.fsreadonly = fsreadonly;
this.usecache = usecache;
this.metaTableDescritor = TableDescriptor.metaTableDescriptor(conf);
}
public void setCacheOn() throws IOException {
this.cache.clear();
this.usecache = true;
}
public void setCacheOff() throws IOException {
this.usecache = false;
this.cache.clear();
}
@VisibleForTesting
public boolean isUsecache() {
return this.usecache;
}
/**
* Get the current table descriptor for the given table, or null if none exists.
*
* Uses a local cache of the descriptor but still checks the filesystem on each call
* to see if a newer file has been created since the cached one was read.
*/
@Override
@Nullable
public TableDescriptor getDescriptor(final TableName tablename)
throws IOException {
invocations++;
if (TableName.META_TABLE_NAME.equals(tablename)) {
cachehits++;
return new TableDescriptor(metaTableDescritor);
}
// hbase:meta is already handled. If some one tries to get the descriptor for
// .logs, .oldlogs or .corrupt throw an exception.
if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(tablename.getNameAsString())) {
throw new IOException("No descriptor found for non table = " + tablename);
}
if (usecache) {
// Look in cache of descriptors.
TableDescriptor cachedtdm = this.cache.get(tablename);
if (cachedtdm != null) {
cachehits++;
return cachedtdm;
}
}
TableDescriptor tdmt = null;
try {
tdmt = getTableDescriptorFromFs(fs, rootdir, tablename, !fsreadonly);
} catch (NullPointerException e) {
LOG.debug("Exception during readTableDecriptor. Current table name = "
+ tablename, e);
} catch (IOException ioe) {
LOG.debug("Exception during readTableDecriptor. Current table name = "
+ tablename, ioe);
}
// last HTD written wins
if (usecache && tdmt != null) {
this.cache.put(tablename, tdmt);
}
return tdmt;
}
/**
* Get the current table descriptor for the given table, or null if none exists.
*
* Uses a local cache of the descriptor but still checks the filesystem on each call
* to see if a newer file has been created since the cached one was read.
*/
@Override
public HTableDescriptor get(TableName tableName) throws IOException {
if (TableName.META_TABLE_NAME.equals(tableName)) {
cachehits++;
return metaTableDescritor;
}
TableDescriptor descriptor = getDescriptor(tableName);
return descriptor == null ? null : descriptor.getHTableDescriptor();
}
/**
* Returns a map from table name to table descriptor for all tables.
*/
@Override
public Map<String, TableDescriptor> getAllDescriptors()
throws IOException {
Map<String, TableDescriptor> tds = new TreeMap<String, TableDescriptor>();
if (fsvisited && usecache) {
for (Map.Entry<TableName, TableDescriptor> entry: this.cache.entrySet()) {
tds.put(entry.getKey().toString(), entry.getValue());
}
// add hbase:meta to the response
tds.put(this.metaTableDescritor.getNameAsString(),
new TableDescriptor(metaTableDescritor));
} else {
LOG.debug("Fetching table descriptors from the filesystem.");
boolean allvisited = true;
for (Path d : FSUtils.getTableDirs(fs, rootdir)) {
TableDescriptor htd = null;
try {
htd = getDescriptor(FSUtils.getTableName(d));
} catch (FileNotFoundException fnfe) {
// inability of retrieving one HTD shouldn't stop getting the remaining
LOG.warn("Trouble retrieving htd", fnfe);
}
if (htd == null) {
allvisited = false;
continue;
} else {
tds.put(htd.getHTableDescriptor().getTableName().getNameAsString(), htd);
}
fsvisited = allvisited;
}
}
return tds;
}
/**
* Returns a map from table name to table descriptor for all tables.
*/
@Override
public Map<String, HTableDescriptor> getAll() throws IOException {
Map<String, HTableDescriptor> htds = new TreeMap<String, HTableDescriptor>();
Map<String, TableDescriptor> allDescriptors = getAllDescriptors();
for (Map.Entry<String, TableDescriptor> entry : allDescriptors
.entrySet()) {
htds.put(entry.getKey(), entry.getValue().getHTableDescriptor());
}
return htds;
}
/**
* Find descriptors by namespace.
* @see #get(org.apache.hadoop.hbase.TableName)
*/
@Override
public Map<String, HTableDescriptor> getByNamespace(String name)
throws IOException {
Map<String, HTableDescriptor> htds = new TreeMap<String, HTableDescriptor>();
List<Path> tableDirs =
FSUtils.getLocalTableDirs(fs, FSUtils.getNamespaceDir(rootdir, name));
for (Path d: tableDirs) {
HTableDescriptor htd = null;
try {
htd = get(FSUtils.getTableName(d));
} catch (FileNotFoundException fnfe) {
// inability of retrieving one HTD shouldn't stop getting the remaining
LOG.warn("Trouble retrieving htd", fnfe);
}
if (htd == null) continue;
htds.put(FSUtils.getTableName(d).getNameAsString(), htd);
}
return htds;
}
/**
* Adds (or updates) the table descriptor to the FileSystem
* and updates the local cache with it.
*/
@Override
public void add(TableDescriptor htd) throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot add a table descriptor - in read only mode");
}
TableName tableName = htd.getHTableDescriptor().getTableName();
if (TableName.META_TABLE_NAME.equals(tableName)) {
throw new NotImplementedException();
}
if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(tableName.getNameAsString())) {
throw new NotImplementedException(
"Cannot add a table descriptor for a reserved subdirectory name: "
+ htd.getHTableDescriptor().getNameAsString());
}
updateTableDescriptor(htd);
}
/**
* Adds (or updates) the table descriptor to the FileSystem
* and updates the local cache with it.
*/
@Override
public void add(HTableDescriptor htd) throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot add a table descriptor - in read only mode");
}
TableName tableName = htd.getTableName();
if (TableName.META_TABLE_NAME.equals(tableName)) {
throw new NotImplementedException();
}
if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(tableName.getNameAsString())) {
throw new NotImplementedException(
"Cannot add a table descriptor for a reserved subdirectory name: "
+ htd.getNameAsString());
}
TableDescriptor descriptor = getDescriptor(htd.getTableName());
if (descriptor == null)
descriptor = new TableDescriptor(htd);
else
descriptor.setHTableDescriptor(htd);
updateTableDescriptor(descriptor);
}
/**
* Removes the table descriptor from the local cache and returns it.
* If not in read only mode, it also deletes the entire table directory(!)
* from the FileSystem.
*/
@Override
public HTableDescriptor remove(final TableName tablename)
throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot remove a table descriptor - in read only mode");
}
Path tabledir = getTableDir(tablename);
if (this.fs.exists(tabledir)) {
if (!this.fs.delete(tabledir, true)) {
throw new IOException("Failed delete of " + tabledir.toString());
}
}
TableDescriptor descriptor = this.cache.remove(tablename);
if (descriptor == null) {
return null;
} else {
return descriptor.getHTableDescriptor();
}
}
/**
* Checks if a current table info file exists for the given table
*
* @param tableName name of table
* @return true if exists
* @throws IOException
*/
public boolean isTableInfoExists(TableName tableName) throws IOException {
return getTableInfoPath(tableName) != null;
}
/**
* Find the most current table info file for the given table in the hbase root directory.
* @return The file status of the current table info file or null if it does not exist
*/
private FileStatus getTableInfoPath(final TableName tableName) throws IOException {
Path tableDir = getTableDir(tableName);
return getTableInfoPath(tableDir);
}
private FileStatus getTableInfoPath(Path tableDir)
throws IOException {
return getTableInfoPath(fs, tableDir, !fsreadonly);
}
/**
* Find the most current table info file for the table located in the given table directory.
*
* Looks within the {@link #TABLEINFO_DIR} subdirectory of the given directory for any table info
* files and takes the 'current' one - meaning the one with the highest sequence number if present
* or no sequence number at all if none exist (for backward compatibility from before there
* were sequence numbers).
*
* @return The file status of the current table info file or null if it does not exist
* @throws IOException
*/
public static FileStatus getTableInfoPath(FileSystem fs, Path tableDir)
throws IOException {
return getTableInfoPath(fs, tableDir, false);
}
/**
* Find the most current table info file for the table in the given table directory.
*
* Looks within the {@link #TABLEINFO_DIR} subdirectory of the given directory for any table info
* files and takes the 'current' one - meaning the one with the highest sequence number if
* present or no sequence number at all if none exist (for backward compatibility from before
* there were sequence numbers).
* If there are multiple table info files found and removeOldFiles is true it also deletes the
* older files.
*
* @return The file status of the current table info file or null if none exist
* @throws IOException
*/
private static FileStatus getTableInfoPath(FileSystem fs, Path tableDir, boolean removeOldFiles)
throws IOException {
Path tableInfoDir = new Path(tableDir, TABLEINFO_DIR);
return getCurrentTableInfoStatus(fs, tableInfoDir, removeOldFiles);
}
/**
* Find the most current table info file in the given directory
*
* Looks within the given directory for any table info files
* and takes the 'current' one - meaning the one with the highest sequence number if present
* or no sequence number at all if none exist (for backward compatibility from before there
* were sequence numbers).
* If there are multiple possible files found
* and the we're not in read only mode it also deletes the older files.
*
* @return The file status of the current table info file or null if it does not exist
* @throws IOException
*/
// only visible for FSTableDescriptorMigrationToSubdir, can be removed with that
static FileStatus getCurrentTableInfoStatus(FileSystem fs, Path dir, boolean removeOldFiles)
throws IOException {
FileStatus [] status = FSUtils.listStatus(fs, dir, TABLEINFO_PATHFILTER);
if (status == null || status.length < 1) return null;
FileStatus mostCurrent = null;
for (FileStatus file : status) {
if (mostCurrent == null || TABLEINFO_FILESTATUS_COMPARATOR.compare(file, mostCurrent) < 0) {
mostCurrent = file;
}
}
if (removeOldFiles && status.length > 1) {
// Clean away old versions
for (FileStatus file : status) {
Path path = file.getPath();
if (file != mostCurrent) {
if (!fs.delete(file.getPath(), false)) {
LOG.warn("Failed cleanup of " + path);
} else {
LOG.debug("Cleaned up old tableinfo file " + path);
}
}
}
}
return mostCurrent;
}
/**
* Compare {@link FileStatus} instances by {@link Path#getName()}. Returns in
* reverse order.
*/
@VisibleForTesting
static final Comparator<FileStatus> TABLEINFO_FILESTATUS_COMPARATOR =
new Comparator<FileStatus>() {
@Override
public int compare(FileStatus left, FileStatus right) {
return right.compareTo(left);
}};
/**
* Return the table directory in HDFS
*/
@VisibleForTesting Path getTableDir(final TableName tableName) {
return FSUtils.getTableDir(rootdir, tableName);
}
private static final PathFilter TABLEINFO_PATHFILTER = new PathFilter() {
@Override
public boolean accept(Path p) {
// Accept any file that starts with TABLEINFO_NAME
return p.getName().startsWith(TABLEINFO_FILE_PREFIX);
}};
/**
* Width of the sequenceid that is a suffix on a tableinfo file.
*/
@VisibleForTesting static final int WIDTH_OF_SEQUENCE_ID = 10;
/*
* @param number Number to use as suffix.
* @return Returns zero-prefixed decimal version of passed
* number (Does absolute in case number is negative).
*/
private static String formatTableInfoSequenceId(final int number) {
byte [] b = new byte[WIDTH_OF_SEQUENCE_ID];
int d = Math.abs(number);
for (int i = b.length - 1; i >= 0; i--) {
b[i] = (byte)((d % 10) + '0');
d /= 10;
}
return Bytes.toString(b);
}
/**
* Regex to eat up sequenceid suffix on a .tableinfo file.
* Use regex because may encounter oldstyle .tableinfos where there is no
* sequenceid on the end.
*/
private static final Pattern TABLEINFO_FILE_REGEX =
Pattern.compile(TABLEINFO_FILE_PREFIX + "(\\.([0-9]{" + WIDTH_OF_SEQUENCE_ID + "}))?$");
/**
* @param p Path to a <code>.tableinfo</code> file.
* @return The current editid or 0 if none found.
*/
@VisibleForTesting static int getTableInfoSequenceId(final Path p) {
if (p == null) return 0;
Matcher m = TABLEINFO_FILE_REGEX.matcher(p.getName());
if (!m.matches()) throw new IllegalArgumentException(p.toString());
String suffix = m.group(2);
if (suffix == null || suffix.length() <= 0) return 0;
return Integer.parseInt(m.group(2));
}
/**
* @param sequenceid
* @return Name of tableinfo file.
*/
@VisibleForTesting static String getTableInfoFileName(final int sequenceid) {
return TABLEINFO_FILE_PREFIX + "." + formatTableInfoSequenceId(sequenceid);
}
/**
* Returns the latest table descriptor for the given table directly from the file system
* if it exists, bypassing the local cache.
* Returns null if it's not found.
*/
public static TableDescriptor getTableDescriptorFromFs(FileSystem fs,
Path hbaseRootDir, TableName tableName) throws IOException {
Path tableDir = FSUtils.getTableDir(hbaseRootDir, tableName);
return getTableDescriptorFromFs(fs, tableDir);
}
/**
* Returns the latest table descriptor for the given table directly from the file system
* if it exists, bypassing the local cache.
* Returns null if it's not found.
*/
public static TableDescriptor getTableDescriptorFromFs(FileSystem fs,
Path hbaseRootDir, TableName tableName, boolean rewritePb) throws IOException {
Path tableDir = FSUtils.getTableDir(hbaseRootDir, tableName);
return getTableDescriptorFromFs(fs, tableDir, rewritePb);
}
/**
* Returns the latest table descriptor for the table located at the given directory
* directly from the file system if it exists.
* @throws TableInfoMissingException if there is no descriptor
*/
public static TableDescriptor getTableDescriptorFromFs(FileSystem fs, Path tableDir)
throws IOException {
return getTableDescriptorFromFs(fs, tableDir, false);
}
/**
* Returns the latest table descriptor for the table located at the given directory
* directly from the file system if it exists.
* @throws TableInfoMissingException if there is no descriptor
*/
public static TableDescriptor getTableDescriptorFromFs(FileSystem fs, Path tableDir,
boolean rewritePb)
throws IOException {
FileStatus status = getTableInfoPath(fs, tableDir, false);
if (status == null) {
throw new TableInfoMissingException("No table descriptor file under " + tableDir);
}
return readTableDescriptor(fs, status, rewritePb);
}
private static TableDescriptor readTableDescriptor(FileSystem fs, FileStatus status,
boolean rewritePb) throws IOException {
int len = Ints.checkedCast(status.getLen());
byte [] content = new byte[len];
FSDataInputStream fsDataInputStream = fs.open(status.getPath());
try {
fsDataInputStream.readFully(content);
} finally {
fsDataInputStream.close();
}
TableDescriptor td = null;
try {
td = TableDescriptor.parseFrom(content);
} catch (DeserializationException e) {
// we have old HTableDescriptor here
try {
HTableDescriptor htd = HTableDescriptor.parseFrom(content);
LOG.warn("Found old table descriptor, converting to new format for table " +
htd.getTableName() + "; NOTE table will be in ENABLED state!");
td = new TableDescriptor(htd);
if (rewritePb) rewriteTableDescriptor(fs, status, td);
} catch (DeserializationException e1) {
throw new IOException("content=" + Bytes.toShort(content), e);
}
}
if (rewritePb && !ProtobufUtil.isPBMagicPrefix(content)) {
// Convert the file over to be pb before leaving here.
rewriteTableDescriptor(fs, status, td);
}
return td;
}
private static void rewriteTableDescriptor(final FileSystem fs, final FileStatus status,
final TableDescriptor td)
throws IOException {
Path tableInfoDir = status.getPath().getParent();
Path tableDir = tableInfoDir.getParent();
writeTableDescriptor(fs, td, tableDir, status);
}
/**
* Update table descriptor on the file system
* @throws IOException Thrown if failed update.
* @throws NotImplementedException if in read only mode
*/
@VisibleForTesting Path updateTableDescriptor(TableDescriptor td)
throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot update a table descriptor - in read only mode");
}
TableName tableName = td.getHTableDescriptor().getTableName();
Path tableDir = getTableDir(tableName);
Path p = writeTableDescriptor(fs, td, tableDir, getTableInfoPath(tableDir));
if (p == null) throw new IOException("Failed update");
LOG.info("Updated tableinfo=" + p);
if (usecache) {
this.cache.put(td.getHTableDescriptor().getTableName(), td);
}
return p;
}
/**
* Deletes all the table descriptor files from the file system.
* Used in unit tests only.
* @throws NotImplementedException if in read only mode
*/
public void deleteTableDescriptorIfExists(TableName tableName) throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot delete a table descriptor - in read only mode");
}
Path tableDir = getTableDir(tableName);
Path tableInfoDir = new Path(tableDir, TABLEINFO_DIR);
deleteTableDescriptorFiles(fs, tableInfoDir, Integer.MAX_VALUE);
}
/**
* Deletes files matching the table info file pattern within the given directory
* whose sequenceId is at most the given max sequenceId.
*/
private static void deleteTableDescriptorFiles(FileSystem fs, Path dir, int maxSequenceId)
throws IOException {
FileStatus [] status = FSUtils.listStatus(fs, dir, TABLEINFO_PATHFILTER);
for (FileStatus file : status) {
Path path = file.getPath();
int sequenceId = getTableInfoSequenceId(path);
if (sequenceId <= maxSequenceId) {
boolean success = FSUtils.delete(fs, path, false);
if (success) {
LOG.debug("Deleted table descriptor at " + path);
} else {
LOG.error("Failed to delete descriptor at " + path);
}
}
}
}
/**
* Attempts to write a new table descriptor to the given table's directory.
* It first writes it to the .tmp dir then uses an atomic rename to move it into place.
* It begins at the currentSequenceId + 1 and tries 10 times to find a new sequence number
* not already in use.
* Removes the current descriptor file if passed in.
*
* @return Descriptor file or null if we failed write.
*/
private static Path writeTableDescriptor(final FileSystem fs,
final TableDescriptor htd, final Path tableDir,
final FileStatus currentDescriptorFile)
throws IOException {
// Get temporary dir into which we'll first write a file to avoid half-written file phenomenon.
// This directory is never removed to avoid removing it out from under a concurrent writer.
Path tmpTableDir = new Path(tableDir, TMP_DIR);
Path tableInfoDir = new Path(tableDir, TABLEINFO_DIR);
// What is current sequenceid? We read the current sequenceid from
// the current file. After we read it, another thread could come in and
// compete with us writing out next version of file. The below retries
// should help in this case some but its hard to do guarantees in face of
// concurrent schema edits.
int currentSequenceId = currentDescriptorFile == null ? 0 :
getTableInfoSequenceId(currentDescriptorFile.getPath());
int newSequenceId = currentSequenceId;
// Put arbitrary upperbound on how often we retry
int retries = 10;
int retrymax = currentSequenceId + retries;
Path tableInfoDirPath = null;
do {
newSequenceId += 1;
String filename = getTableInfoFileName(newSequenceId);
Path tempPath = new Path(tmpTableDir, filename);
if (fs.exists(tempPath)) {
LOG.debug(tempPath + " exists; retrying up to " + retries + " times");
continue;
}
tableInfoDirPath = new Path(tableInfoDir, filename);
try {
writeTD(fs, tempPath, htd);
fs.mkdirs(tableInfoDirPath.getParent());
if (!fs.rename(tempPath, tableInfoDirPath)) {
throw new IOException("Failed rename of " + tempPath + " to " + tableInfoDirPath);
}
LOG.debug("Wrote descriptor into: " + tableInfoDirPath);
} catch (IOException ioe) {
// Presume clash of names or something; go around again.
LOG.debug("Failed write and/or rename; retrying", ioe);
if (!FSUtils.deleteDirectory(fs, tempPath)) {
LOG.warn("Failed cleanup of " + tempPath);
}
tableInfoDirPath = null;
continue;
}
break;
} while (newSequenceId < retrymax);
if (tableInfoDirPath != null) {
// if we succeeded, remove old table info files.
deleteTableDescriptorFiles(fs, tableInfoDir, newSequenceId - 1);
}
return tableInfoDirPath;
}
private static void writeTD(final FileSystem fs, final Path p, final TableDescriptor htd)
throws IOException {
FSDataOutputStream out = fs.create(p, false);
try {
// We used to write this file out as a serialized HTD Writable followed by two '\n's and then
// the toString version of HTD. Now we just write out the pb serialization.
out.write(htd.toByteArray());
} finally {
out.close();
}
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table.
* Used by tests.
* @return True if we successfully created file.
*/
public boolean createTableDescriptor(TableDescriptor htd) throws IOException {
return createTableDescriptor(htd, false);
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table.
* Used by tests.
* @return True if we successfully created file.
*/
public boolean createTableDescriptor(HTableDescriptor htd) throws IOException {
return createTableDescriptor(new TableDescriptor(htd), false);
}
/**
* Create new HTableDescriptor in HDFS. Happens when we are creating table. If
* forceCreation is true then even if previous table descriptor is present it
* will be overwritten
*
* @return True if we successfully created file.
*/
public boolean createTableDescriptor(TableDescriptor htd, boolean forceCreation)
throws IOException {
Path tableDir = getTableDir(htd.getHTableDescriptor().getTableName());
return createTableDescriptorForTableDirectory(tableDir, htd, forceCreation);
}
/**
* Create tables descriptor for given HTableDescriptor. Default TableDescriptor state
* will be used (typically ENABLED).
*/
public boolean createTableDescriptor(HTableDescriptor htd, boolean forceCreation)
throws IOException {
return createTableDescriptor(new TableDescriptor(htd), forceCreation);
}
/**
* Create a new HTableDescriptor in HDFS in the specified table directory. Happens when we create
* a new table or snapshot a table.
* @param tableDir table directory under which we should write the file
* @param htd description of the table to write
* @param forceCreation if <tt>true</tt>,then even if previous table descriptor is present it will
* be overwritten
* @return <tt>true</tt> if the we successfully created the file, <tt>false</tt> if the file
* already exists and we weren't forcing the descriptor creation.
* @throws IOException if a filesystem error occurs
*/
public boolean createTableDescriptorForTableDirectory(Path tableDir,
TableDescriptor htd, boolean forceCreation) throws IOException {
if (fsreadonly) {
throw new NotImplementedException("Cannot create a table descriptor - in read only mode");
}
FileStatus status = getTableInfoPath(fs, tableDir);
if (status != null) {
LOG.debug("Current tableInfoPath = " + status.getPath());
if (!forceCreation) {
if (fs.exists(status.getPath()) && status.getLen() > 0) {
if (readTableDescriptor(fs, status, false).equals(htd)) {
LOG.debug("TableInfo already exists.. Skipping creation");
return false;
}
}
}
}
Path p = writeTableDescriptor(fs, htd, tableDir, status);
return p != null;
}
}
| |
package life.catalogue.importer;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import javax.annotation.Nullable;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import life.catalogue.config.NormalizerConfig;
import life.catalogue.img.ImageService;
import life.catalogue.importer.coldp.MetadataParser;
import life.catalogue.importer.neo.NeoDb;
import life.catalogue.importer.neo.NeoDbFactory;
import life.catalogue.importer.neo.NotUniqueRuntimeException;
import life.catalogue.importer.neo.model.NeoName;
import life.catalogue.importer.neo.model.NeoUsage;
import life.catalogue.importer.neo.model.RankedUsage;
import life.catalogue.importer.neo.model.RelType;
import life.catalogue.importer.neo.traverse.Traversals;
import life.catalogue.matching.NameIndex;
import life.catalogue.matching.NameIndexFactory;
import life.catalogue.api.model.Dataset;
import life.catalogue.api.model.IssueContainer;
import life.catalogue.api.model.VerbatimEntity;
import life.catalogue.api.model.VerbatimRecord;
import life.catalogue.api.vocab.DataFormat;
import life.catalogue.api.vocab.Issue;
import org.gbif.dwc.terms.Term;
import org.gbif.nameparser.api.NomCode;
import org.gbif.nameparser.api.Rank;
import org.junit.After;
import org.junit.Before;
import org.neo4j.graphdb.Direction;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.graphdb.Relationship;
import static org.junit.Assert.*;
abstract class NormalizerITBase {
protected NeoDb store;
private int attempt;
private NormalizerConfig cfg;
private final DataFormat format;
private final Supplier<NameIndex> nameIndexSupplier;
NormalizerITBase(DataFormat format, Supplier<NameIndex> supplier) {
this.format = format;
nameIndexSupplier = supplier;
}
NormalizerITBase(DataFormat format) {
this.format = format;
nameIndexSupplier = NameIndexFactory::passThru;
}
@Before
public void initCfg() throws Exception {
cfg = new NormalizerConfig();
cfg.archiveDir = Files.createTempDir();
cfg.scratchDir = Files.createTempDir();
attempt = 1;
}
@After
public void cleanup() throws Exception {
if (store != null) {
store.closeAndDelete();
}
FileUtils.deleteQuietly(cfg.archiveDir);
FileUtils.deleteQuietly(cfg.scratchDir);
}
public void normalize(int datasetKey) throws Exception {
normalize(datasetKey, null);
}
public static Optional<NomCode> readDatasetCode(String resourceDir) {
URL metaUrl = NormalizerTreeIT.class.getResource(resourceDir + "/metadata.yaml");
if (metaUrl != null) {
try {
Optional<Dataset> meta = MetadataParser.readMetadata(metaUrl.openStream());
if (meta.isPresent()) {
NomCode code = meta.get().getCode();
System.out.println("Use code " + code);
return Optional.of(code);
}
} catch (IOException e) {
e.printStackTrace();
}
}
return Optional.empty();
}
/**
* Normalizes an archive from the test resources
* and checks its printed txt tree against the expected tree
*
*/
public void normalize(int datasetKey, @Nullable NomCode code) throws Exception {
String resourceDir = "/" + format.name().toLowerCase() + "/" + datasetKey;
URL url = getClass().getResource(resourceDir);
if (code == null) {
code = readDatasetCode(resourceDir).orElse(null);
}
normalize(Paths.get(url.toURI()), code);
}
public void normalize(URI url) throws Exception {
normalize(url, null);
}
public void normalize(URI url, @Nullable NomCode code) throws Exception {
// download an decompress
ExternalSourceUtil.consumeSource(url, p -> normalize(p, code));
}
protected void normalize(Path arch) {
normalize(arch, null);
}
protected void normalize(Path arch, @Nullable NomCode code) {
try {
store = NeoDbFactory.create(1, attempt, cfg);
Dataset d = new Dataset();
d.setKey(1);
d.setDataFormat(format);
d.setCode(code);
store.put(d);
Normalizer norm = new Normalizer(store, arch, nameIndexSupplier.get(), ImageService.passThru());
norm.call();
// reopen
store = NeoDbFactory.open(1, attempt, cfg);
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
} finally {
attempt++;
}
}
public VerbatimRecord vByLine(Term type, long line) {
for (VerbatimRecord v : store.verbatimList()) {
if (v.getType() == type && v.getLine() == line) {
return v;
}
}
return null;
}
public VerbatimRecord vByNameID(String id) {
return store.getVerbatim(store.names().objByID(id).getVerbatimKey());
}
public VerbatimRecord vByUsageID(String id) {
return store.getVerbatim(store.usages().objByID(id).getVerbatimKey());
}
public NeoUsage byName(String name) {
return byName(name, null);
}
public NeoUsage byName(String name, @Nullable String author) {
List<Node> usageNodes = store.usagesByName(name, author, null, true);
if (usageNodes.isEmpty()) {
throw new NotFoundException();
}
if (usageNodes.size() > 1) {
throw new NotUniqueRuntimeException("scientificName", name);
}
return store.usageWithName(usageNodes.get(0));
}
public NeoUsage accepted(Node syn) {
List<RankedUsage> accepted = store.accepted(syn);
if (accepted.size() != 1) {
throw new IllegalStateException("Synonym has " + accepted.size() + " accepted taxa");
}
return store.usageWithName(accepted.get(0).usageNode);
}
public List<NeoUsage> parents(Node child, String... parentIdsToVerify) {
List<NeoUsage> parents = new ArrayList<>();
int idx = 0;
for (RankedUsage rn : store.parents(child)) {
NeoUsage u = store.usageWithName(rn.usageNode);
parents.add(u);
if (parentIdsToVerify != null) {
assertEquals(u.getId(), parentIdsToVerify[idx]);
idx++;
}
}
if (parentIdsToVerify != null) {
assertEquals(parents.size(), parentIdsToVerify.length);
}
return parents;
}
public Set<NeoUsage> synonyms(Node accepted, String... synonymNameIdsToVerify) {
Set<NeoUsage> synonyms = new HashSet<>();
for (Node sn : Traversals.SYNONYMS.traverse(accepted).nodes()) {
synonyms.add(Preconditions.checkNotNull(store.usageWithName(sn)));
}
if (synonymNameIdsToVerify != null) {
Set<String> ids = new HashSet<>();
ids.addAll(Arrays.asList(synonymNameIdsToVerify));
assertEquals(ids.size(), synonyms.size());
for (NeoUsage s : synonyms) {
assertTrue(ids.contains(s.usage.getName().getId()));
}
}
return synonyms;
}
public NeoName assertBasionym(NeoUsage usage, @Nullable String basionymNameId) {
NeoName nn = null;
Relationship rel = usage.nameNode.getSingleRelationship(RelType.HAS_BASIONYM, Direction.OUTGOING);
if (basionymNameId == null) {
assertNull(rel);
} else {
Node bn = rel.getOtherNode(usage.nameNode);
nn = store.names().objByNode(bn);
assertNotNull(nn);
assertEquals(basionymNameId, nn.name.getId());
}
return nn;
}
public boolean hasIssues(VerbatimEntity ent, Issue... issues) {
IssueContainer ic = store.getVerbatim(ent.getVerbatimKey());
for (Issue is : issues) {
if (!ic.hasIssue(is))
return false;
}
return true;
}
public NeoUsage usageByNameID(String id) {
List<Node> usages = store.usageNodesByName(store.names().nodeByID(id));
if (usages.size() != 1) {
fail("No single usage for name " + id);
}
return store.usageWithName(usages.get(0));
}
public NeoUsage usageByID(String id) {
return store.usageWithName(store.usages().nodeByID(id));
}
public NeoUsage usageByName(Rank rank, String name) {
List<Node> usages = store.usagesByName(name, null, rank, true);
if (usages.size()!=1) {
throw new IllegalStateException(usages.size() + " usage nodes matching " + rank + " " + name);
}
return store.usageWithName(usages.get(0));
}
public NeoName nameByID(String id) {
return store.names().objByID(id);
}
public void debug() throws Exception {
store.dump(new File("graphs/debugtree.dot"));
}
}
| |
/*
* Copyright 2015 www.seleniumtests.com
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seleniumtests.webelements;
import com.seleniumtests.core.TestLogging;
import com.seleniumtests.driver.BrowserType;
import com.seleniumtests.driver.ScreenshotUtil;
import com.seleniumtests.driver.WebUIDriver;
import com.seleniumtests.helper.ContextHelper;
import com.seleniumtests.helper.WaitHelper;
import org.apache.log4j.Logger;
import org.openqa.selenium.*;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.interactions.HasInputDevices;
import org.openqa.selenium.interactions.Mouse;
import org.openqa.selenium.internal.Locatable;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.Wait;
import org.openqa.selenium.support.ui.WebDriverWait;
import java.util.List;
/**
* Provides methods to interact with a web page. All HTML element (ButtonElement, LinkElement, TextFieldElement, etc.)
* extends from this class.
*/
public class HtmlElement {
private static final int EXPLICIT_WAIT_TIME_OUT = WebUIDriver
.getWebUIDriver().getExplicitWait();
protected static final Logger logger = TestLogging.getLogger(
HtmlElement.class);
private static enum LocatorType {
ID, NAME, CLASS_NAME, LINK_TEXT, PARTIAL_LINK_TEXT, CSS_SELECTOR,
TAG_NAME, XPATH,
}
protected WebDriver driver = WebUIDriver.getWebDriver();
protected WebUIDriver webUXDriver = WebUIDriver.getWebUIDriver();
protected WebElement element = null;
private String label = null;
private String locator = null;
private By by = null;
/**
* Find element using BY locator. Make sure to initialize the driver before calling findElement()
*
* @param label - element name for logging
* @param by - By type
*
* @sample {@code new HtmlElement("UserId", By.id(userid))}
*/
public HtmlElement(final String label, final By by) {
this.label = label;
this.by = by;
}
/**
* This constructor locates the element using locator and locator type.
*
* @param label
* @param locator - locator
*/
public HtmlElement(final String label, final String locator,
final LocatorType locatorType) {
this.label = label;
this.locator = locator;
this.by = getLocatorBy(locator, locatorType);
}
/**
* Captures snapshot of the current browser window.
*/
public void captureSnapshot() {
captureSnapshot(ContextHelper.getCallerMethod() + " on ");
}
/**
* Captures snapshot of the current browser window, and prefix the file name with the assigned string.
*
* @param messagePrefix
*/
protected void captureSnapshot(final String messagePrefix) {
ScreenshotUtil.captureSnapshot(messagePrefix);
}
public void click() {
findElement();
element.click();
}
/**
* Click element in native way by Actions.
*/
public void clickAt() {
clickAt("1,1");
}
/**
* Click element in native way by Actions.
*
* <p/>
* <pre class="code">
clickAt("1, 1");
* </pre>
*
* @param value
*/
public void clickAt(final String value) {
TestLogging.logWebStep("click on " + toHTML(), false);
findElement();
final String[] parts = value.split(",");
final int xOffset = Integer.parseInt(parts[0]);
final int yOffset = Integer.parseInt(parts[1]);
try {
new Actions(driver).moveToElement(element, xOffset, yOffset).click()
.perform();
} catch (final InvalidElementStateException e) {
e.printStackTrace();
element.click();
}
try {
final BrowserType type = WebUIDriver.getWebUIDriver().getConfig()
.getBrowser();
if (((type == BrowserType.Chrome) ||
(type == BrowserType.InternetExplore)) &&
this.getDriver().switchTo().alert().getText().contains(
"leave")) {
this.getDriver().switchTo().alert().accept();
}
} catch (final NoAlertPresentException e) {
e.printStackTrace();
}
}
public void simulateClick() {
findElement();
final String mouseOverScript =
"if(document.createEvent){var evObj = document.createEvent('MouseEvents');evObj.initEvent('mouseover', true, false); arguments[0].dispatchEvent(evObj);} else if(document.createEventObject) { arguments[0].fireEvent('onmouseover');}";
final JavascriptExecutor js = (JavascriptExecutor) driver;
js.executeScript(mouseOverScript, element);
WaitHelper.waitForSeconds(2);
final String clickScript =
"if(document.createEvent){var evObj = document.createEvent('MouseEvents');evObj.initEvent('click', true, false); arguments[0].dispatchEvent(evObj);} else if(document.createEventObject) { arguments[0].fireEvent('onclick');}";
js.executeScript(clickScript, element);
WaitHelper.waitForSeconds(2);
}
public void simulateMoveToElement(final int x, final int y) {
findElement();
((JavascriptExecutor) driver).executeScript(
"function simulate(f,c,d,e){var b,a=null;for(b in eventMatchers)if(eventMatchers[b].test(c)){a=b;break}if(!a)return!1;document.createEvent?(b=document.createEvent(a),a==\"HTMLEvents\"?b.initEvent(c,!0,!0):b.initMouseEvent(c,!0,!0,document.defaultView,0,d,e,d,e,!1,!1,!1,!1,0,null),f.dispatchEvent(b)):(a=document.createEventObject(),a.detail=0,a.screenX=d,a.screenY=e,a.clientX=d,a.clientY=e,a.ctrlKey=!1,a.altKey=!1,a.shiftKey=!1,a.metaKey=!1,a.button=1,f.fireEvent(\"on\"+c,a));return!0} var eventMatchers={HTMLEvents:/^(?:load|unload|abort|errorLogger|select|change|submit|reset|focus|blur|resize|scroll)$/,MouseEvents:/^(?:click|dblclick|mouse(?:down|up|over|move|out))$/}; " +
"simulate(arguments[0],\"mousemove\",arguments[1],arguments[2]);",
element, x, y);
}
/**
* Finds the element using By type. Implicit Waits is built in createWebDriver() in WebUIDriver to handle dynamic
* element problem. This method is invoked before all the basic operations like click, sendKeys, getText, etc. Use
* waitForPresent to use Explicit Waits to deal with special element which needs long time to present.
*/
protected void findElement() {
driver = WebUIDriver.getWebDriver();
element = driver.findElement(by);
}
/**
* Get all elements in the current page with same locator.
*
* @return
*/
public List<WebElement> getAllElements() {
findElement();
return driver.findElements(by);
}
/**
* Gets an attribute (using standard key-value pair) from the underlying attribute.
*
* @param name
*
* @return
*/
public String getAttribute(final String name) {
findElement();
return element.getAttribute(name);
}
/**
* Returns the BY locator stored in the HtmlElement.
*
* @return
*/
public By getBy() {
return by;
}
/**
* Returns the value for the specified CSS key.
*
* @param propertyName
*
* @return
*/
public String getCssValue(final String propertyName) {
findElement();
return element.getCssValue(propertyName);
}
/**
* Get and refresh underlying WebDriver.
*/
protected WebDriver getDriver() {
return WebUIDriver.getWebDriver();
}
/**
* Returns the underlying WebDriver WebElement.
*
* @return
*/
public WebElement getElement() {
element = driver.findElement(by);
return element;
}
/**
* Executes the given JavaScript against the underlying WebElement.
*
* @param script
*
* @return
*/
public String getEval(final String script) {
findElement();
final String name = (String) ((JavascriptExecutor) driver).executeScript(
script, element);
return name;
}
/**
* Returns the 'height' property of the underlying WebElement's Dimension.
*
* @return
*/
public int getHeight() {
findElement();
return element.getSize().getHeight();
}
/**
* Returns the label used during initialization.
*
* @return
*/
public String getLabel() {
return label;
}
/**
* Gets the Point location of the underlying WebElement.
*
* @return
*/
public Point getLocation() {
findElement();
return element.getLocation();
}
/**
* Returns the locator used to find the underlying WebElement.
*
* @return
*/
public String getLocator() {
return locator;
}
private By getLocatorBy(final String locator,
final LocatorType locatorType) {
switch (locatorType) {
case ID:
return By.id(locator);
case NAME:
return By.name(locator);
case CLASS_NAME:
return By.className(locator);
case LINK_TEXT:
return By.linkText(locator);
case PARTIAL_LINK_TEXT:
return By.partialLinkText(locator);
case CSS_SELECTOR:
return By.cssSelector(locator);
case TAG_NAME:
return By.tagName(locator);
default:
return By.xpath(locator);
}
}
/**
* Returns the Dimension property of the underlying WebElement.
*
* @return
*/
public Dimension getSize() {
findElement();
return element.getSize();
}
/**
* Returns the HTML Tag for the underlying WebElement (div, a, input, etc).
*
* @return
*/
public String getTagName() {
findElement();
return element.getTagName();
}
/**
* Returns the text body of the underlying WebElement.
*
* @return
*/
public String getText() {
findElement();
return element.getText();
}
/**
* Returns the 'value' attribute of the underlying WebElement.
*
* @return
*/
public String getValue() {
findElement();
return element.getAttribute("value");
}
/**
* Returns the 'width' property of the underlying WebElement's Dimension.
*
* @return
*/
public int getWidth() {
findElement();
return element.getSize().getWidth();
}
/**
* Refreshes the WebUIDriver before locating the element, to ensure we have the current version (useful for when the
* state of an element has changed via an AJAX or non-page-turn action).
*/
public void init() {
driver = WebUIDriver.getWebDriver();
element = driver.findElement(by);
}
/**
* Indicates whether or not the web element is currently displayed in the browser.
*
* @return
*/
public boolean isDisplayed() {
try {
findElement();
return element.isDisplayed();
} catch (final Exception e) {
return false;
}
}
/**
* Searches for the element using the BY locator, and indicates whether or not it exists in the page. This can be
* used to look for hidden objects, whereas isDisplayed() only looks for things that are visible to the user
*
* @return
*/
public boolean isElementPresent() {
if (WebUIDriver.getWebDriver() == null) {
TestLogging.log(
"Web Driver is terminated! Exception might caught in last action.");
throw new RuntimeException(
"Web Driver is terminated! Exception might caught in last action.");
}
int count = 0;
try {
count = WebUIDriver.getWebDriver().findElements(by).size();
} catch (final RuntimeException e) {
if (e instanceof InvalidSelectorException) {
TestLogging.log("Got InvalidSelectorException, retry");
WaitHelper.waitForSeconds(2);
count = WebUIDriver.getWebDriver().findElements(by).size();
} else if ((e.getMessage() != null) &&
e.getMessage().contains(
"TransformedEntriesMap cannot be cast to java.util.List")) {
TestLogging.log("Got CastException, retry");
WaitHelper.waitForSeconds(2);
count = WebUIDriver.getWebDriver().findElements(by).size();
} else {
throw e;
}
}
if (count == 0) {
return false;
}
return true;
}
/**
* Indicates whether or not the element is enabled in the browser.
*
* @return
*/
public boolean isEnabled() {
findElement();
return element.isEnabled();
}
/**
* Indicates whether or not the element is selected in the browser.
*
* @return
*/
public boolean isSelected() {
findElement();
return element.isSelected();
}
/**
* Whether or not the indicated text is contained in the element's getText() attribute.
*
* @param text
*
* @return
*/
public boolean isTextPresent(final String text) {
findElement();
return element.getText().contains(text);
}
/**
* Forces a mouseDown event on the WebElement.
*/
public void mouseDown() {
TestLogging.log("MouseDown " + this.toString());
findElement();
final Mouse mouse = ((HasInputDevices) driver).getMouse();
mouse.mouseDown(null);
}
/**
* Forces a mouseOver event on the WebElement.
*/
public void mouseOver() {
TestLogging.log("MouseOver " + this.toString());
findElement();
// build and perform the mouseOver with Advanced User Interactions API
// Actions builder = new Actions(driver);
// builder.moveToElement(element).build().perform();
final Locatable hoverItem = (Locatable) element;
final Mouse mouse = ((HasInputDevices) driver).getMouse();
mouse.mouseMove(hoverItem.getCoordinates());
}
/**
* Forces a mouseOver event on the WebElement using simulate by JavaScript way for some dynamic menu.
*/
public void simulateMouseOver() {
findElement();
final String mouseOverScript =
"if(document.createEvent){var evObj = document.createEvent('MouseEvents');evObj.initEvent('mouseover', true, false); arguments[0].dispatchEvent(evObj);} else if(document.createEventObject) { arguments[0].fireEvent('onmouseover');}";
final JavascriptExecutor js = (JavascriptExecutor) driver;
js.executeScript(mouseOverScript, element);
}
/**
* Forces a mouseUp event on the WebElement.
*/
public void mouseUp() {
TestLogging.log("MouseUp " + this.toString());
findElement();
final Mouse mouse = ((HasInputDevices) driver).getMouse();
mouse.mouseUp(null);
}
/**
* Sends the indicated CharSequence to the WebElement.
*
* @param arg0
*/
public void sendKeys(final CharSequence arg0) {
findElement();
element.sendKeys(arg0);
}
/**
* Method, which should never be used.
*/
protected void sleep(final int waitTime) throws InterruptedException {
Thread.sleep(waitTime);
}
/**
* Converts the Type, Locator and LabelElement attributes of the HtmlElement into a readable and report-friendly
* string.
*
* @return
*/
public String toHTML() {
return getClass().getSimpleName().toLowerCase() +
" <a style=\"font-style:normal;color:#8C8984;text-decoration:none;\" href=# \">" +
getLabel() + ",: " + getBy().toString() + "</a>";
}
/**
* Returns a friendly string, representing the HtmlElement's Type, LabelElement and Locator.
*/
public String toString() {
return getClass().getSimpleName().toLowerCase() + " " + getLabel() +
", by={" + getBy().toString() + "}";
}
/**
* Wait element to present using Explicit Waits with default EXPLICIT_WAIT_TIME_OUT = 15 seconds.
*/
public void waitForPresent() {
waitForPresent(EXPLICIT_WAIT_TIME_OUT);
}
/**
* Wait element to present using Explicit Waits with timeout in seconds. This method is used for special element
* which needs long time to present.
*/
public void waitForPresent(final int timeout) {
TestLogging.logWebStep("wait for " + this.toString() + " to present.", false);
final Wait<WebDriver> wait = new WebDriverWait(driver, timeout);
wait.until(new ExpectedCondition<WebElement>() {
public WebElement apply(final WebDriver driver) {
return driver.findElement(by);
}
});
}
}
| |
package se.purplescout.pong.competition.client.paddle.classselector;
import javafx.application.Platform;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.scene.control.ChoiceBox;
import javafx.util.StringConverter;
import org.slf4j.LoggerFactory;
import se.purplescout.pong.competition.client.util.StatusIndicator;
import se.purplescout.pong.competition.client.util.filesystemlistener.DirectoryChangeListener;
import se.purplescout.pong.competition.client.util.filesystemlistener.DirectoryWatcher;
import se.purplescout.pong.competition.compiler.JDKNotFoundException;
import se.purplescout.pong.competition.compiler.PaddleCompiler;
import se.purplescout.pong.competition.compiler.ToDisplay;
import se.purplescout.pong.competition.paddlecache.GetTeamNameException;
import se.purplescout.pong.competition.paddlecache.NewInstanceException;
import se.purplescout.pong.competition.paddlecache.PaddleCache;
import se.purplescout.pong.competition.paddlecache.RegisterTimeoutException;
import se.purplescout.pong.game.Paddle;
import se.purplescout.pong.competition.client.util.filesystemlistener.AbstractDirectoryChangeListener;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
public class ClassSelector extends ChoiceBox<Class<Paddle>> {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(ClassSelector.class);
private final ObjectProperty<File> paddleClassFolder = new SimpleObjectProperty<>();
private final ObjectProperty<File> jdkPath = new SimpleObjectProperty<>();
private final ObjectProperty<Class<Paddle>> selectedPaddle = new SimpleObjectProperty<>();
private final Map<Class<Paddle>, PathAndUpdatedStatus> paddleToFile = new HashMap<>();
private final PaddleCompiler compiler = new PaddleCompiler();
private StatusIndicator statusIndicator;
private DirectoryWatcher directoryWatcher;
private final DirectoryChangeListener directoryChangeListener = new AbstractDirectoryChangeListener() {
@Override
public void created(Path file) {
updateIfJavaFile(file);
}
@Override
public void deleted(Path file) {
updateIfJavaFile(file);
}
@Override
public void modified(Path file) {
updateIfJavaFile(file);
}
private void updateIfJavaFile(Path file) {
if (file.toFile().getName().endsWith(".java")) {
Platform.runLater(ClassSelector.this::updatePaddleClassList);
}
}
};
public ClassSelector() {
super();
paddleClassFolder.addListener(new ChangeListener<File>() {
@Override
public void changed(ObservableValue<? extends File> observable, File oldValue, File newValue) {
registerFileSystemListener();
updatePaddleClassList();
}
});
this.valueProperty().bindBidirectional(selectedPaddle);
this.setConverter(new StringConverter<Class<Paddle>>() {
Map<String, Class<Paddle>> convertHistory = new HashMap<>();
@Override
public String toString(Class<Paddle> clazz) {
String asString = PaddleCache.getTeamName(clazz);
convertHistory.put(asString, clazz);
return asString;
}
@Override
public Class<Paddle> fromString(String string) {
Class<Paddle> paddle = convertHistory.get(string);
if (paddle == null) {
throw new NullPointerException("Tried to turn " + string + " into a paddle, but didn't know how");
}
return paddle;
}
});
}
public void setStatusIndicator(StatusIndicator statusIndicator) {
this.statusIndicator = statusIndicator;
}
public ObjectProperty<File> jdkFolderProperty() {
return jdkPath;
}
public ObjectProperty<Class<Paddle>> selectedPaddleProperty() {
return selectedPaddle;
}
private void registerFileSystemListener() {
if (directoryWatcher != null) {
directoryWatcher.requestStop();
}
try {
directoryWatcher = new DirectoryWatcher(paddleClassFolder.get().toPath(), directoryChangeListener);
directoryWatcher.startWatching();
} catch (IOException e) {
throw new ToDisplay("Unable to register file system listener on the paddle directory", e);
}
}
private void populatePaddleClassChoices(Collection<Class<Paddle>> newPaddles) {
Class<Paddle> currentPaddle = this.getValue();
// Disconnect properties to prevent change events from being fired.
this.valueProperty().unbindBidirectional(selectedPaddle);
// Change available paddles. No event is fired here
this.setItems(FXCollections.observableArrayList(newPaddles));
// Check if the currently selected paddle is still a valid choice
if (currentPaddle != null && containsPaddle(currentPaddle, newPaddles)) {
currentPaddle = getPaddleFromSameTeam(currentPaddle, newPaddles);
PathAndUpdatedStatus pathAndUpdatedStatus = paddleToFile.get(currentPaddle);
boolean classHasChanged = pathAndUpdatedStatus == null ? true : pathAndUpdatedStatus.isUpdated();
if (classHasChanged) { // If the current paddle is a valid choice and it has changed, fire a change event
this.valueProperty().bindBidirectional(selectedPaddle);
this.setValue(currentPaddle);
} else { // If the current paddle is valid but hasn't changed, don't fire a change event
this.setValue(currentPaddle);
this.valueProperty().bindBidirectional(selectedPaddle);
}
} else {
// The currently selected paddle is no longer valid, select an arbitrary paddle and fire a change event
this.valueProperty().bindBidirectional(selectedPaddle);
this.setValue(this.getItems().get(0));
}
}
private boolean containsPaddle(Class<Paddle> paddle, Iterable<Class<Paddle>> paddles) {
return getPaddleFromSameTeam(paddle, paddles) != null;
}
private Class<Paddle> getPaddleFromSameTeam(Class<Paddle> template, Iterable<Class<Paddle>> paddles) {
for (Class<Paddle> paddle : paddles) {
if (PaddleCache.getTeamName(template).equals(PaddleCache.getTeamName(paddle))) {
return paddle;
}
}
return null;
}
public ObjectProperty<File> paddleClassFolderProperty() {
return paddleClassFolder;
}
public void updatePaddleClassList() {
if (paddleClassFolder.get() != null) {
Collection<Class<Paddle>> classes = getPaddlesInFolder(paddleClassFolder.get());
populatePaddleClassChoices(classes);
}
}
private Collection<Class<Paddle>> getPaddlesInFolder(File root) {
final Object key = new Object();
try {
if (statusIndicator != null) {
statusIndicator.startWorking(key, "Looking for files in " + root, null);
}
List<Path> files = getJavaFilesInFolder(root);
log(key, "Found " + files.size() + " file(s)");
Collection<Class<Paddle>> paddles = new LinkedList<>();
for (Path file : files) {
try {
Class<Paddle> paddle = compiler.compile(file, jdkPath.get());
if (paddle != null) {
String oldTeamName = PaddleCache.getTeamName(paddle);
PaddleCache.registerNewPaddle(paddle);
paddles.add(paddle);
String previousHash = null;
for (Map.Entry<Class<Paddle>, PathAndUpdatedStatus> entry : paddleToFile.entrySet()) {
if (PaddleCache.getTeamName(entry.getKey()).equals(oldTeamName)) {
previousHash = entry.getValue().getHash();
paddleToFile.remove(entry.getKey());
break;
}
}
paddleToFile.put(paddle, new PathAndUpdatedStatus(file, previousHash));
}
} catch (JDKNotFoundException ex) {
throw new ToDisplay("I can't seem to find your JDK, please tell me where it is", ex);
} catch (NewInstanceException ex) {
throw new ToDisplay("Unable to instatiate paddle " + file + ", try to make the class public and add a public constructor with no arguments if you have declared your own constructor. ", ex);
} catch (GetTeamNameException ex) {
throw new ToDisplay("Couldn't get the team name from " + file + ", it either took too long or throwed an exception", ex);
} catch (RegisterTimeoutException ex) {
throw new ToDisplay("Unable to instatiate paddle " + file + ", it took too long..", ex);
} catch (IOException ex) {
throw new ToDisplay("Unable to instatiate paddle " + file + ", " + ex.getMessage(), ex);
}
}
log(key, "Compiled " + files.size() + " file(s)");
return paddles;
} finally {
if (statusIndicator != null) {
statusIndicator.stopWorking(key, null);
}
}
}
private void log(Object key, String msg) {
if (statusIndicator != null) {
statusIndicator.setStatus(key, msg);
}
}
private List<Path> getJavaFilesInFolder(File root) {
try {
List<Path> files = Files.walk(root.toPath()).filter(p -> p.toFile().getName().endsWith(".java")).collect(Collectors.toList());
if (files.isEmpty()) {
throw new ToDisplay("Found no Java files in " + root.getAbsolutePath());
}
if (files.size() > 100) {
// TODO: Allow the user to override this decision
throw new ToDisplay("Found too many Java files in " + root.getAbsolutePath());
}
return files;
} catch (IOException ex) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
throw new ToDisplay("Unable to locate classes in the selected folder: " + ex.getMessage() + ".\nPlease try different folder, it is probably named 'src'.");
}
}
public Path whichFileCompiledTo(Class<Paddle> paddle) {
PathAndUpdatedStatus toReturn = paddleToFile.get(paddle);
if (toReturn == null) {
LOG.warn("Did not find file that compiled to " + PaddleCache.getTeamName(paddle));
return null;
} else {
return toReturn.getPath();
}
}
}
| |
package com.vaadin.tests.components.grid;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import org.easymock.Capture;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import com.vaadin.data.provider.DataProvider;
import com.vaadin.data.provider.bov.Person;
import com.vaadin.event.selection.MultiSelectionEvent;
import com.vaadin.event.selection.MultiSelectionListener;
import com.vaadin.shared.Registration;
import com.vaadin.tests.util.MockUI;
import com.vaadin.ui.Grid;
import com.vaadin.ui.Grid.SelectionMode;
import com.vaadin.ui.UI;
import com.vaadin.ui.components.grid.GridSelectionModel;
import com.vaadin.ui.components.grid.MultiSelectionModel;
import com.vaadin.ui.components.grid.MultiSelectionModel.SelectAllCheckBoxVisibility;
import com.vaadin.ui.components.grid.MultiSelectionModelImpl;
import elemental.json.JsonObject;
public class GridMultiSelectionModelTest {
public static final Person PERSON_C = new Person("c", 3);
public static final Person PERSON_B = new Person("b", 2);
public static final Person PERSON_A = new Person("a", 1);
private Grid<Person> grid;
private MultiSelectionModelImpl<Person> selectionModel;
private Capture<List<Person>> currentSelectionCapture;
private Capture<List<Person>> oldSelectionCapture;
private AtomicInteger events;
public static class CustomMultiSelectionModel
extends MultiSelectionModelImpl<String> {
public final Map<String, Boolean> generatedData = new LinkedHashMap<>();
@Override
public void generateData(String item, JsonObject jsonObject) {
super.generateData(item, jsonObject);
// capture updated row
generatedData.put(item, isSelected(item));
}
}
public static class CustomSelectionModelGrid extends Grid<String> {
public CustomSelectionModelGrid() {
this(new CustomMultiSelectionModel());
}
public CustomSelectionModelGrid(
GridSelectionModel<String> selectionModel) {
super();
setSelectionModel(selectionModel);
}
}
private static class TestMultiSelectionModel
extends MultiSelectionModelImpl<Object> {
public TestMultiSelectionModel() {
getState(false).selectionAllowed = false;
}
@Override
protected void updateSelection(Set<Object> addedItems,
Set<Object> removedItems, boolean userOriginated) {
super.updateSelection(addedItems, removedItems, userOriginated);
}
}
@Before
public void setUp() {
grid = new Grid<>();
selectionModel = (MultiSelectionModelImpl<Person>) grid
.setSelectionMode(SelectionMode.MULTI);
grid.setItems(PERSON_A, PERSON_B, PERSON_C);
currentSelectionCapture = new Capture<>();
oldSelectionCapture = new Capture<>();
events = new AtomicInteger();
selectionModel.addMultiSelectionListener(event -> {
currentSelectionCapture
.setValue(new ArrayList<>(event.getNewSelection()));
oldSelectionCapture
.setValue(new ArrayList<>(event.getOldSelection()));
events.incrementAndGet();
});
}
@Test(expected = IllegalStateException.class)
public void throwExcpetionWhenSelectionIsDisallowed() {
TestMultiSelectionModel model = new TestMultiSelectionModel();
model.updateSelection(Collections.emptySet(), Collections.emptySet(),
true);
}
@Test(expected = IllegalStateException.class)
public void selectionModelChanged_usingPreviousSelectionModel_throws() {
grid.setSelectionMode(SelectionMode.SINGLE);
selectionModel.select(PERSON_A);
}
@Test
public void changingSelectionModel_firesSelectionEvent() {
Grid<String> customGrid = new Grid<>();
customGrid.setSelectionMode(SelectionMode.MULTI);
customGrid.setItems("Foo", "Bar", "Baz");
List<String> selectionChanges = new ArrayList<>();
Capture<List<String>> oldSelectionCapture = new Capture<>();
((MultiSelectionModelImpl<String>) customGrid.getSelectionModel())
.addMultiSelectionListener(e -> {
selectionChanges.addAll(e.getValue());
oldSelectionCapture
.setValue(new ArrayList<>(e.getOldSelection()));
});
customGrid.getSelectionModel().select("Foo");
assertEquals(Arrays.asList("Foo"), selectionChanges);
selectionChanges.clear();
customGrid.getSelectionModel().select("Bar");
assertEquals("Foo",
customGrid.getSelectionModel().getFirstSelectedItem().get());
assertEquals(Arrays.asList("Foo", "Bar"), selectionChanges);
selectionChanges.clear();
customGrid.setSelectionMode(SelectionMode.SINGLE);
assertFalse(customGrid.getSelectionModel().getFirstSelectedItem()
.isPresent());
assertEquals(Arrays.asList(), selectionChanges);
assertEquals(Arrays.asList("Foo", "Bar"),
oldSelectionCapture.getValue());
}
@Test
public void serverSideSelection_GridChangingSelectionModel_sendsUpdatedRowsToClient() {
Grid<String> customGrid = new CustomSelectionModelGrid();
CustomMultiSelectionModel customModel = (CustomMultiSelectionModel) customGrid
.getSelectionModel();
customGrid.setItems("Foo", "Bar", "Baz");
customGrid.getDataCommunicator().beforeClientResponse(true);
Assert.assertFalse("Item should have been updated as selected",
customModel.generatedData.get("Foo"));
Assert.assertFalse("Item should have been updated as NOT selected",
customModel.generatedData.get("Bar"));
Assert.assertFalse("Item should have been updated as NOT selected",
customModel.generatedData.get("Baz"));
customModel.generatedData.clear();
customGrid.getSelectionModel().select("Foo");
customGrid.getDataCommunicator().beforeClientResponse(false);
Assert.assertTrue("Item should have been updated as selected",
customModel.generatedData.get("Foo"));
Assert.assertFalse("Item should have NOT been updated",
customModel.generatedData.containsKey("Bar"));
Assert.assertFalse("Item should have NOT been updated",
customModel.generatedData.containsKey("Baz"));
customModel.generatedData.clear();
customModel.updateSelection(asSet("Bar"), asSet("Foo"));
customGrid.getDataCommunicator().beforeClientResponse(false);
Assert.assertFalse("Item should have been updated as NOT selected",
customModel.generatedData.get("Foo"));
Assert.assertTrue("Item should have been updated as selected",
customModel.generatedData.get("Bar"));
Assert.assertFalse("Item should have NOT been updated",
customModel.generatedData.containsKey("Baz"));
// switch to single to cause event
customModel.generatedData.clear();
customGrid.setSelectionMode(SelectionMode.SINGLE);
customGrid.getDataCommunicator().beforeClientResponse(false);
// changing selection model should trigger row updates, but the old
// selection model is not triggered as it has been removed
Assert.assertTrue(customModel.generatedData.isEmpty()); // not triggered
}
@Test
public void select_gridWithStrings() {
Grid<String> gridWithStrings = new Grid<>();
gridWithStrings.setSelectionMode(SelectionMode.MULTI);
gridWithStrings.setItems("Foo", "Bar", "Baz");
GridSelectionModel<String> model = gridWithStrings.getSelectionModel();
Assert.assertFalse(model.isSelected("Foo"));
model.select("Foo");
Assert.assertTrue(model.isSelected("Foo"));
Assert.assertEquals(Optional.of("Foo"), model.getFirstSelectedItem());
model.select("Bar");
Assert.assertTrue(model.isSelected("Foo"));
Assert.assertTrue(model.isSelected("Bar"));
Assert.assertEquals(Arrays.asList("Foo", "Bar"),
new ArrayList<>(model.getSelectedItems()));
model.deselect("Bar");
Assert.assertFalse(model.isSelected("Bar"));
Assert.assertTrue(model.getFirstSelectedItem().isPresent());
Assert.assertEquals(Arrays.asList("Foo"),
new ArrayList<>(model.getSelectedItems()));
}
@Test
public void select() {
selectionModel.select(PERSON_B);
assertEquals(PERSON_B,
selectionModel.getFirstSelectedItem().orElse(null));
assertEquals(Optional.of(PERSON_B),
selectionModel.getFirstSelectedItem());
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B),
currentSelectionCapture.getValue());
selectionModel.select(PERSON_A);
assertEquals(PERSON_B,
selectionModel.getFirstSelectedItem().orElse(null));
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B, PERSON_A),
currentSelectionCapture.getValue());
assertEquals(2, events.get());
}
@Test
public void deselect() {
selectionModel.select(PERSON_B);
selectionModel.deselect(PERSON_B);
assertFalse(selectionModel.getFirstSelectedItem().isPresent());
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(2, events.get());
}
@Test
public void selectItems() {
selectionModel.selectItems(PERSON_C, PERSON_B);
assertEquals(PERSON_C,
selectionModel.getFirstSelectedItem().orElse(null));
assertEquals(Optional.of(PERSON_C),
selectionModel.getFirstSelectedItem());
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_C, PERSON_B),
currentSelectionCapture.getValue());
selectionModel.selectItems(PERSON_A, PERSON_C); // partly NOOP
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_C, PERSON_B, PERSON_A),
currentSelectionCapture.getValue());
assertEquals(2, events.get());
}
@Test
public void deselectItems() {
selectionModel.selectItems(PERSON_C, PERSON_A, PERSON_B);
selectionModel.deselectItems(PERSON_A);
assertEquals(PERSON_C,
selectionModel.getFirstSelectedItem().orElse(null));
assertEquals(Optional.of(PERSON_C),
selectionModel.getFirstSelectedItem());
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_C, PERSON_B),
currentSelectionCapture.getValue());
selectionModel.deselectItems(PERSON_A, PERSON_B, PERSON_C);
assertNull(selectionModel.getFirstSelectedItem().orElse(null));
assertEquals(Optional.empty(), selectionModel.getFirstSelectedItem());
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(3, events.get());
}
@Test
public void selectionEvent_newSelection_oldSelection() {
selectionModel.selectItems(PERSON_C, PERSON_A, PERSON_B);
assertEquals(Arrays.asList(PERSON_C, PERSON_A, PERSON_B),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(), oldSelectionCapture.getValue());
selectionModel.deselect(PERSON_A);
assertEquals(Arrays.asList(PERSON_C, PERSON_B),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_C, PERSON_A, PERSON_B),
oldSelectionCapture.getValue());
selectionModel.deselectItems(PERSON_A, PERSON_B, PERSON_C);
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_C, PERSON_B),
oldSelectionCapture.getValue());
selectionModel.selectItems(PERSON_A);
assertEquals(Arrays.asList(PERSON_A),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(), oldSelectionCapture.getValue());
selectionModel.updateSelection(
new LinkedHashSet<>(Arrays.asList(PERSON_B, PERSON_C)),
new LinkedHashSet<>(Arrays.asList(PERSON_A)));
assertEquals(Arrays.asList(PERSON_B, PERSON_C),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_A), oldSelectionCapture.getValue());
selectionModel.deselectAll();
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_B, PERSON_C),
oldSelectionCapture.getValue());
selectionModel.select(PERSON_C);
assertEquals(Arrays.asList(PERSON_C),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(), oldSelectionCapture.getValue());
selectionModel.deselect(PERSON_C);
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_C), oldSelectionCapture.getValue());
}
@Test
public void deselectAll() {
selectionModel.selectItems(PERSON_A, PERSON_C, PERSON_B);
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_A, PERSON_C, PERSON_B),
currentSelectionCapture.getValue());
assertEquals(1, events.get());
selectionModel.deselectAll();
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_A, PERSON_C, PERSON_B),
oldSelectionCapture.getValue());
assertEquals(2, events.get());
selectionModel.select(PERSON_C);
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_C),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(), oldSelectionCapture.getValue());
assertEquals(3, events.get());
selectionModel.deselectAll();
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_C), oldSelectionCapture.getValue());
assertEquals(4, events.get());
selectionModel.deselectAll();
assertEquals(4, events.get());
}
@Test
public void selectAll() {
selectionModel.selectAll();
assertTrue(selectionModel.isAllSelected());
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_A, PERSON_B, PERSON_C),
currentSelectionCapture.getValue());
assertEquals(1, events.get());
selectionModel.deselectItems(PERSON_A, PERSON_C);
assertFalse(selectionModel.isAllSelected());
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_A, PERSON_B, PERSON_C),
oldSelectionCapture.getValue());
selectionModel.selectAll();
assertTrue(selectionModel.isAllSelected());
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B, PERSON_A, PERSON_C),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_B), oldSelectionCapture.getValue());
assertEquals(3, events.get());
}
@Test
public void updateSelection() {
selectionModel.updateSelection(asSet(PERSON_A), Collections.emptySet());
assertTrue(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_A),
currentSelectionCapture.getValue());
assertEquals(1, events.get());
selectionModel.updateSelection(asSet(PERSON_B), asSet(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_A), oldSelectionCapture.getValue());
assertEquals(2, events.get());
selectionModel.updateSelection(asSet(PERSON_B), asSet(PERSON_A)); // NOOP
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_A), oldSelectionCapture.getValue());
assertEquals(2, events.get());
selectionModel.updateSelection(asSet(PERSON_A, PERSON_C),
asSet(PERSON_A)); // partly NOOP
assertFalse(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B, PERSON_C),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_B), oldSelectionCapture.getValue());
assertEquals(3, events.get());
selectionModel.updateSelection(asSet(PERSON_B, PERSON_A),
asSet(PERSON_B)); // partly NOOP
assertTrue(selectionModel.isSelected(PERSON_A));
assertTrue(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(PERSON_B, PERSON_C, PERSON_A),
currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_B, PERSON_C),
oldSelectionCapture.getValue());
assertEquals(4, events.get());
selectionModel.updateSelection(asSet(),
asSet(PERSON_B, PERSON_A, PERSON_C));
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(Arrays.asList(PERSON_B, PERSON_C, PERSON_A),
oldSelectionCapture.getValue());
assertEquals(5, events.get());
}
private <T> Set<T> asSet(@SuppressWarnings("unchecked") T... people) {
return new LinkedHashSet<>(Arrays.asList(people));
}
@Test
public void selectTwice() {
selectionModel.select(PERSON_C);
selectionModel.select(PERSON_C);
assertEquals(PERSON_C,
selectionModel.getFirstSelectedItem().orElse(null));
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertTrue(selectionModel.isSelected(PERSON_C));
assertEquals(Optional.of(PERSON_C),
selectionModel.getFirstSelectedItem());
assertEquals(Arrays.asList(PERSON_C),
currentSelectionCapture.getValue());
assertEquals(1, events.get());
}
@Test
public void deselectTwice() {
selectionModel.select(PERSON_C);
assertEquals(Arrays.asList(PERSON_C),
currentSelectionCapture.getValue());
assertEquals(1, events.get());
selectionModel.deselect(PERSON_C);
assertFalse(selectionModel.getFirstSelectedItem().isPresent());
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(2, events.get());
selectionModel.deselect(PERSON_C);
assertFalse(selectionModel.getFirstSelectedItem().isPresent());
assertFalse(selectionModel.isSelected(PERSON_A));
assertFalse(selectionModel.isSelected(PERSON_B));
assertFalse(selectionModel.isSelected(PERSON_C));
assertEquals(Arrays.asList(), currentSelectionCapture.getValue());
assertEquals(2, events.get());
}
@SuppressWarnings({ "serial" })
@Test
public void addValueChangeListener() {
String value = "foo";
AtomicReference<MultiSelectionListener<String>> selectionListener = new AtomicReference<>();
Registration registration = Mockito.mock(Registration.class);
MultiSelectionModelImpl<String> model = new MultiSelectionModelImpl<String>() {
@Override
public Registration addMultiSelectionListener(
MultiSelectionListener<String> listener) {
selectionListener.set(listener);
return registration;
}
@Override
public Set<String> getSelectedItems() {
return new LinkedHashSet<>(Arrays.asList(value));
}
};
Grid<String> grid = new CustomSelectionModelGrid(model);
grid.setItems("foo", "bar");
AtomicReference<MultiSelectionEvent<String>> event = new AtomicReference<>();
Registration actualRegistration = model
.addMultiSelectionListener(evt -> {
Assert.assertNull(event.get());
event.set(evt);
});
Assert.assertSame(registration, actualRegistration);
selectionListener.get().selectionChange(new MultiSelectionEvent<>(grid,
model.asMultiSelect(), Collections.emptySet(), true));
Assert.assertEquals(grid, event.get().getComponent());
Assert.assertEquals(new LinkedHashSet<>(Arrays.asList(value)),
event.get().getValue());
Assert.assertTrue(event.get().isUserOriginated());
}
@Test
public void selectAllCheckboxVisible__inMemoryDataProvider() {
UI ui = new MockUI();
Grid<String> grid = new Grid<>();
MultiSelectionModel<String> model = (MultiSelectionModel<String>) grid
.setSelectionMode(SelectionMode.MULTI);
ui.setContent(grid);
// no items yet, default data provider is empty not in memory one
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
Assert.assertEquals(SelectAllCheckBoxVisibility.DEFAULT,
model.getSelectAllCheckBoxVisibility());
grid.setItems("Foo", "Bar", "Baz");
// in-memory container keeps default
Assert.assertTrue(model.isSelectAllCheckBoxVisible());
Assert.assertEquals(SelectAllCheckBoxVisibility.DEFAULT,
model.getSelectAllCheckBoxVisibility());
// change to explicit NO
model.setSelectAllCheckBoxVisibility(
SelectAllCheckBoxVisibility.HIDDEN);
Assert.assertEquals(SelectAllCheckBoxVisibility.HIDDEN,
model.getSelectAllCheckBoxVisibility());
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
// change to explicit YES
model.setSelectAllCheckBoxVisibility(
SelectAllCheckBoxVisibility.VISIBLE);
Assert.assertEquals(SelectAllCheckBoxVisibility.VISIBLE,
model.getSelectAllCheckBoxVisibility());
Assert.assertTrue(model.isSelectAllCheckBoxVisible());
}
@Test
public void selectAllCheckboxVisible__lazyDataProvider() {
Grid<String> grid = new Grid<>();
UI ui = new MockUI();
ui.setContent(grid);
MultiSelectionModel<String> model = (MultiSelectionModel<String>) grid
.setSelectionMode(SelectionMode.MULTI);
// no items yet, default data provider is empty not in memory one
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
Assert.assertEquals(SelectAllCheckBoxVisibility.DEFAULT,
model.getSelectAllCheckBoxVisibility());
grid.setDataProvider(
DataProvider
.fromCallbacks(
query -> IntStream
.range(query.getOffset(),
Math.max(query.getOffset()
+ query.getLimit() + 1,
1000))
.mapToObj(i -> "Item " + i),
query -> 1000));
// not in-memory -> checkbox is hidden
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
Assert.assertEquals(SelectAllCheckBoxVisibility.DEFAULT,
model.getSelectAllCheckBoxVisibility());
// change to explicit YES
model.setSelectAllCheckBoxVisibility(
SelectAllCheckBoxVisibility.VISIBLE);
Assert.assertEquals(SelectAllCheckBoxVisibility.VISIBLE,
model.getSelectAllCheckBoxVisibility());
Assert.assertTrue(model.isSelectAllCheckBoxVisible());
// change to explicit NO
model.setSelectAllCheckBoxVisibility(
SelectAllCheckBoxVisibility.HIDDEN);
Assert.assertEquals(SelectAllCheckBoxVisibility.HIDDEN,
model.getSelectAllCheckBoxVisibility());
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
// change back to depends on data provider
model.setSelectAllCheckBoxVisibility(
SelectAllCheckBoxVisibility.DEFAULT);
Assert.assertFalse(model.isSelectAllCheckBoxVisible());
Assert.assertEquals(SelectAllCheckBoxVisibility.DEFAULT,
model.getSelectAllCheckBoxVisibility());
}
}
| |
package client;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.ResourceBundle;
import java.util.Scanner;
import java.util.regex.Pattern;
import javax.crypto.BadPaddingException;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import channel.SecureChannelAsym;
import channel.SecureChannelSym;
public class Client {
private Scanner inScanner;
/* secure channels */
public static SecureChannelSym secSym;
public static SecureChannelAsym secAsym;
/*
* Network communication
*/
private Socket socket = null;
private PrintWriter socketOut = null;
private BufferedReader socketIn = null;
private DatagramPacket packet;
private DatagramSocket udpSocket;
/*
* Threads
*/
private ClientThread listenerThread;
private ClientThread udpListenerThread;
/*
* Input placeholder Strings
*/
private String input;
private String[] inputArray;
private String cmd;
/*
* Internal
*/
private Boolean clientActive;
private Boolean clientLoggedIn;
private ResourceBundle configData;
private String dataPath = "client";
/*
* SETTINGS
*/
//private Integer clientUDPPort;
private String serverHost;
private Integer serverTCPPort;
private Integer serverUDPPort;
public static String key_public_dir;
private String key_private_dir;
private String server_key;
private String ca_host;
private Integer ca_port;
private String ca_certificate;
/**
* CONSTRUCTOR
*
*/
public Client() {
initalizeClient();
showWelcome();
/* start threads */
listenerThread = new ClientTCP(socket);
udpListenerThread = new ClientUDP(udpSocket);
udpListenerThread.start();
listenerThread.start();
handleInput();
exit();
}
/**
* Initialising of the basic network stuff
*
*/
private void initalizeClient() {
/*load config resourcebundle */
loadConfigData();
/* setting configs */
try {
serverHost = configData.getString("server.host");
serverTCPPort = Integer.valueOf(configData.getString("server.tcp.port"));
serverUDPPort = Integer.valueOf(configData.getString("server.udp.port"));
key_public_dir = configData.getString("keys.public.dir");
key_private_dir = configData.getString("keys.private.dir");
server_key = configData.getString("server.key");
ca_host = configData.getString("ca.host");
ca_port = Integer.valueOf(configData.getString("ca.port"));
ca_certificate = configData.getString("ca.certificate");
} catch(Exception e) {
System.err.println("error while retriving config data " + e);
exit();
}
/* Initialising datamembers*/
inScanner = new Scanner(System.in);
inputArray = new String[1];
clientActive = new Boolean(true);
clientLoggedIn = new Boolean(false);
/* security */
secSym = new SecureChannelSym();
secAsym = new SecureChannelAsym();
/* initializing sockets */
try {
socket = new Socket(serverHost, serverTCPPort);
socketOut = new PrintWriter(socket.getOutputStream(), true);
} catch (UnknownHostException e) {
System.err.println("Don't know about host: " + serverHost);
exit();
} catch (IOException e) {
System.err.println("Couldn't get I/O for the connection to: " + serverHost);
exit();
}
try {
udpSocket = null;
udpSocket = new DatagramSocket();
} catch (SocketException e) {
System.err.println("Couldn't get Datagram Socket: ");
exit();
}
}
/**
* Shows a simple welcome
*/
private void showWelcome() {
System.out.println("Client started.");
}
/**
* Awaits the user input and handles it depending on
* the type of command.
*
* @param whole unformatted input
*/
private void handleInput() {
/* FIRST STEP OF ATHENTIFICATION */
while(clientActive) {
/* Split input with regex and save in array */
Pattern p = Pattern.compile(" ");
inputArray = p.split(inScanner.nextLine());
/* reading command */
try{
cmd = inputArray[0];
} catch (Exception e) {
System.out.println("Error while reading input");
}
/* if login */
try {
if(cmd.equals("!login") && inputArray[1] != null && !inputArray[1].equals("")) {
try {
secAsym.setServerPublicKey(server_key);
secAsym.setClientPrivateKey(key_private_dir+"/"+inputArray[1]+".pem");
//System.out.println(key_private_dir+"/"+inputArray[1]+".pem");
secAsym.generateClientChallenge();
socketOut.println(secAsym.encryptRSAWithServerPublicKey(cmd+" "+inputArray[1]+" "+secAsym.getBase64ClientChallenge()));
break;
}
catch (Exception e) {
System.err.println("[Client/Err]: Cannot login "+inputArray[1]+". User does not have a key or does not exist, or passphrase is wrong.");
}
} else if(cmd.equals("!end")){
exit();
} else {
System.out.println("Please login first.");
}
} catch (Exception e) {
System.err.println("[Client/Err]: no user selected");
}
}
/* iv and key filled, read to send */
while(!secSym.ready()) {}
while(clientActive) {
/* Split input with regex and save in array */
Pattern p = Pattern.compile(" ");
String input = inScanner.nextLine();
inputArray = p.split(input);
/* reading command */
try{
cmd = inputArray[0];
} catch (Exception e) {
System.out.println("Error while reading input");
}
/* the command switch */
if(cmd.equals("!end")) { /* ends and if needed logs out */
System.out.println("Client closing.");
exit();
} else if(cmd.equals("!info") || cmd.equals("!list")) { /* sending this through udp */
try {
byte[] data = input.getBytes();
packet = new DatagramPacket( data, data.length, InetAddress.getByName(serverHost), serverUDPPort );
//System.out.println("Sending Client Port: "+serverUDPPort+" InetAdrr: "+InetAddress.getByName(serverHost).getHostAddress()+" Data: "+new String(data));
udpSocket.send( packet );
} catch (IOException e) {
System.err.println("Could not send Datagram Data.");
}
} else if(cmd.equals("!msg")) {
try {
String text = Client.specialConcatStringArray(Arrays.copyOfRange(inputArray, 2, inputArray.length)).trim();
String private_msg = cmd + " " +inputArray[1]+" ";
private_msg += secAsym.signWithClientPrivateKeyAndBase64(text)+" "+text;
//System.out.println(private_msg);
socketOut.println(secSym.encryptAES(private_msg));
} catch(Exception e) {
System.out.println("Error while sending private message.");
}
} else { /* sends the message */
try {
socketOut.println(secSym.encryptAES(input));
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
/**
* Loads the config into a ResourceBundle
*/
private void loadConfigData() {
try {
configData = ResourceBundle.getBundle(dataPath);
} catch (Exception e) {
System.err.println("[Client/Err]: Could not load config properties: "+e);
exit();
}
//System.out.println("[Server]: Users loaded successfully.");
}
/**
* Just a little helper method for
* concating String Arrays to 1 String and
* adds a whitespace in fornt of every part
*
* @param arr - the array containing the String parts
* @return the whole String
*/
public static String specialConcatStringArray(String[] arr) {
String returnString = new String();
for(String s:arr) {
returnString += " "+s;
}
return returnString;
}
/**
* Exits the program and releases all resources.
*/
private void exit() {
clientActive = false;
try {
inScanner.close();
socketOut.close();
listenerThread.exitThread();
udpListenerThread.exitThread();
} catch (Exception e) {
System.err.println("Client: Error while closing client: "+e);
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.file;
import alluxio.Configuration;
import alluxio.PropertyKey;
import alluxio.client.block.BlockMasterClient;
import alluxio.client.block.BlockMasterClientPool;
import alluxio.client.block.BlockWorkerClient;
import alluxio.client.block.BlockWorkerThriftClientPool;
import alluxio.client.netty.NettyClient;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.status.UnavailableException;
import alluxio.metrics.MetricsSystem;
import alluxio.network.connection.NettyChannelPool;
import alluxio.resource.CloseableResource;
import alluxio.util.IdUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.util.network.NetworkAddressUtils.ServiceType;
import alluxio.wire.WorkerInfo;
import alluxio.wire.WorkerNetAddress;
import com.codahale.metrics.Gauge;
import com.google.common.base.Preconditions;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.util.internal.chmv8.ConcurrentHashMapV8;
import java.io.Closeable;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import javax.security.auth.Subject;
/**
* A shared context that isolates all operations within a {@link FileSystem}. Usually, one user
* only needs one instance of {@link FileSystemContext}.
*
* <p>
* NOTE: The context maintains a pool of file system master clients that is already thread-safe.
* Synchronizing {@link FileSystemContext} methods could lead to deadlock: thread A attempts to
* acquire a client when there are no clients left in the pool and blocks holding a lock on the
* {@link FileSystemContext}, when thread B attempts to release a client it owns it is unable to do
* so, because thread A holds the lock on {@link FileSystemContext}.
*/
@ThreadSafe
public final class FileSystemContext implements Closeable {
public static final FileSystemContext INSTANCE = create(null);
static {
MetricsSystem.startSinks();
Metrics.initializeGauges();
}
// Master client pools.
private volatile FileSystemMasterClientPool mFileSystemMasterClientPool;
private volatile BlockMasterClientPool mBlockMasterClientPool;
// Block worker client pools.
private final ConcurrentHashMapV8<InetSocketAddress, BlockWorkerThriftClientPool>
mBlockWorkerClientPools = new ConcurrentHashMapV8<>();
private final ConcurrentHashMapV8<InetSocketAddress, BlockWorkerThriftClientPool>
mBlockWorkerClientHeartbeatPools = new ConcurrentHashMapV8<>();
// The netty data server channel pools.
private final ConcurrentHashMapV8<SocketAddress, NettyChannelPool>
mNettyChannelPools = new ConcurrentHashMapV8<>();
/** The shared master address associated with the {@link FileSystemContext}. */
@GuardedBy("this")
private InetSocketAddress mMasterAddress;
/**
* Indicates whether the {@link #mLocalWorker} field has been lazily initialized yet.
*/
@GuardedBy("this")
private boolean mLocalWorkerInitialized;
/**
* The address of any Alluxio worker running on the local machine. This is initialized lazily.
*/
@GuardedBy("this")
private WorkerNetAddress mLocalWorker;
/** The parent user associated with the {@link FileSystemContext}. */
private final Subject mParentSubject;
/**
* Creates a new file system context.
*
* @return the context
*/
public static FileSystemContext create() {
return create(null);
}
/**
* Creates a file system context with a subject.
*
* @param subject the parent subject, set to null if not present
* @return the context
*/
public static FileSystemContext create(Subject subject) {
FileSystemContext context = new FileSystemContext(subject);
context.init();
return context;
}
/**
* Creates a file system context with a subject.
*
* @param subject the parent subject, set to null if not present
*/
private FileSystemContext(Subject subject) {
mParentSubject = subject;
}
/**
* Initializes the context. Only called in the factory methods and reset.
*/
private void init() {
mMasterAddress = NetworkAddressUtils.getConnectAddress(ServiceType.MASTER_RPC);
mFileSystemMasterClientPool = new FileSystemMasterClientPool(mParentSubject, mMasterAddress);
mBlockMasterClientPool = new BlockMasterClientPool(mParentSubject, mMasterAddress);
}
/**
* Closes all the resources associated with the context. Make sure all the resources are released
* back to this context before calling this close. After closing the context, all the resources
* that acquired from this context might fail. Only call this when you are done with using
* the {@link FileSystem} associated with this {@link FileSystemContext}.
*/
public void close() {
mFileSystemMasterClientPool.close();
mFileSystemMasterClientPool = null;
mBlockMasterClientPool.close();
mBlockMasterClientPool = null;
for (BlockWorkerThriftClientPool pool : mBlockWorkerClientPools.values()) {
pool.close();
}
mBlockWorkerClientPools.clear();
for (BlockWorkerThriftClientPool pool : mBlockWorkerClientHeartbeatPools.values()) {
pool.close();
}
mBlockWorkerClientHeartbeatPools.clear();
for (NettyChannelPool pool : mNettyChannelPools.values()) {
pool.close();
}
mNettyChannelPools.clear();
synchronized (this) {
mMasterAddress = null;
mLocalWorkerInitialized = false;
mLocalWorker = null;
}
}
/**
* Resets the context. It is only used in {@link alluxio.hadoop.AbstractFileSystem} and
* tests to reset the default file system context.
*/
public synchronized void reset() {
close();
init();
}
/**
* @return the parent subject
*/
public Subject getParentSubject() {
return mParentSubject;
}
/**
* @return the master address
*/
public synchronized InetSocketAddress getMasterAddress() {
return mMasterAddress;
}
/**
* Acquires a file system master client from the file system master client pool.
*
* @return the acquired file system master client
*/
public FileSystemMasterClient acquireMasterClient() {
return mFileSystemMasterClientPool.acquire();
}
/**
* Releases a file system master client into the file system master client pool.
*
* @param masterClient a file system master client to release
*/
public void releaseMasterClient(FileSystemMasterClient masterClient) {
mFileSystemMasterClientPool.release(masterClient);
}
/**
* Acquires a file system master client from the file system master client pool. The resource is
* {@code Closeable}.
*
* @return the acquired file system master client resource
*/
public CloseableResource<FileSystemMasterClient> acquireMasterClientResource() {
return new CloseableResource<FileSystemMasterClient>(mFileSystemMasterClientPool.acquire()) {
@Override
public void close() {
mFileSystemMasterClientPool.release(get());
}
};
}
/**
* Acquires a block master client resource from the block master client pool. The resource is
* {@code Closeable}.
*
* @return the acquired block master client resource
*/
public CloseableResource<BlockMasterClient> acquireBlockMasterClientResource() {
return new CloseableResource<BlockMasterClient>(mBlockMasterClientPool.acquire()) {
@Override
public void close() {
mBlockMasterClientPool.release(get());
}
};
}
/**
* Creates a client for a block worker with the given address.
*
* @param address the address of the worker to get a client to
* @return a {@link BlockWorkerClient} connected to the worker with the given worker RPC address
*/
public BlockWorkerClient createBlockWorkerClient(WorkerNetAddress address) {
return createBlockWorkerClient(address, IdUtils.getRandomNonNegativeLong());
}
/**
* Creates a client for a block worker with the given address.
*
* @param address the address of the worker to get a client to
* @param sessionId the session ID
* @return a {@link BlockWorkerClient} connected to the worker with the given worker RPC address
*/
// TODO(peis): Abstract the logic to operate on the pools.
public BlockWorkerClient createBlockWorkerClient(WorkerNetAddress address,
Long sessionId) {
Preconditions.checkNotNull(address, ExceptionMessage.NO_WORKER_AVAILABLE.getMessage());
InetSocketAddress rpcAddress = NetworkAddressUtils.getRpcPortSocketAddress(address);
if (!mBlockWorkerClientPools.containsKey(rpcAddress)) {
BlockWorkerThriftClientPool pool = new BlockWorkerThriftClientPool(mParentSubject, rpcAddress,
Configuration.getInt(PropertyKey.USER_BLOCK_WORKER_CLIENT_POOL_SIZE_MAX),
Configuration.getLong(PropertyKey.USER_BLOCK_WORKER_CLIENT_POOL_GC_THRESHOLD_MS));
if (mBlockWorkerClientPools.putIfAbsent(rpcAddress, pool) != null) {
pool.close();
}
}
if (!mBlockWorkerClientHeartbeatPools.containsKey(rpcAddress)) {
BlockWorkerThriftClientPool pool = new BlockWorkerThriftClientPool(mParentSubject, rpcAddress,
Configuration.getInt(PropertyKey.USER_BLOCK_WORKER_CLIENT_POOL_SIZE_MAX),
Configuration.getLong(PropertyKey.USER_BLOCK_WORKER_CLIENT_POOL_GC_THRESHOLD_MS));
if (mBlockWorkerClientHeartbeatPools.putIfAbsent(rpcAddress, pool) != null) {
pool.close();
}
}
return BlockWorkerClient.Factory.create(mBlockWorkerClientPools.get(rpcAddress),
mBlockWorkerClientHeartbeatPools.get(rpcAddress), address, sessionId);
}
/**
* Acquires a netty channel from the channel pools. If there is no available client instance
* available in the pool, it tries to create a new one. And an exception is thrown if it fails to
* create a new one.
*
* @param workerNetAddress the network address of the channel
* @return the acquired netty channel
*/
public Channel acquireNettyChannel(final WorkerNetAddress workerNetAddress) {
SocketAddress address = NetworkAddressUtils.getDataPortSocketAddress(workerNetAddress);
if (!mNettyChannelPools.containsKey(address)) {
Bootstrap bs = NettyClient.createClientBootstrap(address);
bs.remoteAddress(address);
NettyChannelPool pool = new NettyChannelPool(bs,
Configuration.getInt(PropertyKey.USER_NETWORK_NETTY_CHANNEL_POOL_SIZE_MAX),
Configuration.getLong(PropertyKey.USER_NETWORK_NETTY_CHANNEL_POOL_GC_THRESHOLD_MS));
if (mNettyChannelPools.putIfAbsent(address, pool) != null) {
// This can happen if this function is called concurrently.
pool.close();
}
}
try {
return mNettyChannelPools.get(address).acquire();
} catch (InterruptedException e) {
throw new UnavailableException(e);
}
}
/**
* Releases a netty channel to the channel pools.
*
* @param workerNetAddress the address of the channel
* @param channel the channel to release
*/
public void releaseNettyChannel(WorkerNetAddress workerNetAddress, Channel channel) {
SocketAddress address = NetworkAddressUtils.getDataPortSocketAddress(workerNetAddress);
Preconditions.checkArgument(mNettyChannelPools.containsKey(address));
mNettyChannelPools.get(address).release(channel);
}
/**
* @return if there is a local worker running the same machine
*/
public synchronized boolean hasLocalWorker() {
if (!mLocalWorkerInitialized) {
initializeLocalWorker();
}
return mLocalWorker != null;
}
/**
* @return a local worker running the same machine, or null if none is found
*/
public synchronized WorkerNetAddress getLocalWorker() {
if (!mLocalWorkerInitialized) {
initializeLocalWorker();
}
return mLocalWorker;
}
private void initializeLocalWorker() {
List<WorkerNetAddress> addresses = getWorkerAddresses();
if (!addresses.isEmpty()) {
if (addresses.get(0).getHost().equals(NetworkAddressUtils.getClientHostName())) {
mLocalWorker = addresses.get(0);
}
}
mLocalWorkerInitialized = true;
}
/**
* @return if there are any local workers, the returned list will ONLY contain the local workers,
* otherwise a list of all remote workers will be returned
*/
private List<WorkerNetAddress> getWorkerAddresses() {
List<WorkerInfo> infos;
BlockMasterClient blockMasterClient = mBlockMasterClientPool.acquire();
try {
infos = blockMasterClient.getWorkerInfoList();
} finally {
mBlockMasterClientPool.release(blockMasterClient);
}
if (infos.isEmpty()) {
throw new UnavailableException(ExceptionMessage.NO_WORKER_AVAILABLE.getMessage());
}
// Convert the worker infos into net addresses, if there are local addresses, only keep those
List<WorkerNetAddress> workerNetAddresses = new ArrayList<>();
List<WorkerNetAddress> localWorkerNetAddresses = new ArrayList<>();
String localHostname = NetworkAddressUtils.getClientHostName();
for (WorkerInfo info : infos) {
WorkerNetAddress netAddress = info.getAddress();
if (netAddress.getHost().equals(localHostname)) {
localWorkerNetAddresses.add(netAddress);
}
workerNetAddresses.add(netAddress);
}
return localWorkerNetAddresses.isEmpty() ? workerNetAddresses : localWorkerNetAddresses;
}
/**
* Class that contains metrics about FileSystemContext.
*/
@ThreadSafe
private static final class Metrics {
private static void initializeGauges() {
MetricsSystem.registerGaugeIfAbsent(MetricsSystem.getClientMetricName("NettyConnectionsOpen"),
new Gauge<Long>() {
@Override
public Long getValue() {
long ret = 0;
for (NettyChannelPool pool : INSTANCE.mNettyChannelPools.values()) {
ret += pool.size();
}
return ret;
}
});
MetricsSystem
.registerGaugeIfAbsent(MetricsSystem.getClientMetricName("BlockWorkerClientsOpen"),
new Gauge<Long>() {
@Override
public Long getValue() {
long ret = 0;
for (BlockWorkerThriftClientPool pool : INSTANCE.mBlockWorkerClientPools
.values()) {
ret += pool.size();
}
return ret;
}
});
MetricsSystem.registerGaugeIfAbsent(
MetricsSystem.getClientMetricName("BlockWorkerHeartbeatClientsOpen"), new Gauge<Long>() {
@Override
public Long getValue() {
long ret = 0;
for (BlockWorkerThriftClientPool pool : INSTANCE.mBlockWorkerClientHeartbeatPools
.values()) {
ret += pool.size();
}
return ret;
}
});
}
private Metrics() {} // prevent instantiation
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.singhasdev.calamus.app.sentimentanalysis;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.ProcessorSupplier;
import org.apache.kafka.streams.processor.TopologyBuilder;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.Stores;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
public class SentimentAnalyzer {
private static KafkaStreams STREAMS;
private static Logger LOG = Logger.getLogger(SentimentAnalyzer.class.getClass());
private static class CalculateSentiment implements ProcessorSupplier<String, String> {
@Override
public Processor<String, String> get() {
return new Processor<String, String>() {
private ProcessorContext context;
private KeyValueStore<String, String> kvStore;
@Override
@SuppressWarnings("unchecked")
public void init(ProcessorContext context) {
this.context = context;
this.context.schedule(1000);
this.kvStore = (KeyValueStore<String, String>) context.getStateStore("SentimentAnalysis");
}
@Override
public void process(String dummy, String line) {
try {
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readValue(line, JsonNode.class);
final JsonNode payload = root.get("payload");
if (payload != null) {
JsonNode tweetIdJson = payload.get("id");
JsonNode createdAtJson = payload.get("createdAt");
JsonNode favoriteCountJson = payload.get("favoriteCount");
JsonNode textNodeJson = payload.get("text");
JsonNode userNodeJson = payload.get("user");
if (tweetIdJson != null && createdAtJson != null && favoriteCountJson != null &&
textNodeJson != null && userNodeJson != null) {
Long tweetId = tweetIdJson.longValue();
DateFormat inputFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss ZZZZZ yyyy", Locale.ENGLISH);
inputFormat.setLenient(true);
Long timestamp = inputFormat.parse(createdAtJson.asText()).getTime();
Long favoriteCount = favoriteCountJson.longValue();
String tweet = textNodeJson.textValue();
Long userId = userNodeJson.get("id").longValue();
String userName = userNodeJson.get("name").textValue();
String modifiedText = tweet.replaceAll("[^a-zA-Z\\s]", "").trim().toLowerCase(Locale.getDefault());
for (String word : StopWords.getWords()) {
modifiedText = modifiedText.replaceAll("\\b" + word + "\\b", "");
}
List<String> posWords = PositiveWords.getWords();
String[] words = modifiedText.split(" ");
int numWords = words.length;
int numPosWords = 0;
for (String word : words) {
if (posWords.contains(word))
numPosWords++;
}
List<String> negWords = NegativeWords.getWords();
int numNegWords = 0;
for (String word : words) {
if (negWords.contains(word))
numNegWords++;
}
float posPercent = (float) numPosWords / numWords;
float negPercent = (float) numNegWords / numWords;
TweetSentiment tweetSentiment = new TweetSentiment(tweetId, timestamp, favoriteCount, tweet, userId, userName, posPercent, negPercent);
System.out.println(tweetSentiment);
String tweetSentimentJson = mapper.writeValueAsString(tweetSentiment);
this.kvStore.put(timestamp.toString(), tweetSentimentJson);
}
}
} catch (IOException ex) {
LOG.error("IO error while processing tweets", ex);
LOG.trace(null, ex);
} catch (ParseException ex) {
LOG.error("Parse exception while processing tweets", ex);
LOG.trace(null, ex);
}
context.commit();
}
@Override
public void punctuate(long timestamp) {
KeyValueIterator<String, String> iter = this.kvStore.all();
LOG.debug("----------- " + timestamp + " ----------- ");
while (iter.hasNext()) {
KeyValue<String, String> entry = iter.next();
//System.out.println("[" + entry.key + ", " + entry.value + "]");
context.forward(entry.key, entry.value);
}
iter.close();
}
@Override
public void close() {
this.kvStore.close();
}
};
}
}
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "sentiment-analyzer");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181");
props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
// setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
TopologyBuilder builder = new TopologyBuilder();
builder.addSource("Source", "test");
builder.addProcessor("Process", new CalculateSentiment(), "Source");
builder.addStateStore(Stores.create("SentimentAnalysis").withStringKeys().withStringValues().inMemory().build(), "Process");
builder.addSink("Sink", "test-output", "Process");
STREAMS = new KafkaStreams(builder, props);
STREAMS.start();
Runtime.getRuntime().addShutdownHook(new Thread("MirrorMakerShutdownHook") {
@Override
public void run() {
System.out.println("Closing Calamus sentiment-analyzer.");
STREAMS.close();
}
});
}
private static class TweetSentiment {
public float getNegativePercentage() {
return negativePercentage;
}
public long getTweetId() {
return tweetId;
}
public long getTimestamp() {
return timestamp;
}
public long getFavoriteCount() {
return favoriteCount;
}
public String getTweet() {
return tweet;
}
public long getUserId() {
return userId;
}
public String getUserName() {
return userName;
}
public float getPositivePercentage() {
return positivePercentage;
}
TweetSentiment(long tweetId, long timestamp, long favoriteCount, String tweet, long userId,
String userName, float positivePercentage, float negativePercentage) {
this.tweetId = tweetId;
this.timestamp = timestamp;
this.favoriteCount = favoriteCount;
this.tweet = tweet;
this.userId = userId;
this.userName = userName;
this.positivePercentage = positivePercentage;
this.negativePercentage = negativePercentage;
}
private long tweetId;
private long timestamp;
private long favoriteCount;
private String tweet;
private long userId;
private String userName;
private float positivePercentage;
private float negativePercentage;
@Override
public String toString() {
return "TweetID: " + tweetId +
"\t Timestamp: " + timestamp +
"\t FavoriteCount: " + favoriteCount +
"\t Tweet: " + tweet +
"\t UserID: " + userId +
"\t UserName: " + userName +
"\t PositivePercent: " + positivePercentage +
"\t NegativePercent: " + negativePercentage;
}
}
}
| |
/*L
* Copyright SAIC, Ellumen and RSNA (CTP)
*
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/national-biomedical-image-archive/LICENSE.txt for details.
*/
package gov.nih.nci.nbia.textsupport;
import java.io.Serializable;
/**
* Computed Tomography image **/
public class CTImageSubDoc implements Serializable
{
/**
* An attribute to allow serialization of the domain objects
*/
private static final long serialVersionUID = 1234567890L;
/**
* identifier **/
private Integer seriesPKId;
/**
* Retreives the value of seriesPKId attribute
* @return seriesPKId
**/
public Integer getSeriesPKId(){
return seriesPKId;
}
/**
* Sets the value of seriesPKId attribue
**/
public void setSeriesPKId(Integer seriesPKId){
this.seriesPKId = seriesPKId;
}
/**
* An associated gov.nih.nci.ncia.domain.GeneralSeries object
**/
private GeneralSeriesSubDoc generalSeries;
/**
* Retreives the value of generalSeries attribue
* @return generalSeries
**/
public GeneralSeriesSubDoc getGeneralSeries(){
return generalSeries;
}
/**
* Sets the value of generalSeries attribue
**/
public void setGeneralSeries(GeneralSeriesSubDoc generalSeries){
this.generalSeries = generalSeries;
}
/**
* Text to capture the sequence that identifies the anatomic region of interest in an instance, as specified in a DICOM tag. **/
private String anatomicRegionSeq;
/**
* Retreives the value of anatomicRegionSeq attribute
* @return anatomicRegionSeq
**/
public String getAnatomicRegionSeq(){
return anatomicRegionSeq;
}
/**
* Sets the value of anatomicRegionSeq attribue
**/
public void setAnatomicRegionSeq(String anatomicRegionSeq){
this.anatomicRegionSeq = anatomicRegionSeq;
}
/**
* Type of convolution kernel or algorithm used to reconstruct imaging data. **/
private String convolutionKernel;
/**
* Retreives the value of convolutionKernel attribute
* @return convolutionKernel
**/
public String getConvolutionKernel(){
return convolutionKernel;
}
/**
* Sets the value of convolutionKernel attribue
**/
public void setConvolutionKernel(String convolutionKernel){
this.convolutionKernel = convolutionKernel;
}
/**
* Numeric value representing the ratio of the Table Feed per Rotation to the Total Collimation Width. **/
private Double CTPitchFactor;
/**
* Retreives the value of CTPitchFactor attribute
* @return CTPitchFactor
**/
public Double getCTPitchFactor(){
return CTPitchFactor;
}
/**
* Sets the value of CTPitchFactor attribue
**/
public void setCTPitchFactor(Double CTPitchFactor){
this.CTPitchFactor = CTPitchFactor;
}
/**
* Millimeter value for the diameter of the region over which data was collected as specified in DICOM tag. **/
private Double dataCollectionDiameter;
/**
* Retreives the value of dataCollectionDiameter attribute
* @return dataCollectionDiameter
**/
public Double getDataCollectionDiameter(){
return dataCollectionDiameter;
}
/**
* Sets the value of dataCollectionDiameter attribue
**/
public void setDataCollectionDiameter(Double dataCollectionDiameter){
this.dataCollectionDiameter = dataCollectionDiameter;
}
/**
* Current exposure of x-ray image in milliAmp seconds, as calculated from time and x-ray tube current, as specified in DICOM tag. **/
private Integer exposure;
/**
* Retreives the value of exposure attribute
* @return exposure
**/
public Integer getExposure(){
return exposure;
}
/**
* Sets the value of exposure attribue
**/
public void setExposure(Integer exposure){
this.exposure = exposure;
}
/**
* Value for exposure expressed in microAmp seconds. **/
private Integer exposureInMicroAs;
/**
* Retreives the value of exposureInMicroAs attribute
* @return exposureInMicroAs
**/
public Integer getExposureInMicroAs(){
return exposureInMicroAs;
}
/**
* Sets the value of exposureInMicroAs attribue
**/
public void setExposureInMicroAs(Integer exposureInMicroAs){
this.exposureInMicroAs = exposureInMicroAs;
}
/**
* Value for the time of x-ray exposure expressed as number of milliseconds. **/
private Integer exposureTime;
/**
* Retreives the value of exposureTime attribute
* @return exposureTime
**/
public Integer getExposureTime(){
return exposureTime;
}
/**
* Sets the value of exposureTime attribue
**/
public void setExposureTime(Integer exposureTime){
this.exposureTime = exposureTime;
}
/**
* Numeric degree value for the nominal angle of tilt of the scanning gantry. This value is not intended for mathematical computations.
**/
private Double gantryDetectorTilt;
/**
* Retreives the value of gantryDetectorTilt attribute
* @return gantryDetectorTilt
**/
public Double getGantryDetectorTilt(){
return gantryDetectorTilt;
}
/**
* Sets the value of gantryDetectorTilt attribue
**/
public void setGantryDetectorTilt(Double gantryDetectorTilt){
this.gantryDetectorTilt = gantryDetectorTilt;
}
/**
* One or more characters used to identify, name, or characterize the nature, properties, or contents of a thing. **/
private Integer id;
/**
* Retreives the value of id attribute
* @return id
**/
public Integer getId(){
return id;
}
/**
* Sets the value of id attribue
**/
public void setId(Integer id){
this.id = id;
}
/**
* Numeric value to represent the Kilovoltage Peak (kVp) reading as recorded in a DICOM tag. **/
private Double KVP;
/**
* Retreives the value of KVP attribute
* @return KVP
**/
public Double getKVP(){
return KVP;
}
/**
* Sets the value of KVP attribue
**/
public void setKVP(Double KVP){
this.KVP = KVP;
}
/**
* Numeric millimeter value for the diameter in millimeters of the region from within which data were used in creating the reconstruction an image. Data may exist outside this region and portions of the patient may exist outside this region. **/
private Double reconstructionDiameter;
/**
* Retreives the value of reconstructionDiameter attribute
* @return reconstructionDiameter
**/
public Double getReconstructionDiameter(){
return reconstructionDiameter;
}
/**
* Sets the value of reconstructionDiameter attribue
**/
public void setReconstructionDiameter(Double reconstructionDiameter){
this.reconstructionDiameter = reconstructionDiameter;
}
/**
* Period of time required for a single complete turn of the source around the gantry orbit, expressed as a number of seconds. **/
private Double revolutionTime;
/**
* Retreives the value of revolutionTime attribute
* @return revolutionTime
**/
public Double getRevolutionTime(){
return revolutionTime;
}
/**
* Sets the value of revolutionTime attribue
**/
public void setRevolutionTime(Double revolutionTime){
this.revolutionTime = revolutionTime;
}
/**
* Type of scan used in medical imaging as specified in DICOM tag. **/
private String scanOptions;
/**
* Retreives the value of scanOptions attribute
* @return scanOptions
**/
public String getScanOptions(){
return scanOptions;
}
/**
* Sets the value of scanOptions attribue
**/
public void setScanOptions(String scanOptions){
this.scanOptions = scanOptions;
}
/**
* Width of a single row of acquired data expressed as a number of millimeters (mm). **/
private Double singleCollimationWidth;
/**
* Retreives the value of singleCollimationWidth attribute
* @return singleCollimationWidth
**/
public Double getSingleCollimationWidth(){
return singleCollimationWidth;
}
/**
* Sets the value of singleCollimationWidth attribue
**/
public void setSingleCollimationWidth(Double singleCollimationWidth){
this.singleCollimationWidth = singleCollimationWidth;
}
/**
* Motion in millimeters of the table during a completion revolution of the source around the gantry orbit. **/
private Double tableFeedPerRotation;
/**
* Retreives the value of tableFeedPerRotation attribute
* @return tableFeedPerRotation
**/
public Double getTableFeedPerRotation(){
return tableFeedPerRotation;
}
/**
* Sets the value of tableFeedPerRotation attribue
**/
public void setTableFeedPerRotation(Double tableFeedPerRotation){
this.tableFeedPerRotation = tableFeedPerRotation;
}
/**
* Numeric value to represent the distance in millimeters that the table moves in one second during the gathering of data that resulted in an image. **/
private Double tableSpeed;
/**
* Retreives the value of tableSpeed attribute
* @return tableSpeed
**/
public Double getTableSpeed(){
return tableSpeed;
}
/**
* Sets the value of tableSpeed attribue
**/
public void setTableSpeed(Double tableSpeed){
this.tableSpeed = tableSpeed;
}
/**
* Width of the total collimation over the area of active x-ray detection, expressed as a number of millimeters (mm). **/
private Double totalCollimationWidth;
/**
* Retreives the value of totalCollimationWidth attribute
* @return totalCollimationWidth
**/
public Double getTotalCollimationWidth(){
return totalCollimationWidth;
}
/**
* Sets the value of totalCollimationWidth attribue
**/
public void setTotalCollimationWidth(Double totalCollimationWidth){
this.totalCollimationWidth = totalCollimationWidth;
}
/**
* X-ray tube current expressed in MilliAmp(s) as specified in DICOM tag. **/
private Integer XRayTubeCurrent;
/**
* Retreives the value of XRayTubeCurrent attribute
* @return XRayTubeCurrent
**/
public Integer getXRayTubeCurrent(){
return XRayTubeCurrent;
}
/**
* Sets the value of XRayTubeCurrent attribue
**/
public void setXRayTubeCurrent(Integer XRayTubeCurrent){
this.XRayTubeCurrent = XRayTubeCurrent;
}
/**
* An associated gov.nih.nci.ncia.domain.GeneralImage object
**/
private GeneralImageSubDoc generalImage;
/**
* Retreives the value of generalImage attribue
* @return generalImage
**/
public GeneralImageSubDoc getGeneralImage(){
return generalImage;
}
/**
* Sets the value of generalImage attribue
**/
public void setGeneralImage(GeneralImageSubDoc generalImage){
this.generalImage = generalImage;
}
/**
* Compares <code>obj</code> to it self and returns true if they both are same
*
* @param obj
**/
public boolean equals(Object obj)
{
if(obj instanceof CTImageSubDoc)
{
CTImageSubDoc c =(CTImageSubDoc)obj;
if(getId() != null && getId().equals(c.getId()))
return true;
}
return false;
}
/**
* Returns hash code for the primary key of the object
**/
public int hashCode()
{
if(getId() != null)
return getId().hashCode();
return 0;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util.logging;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Hashtable;
import org.apache.harmony.logging.internal.nls.Messages;
/**
* A <code>Handler</code> writes description of logging event into a specified
* file or a rotating set of files.
* <p>
* If a set of files are used, when a given amount of data has been written to
* one file, this file is closed, and another file is opened. The name of these
* files are generated by given name pattern, see below for details.
* </p>
* <p>
* By default the IO buffering mechanism is enabled, but when each log record is
* complete, it is flushed out.
* </p>
* <p>
* <code>XMLFormatter</code> is default formatter for <code>FileHandler</code>.
* </p>
* <p>
* <code>MemoryHandler</code> will read following <code>LogManager</code>
* properties for initialization, if given properties are not defined or has
* invalid values, default value will be used.
* <ul>
* <li>java.util.logging.FileHandler.level specifies the level for this
* <code>Handler</code>, defaults to <code>Level.ALL</code>.</li>
* <li>java.util.logging.FileHandler.filter specifies the <code>Filter</code>
* class name, defaults to no <code>Filter</code>.</li>
* <li>java.util.logging.FileHandler.formatter specifies the
* <code>Formatter</code> class, defaults to
* <code>java.util.logging.XMLFormatter</code>.</li>
* <li>java.util.logging.FileHandler.encoding specifies the character set
* encoding name, defaults to the default platform encoding.</li>
* <li>java.util.logging.FileHandler.limit specifies an maximum bytes to write
* to any one file, defaults to zero, which means no limit.</li>
* <li>java.util.logging.FileHandler.count specifies how many output files to
* rotate, defaults to 1.</li>
* <li>java.util.logging.FileHandler.pattern specifies name pattern for the
* output files. See below for details. Defaults to "%h/java%u.log".</li>
* <li>java.util.logging.FileHandler.append specifies whether this
* <code>FileHandler</code> should append onto existing files, defaults to
* false.</li>
* </ul>
* </p>
* <p>
* Name pattern is a string that may includes some special sub-strings, which
* will be replaced to generate output files:
* <ul>
* <li>"/" represents the local pathname separator</li>
* <li>"%t" represents the system temporary directory</li>
* <li>"%h" represents the home directory of current user, which is specified
* by "user.home" system property</li>
* <li>"%g" represents the generation number to distinguish rotated logs</li>
* <li>"%u" represents a unique number to resolve conflicts</li>
* <li>"%%" represents percent sign character '%'</li>
* </ul>
* </p>
* Normally, the generation numbers are not larger than given file count and
* follow the sequence 0, 1, 2.... If the file count is larger than one, but the
* generation field("%g") has not been specified in the pattern, then the
* generation number after a dot will be added to the end of the file name,
* </p>
* <p>
* The "%u" unique field is used to avoid conflicts and set to 0 at first. If
* one <code>FileHandler</code> tries to open the filename which is currently
* in use by another process, it will repeatedly increment the unique number
* field and try again. If the "%u" component has not been included in the file
* name pattern and some contention on a file does occur then a unique numerical
* value will be added to the end of the filename in question immediately to the
* right of a dot. The unique IDs for avoiding conflicts is only guaranteed to
* work reliably when using a local disk file system.
* </p>
*
*/
public class FileHandler extends StreamHandler {
private static final String LCK_EXT = ".lck"; //$NON-NLS-1$
private static final int DEFAULT_COUNT = 1;
private static final int DEFAULT_LIMIT = 0;
private static final boolean DEFAULT_APPEND = false;
private static final String DEFAULT_PATTERN = "%h/java%u.log"; //$NON-NLS-1$
// maintain all file locks hold by this process
private static final Hashtable<String, FileLock> allLocks = new Hashtable<String, FileLock>();
// the count of files which the output cycle through
private int count;
// the size limitation in byte of log file
private int limit;
// whether the FileHandler should open a existing file for output in append
// mode
private boolean append;
// the pattern for output file name
private String pattern;
// maintain a LogManager instance for convenience
private LogManager manager;
// output stream, which can measure the output file length
private MeasureOutputStream output;
// used output file
private File[] files;
// output file lock
FileLock lock = null;
// current output file name
String fileName = null;
// current unique ID
int uniqueID = -1;
/**
* Construct a <code>FileHandler</code> using <code>LogManager</code>
* properties or their default value
*
* @throws IOException
* if any IO exception happened
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
*/
public FileHandler() throws IOException {
init(null, null, null, null);
}
// init properties
private void init(String p, Boolean a, Integer l, Integer c)
throws IOException {
// check access
manager = LogManager.getLogManager();
manager.checkAccess();
initProperties(p, a, l, c);
initOutputFiles();
}
private void initOutputFiles() throws FileNotFoundException, IOException {
while (true) {
// try to find a unique file which is not locked by other process
uniqueID++;
// FIXME: improve performance here
for (int generation = 0; generation < count; generation++) {
// cache all file names for rotation use
files[generation] = new File(parseFileName(generation));
}
fileName = files[0].getAbsolutePath();
synchronized (allLocks) {
/*
* if current process has held lock for this fileName continue
* to find next file
*/
if (null != allLocks.get(fileName)) {
continue;
}
if (files[0].exists()
&& (!append || files[0].length() >= limit)) {
for (int i = count - 1; i > 0; i--) {
if (files[i].exists()) {
files[i].delete();
}
files[i - 1].renameTo(files[i]);
}
}
FileOutputStream fileStream = new FileOutputStream(fileName
+ LCK_EXT);
FileChannel channel = fileStream.getChannel();
/*
* if lock is unsupported and IOException thrown, just let the
* IOException throws out and exit otherwise it will go into an
* undead cycle
*/
lock = channel.tryLock();
if (null == lock) {
try {
fileStream.close();
} catch (Exception e) {
// ignore
}
continue;
}
allLocks.put(fileName, lock);
break;
}
}
output = new MeasureOutputStream(new BufferedOutputStream(
new FileOutputStream(fileName, append)), files[0].length());
setOutputStream(output);
}
private void initProperties(String p, Boolean a, Integer l, Integer c) {
super.initProperties("ALL", null, "java.util.logging.XMLFormatter", //$NON-NLS-1$//$NON-NLS-2$
null);
String className = this.getClass().getName();
pattern = (null == p) ? getStringProperty(className + ".pattern", //$NON-NLS-1$
DEFAULT_PATTERN) : p;
if (null == pattern || "".equals(pattern)) { //$NON-NLS-1$
// logging.19=Pattern cannot be empty
throw new NullPointerException(Messages.getString("logging.19")); //$NON-NLS-1$
}
append = (null == a) ? getBooleanProperty(className + ".append", //$NON-NLS-1$
DEFAULT_APPEND) : a.booleanValue();
count = (null == c) ? getIntProperty(className + ".count", //$NON-NLS-1$
DEFAULT_COUNT) : c.intValue();
limit = (null == l) ? getIntProperty(className + ".limit", //$NON-NLS-1$
DEFAULT_LIMIT) : l.intValue();
count = count < 1 ? DEFAULT_COUNT : count;
limit = limit < 0 ? DEFAULT_LIMIT : limit;
files = new File[count];
}
void findNextGeneration() {
super.close();
for (int i = count - 1; i > 0; i--) {
if (files[i].exists()) {
files[i].delete();
}
files[i - 1].renameTo(files[i]);
}
try {
output = new MeasureOutputStream(new BufferedOutputStream(
new FileOutputStream(files[0])));
} catch (FileNotFoundException e1) {
// logging.1A=Error happened when open log file.
this.getErrorManager().error(Messages.getString("logging.1A"), //$NON-NLS-1$
e1, ErrorManager.OPEN_FAILURE);
}
setOutputStream(output);
}
/**
* Transform the pattern to the valid file name, replacing any patterns, and
* applying generation and uniqueID if present
*
* @param gen
* generation of this file
* @return transformed filename ready for use
*/
private String parseFileName(int gen) {
int cur = 0;
int next = 0;
boolean hasUniqueID = false;
boolean hasGeneration = false;
// TODO privilege code?
String tempPath = System.getProperty("java.io.tmpdir"); //$NON-NLS-1$
boolean tempPathHasSepEnd = (tempPath == null ? false : tempPath
.endsWith(File.separator));
String homePath = System.getProperty("user.home"); //$NON-NLS-1$
boolean homePathHasSepEnd = (homePath == null ? false : homePath
.endsWith(File.separator));
StringBuilder sb = new StringBuilder();
pattern = pattern.replace('/', File.separatorChar);
char[] value = pattern.toCharArray();
while ((next = pattern.indexOf('%', cur)) >= 0) {
if (++next < pattern.length()) {
switch (value[next]) {
case 'g':
sb.append(value, cur, next - cur - 1).append(gen);
hasGeneration = true;
break;
case 'u':
sb.append(value, cur, next - cur - 1).append(uniqueID);
hasUniqueID = true;
break;
case 't':
/*
* we should probably try to do something cute here like
* lookahead for adjacent '/'
*/
sb.append(value, cur, next - cur - 1).append(tempPath);
if (!tempPathHasSepEnd) {
sb.append(File.separator);
}
break;
case 'h':
sb.append(value, cur, next - cur - 1).append(homePath);
if (!homePathHasSepEnd) {
sb.append(File.separator);
}
break;
case '%':
sb.append(value, cur, next - cur - 1).append('%');
break;
default:
sb.append(value, cur, next - cur);
}
cur = ++next;
} else {
// fail silently
}
}
sb.append(value, cur, value.length - cur);
if (!hasGeneration && count > 1) {
sb.append(".").append(gen); //$NON-NLS-1$
}
if (!hasUniqueID && uniqueID > 0) {
sb.append(".").append(uniqueID); //$NON-NLS-1$
}
return sb.toString();
}
// get boolean LogManager property, if invalid value got, using default
// value
private boolean getBooleanProperty(String key, boolean defaultValue) {
String property = manager.getProperty(key);
if (null == property) {
return defaultValue;
}
boolean result = defaultValue;
if ("true".equalsIgnoreCase(property)) { //$NON-NLS-1$
result = true;
} else if ("false".equalsIgnoreCase(property)) { //$NON-NLS-1$
result = false;
}
return result;
}
// get String LogManager property, if invalid value got, using default value
private String getStringProperty(String key, String defaultValue) {
String property = manager.getProperty(key);
return property == null ? defaultValue : property;
}
// get int LogManager property, if invalid value got, using default value
private int getIntProperty(String key, int defaultValue) {
String property = manager.getProperty(key);
int result = defaultValue;
if (null != property) {
try {
result = Integer.parseInt(property);
} catch (Exception e) {
// ignore
}
}
return result;
}
/**
* Construct a <code>FileHandler</code>, the given name pattern is used
* as output filename, the file limit is set to zero(no limit), and the file
* count is set to one, other configuration using <code>LogManager</code>
* properties or their default value
*
* This handler write to only one file and no amount limit.
*
* @param pattern
* the name pattern of output file
* @throws IOException
* if any IO exception happened
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
* @throws NullPointerException
* if the pattern is <code>null</code>.
* @throws IllegalArgumentException
* if the pattern is empty.
*/
public FileHandler(String pattern) throws IOException {
if (pattern.equals("")) { //$NON-NLS-1$
// logging.19=Pattern cannot be empty
throw new IllegalArgumentException(Messages.getString("logging.19")); //$NON-NLS-1$
}
init(pattern, null, Integer.valueOf(DEFAULT_LIMIT), Integer
.valueOf(DEFAULT_COUNT));
}
/**
* Construct a <code>FileHandler</code>, the given name pattern is used
* as output filename, the file limit is set to zero(i.e. no limit applies),
* the file count is initialized to one, and the value of
* <code>append</code> becomes the new instance's append mode. Other
* configuration is done using <code>LogManager</code> properties.
*
* This handler write to only one file and no amount limit.
*
* @param pattern
* the name pattern of output file
* @param append
* the append mode
* @throws IOException
* if any IO exception happened
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
* @throws NullPointerException
* if the pattern is <code>null</code>.
* @throws IllegalArgumentException
* if the pattern is empty.
*/
public FileHandler(String pattern, boolean append) throws IOException {
if (pattern.equals("")) { //$NON-NLS-1$
throw new IllegalArgumentException(Messages.getString("logging.19")); //$NON-NLS-1$
}
init(pattern, Boolean.valueOf(append), Integer.valueOf(DEFAULT_LIMIT),
Integer.valueOf(DEFAULT_COUNT));
}
/**
* Construct a <code>FileHandler</code>, the given name pattern is used
* as output filename, the file limit is set to given limit argument, and
* the file count is set to given count argument, other configuration using
* <code>LogManager</code> properties or their default value
*
* This handler is configured to write to a rotating set of count files,
* when the limit of bytes has been written to one output file, another file
* will be opened instead.
*
* @param pattern
* the name pattern of output file
* @param limit
* the data amount limit in bytes of one output file, cannot less
* than one
* @param count
* the maximum number of files can be used, cannot less than one
* @throws IOException
* if any IO exception happened
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
* @throws NullPointerException
* if pattern is <code>null</code>.
* @throws IllegalArgumentException
* if count<1, or limit<0
*/
public FileHandler(String pattern, int limit, int count) throws IOException {
if (pattern.equals("")) { //$NON-NLS-1$
throw new IllegalArgumentException(Messages.getString("logging.19")); //$NON-NLS-1$
}
if (limit < 0 || count < 1) {
// logging.1B=The limit and count property must be larger than 0 and
// 1, respectively
throw new IllegalArgumentException(Messages.getString("logging.1B")); //$NON-NLS-1$
}
init(pattern, null, Integer.valueOf(limit), Integer.valueOf(count));
}
/**
* Construct a <code>FileHandler</code>, the given name pattern is used
* as output filename, the file limit is set to given limit argument, the
* file count is set to given count argument, and the append mode is set to
* given append argument, other configuration using <code>LogManager</code>
* properties or their default value
*
* This handler is configured to write to a rotating set of count files,
* when the limit of bytes has been written to one output file, another file
* will be opened instead.
*
* @param pattern
* the name pattern of output file
* @param limit
* the data amount limit in bytes of one output file, cannot less
* than one
* @param count
* the maximum number of files can be used, cannot less than one
* @param append
* the append mode
* @throws IOException
* if any IO exception happened
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
* @throws NullPointerException
* if pattern is <code>null</code>.
* @throws IllegalArgumentException
* if count<1, or limit<0
*/
public FileHandler(String pattern, int limit, int count, boolean append)
throws IOException {
if (pattern.equals("")) { //$NON-NLS-1$
throw new IllegalArgumentException(Messages.getString("logging.19")); //$NON-NLS-1$
}
if (limit < 0 || count < 1) {
// logging.1B=The limit and count property must be larger than 0 and
// 1, respectively
throw new IllegalArgumentException(Messages.getString("logging.1B")); //$NON-NLS-1$
}
init(pattern, Boolean.valueOf(append), Integer.valueOf(limit), Integer
.valueOf(count));
}
/**
* Flush and close all opened files.
*
* @throws SecurityException
* if security manager exists and it determines that caller does
* not have the required permissions to control this handler,
* required permissions include
* <code>LogPermission("control")</code> and other permission
* like <code>FilePermission("write")</code>, etc.
*/
@Override
public void close() {
// release locks
super.close();
allLocks.remove(fileName);
try {
FileChannel channel = lock.channel();
lock.release();
channel.close();
File file = new File(fileName + LCK_EXT);
file.delete();
} catch (IOException e) {
// ignore
}
}
/**
* Publish a <code>LogRecord</code>
*
* @param record
* the log record to be published
*/
@Override
public void publish(LogRecord record) {
super.publish(record);
flush();
if (limit > 0 && output.getLength() >= limit) {
AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
findNextGeneration();
return null;
}
});
}
}
/**
* This output stream use decorator pattern to add measure feature to
* OutputStream which can detect the total size(in bytes) of output, the
* initial size can be set
*/
static class MeasureOutputStream extends OutputStream {
OutputStream wrapped;
long length;
public MeasureOutputStream(OutputStream stream, long currentLength) {
wrapped = stream;
length = currentLength;
}
public MeasureOutputStream(OutputStream stream) {
this(stream, 0);
}
@Override
public void write(int oneByte) throws IOException {
wrapped.write(oneByte);
length++;
}
@Override
public void write(byte[] bytes) throws IOException {
wrapped.write(bytes);
length += bytes.length;
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
wrapped.write(b, off, len);
length += len;
}
@Override
public void close() throws IOException {
wrapped.close();
}
@Override
public void flush() throws IOException {
wrapped.flush();
}
public long getLength() {
return length;
}
public void setLength(long newLength) {
length = newLength;
}
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.lisp.msg.types.lcaf;
import io.netty.buffer.ByteBuf;
import org.onosproject.lisp.msg.exceptions.LispParseError;
import org.onosproject.lisp.msg.exceptions.LispReaderException;
import org.onosproject.lisp.msg.exceptions.LispWriterException;
import org.onosproject.lisp.msg.types.LispAddressReader;
import org.onosproject.lisp.msg.types.LispAddressWriter;
import org.onosproject.lisp.msg.types.LispAfiAddress;
import java.util.Objects;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Instance ID type LCAF address class.
* <p>
* Instance ID type is defined in draft-ietf-lisp-lcaf-22
* https://tools.ietf.org/html/draft-ietf-lisp-lcaf-22#page-8
*
* <pre>
* {@literal
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | AFI = 16387 | Rsvd1 | Flags |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Type = 2 | IID mask-len | Length |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Instance ID |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | AFI = x | Address ... |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* }</pre>
*/
public final class LispSegmentLcafAddress extends LispLcafAddress {
private final LispAfiAddress address;
private final int instanceId;
/**
* Initializes segment type LCAF address.
*
* @param idMaskLength Id mask length
* @param instanceId instance id
* @param address address
*/
private LispSegmentLcafAddress(byte idMaskLength, int instanceId,
LispAfiAddress address) {
super(LispCanonicalAddressFormatEnum.SEGMENT, idMaskLength);
this.address = address;
this.instanceId = instanceId;
}
/**
* Initializes segment type LCAF address.
*
* @param reserved1 reserved1
* @param idMaskLength ID mask length
* @param flag flag
* @param length length
* @param instanceId instance id
* @param address address
*/
private LispSegmentLcafAddress(byte reserved1, byte idMaskLength, byte flag,
short length, int instanceId,
LispAfiAddress address) {
super(LispCanonicalAddressFormatEnum.SEGMENT, reserved1,
idMaskLength, flag, length);
this.address = address;
this.instanceId = instanceId;
}
/**
* Obtains address.
*
* @return address
*/
public LispAfiAddress getAddress() {
return address;
}
/**
* Obtains instance id.
*
* @return instance id
*/
public int getInstanceId() {
return instanceId;
}
/**
* Obtains id mask length.
*
* @return id mask length
*/
public byte getIdMaskLength() {
return getReserved2();
}
@Override
public int hashCode() {
return Objects.hash(address, instanceId, getReserved2());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof LispSegmentLcafAddress) {
final LispSegmentLcafAddress other = (LispSegmentLcafAddress) obj;
return Objects.equals(this.address, other.address) &&
Objects.equals(this.instanceId, other.instanceId) &&
Objects.equals(this.getReserved2(), other.getReserved2());
}
return false;
}
@Override
public String toString() {
return toStringHelper(this)
.add("address", address)
.add("instanceId", instanceId)
.add("idMaskLength", getReserved2())
.toString();
}
public static final class SegmentAddressBuilder
extends LcafAddressBuilder<SegmentAddressBuilder> {
private byte idMaskLength;
private LispAfiAddress address;
private int instanceId;
/**
* Sets identifier mask length.
*
* @param idMaskLength identifier mask length
* @return SegmentAddressBuilder object
*/
public SegmentAddressBuilder withIdMaskLength(byte idMaskLength) {
this.idMaskLength = idMaskLength;
return this;
}
/**
* Sets instance identifer.
*
* @param instanceId instance identifier
* @return SegmentAddressBuilder object
*/
public SegmentAddressBuilder withInstanceId(int instanceId) {
this.instanceId = instanceId;
return this;
}
/**
* Sets AFI address.
*
* @param address AFI address
* @return SegmentAddressBuilder object
*/
public SegmentAddressBuilder withAddress(LispAfiAddress address) {
this.address = address;
return this;
}
/**
* Builds LispSegmentLcafAddress instance.
*
* @return LispSegmentLcafAddress instance
*/
public LispSegmentLcafAddress build() {
checkNotNull(address, "Must specify an address");
return new LispSegmentLcafAddress(reserved1, idMaskLength, flag,
length, instanceId, address);
}
}
/**
* Segment LCAF address reader class.
*/
public static class SegmentLcafAddressReader
implements LispAddressReader<LispSegmentLcafAddress> {
@Override
public LispSegmentLcafAddress readFrom(ByteBuf byteBuf)
throws LispParseError, LispReaderException {
LispLcafAddress lcafAddress = LispLcafAddress.deserializeCommon(byteBuf);
byte idMaskLength = lcafAddress.getReserved2();
int instanceId = (int) byteBuf.readUnsignedInt();
LispAfiAddress address = new AfiAddressReader().readFrom(byteBuf);
return new SegmentAddressBuilder()
.withIdMaskLength(idMaskLength)
.withInstanceId(instanceId)
.withAddress(address)
.build();
}
}
/**
* Segment LCAF address writer class.
*/
public static class SegmentLcafAddressWriter
implements LispAddressWriter<LispSegmentLcafAddress> {
@Override
public void writeTo(ByteBuf byteBuf, LispSegmentLcafAddress address)
throws LispWriterException {
int lcafIndex = byteBuf.writerIndex();
LispLcafAddress.serializeCommon(byteBuf, address);
byteBuf.writeInt(address.getInstanceId());
new LispAfiAddress.AfiAddressWriter().writeTo(byteBuf, address.getAddress());
LispLcafAddress.updateLength(lcafIndex, byteBuf);
}
}
}
| |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.reports.pentaho.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import org.joda.time.LocalDate;
import org.mifos.application.servicefacade.ApplicationContextProvider;
import org.mifos.application.servicefacade.CenterServiceFacade;
import org.mifos.application.servicefacade.LoanAccountServiceFacade;
import org.mifos.core.MifosRuntimeException;
import org.mifos.dto.domain.CustomerDetailDto;
import org.mifos.dto.domain.PersonnelDto;
import org.mifos.dto.screen.ChangeAccountStatusDto;
import org.mifos.reports.pentaho.params.AbstractPentahoParameter;
import org.mifos.reports.pentaho.params.PentahoDateParameter;
import org.mifos.reports.pentaho.params.PentahoInputParameter;
import org.mifos.reports.pentaho.params.PentahoMultiSelectParameter;
import org.mifos.reports.pentaho.params.PentahoSingleSelectParameter;
import org.pentaho.reporting.engine.classic.core.MasterReport;
import org.pentaho.reporting.engine.classic.core.ReportDataFactoryException;
import org.pentaho.reporting.engine.classic.core.parameters.DefaultListParameter;
import org.pentaho.reporting.engine.classic.core.parameters.DefaultParameterContext;
import org.pentaho.reporting.engine.classic.core.parameters.ListParameter;
import org.pentaho.reporting.engine.classic.core.parameters.ParameterContext;
import org.pentaho.reporting.engine.classic.core.parameters.ParameterDefinitionEntry;
import org.pentaho.reporting.engine.classic.core.parameters.ParameterValues;
import org.pentaho.reporting.engine.classic.core.parameters.PlainParameter;
import org.pentaho.reporting.engine.classic.core.parameters.ReportParameterDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PentahoParamParser {
private final static Logger logger = LoggerFactory.getLogger(PentahoParamParser.class);
public List<AbstractPentahoParameter> parseReportParams(MasterReport report, HttpServletRequest request, Map<String, AbstractPentahoParameter> selectedValues, boolean update) {
ParameterContext paramContext = null;
try {
paramContext = new DefaultParameterContext(report);
ReportParameterDefinition paramDefinition = report.getParameterDefinition();
List<AbstractPentahoParameter> result = new ArrayList<AbstractPentahoParameter>();
for (ParameterDefinitionEntry paramDefEntry : paramDefinition.getParameterDefinitions()) {
result.add(parseParam(paramDefEntry, paramContext, selectedValues, update));
}
return result;
} catch (ReportDataFactoryException ex) {
throw new JNDIException("Problem with Pentaho Reports", request);
}catch (Exception ex) {
throw new MifosRuntimeException(ex);
} finally {
if (paramContext != null) {
try {
paramContext.close();
} catch (ReportDataFactoryException ex) {
logger.error("Exception while closing parameter context", ex);
}
}
}
}
public Object parseParamValue(AbstractPentahoParameter param, ParameterDefinitionEntry paramDefEntry)
throws ReflectionException {
Object result = null;
Class<?> clazz = paramDefEntry.getValueType();
if (param instanceof PentahoDateParameter) {
PentahoDateParameter dateParam = (PentahoDateParameter) param;
LocalDate date = dateParam.getDate();
Date javaDate = (date == null) ? null : date.toDateMidnight().toDate();
result = ReflectionUtil.parseDateToClass(javaDate, clazz);
} else if (param instanceof PentahoInputParameter) {
PentahoInputParameter inputParam = (PentahoInputParameter) param;
result = ReflectionUtil.parseStringToClass(inputParam.getValue(), clazz);
} else if (param instanceof PentahoSingleSelectParameter) {
PentahoSingleSelectParameter singleSelectParam = (PentahoSingleSelectParameter) param;
result = ReflectionUtil.parseStringToClass(singleSelectParam.getSelectedValue(), clazz);
} else if (param instanceof PentahoMultiSelectParameter) {
PentahoMultiSelectParameter multiSelectParam = (PentahoMultiSelectParameter) param;
Class<?> componentType = (clazz.isArray()) ? clazz.getComponentType() : clazz;
result = ReflectionUtil.parseStringsToClass(multiSelectParam.getSelectedValues(), componentType);
}
return result;
}
private AbstractPentahoParameter parseParam(ParameterDefinitionEntry paramDefEntry, ParameterContext paramContext, Map<String, AbstractPentahoParameter> selectedValues, boolean update)
throws ReportDataFactoryException {
AbstractPentahoParameter result = null;
if (paramDefEntry instanceof PlainParameter) {
result = parsePlainParameter((PlainParameter) paramDefEntry);
} else if (paramDefEntry instanceof ListParameter) {
result = parseListParameter((ListParameter) paramDefEntry, paramContext, selectedValues, update);
} else {
return null;
}
result.setMandatory(paramDefEntry.isMandatory());
result.setParamName(paramDefEntry.getName());
if (null!=paramDefEntry.getParameterAttribute(paramDefEntry.getParameterAttributeNamespaces()[0], "label", paramContext)) {
result.setLabelName(paramDefEntry.getParameterAttribute(paramDefEntry.getParameterAttributeNamespaces()[0], "label", paramContext).replace(":", ""));
} else {
result.setLabelName(paramDefEntry.getName());
}
return result;
}
private AbstractPentahoParameter parsePlainParameter(PlainParameter paramDefEntry) {
AbstractPentahoParameter result = null;
if (Date.class.isAssignableFrom(paramDefEntry.getValueType())) {
result = parseDateParameter(paramDefEntry);
} else {
result = parseInputParameter(paramDefEntry);
}
return result;
}
private PentahoDateParameter parseDateParameter(PlainParameter paramDefEntry) {
PentahoDateParameter result = new PentahoDateParameter();
Date defaultValue = (Date) paramDefEntry.getDefaultValue();
if (defaultValue != null) {
result.setDate(new LocalDate(defaultValue));
}
return result;
}
private PentahoInputParameter parseInputParameter(PlainParameter paramDefEntry) {
PentahoInputParameter result = new PentahoInputParameter();
String defaultValue = String.valueOf(paramDefEntry.getDefaultValue());
result.setValue(defaultValue);
return result;
}
private AbstractPentahoParameter parseListParameter(ListParameter paramDefEntry, ParameterContext paramContext,
Map<String, AbstractPentahoParameter> selectedValues, boolean update)
throws ReportDataFactoryException {
DefaultListParameter listParam = (DefaultListParameter) paramDefEntry;
AbstractPentahoParameter result;
if (listParam.isAllowMultiSelection()) {
result = parseMultiListParameter(paramDefEntry, paramContext);
} else {
result = parseSingleListParameter(paramDefEntry, paramContext, selectedValues, update);
}
return result;
}
private PentahoSingleSelectParameter parseSingleListParameter(ListParameter paramDefEntry,
ParameterContext paramContext,Map<String, AbstractPentahoParameter> selectedValues, boolean update) throws ReportDataFactoryException {
PentahoSingleSelectParameter result = new PentahoSingleSelectParameter();
Map<String, String> possibleValues = null;
if (update == false) {
possibleValues = getPossibleValuesForParam(paramDefEntry,
paramContext);
} else {
possibleValues = updatePossibleValuesForParam(paramDefEntry,
paramContext, selectedValues);
}
result.setPossibleValues(possibleValues);
Object defaultVal = paramDefEntry.getDefaultValue(paramContext);
if (defaultVal != null && possibleValues.containsKey(String.valueOf(defaultVal))) {
result.setSelectedValue(String.valueOf(defaultVal));
}
else if (defaultVal==null){
result.setSelectedValue(String.valueOf(-1));
}
return result;
}
private PentahoMultiSelectParameter parseMultiListParameter(ListParameter paramDefEntry,
ParameterContext paramContext) throws ReportDataFactoryException {
PentahoMultiSelectParameter result = new PentahoMultiSelectParameter();
Map<String, String> possibleValues = getPossibleValuesForParam(paramDefEntry, paramContext);
result.setPossibleValuesOptions(possibleValues);
return result;
}
private Map<String, String> getPossibleValuesForParam(ListParameter paramDefEntry, ParameterContext paramContext)
throws ReportDataFactoryException {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
Map<String, String> result = new HashMap<String, String>();
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
return result;
}
private Map<String, String> updatePossibleValuesForParam(ListParameter paramDefEntry,
ParameterContext paramContext, Map<String, AbstractPentahoParameter> selectedValues)
throws ReportDataFactoryException {
Map<String, String> result = new HashMap<String, String>();
String id;
if (parameterIsOffice(paramDefEntry.getName())) {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
} else if (parameterIsCenter(paramDefEntry.getName())) {
String keyValue = searchKey("(.*officer$)|(.*officer_id$)", selectedValues,
paramDefEntry.getName());
if (!keyValue.equals("")) {
id = (String) selectedValues.get(keyValue).getParamValue();
if (!id.equals("-1")) {
List<CustomerDetailDto> customerList = ApplicationContextProvider
.getBean(CenterServiceFacade.class).retrieveCustomersUnderUser(new Short(id));
for (CustomerDetailDto office : customerList) {
String key = office.getCustomerId().toString();
String value = office.getDisplayName();
result.put(key, value);
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
} else {
keyValue = searchKey("(selected_office$)|(.*BRANCH_NAME.*)", selectedValues,
paramDefEntry.getName());
if (!keyValue.equals("")) {
id = (String) selectedValues.get(keyValue).getParamValue();
if (!id.equals("-1")) {
List<CustomerDetailDto> customerList = ApplicationContextProvider
.getBean(CenterServiceFacade.class).retrieveCustomersUnderBranch(new Short(id));
for (CustomerDetailDto office : customerList) {
String key = office.getCustomerId().toString();
String value = office.getDisplayName();
result.put(key, value);
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
}
String key = "-1";
String value = "All";
result.put(key, value);
} else if (parameterIsGroup(paramDefEntry.getName())) {
String keyValue = searchKey("(.*officer*)", selectedValues,
paramDefEntry.getName());
if (!keyValue.equals("")) {
id = (String) selectedValues.get(keyValue).getParamValue();
if (!id.equals("-1")) {
List<CustomerDetailDto> customerList = ApplicationContextProvider
.getBean(CenterServiceFacade.class).retrieveGroupForPentahoReport(new Short(id));
for (CustomerDetailDto office : customerList) {
String key = office.getCustomerId().toString();
String value = office.getDisplayName();
result.put(key, value);
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
String key = "-1";
String value = "All";
result.put(key, value);
} else if (parameterIsOfficer(paramDefEntry.getName())) {
String keyValue = searchKey("(.*office$)|(.*office_id$)|(.*branch_id$)|(.*office$)|(.*selectedBranch.*)", selectedValues, paramDefEntry.getName());
if(!keyValue.equals("")){
id = (String) selectedValues.get(keyValue).getParamValue();
if (!id.equals("-1")) {
ChangeAccountStatusDto changeAccountStatusDto = ApplicationContextProvider
.getBean(LoanAccountServiceFacade.class)
.retrieveLoanOfficerDetailsForBranch(new Short(id));
List<PersonnelDto> officers = changeAccountStatusDto
.getLoanOfficers();
for (PersonnelDto officer : officers) {
String key = officer.getPersonnelId().toString();
String value = officer.getDisplayName();
result.put(key, value);
}
}
String key = "-1";
String value = "All";
result.put(key, value);
}
} else {
ParameterValues paramValues = paramDefEntry.getValues(paramContext);
for (int i = 0; i < paramValues.getRowCount(); i++) {
String key = String.valueOf(paramValues.getKeyValue(i));
String value = String.valueOf(paramValues.getTextValue(i));
result.put(key, value);
}
}
return result;
}
private boolean parameterIsOffice(String name) {
Pattern p = Pattern.compile("(.*office_id$)|(.*branch_id$)|(.*office$)");
Matcher m = p.matcher(name);
boolean b = m.matches();
return b;
}
private boolean parameterIsCenter(String name) {
Pattern p = Pattern.compile("(.*center.*)|(.*CENTER_NAME.*)");
Matcher m = p.matcher(name);
boolean b = m.matches();
return b;
}
private boolean parameterIsOfficer(String name) {
Pattern p = Pattern.compile("(.*officer$)|(.*officer_id$)|(.*user.*)|(.*selectedLoanOfficer.*)");
Matcher m = p.matcher(name);
boolean b = m.matches();
return b;
}
private boolean parameterIsGroup(String name) {
Pattern p = Pattern.compile("(.*group.*)");
Matcher m = p.matcher(name);
boolean b = m.matches();
return b;
}
private String searchKey(String regex,
Map<String, AbstractPentahoParameter> selectedValues, String name) {
String results = "";
Pattern p = Pattern.compile(regex);
Set<String> keys = selectedValues.keySet();
Iterator<String> ite = keys.iterator();
while (ite.hasNext()) {
String candidate = ite.next();
Matcher m = p.matcher(candidate);
if (m.matches() && !name.equals(candidate)) {
results = candidate;
}
}
if (results.isEmpty()) {
return "";
} else {
return results;
}
}
}
| |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.view;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.content.ContentResolver;
import android.content.Context;
import android.view.View;
import android.view.ViewParent;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityManager;
import android.view.accessibility.AccessibilityNodeInfo;
import io.flutter.embedding.engine.systemchannels.AccessibilityChannel;
import io.flutter.plugin.platform.PlatformViewsAccessibilityDelegate;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(RobolectricTestRunner.class)
public class AccessibilityBridgeTest {
@Test
public void itDescribesNonTextFieldsWithAContentDescription() {
AccessibilityBridge accessibilityBridge = setUpBridge();
TestSemanticsNode testSemanticsNode = new TestSemanticsNode();
testSemanticsNode.label = "Hello, World";
TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate();
accessibilityBridge.updateSemantics(testSemanticsUpdate.buffer, testSemanticsUpdate.strings);
AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0);
assertEquals(nodeInfo.getContentDescription(), "Hello, World");
assertEquals(nodeInfo.getText(), null);
}
@Test
public void itDescribesTextFieldsWithText() {
AccessibilityBridge accessibilityBridge = setUpBridge();
TestSemanticsNode testSemanticsNode = new TestSemanticsNode();
testSemanticsNode.label = "Hello, World";
testSemanticsNode.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD);
TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate();
accessibilityBridge.updateSemantics(testSemanticsUpdate.buffer, testSemanticsUpdate.strings);
AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0);
assertEquals(nodeInfo.getContentDescription(), null);
assertEquals(nodeInfo.getText(), "Hello, World");
}
@Test
public void itDoesNotContainADescriptionIfScopesRoute() {
AccessibilityBridge accessibilityBridge = setUpBridge();
TestSemanticsNode testSemanticsNode = new TestSemanticsNode();
testSemanticsNode.label = "Hello, World";
testSemanticsNode.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE);
TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate();
accessibilityBridge.updateSemantics(testSemanticsUpdate.buffer, testSemanticsUpdate.strings);
AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0);
assertEquals(nodeInfo.getContentDescription(), null);
assertEquals(nodeInfo.getText(), null);
}
@Test
public void itUnfocusesPlatformViewWhenPlatformViewGoesAway() {
AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class);
AccessibilityManager mockManager = mock(AccessibilityManager.class);
View mockRootView = mock(View.class);
Context context = mock(Context.class);
when(mockRootView.getContext()).thenReturn(context);
when(context.getPackageName()).thenReturn("test");
AccessibilityBridge accessibilityBridge =
setUpBridge(mockRootView, mockManager, mockViewEmbedder);
// Sent a11y tree with platform view.
TestSemanticsNode root = new TestSemanticsNode();
root.id = 0;
TestSemanticsNode platformView = new TestSemanticsNode();
platformView.id = 1;
platformView.platformViewId = 42;
root.children.add(platformView);
TestSemanticsUpdate testSemanticsUpdate = root.toUpdate();
accessibilityBridge.updateSemantics(testSemanticsUpdate.buffer, testSemanticsUpdate.strings);
// Set a11y focus to platform view.
View mockView = mock(View.class);
AccessibilityEvent focusEvent = mock(AccessibilityEvent.class);
when(mockViewEmbedder.requestSendAccessibilityEvent(mockView, mockView, focusEvent))
.thenReturn(true);
when(mockViewEmbedder.getRecordFlutterId(mockView, focusEvent)).thenReturn(42);
when(focusEvent.getEventType()).thenReturn(AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED);
accessibilityBridge.externalViewRequestSendAccessibilityEvent(mockView, mockView, focusEvent);
// Replace the platform view.
TestSemanticsNode node = new TestSemanticsNode();
node.id = 2;
root.children.clear();
root.children.add(node);
testSemanticsUpdate = root.toUpdate();
when(mockManager.isEnabled()).thenReturn(true);
ViewParent mockParent = mock(ViewParent.class);
when(mockRootView.getParent()).thenReturn(mockParent);
accessibilityBridge.updateSemantics(testSemanticsUpdate.buffer, testSemanticsUpdate.strings);
// Check that unfocus event was sent.
ArgumentCaptor<AccessibilityEvent> eventCaptor =
ArgumentCaptor.forClass(AccessibilityEvent.class);
verify(mockParent, times(2))
.requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture());
AccessibilityEvent event = eventCaptor.getAllValues().get(0);
assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUS_CLEARED);
}
AccessibilityBridge setUpBridge() {
return setUpBridge(null, null, null, null, null, null);
}
AccessibilityBridge setUpBridge(
View rootAccessibilityView,
AccessibilityManager accessibilityManager,
AccessibilityViewEmbedder accessibilityViewEmbedder) {
return setUpBridge(
rootAccessibilityView, null, accessibilityManager, null, accessibilityViewEmbedder, null);
}
AccessibilityBridge setUpBridge(
View rootAccessibilityView,
AccessibilityChannel accessibilityChannel,
AccessibilityManager accessibilityManager,
ContentResolver contentResolver,
AccessibilityViewEmbedder accessibilityViewEmbedder,
PlatformViewsAccessibilityDelegate platformViewsAccessibilityDelegate) {
if (rootAccessibilityView == null) {
rootAccessibilityView = mock(View.class);
Context context = mock(Context.class);
when(rootAccessibilityView.getContext()).thenReturn(context);
when(context.getPackageName()).thenReturn("test");
}
if (accessibilityChannel == null) {
accessibilityChannel = mock(AccessibilityChannel.class);
}
if (accessibilityManager == null) {
accessibilityManager = mock(AccessibilityManager.class);
}
if (contentResolver == null) {
contentResolver = mock(ContentResolver.class);
}
if (accessibilityViewEmbedder == null) {
accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class);
}
if (platformViewsAccessibilityDelegate == null) {
platformViewsAccessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class);
}
return new AccessibilityBridge(
rootAccessibilityView,
accessibilityChannel,
accessibilityManager,
contentResolver,
accessibilityViewEmbedder,
platformViewsAccessibilityDelegate);
}
/// The encoding for semantics is described in platform_view_android.cc
class TestSemanticsUpdate {
TestSemanticsUpdate(ByteBuffer buffer, String[] strings) {
this.buffer = buffer;
this.strings = strings;
}
final ByteBuffer buffer;
final String[] strings;
}
class TestSemanticsNode {
TestSemanticsNode() {}
void addFlag(AccessibilityBridge.Flag flag) {
flags |= flag.value;
}
// These fields are declared in the order they should be
// encoded.
int id = 0;
int flags = 0;
int actions = 0;
int maxValueLength = 0;
int currentValueLength = 0;
int textSelectionBase = 0;
int textSelectionExtent = 0;
int platformViewId = -1;
int scrollChildren = 0;
int scrollIndex = 0;
float scrollPosition = 0.0f;
float scrollExtentMax = 0.0f;
float scrollExtentMin = 0.0f;
String label = null;
String value = null;
String increasedValue = null;
String decreasedValue = null;
String hint = null;
int textDirection = 0;
float left = 0.0f;
float top = 0.0f;
float right = 0.0f;
float bottom = 0.0f;
final List<TestSemanticsNode> children = new ArrayList<TestSemanticsNode>();
// custom actions not supported.
TestSemanticsUpdate toUpdate() {
ArrayList<String> strings = new ArrayList<String>();
ByteBuffer bytes = ByteBuffer.allocate(1000);
addToBuffer(bytes, strings);
bytes.flip();
return new TestSemanticsUpdate(bytes, strings.toArray(new String[strings.size()]));
}
protected void addToBuffer(ByteBuffer bytes, ArrayList<String> strings) {
bytes.putInt(id);
bytes.putInt(flags);
bytes.putInt(actions);
bytes.putInt(maxValueLength);
bytes.putInt(currentValueLength);
bytes.putInt(textSelectionBase);
bytes.putInt(textSelectionExtent);
bytes.putInt(platformViewId);
bytes.putInt(scrollChildren);
bytes.putInt(scrollIndex);
bytes.putFloat(scrollPosition);
bytes.putFloat(scrollExtentMax);
bytes.putFloat(scrollExtentMin);
updateString(label, bytes, strings);
updateString(value, bytes, strings);
updateString(increasedValue, bytes, strings);
updateString(decreasedValue, bytes, strings);
updateString(hint, bytes, strings);
bytes.putInt(textDirection);
bytes.putFloat(left);
bytes.putFloat(top);
bytes.putFloat(right);
bytes.putFloat(bottom);
// transform.
for (int i = 0; i < 16; i++) {
bytes.putFloat(0);
}
// children in traversal order.
bytes.putInt(children.size());
for (TestSemanticsNode node : children) {
bytes.putInt(node.id);
}
// children in hit test order.
for (TestSemanticsNode node : children) {
bytes.putInt(node.id);
}
// custom actions
bytes.putInt(0);
// child nodes
for (TestSemanticsNode node : children) {
node.addToBuffer(bytes, strings);
}
}
}
static void updateString(String value, ByteBuffer bytes, ArrayList<String> strings) {
if (value == null) {
bytes.putInt(-1);
} else {
strings.add(value);
bytes.putInt(strings.size() - 1);
}
}
}
| |
/**
* Copyright (c) 2007, Slick 2D
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the distribution. Neither the name of the Slick 2D nor the names of
* its contributors may be used to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.badlogic.gdx.backends.jglfw.audio;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.StreamUtils;
import com.jcraft.jogg.Packet;
import com.jcraft.jogg.Page;
import com.jcraft.jogg.StreamState;
import com.jcraft.jogg.SyncState;
import com.jcraft.jorbis.Block;
import com.jcraft.jorbis.Comment;
import com.jcraft.jorbis.DspState;
import com.jcraft.jorbis.Info;
import org.lwjgl.BufferUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/** An input stream to read Ogg Vorbis.
* @author kevin */
public class OggInputStream extends InputStream {
private final static int BUFFER_SIZE = 512;
/** The conversion buffer size */
private int convsize = BUFFER_SIZE * 4;
/** The buffer used to read OGG file */
private byte[] convbuffer;
/** The stream we're reading the OGG file from */
private InputStream input;
/** The audio information from the OGG header */
private Info oggInfo = new Info(); // struct that stores all the static vorbis bitstream settings
/** True if we're at the end of the available data */
private boolean endOfStream;
/** The Vorbis SyncState used to decode the OGG */
private SyncState syncState = new SyncState(); // sync and verify incoming physical bitstream
/** The Vorbis Stream State used to decode the OGG */
private StreamState streamState = new StreamState(); // take physical pages, weld into a logical stream of packets
/** The current OGG page */
private Page page = new Page(); // one Ogg bitstream page. Vorbis packets are inside
/** The current packet page */
private Packet packet = new Packet(); // one raw packet of data for decode
/** The comment read from the OGG file */
private Comment comment = new Comment(); // struct that stores all the bitstream user comments
/** The Vorbis DSP stat eused to decode the OGG */
private DspState dspState = new DspState(); // central working state for the packet->PCM decoder
/** The OGG block we're currently working with to convert PCM */
private Block vorbisBlock = new Block(dspState); // local working space for packet->PCM decode
/** Temporary scratch buffer */
byte[] buffer;
/** The number of bytes read */
int bytes = 0;
/** The true if we should be reading big endian */
boolean bigEndian = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
/** True if we're reached the end of the current bit stream */
boolean endOfBitStream = true;
/** True if we're initialise the OGG info block */
boolean inited = false;
/** The index into the byte array we currently read from */
private int readIndex;
/** The byte array store used to hold the data read from the ogg */
private ByteBuffer pcmBuffer;
/** The total number of bytes */
private int total;
/** Create a new stream to decode OGG data
*
* @param input The input stream from which to read the OGG file */
public OggInputStream (InputStream input) {
this(input, null);
}
/** Create a new stream to decode OGG data, reusing buffers from another stream.
*
* It's not a good idea to use the old stream instance afterwards.
*
* @param input The input stream from which to read the OGG file
* @param previousStream The stream instance to reuse buffers from, may be null */
OggInputStream (InputStream input, OggInputStream previousStream) {
if (previousStream == null) {
convbuffer = new byte[convsize];
pcmBuffer = BufferUtils.createByteBuffer(4096 * 500);
} else {
convbuffer = previousStream.convbuffer;
pcmBuffer = previousStream.pcmBuffer;
}
this.input = input;
try {
total = input.available();
} catch (IOException ex) {
throw new GdxRuntimeException(ex);
}
init();
}
/** Get the number of bytes on the stream
*
* @return The number of the bytes on the stream */
public int getLength () {
return total;
}
public int getChannels () {
return oggInfo.channels;
}
public int getSampleRate () {
return oggInfo.rate;
}
/** Initialise the streams and thread involved in the streaming of OGG data */
private void init () {
initVorbis();
readPCM();
}
/** @see InputStream#available() */
public int available () {
return endOfStream ? 0 : 1;
}
/** Initialise the vorbis decoding */
private void initVorbis () {
syncState.init();
}
/** Get a page and packet from that page
*
* @return True if there was a page available */
private boolean getPageAndPacket () {
// grab some data at the head of the stream. We want the first page
// (which is guaranteed to be small and only contain the Vorbis
// stream initial header) We need the first page to get the stream
// serialno.
// submit a 4k block to libvorbis' Ogg layer
int index = syncState.buffer(BUFFER_SIZE);
if (index == -1) return false;
buffer = syncState.data;
if (buffer == null) {
endOfStream = true;
return false;
}
try {
bytes = input.read(buffer, index, BUFFER_SIZE);
} catch (Exception e) {
throw new GdxRuntimeException("Failure reading Vorbis.", e);
}
syncState.wrote(bytes);
// Get the first page.
if (syncState.pageout(page) != 1) {
// have we simply run out of data? If so, we're done.
if (bytes < BUFFER_SIZE) return false;
// error case. Must not be Vorbis data
throw new GdxRuntimeException("Input does not appear to be an Ogg bitstream.");
}
// Get the serial number and set up the rest of decode.
// serialno first; use it to set up a logical stream
streamState.init(page.serialno());
// extract the initial header from the first page and verify that the
// Ogg bitstream is in fact Vorbis data
// I handle the initial header first instead of just having the code
// read all three Vorbis headers at once because reading the initial
// header is an easy way to identify a Vorbis bitstream and it's
// useful to see that functionality seperated out.
oggInfo.init();
comment.init();
if (streamState.pagein(page) < 0) {
// error; stream version mismatch perhaps
throw new GdxRuntimeException("Error reading first page of Ogg bitstream.");
}
if (streamState.packetout(packet) != 1) {
// no page? must not be vorbis
throw new GdxRuntimeException("Error reading initial header packet.");
}
if (oggInfo.synthesis_headerin(comment, packet) < 0) {
// error case; not a vorbis header
throw new GdxRuntimeException("Ogg bitstream does not contain Vorbis audio data.");
}
// At this point, we're sure we're Vorbis. We've set up the logical
// (Ogg) bitstream decoder. Get the comment and codebook headers and
// set up the Vorbis decoder
// The next two packets in order are the comment and codebook headers.
// They're likely large and may span multiple pages. Thus we reead
// and submit data until we get our two pacakets, watching that no
// pages are missing. If a page is missing, error out; losing a
// header page is the only place where missing data is fatal. */
int i = 0;
while (i < 2) {
while (i < 2) {
int result = syncState.pageout(page);
if (result == 0) break; // Need more data
// Don't complain about missing or corrupt data yet. We'll
// catch it at the packet output phase
if (result == 1) {
streamState.pagein(page); // we can ignore any errors here
// as they'll also become apparent
// at packetout
while (i < 2) {
result = streamState.packetout(packet);
if (result == 0) break;
if (result == -1) {
// Uh oh; data at some point was corrupted or missing!
// We can't tolerate that in a header. Die.
throw new GdxRuntimeException("Corrupt secondary header.");
}
oggInfo.synthesis_headerin(comment, packet);
i++;
}
}
}
// no harm in not checking before adding more
index = syncState.buffer(BUFFER_SIZE);
if (index == -1) return false;
buffer = syncState.data;
try {
bytes = input.read(buffer, index, BUFFER_SIZE);
} catch (Exception e) {
throw new GdxRuntimeException("Failed to read Vorbis.", e);
}
if (bytes == 0 && i < 2) {
throw new GdxRuntimeException("End of file before finding all Vorbis headers.");
}
syncState.wrote(bytes);
}
convsize = BUFFER_SIZE / oggInfo.channels;
// OK, got and parsed all three headers. Initialize the Vorbis
// packet->PCM decoder.
dspState.synthesis_init(oggInfo); // central decode state
vorbisBlock.init(dspState); // local state for most of the decode
// so multiple block decodes can
// proceed in parallel. We could init
// multiple vorbis_block structures
// for vd here
return true;
}
/** Decode the OGG file as shown in the jogg/jorbis examples */
private void readPCM () {
boolean wrote = false;
while (true) { // we repeat if the bitstream is chained
if (endOfBitStream) {
if (!getPageAndPacket()) {
break;
}
endOfBitStream = false;
}
if (!inited) {
inited = true;
return;
}
float[][][] _pcm = new float[1][][];
int[] _index = new int[oggInfo.channels];
// The rest is just a straight decode loop until end of stream
while (!endOfBitStream) {
while (!endOfBitStream) {
int result = syncState.pageout(page);
if (result == 0) {
break; // need more data
}
if (result == -1) { // missing or corrupt data at this page position
// throw new GdxRuntimeException("Corrupt or missing data in bitstream.");
Gdx.app.log("gdx-audio", "Error reading OGG: Corrupt or missing data in bitstream.");
} else {
streamState.pagein(page); // can safely ignore errors at
// this point
while (true) {
result = streamState.packetout(packet);
if (result == 0) break; // need more data
if (result == -1) { // missing or corrupt data at this page position
// no reason to complain; already complained above
} else {
// we have a packet. Decode it
int samples;
if (vorbisBlock.synthesis(packet) == 0) { // test for success!
dspState.synthesis_blockin(vorbisBlock);
}
// **pcm is a multichannel float vector. In stereo, for
// example, pcm[0] is left, and pcm[1] is right. samples is
// the size of each channel. Convert the float values
// (-1.<=range<=1.) to whatever PCM format and write it out
while ((samples = dspState.synthesis_pcmout(_pcm, _index)) > 0) {
float[][] pcm = _pcm[0];
// boolean clipflag = false;
int bout = (samples < convsize ? samples : convsize);
// convert floats to 16 bit signed ints (host order) and
// interleave
for (int i = 0; i < oggInfo.channels; i++) {
int ptr = i * 2;
// int ptr=i;
int mono = _index[i];
for (int j = 0; j < bout; j++) {
int val = (int)(pcm[i][mono + j] * 32767.);
// might as well guard against clipping
if (val > 32767) {
val = 32767;
}
if (val < -32768) {
val = -32768;
}
if (val < 0) val = val | 0x8000;
if (bigEndian) {
convbuffer[ptr] = (byte)(val >>> 8);
convbuffer[ptr + 1] = (byte)(val);
} else {
convbuffer[ptr] = (byte)(val);
convbuffer[ptr + 1] = (byte)(val >>> 8);
}
ptr += 2 * (oggInfo.channels);
}
}
int bytesToWrite = 2 * oggInfo.channels * bout;
if (bytesToWrite > pcmBuffer.remaining()) {
throw new GdxRuntimeException("Ogg block too big to be buffered: " + bytesToWrite + " :: " + pcmBuffer.remaining());
} else {
pcmBuffer.put(convbuffer, 0, bytesToWrite);
}
wrote = true;
dspState.synthesis_read(bout); // tell libvorbis how
// many samples we
// actually consumed
}
}
}
if (page.eos() != 0) {
endOfBitStream = true;
}
if ((!endOfBitStream) && (wrote)) {
return;
}
}
}
if (!endOfBitStream) {
bytes = 0;
int index = syncState.buffer(BUFFER_SIZE);
if (index >= 0) {
buffer = syncState.data;
try {
bytes = input.read(buffer, index, BUFFER_SIZE);
} catch (Exception e) {
throw new GdxRuntimeException("Error during Vorbis decoding.", e);
}
} else {
bytes = 0;
}
syncState.wrote(bytes);
if (bytes == 0) {
endOfBitStream = true;
}
}
}
// clean up this logical bitstream; before exit we see if we're
// followed by another [chained]
streamState.clear();
// ogg_page and ogg_packet structs always point to storage in
// libvorbis. They're never freed or manipulated directly
vorbisBlock.clear();
dspState.clear();
oggInfo.clear(); // must be called last
}
// OK, clean up the framer
syncState.clear();
endOfStream = true;
}
public int read () {
if (readIndex >= pcmBuffer.position()) {
pcmBuffer.clear();
readPCM();
readIndex = 0;
}
if (readIndex >= pcmBuffer.position()) {
return -1;
}
int value = pcmBuffer.get(readIndex);
if (value < 0) {
value = 256 + value;
}
readIndex++;
return value;
}
public boolean atEnd () {
return endOfStream && (readIndex >= pcmBuffer.position());
}
public int read (byte[] b, int off, int len) {
for (int i = 0; i < len; i++) {
int value = read();
if (value >= 0) {
b[i] = (byte)value;
} else {
if (i == 0) {
return -1;
} else {
return i;
}
}
}
return len;
}
public int read (byte[] b) {
return read(b, 0, b.length);
}
public void close () {
StreamUtils.closeQuietly(input);
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.devtools.starlark.spelling.SpellChecker;
import com.google.errorprone.annotations.FormatMethod;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* The Resolver resolves each identifier in a syntax tree to its binding, and performs other
* validity checks.
*
* <p>When a variable is defined, it is visible in the entire block. For example, a global variable
* is visible in the entire file; a variable in a function is visible in the entire function block
* (even on the lines before its first assignment).
*
* <p>Resolution is a mutation of the syntax tree, as it attaches binding information to Identifier
* nodes. (In the future, it will attach additional information to functions to support lexical
* scope, and even compilation of the trees to bytecode.) Resolution errors are reported in the
* analogous manner to scan/parse errors: for a StarlarkFile, they are appended to {@code
* StarlarkFile.errors}; for an expression they are reported by an SyntaxError.Exception exception.
* It is legal to resolve a file that already contains scan/parse errors, though it may lead to
* secondary errors.
*/
public final class Resolver extends NodeVisitor {
// TODO(adonovan): use "keyword" (not "named") and "required" (not "mandatory") terminology
// everywhere, including the spec.
enum Scope {
// TODO(adonovan): Add UNIVERSAL, FREE, CELL.
// (PREDECLARED vs UNIVERSAL allows us to represent the app-dependent and fixed parts of the
// predeclared environment separately, reducing the amount of copying.)
/** Binding is local to a function, comprehension, or file (e.g. load). */
LOCAL,
/** Binding occurs outside any function or comprehension. */
GLOBAL,
/** Binding is predeclared by the core or application. */
PREDECLARED;
@Override
public String toString() {
return super.toString().toLowerCase();
}
}
// A Binding is a static abstraction of a variable.
// The Resolver maps each Identifier to a Binding.
static final class Binding {
final Scope scope;
@Nullable final Identifier first; // first binding use, if syntactic
final int index; // within its block (currently unused)
private Binding(Scope scope, @Nullable Identifier first, int index) {
this.scope = scope;
this.first = first;
this.index = index;
}
@Override
public String toString() {
return first == null
? scope.toString()
: String.format(
"%s[%d] %s @ %s", scope, index, first.getName(), first.getStartLocation());
}
}
/** A Function records information about a resolved function. */
static final class Function {
// This class is exposed to Eval in the evaluator build target
// (which is the same Java package, at least for now).
// Once we switch to bytecode, it will be exposed only to the compiler.
// The params and parameterNames fields use "run-time order":
// non-kwonly, keyword-only, *args, **kwargs.
// A bare * parameter is dropped.
final String name;
final Location location; // of identifier
final ImmutableList<Parameter> params; // order defined above
final ImmutableList<Statement> body;
final boolean hasVarargs;
final boolean hasKwargs;
final int numKeywordOnlyParams;
final ImmutableList<String> parameterNames; // order defined above
// isToplevel indicates that this is the <toplevel> function containing
// top-level statements of a file. It causes assignments to unresolved
// identifiers to update the module, not the lexical frame.
// TODO(adonovan): remove this hack when identifier resolution is accurate.
final boolean isToplevel;
private Function(
String name,
Location loc,
ImmutableList<Parameter> params,
ImmutableList<Statement> body,
boolean hasVarargs,
boolean hasKwargs,
int numKeywordOnlyParams) {
this.name = name;
this.location = loc;
this.params = params;
this.body = body;
this.hasVarargs = hasVarargs;
this.hasKwargs = hasKwargs;
this.numKeywordOnlyParams = numKeywordOnlyParams;
ImmutableList.Builder<String> names = ImmutableList.builderWithExpectedSize(params.size());
for (Parameter p : params) {
names.add(p.getName());
}
this.parameterNames = names.build();
this.isToplevel = name.equals("<toplevel>");
}
}
/**
* Module is a static abstraction of a Starlark module. It describes the set of variable names for
* use during name resolution.
*/
public interface Module {
// TODO(adonovan): opt: for efficiency, turn this into a predicate, not an enumerable set,
// and look up bindings as they are needed, not preemptively.
// Otherwise we must do work proportional to the number of bindings in the
// environment, not the number of free variables of the file/expression.
//
// A single method will then suffice:
// Scope resolve(String name) throws Undeclared
/** Returns the set of names defined by this module. The caller must not modify the set. */
Set<String> getNames();
/**
* Returns (optionally) a more specific error for an undeclared name than the generic message.
* This hook allows the module to implement flag-enabled names without any knowledge in this
* file.
*/
@Nullable
String getUndeclaredNameError(String name);
}
private static class Block {
private final Map<String, Binding> bindings = new HashMap<>();
private final Scope scope;
@Nullable private final Block parent;
Block(Scope scope, @Nullable Block parent) {
this.scope = scope;
this.parent = parent;
}
}
private final List<SyntaxError> errors;
private final FileOptions options;
private final Module module;
private Block block;
private int loopCount;
// Shared binding for all predeclared names.
private static final Binding PREDECLARED = new Binding(Scope.PREDECLARED, null, 0);
private Resolver(List<SyntaxError> errors, Module module, FileOptions options) {
this.errors = errors;
this.module = module;
this.options = options;
this.block = new Block(Scope.PREDECLARED, null);
for (String name : module.getNames()) {
block.bindings.put(name, PREDECLARED);
}
}
// Formats and reports an error at the start of the specified node.
@FormatMethod
private void errorf(Node node, String format, Object... args) {
errorf(node.getStartLocation(), format, args);
}
// Formats and reports an error at the specified location.
@FormatMethod
private void errorf(Location loc, String format, Object... args) {
errors.add(new SyntaxError(loc, String.format(format, args)));
}
/**
* First pass: add bindings for all variables to the current block. This is done because symbols
* are sometimes used before their definition point (e.g. functions are not necessarily declared
* in order).
*/
// TODO(adonovan): eliminate this first pass by using go.starlark.net one-pass approach.
private void createBindings(Iterable<Statement> stmts) {
for (Statement stmt : stmts) {
createBindings(stmt);
}
}
private void createBindings(Statement stmt) {
switch (stmt.kind()) {
case ASSIGNMENT:
createBindings(((AssignmentStatement) stmt).getLHS());
break;
case IF:
IfStatement ifStmt = (IfStatement) stmt;
createBindings(ifStmt.getThenBlock());
if (ifStmt.getElseBlock() != null) {
createBindings(ifStmt.getElseBlock());
}
break;
case FOR:
ForStatement forStmt = (ForStatement) stmt;
createBindings(forStmt.getVars());
createBindings(forStmt.getBody());
break;
case DEF:
DefStatement def = (DefStatement) stmt;
bind(def.getIdentifier());
break;
case LOAD:
LoadStatement load = (LoadStatement) stmt;
Set<String> names = new HashSet<>();
for (LoadStatement.Binding b : load.getBindings()) {
// Reject load('...', '_private').
Identifier orig = b.getOriginalName();
if (orig.isPrivate() && !options.allowLoadPrivateSymbols()) {
errorf(orig, "symbol '%s' is private and cannot be imported", orig.getName());
}
// The allowToplevelRebinding check is not applied to all files
// but we apply it to each load statement as a special case,
// and emit a better error message than the generic check.
if (!names.add(b.getLocalName().getName())) {
errorf(
b.getLocalName(),
"load statement defines '%s' more than once",
b.getLocalName().getName());
}
}
// TODO(adonovan): support options.loadBindsGlobally().
// Requires that we open a LOCAL block for each file,
// as well as its Module block, and select which block
// to declare it in. See go.starlark.net implementation.
for (LoadStatement.Binding b : load.getBindings()) {
bind(b.getLocalName());
}
break;
case EXPRESSION:
case FLOW:
case RETURN:
// nothing to declare
}
}
private void createBindings(Expression lhs) {
for (Identifier id : Identifier.boundIdentifiers(lhs)) {
bind(id);
}
}
private void assign(Expression lhs) {
if (lhs instanceof Identifier) {
Identifier id = (Identifier) lhs;
// Bindings are created by the first pass (createBindings),
// so there's nothing to do here.
Preconditions.checkNotNull(block.bindings.get(id.getName()));
} else if (lhs instanceof IndexExpression) {
visit(lhs);
} else if (lhs instanceof ListExpression) {
for (Expression elem : ((ListExpression) lhs).getElements()) {
assign(elem);
}
} else {
errorf(lhs, "cannot assign to '%s'", lhs);
}
}
@Override
public void visit(Identifier id) {
for (Block b = block; b != null; b = b.parent) {
Binding bind = b.bindings.get(id.getName());
if (bind != null) {
if (options.recordScope()) {
id.setBinding(bind);
}
return;
}
}
// The identifier might not exist because it was restricted (hidden) by flags.
// If this is the case, output a more helpful error message than 'not found'.
String error = module.getUndeclaredNameError(id.getName());
if (error == null) {
// generic error
error = createInvalidIdentifierException(id.getName(), getAllSymbols());
}
errorf(id, "%s", error);
}
private static String createInvalidIdentifierException(String name, Set<String> candidates) {
if (!Identifier.isValid(name)) {
// Identifier was created by Parser.makeErrorExpression and contains misparsed text.
return "contains syntax errors";
}
String suggestion = SpellChecker.didYouMean(name, candidates);
return "name '" + name + "' is not defined" + suggestion;
}
@Override
public void visit(ReturnStatement node) {
if (block.scope != Scope.LOCAL) {
errorf(node, "return statements must be inside a function");
}
super.visit(node);
}
@Override
public void visit(CallExpression node) {
// validate call arguments
boolean seenVarargs = false;
boolean seenKwargs = false;
Set<String> keywords = null;
for (Argument arg : node.getArguments()) {
if (arg instanceof Argument.Positional) {
if (seenVarargs) {
errorf(arg, "positional argument may not follow *args");
} else if (seenKwargs) {
errorf(arg, "positional argument may not follow **kwargs");
} else if (keywords != null) {
errorf(arg, "positional argument may not follow keyword argument");
}
} else if (arg instanceof Argument.Keyword) {
String keyword = ((Argument.Keyword) arg).getName();
if (seenVarargs) {
errorf(arg, "keyword argument %s may not follow *args", keyword);
} else if (seenKwargs) {
errorf(arg, "keyword argument %s may not follow **kwargs", keyword);
}
if (keywords == null) {
keywords = new HashSet<>();
}
if (!keywords.add(keyword)) {
errorf(arg, "duplicate keyword argument: %s", keyword);
}
} else if (arg instanceof Argument.Star) {
if (seenKwargs) {
errorf(arg, "*args may not follow **kwargs");
} else if (seenVarargs) {
errorf(arg, "multiple *args not allowed");
}
seenVarargs = true;
} else if (arg instanceof Argument.StarStar) {
if (seenKwargs) {
errorf(arg, "multiple **kwargs not allowed");
}
seenKwargs = true;
}
}
super.visit(node);
}
@Override
public void visit(ForStatement node) {
if (block.scope != Scope.LOCAL) {
errorf(
node,
"for loops are not allowed at the top level. You may move it inside a function "
+ "or use a comprehension, [f(x) for x in sequence]");
}
loopCount++;
visit(node.getCollection());
assign(node.getVars());
visitBlock(node.getBody());
Preconditions.checkState(loopCount > 0);
loopCount--;
}
@Override
public void visit(LoadStatement node) {
if (block.scope == Scope.LOCAL) {
errorf(node, "load statement not at top level");
}
// Skip super.visit: don't revisit local Identifier as a use.
}
@Override
public void visit(FlowStatement node) {
if (node.getKind() != TokenKind.PASS && loopCount <= 0) {
errorf(node, "%s statement must be inside a for loop", node.getKind());
}
super.visit(node);
}
@Override
public void visit(DotExpression node) {
visit(node.getObject());
// Do not visit the field.
}
@Override
public void visit(Comprehension node) {
openBlock(Scope.LOCAL);
for (Comprehension.Clause clause : node.getClauses()) {
if (clause instanceof Comprehension.For) {
Comprehension.For forClause = (Comprehension.For) clause;
createBindings(forClause.getVars());
}
}
// TODO(adonovan): opt: combine loops
for (Comprehension.Clause clause : node.getClauses()) {
if (clause instanceof Comprehension.For) {
Comprehension.For forClause = (Comprehension.For) clause;
visit(forClause.getIterable());
assign(forClause.getVars());
} else {
Comprehension.If ifClause = (Comprehension.If) clause;
visit(ifClause.getCondition());
}
}
visit(node.getBody());
closeBlock();
}
@Override
public void visit(DefStatement node) {
if (block.scope == Scope.LOCAL) {
errorf(node, "nested functions are not allowed. Move the function to the top level.");
}
node.resolved =
resolveFunction(
node.getIdentifier().getName(),
node.getIdentifier().getStartLocation(),
node.getParameters(),
node.getBody());
}
private Function resolveFunction(
String name,
Location loc,
ImmutableList<Parameter> parameters,
ImmutableList<Statement> body) {
// Resolve defaults in enclosing environment.
for (Parameter param : parameters) {
if (param instanceof Parameter.Optional) {
visit(param.getDefaultValue());
}
}
// Enter function block.
openBlock(Scope.LOCAL);
// Check parameter order and convert to run-time order:
// positionals, keyword-only, *args, **kwargs.
Parameter.Star star = null;
Parameter.StarStar starStar = null;
boolean seenOptional = false;
int numKeywordOnlyParams = 0;
// TODO(adonovan): opt: when all Identifiers are resolved to bindings accumulated
// in the function, params can be a prefix of the function's array of bindings.
ImmutableList.Builder<Parameter> params =
ImmutableList.builderWithExpectedSize(parameters.size());
for (Parameter param : parameters) {
if (param instanceof Parameter.Mandatory) {
// e.g. id
if (starStar != null) {
errorf(
param,
"required parameter %s may not follow **%s",
param.getName(),
starStar.getName());
} else if (star != null) {
numKeywordOnlyParams++;
} else if (seenOptional) {
errorf(
param,
"required positional parameter %s may not follow an optional parameter",
param.getName());
}
bindParam(params, param);
} else if (param instanceof Parameter.Optional) {
// e.g. id = default
seenOptional = true;
if (starStar != null) {
errorf(param, "optional parameter may not follow **%s", starStar.getName());
} else if (star != null) {
numKeywordOnlyParams++;
}
bindParam(params, param);
} else if (param instanceof Parameter.Star) {
// * or *args
if (starStar != null) {
errorf(param, "* parameter may not follow **%s", starStar.getName());
} else if (star != null) {
errorf(param, "multiple * parameters not allowed");
} else {
star = (Parameter.Star) param;
}
} else {
// **kwargs
if (starStar != null) {
errorf(param, "multiple ** parameters not allowed");
}
starStar = (Parameter.StarStar) param;
}
}
// * or *args
if (star != null) {
if (star.getIdentifier() != null) {
bindParam(params, star);
} else if (numKeywordOnlyParams == 0) {
errorf(star, "bare * must be followed by keyword-only parameters");
}
}
// **kwargs
if (starStar != null) {
bindParam(params, starStar);
}
createBindings(body);
visitAll(body);
closeBlock();
return new Function(
name,
loc,
params.build(),
body,
star != null && star.getIdentifier() != null,
starStar != null,
numKeywordOnlyParams);
}
private void bindParam(ImmutableList.Builder<Parameter> params, Parameter param) {
if (bind(param.getIdentifier())) {
errorf(param, "duplicate parameter: %s", param.getName());
}
params.add(param);
}
@Override
public void visit(IfStatement node) {
if (block.scope != Scope.LOCAL) {
errorf(
node,
"if statements are not allowed at the top level. You may move it inside a function "
+ "or use an if expression (x if condition else y).");
}
super.visit(node);
}
@Override
public void visit(AssignmentStatement node) {
visit(node.getRHS());
// Disallow: [e, ...] += rhs
// Other bad cases are handled in assign.
if (node.isAugmented() && node.getLHS() instanceof ListExpression) {
errorf(
node.getOperatorLocation(),
"cannot perform augmented assignment on a list or tuple expression");
}
assign(node.getLHS());
}
/**
* Process a binding use of a name by adding a binding to the current block if not already bound,
* and associate the identifier with it. Reports whether the name was already bound in this block.
*/
private boolean bind(Identifier id) {
Binding bind = block.bindings.get(id.getName());
// Already bound in this block?
if (bind != null) {
// Symbols defined in the module block cannot be reassigned.
if (block.scope == Scope.GLOBAL && !options.allowToplevelRebinding()) {
errorf(
id,
"cannot reassign global '%s' (read more at"
+ " https://bazel.build/versions/master/docs/skylark/errors/read-only-variable.html)",
id.getName());
if (bind.first != null) {
errorf(bind.first, "'%s' previously declared here", id.getName());
}
}
if (options.recordScope()) {
id.setBinding(bind);
}
return true;
}
// new binding
// TODO(adonovan): accumulate locals in the enclosing function/file block.
bind = new Binding(block.scope, id, block.bindings.size());
block.bindings.put(id.getName(), bind);
if (options.recordScope()) {
id.setBinding(bind);
}
return false;
}
/** Returns the set of all accessible symbols (both local and global) */
private Set<String> getAllSymbols() {
Set<String> all = new HashSet<>();
for (Block b = block; b != null; b = b.parent) {
all.addAll(b.bindings.keySet());
}
return all;
}
// Report an error if a load statement appears after another kind of statement.
private void checkLoadAfterStatement(List<Statement> statements) {
Statement firstStatement = null;
for (Statement statement : statements) {
// Ignore string literals (e.g. docstrings).
if (statement instanceof ExpressionStatement
&& ((ExpressionStatement) statement).getExpression() instanceof StringLiteral) {
continue;
}
if (statement instanceof LoadStatement) {
if (firstStatement == null) {
continue;
}
errorf(statement, "load statements must appear before any other statement");
errorf(firstStatement, "\tfirst non-load statement appears here");
}
if (firstStatement == null) {
firstStatement = statement;
}
}
}
private void resolveToplevelStatements(List<Statement> statements) {
// Check that load() statements are on top.
if (options.requireLoadStatementsFirst()) {
checkLoadAfterStatement(statements);
}
openBlock(Scope.GLOBAL);
// Add a binding for each variable defined by statements, not including definitions that appear
// in sub-scopes of the given statements (function bodies and comprehensions).
createBindings(statements);
// Second pass: ensure that all symbols have been defined.
visitAll(statements);
closeBlock();
}
/**
* Performs static checks, including resolution of identifiers in {@code file} in the environment
* defined by {@code module}. The StarlarkFile is mutated. Errors are appended to {@link
* StarlarkFile#errors}.
*/
public static void resolveFile(StarlarkFile file, Module module) {
ImmutableList<Statement> stmts = file.getStatements();
Resolver r = new Resolver(file.errors, module, file.getOptions());
r.resolveToplevelStatements(stmts);
// Check that no closeBlock was forgotten.
Preconditions.checkState(r.block.parent == null);
// If the final statement is an expression, synthesize a return statement.
int n = stmts.size();
if (n > 0 && stmts.get(n - 1) instanceof ExpressionStatement) {
Expression expr = ((ExpressionStatement) stmts.get(n - 1)).getExpression();
stmts =
ImmutableList.<Statement>builder()
.addAll(stmts.subList(0, n - 1))
.add(ReturnStatement.make(expr))
.build();
}
// Annotate with resolved information about the toplevel function.
file.resolved =
new Function(
"<toplevel>",
file.getStartLocation(),
/*params=*/ ImmutableList.of(),
/*body=*/ stmts,
/*hasVarargs=*/ false,
/*hasKwargs=*/ false,
/*numKeywordOnlyParams=*/ 0);
}
/**
* Performs static checks, including resolution of identifiers in {@code expr} in the environment
* defined by {@code module}. This operation mutates the Expression.
*/
static Function resolveExpr(Expression expr, Module module, FileOptions options)
throws SyntaxError.Exception {
List<SyntaxError> errors = new ArrayList<>();
Resolver r = new Resolver(errors, module, options);
r.visit(expr);
if (!errors.isEmpty()) {
throw new SyntaxError.Exception(errors);
}
// Return no-arg function that computes the expression.
return new Function(
"<expr>",
expr.getStartLocation(),
/*params=*/ ImmutableList.of(),
ImmutableList.of(ReturnStatement.make(expr)),
/*hasVarargs=*/ false,
/*hasKwargs=*/ false,
/*numKeywordOnlyParams=*/ 0);
}
/** Open a new lexical block that will contain the future declarations. */
private void openBlock(Scope scope) {
block = new Block(scope, block);
}
/** Close a lexical block (and lose all declarations it contained). */
private void closeBlock() {
block = Preconditions.checkNotNull(block.parent);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.store;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class IndicesStore extends AbstractComponent implements ClusterStateListener, Closeable {
// TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a separate public service
public static final Setting<TimeValue> INDICES_STORE_DELETE_SHARD_TIMEOUT =
Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS),
Property.NodeScope);
public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists";
private static final EnumSet<IndexShardState> ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED);
private final IndicesService indicesService;
private final ClusterService clusterService;
private final TransportService transportService;
private final ThreadPool threadPool;
// Cache successful shard deletion checks to prevent unnecessary file system lookups
private final Set<ShardId> folderNotFoundCache = new HashSet<>();
private TimeValue deleteShardTimeout;
@Inject
public IndicesStore(Settings settings, IndicesService indicesService,
ClusterService clusterService, TransportService transportService, ThreadPool threadPool) {
super(settings);
this.indicesService = indicesService;
this.clusterService = clusterService;
this.transportService = transportService;
this.threadPool = threadPool;
transportService.registerRequestHandler(ACTION_SHARD_EXISTS, ShardActiveRequest::new, ThreadPool.Names.SAME, new ShardActiveRequestHandler());
this.deleteShardTimeout = INDICES_STORE_DELETE_SHARD_TIMEOUT.get(settings);
// Doesn't make sense to delete shards on non-data nodes
if (DiscoveryNode.isDataNode(settings)) {
// we double check nothing has changed when responses come back from other nodes.
// it's easier to do that check when the current cluster state is visible.
// also it's good in general to let things settle down
clusterService.addListener(this);
}
}
@Override
public void close() {
if (DiscoveryNode.isDataNode(settings)) {
clusterService.removeListener(this);
}
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!event.routingTableChanged()) {
return;
}
if (event.state().blocks().disableStatePersistence()) {
return;
}
RoutingTable routingTable = event.state().routingTable();
// remove entries from cache that don't exist in the routing table anymore (either closed or deleted indices)
// - removing shard data of deleted indices is handled by IndicesClusterStateService
// - closed indices don't need to be removed from the cache but we do it anyway for code simplicity
for (Iterator<ShardId> it = folderNotFoundCache.iterator(); it.hasNext(); ) {
ShardId shardId = it.next();
if (routingTable.hasIndex(shardId.getIndex()) == false) {
it.remove();
}
}
// remove entries from cache which are allocated to this node
final String localNodeId = event.state().nodes().getLocalNodeId();
RoutingNode localRoutingNode = event.state().getRoutingNodes().node(localNodeId);
if (localRoutingNode != null) {
for (ShardRouting routing : localRoutingNode) {
folderNotFoundCache.remove(routing.shardId());
}
}
for (IndexRoutingTable indexRoutingTable : routingTable) {
// Note, closed indices will not have any routing information, so won't be deleted
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
ShardId shardId = indexShardRoutingTable.shardId();
if (folderNotFoundCache.contains(shardId) == false && shardCanBeDeleted(localNodeId, indexShardRoutingTable)) {
IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex());
final IndexSettings indexSettings;
if (indexService == null) {
IndexMetaData indexMetaData = event.state().getMetaData().getIndexSafe(indexRoutingTable.getIndex());
indexSettings = new IndexSettings(indexMetaData, settings);
} else {
indexSettings = indexService.getIndexSettings();
}
IndicesService.ShardDeletionCheckResult shardDeletionCheckResult = indicesService.canDeleteShardContent(shardId, indexSettings);
switch (shardDeletionCheckResult) {
case FOLDER_FOUND_CAN_DELETE:
deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable);
break;
case NO_FOLDER_FOUND:
folderNotFoundCache.add(shardId);
break;
case NO_LOCAL_STORAGE:
assert false : "shard deletion only runs on data nodes which always have local storage";
// nothing to do
break;
case STILL_ALLOCATED:
// nothing to do
break;
default:
assert false : "unknown shard deletion check result: " + shardDeletionCheckResult;
}
}
}
}
}
static boolean shardCanBeDeleted(String localNodeId, IndexShardRoutingTable indexShardRoutingTable) {
// a shard can be deleted if all its copies are active, and its not allocated on this node
if (indexShardRoutingTable.size() == 0) {
// should not really happen, there should always be at least 1 (primary) shard in a
// shard replication group, in any case, protected from deleting something by mistake
return false;
}
for (ShardRouting shardRouting : indexShardRoutingTable) {
// be conservative here, check on started, not even active
if (shardRouting.started() == false) {
return false;
}
// check if shard is active on the current node
if (localNodeId.equals(shardRouting.currentNodeId())) {
return false;
}
}
return true;
}
private void deleteShardIfExistElseWhere(ClusterState state, IndexShardRoutingTable indexShardRoutingTable) {
List<Tuple<DiscoveryNode, ShardActiveRequest>> requests = new ArrayList<>(indexShardRoutingTable.size());
String indexUUID = indexShardRoutingTable.shardId().getIndex().getUUID();
ClusterName clusterName = state.getClusterName();
for (ShardRouting shardRouting : indexShardRoutingTable) {
assert shardRouting.started() : "expected started shard but was " + shardRouting;
DiscoveryNode currentNode = state.nodes().get(shardRouting.currentNodeId());
requests.add(new Tuple<>(currentNode, new ShardActiveRequest(clusterName, indexUUID, shardRouting.shardId(), deleteShardTimeout)));
}
ShardActiveResponseHandler responseHandler = new ShardActiveResponseHandler(indexShardRoutingTable.shardId(), state.getVersion(),
requests.size());
for (Tuple<DiscoveryNode, ShardActiveRequest> request : requests) {
logger.trace("{} sending shard active check to {}", request.v2().shardId, request.v1());
transportService.sendRequest(request.v1(), ACTION_SHARD_EXISTS, request.v2(), responseHandler);
}
}
private class ShardActiveResponseHandler implements TransportResponseHandler<ShardActiveResponse> {
private final ShardId shardId;
private final int expectedActiveCopies;
private final long clusterStateVersion;
private final AtomicInteger awaitingResponses;
private final AtomicInteger activeCopies;
ShardActiveResponseHandler(ShardId shardId, long clusterStateVersion, int expectedActiveCopies) {
this.shardId = shardId;
this.expectedActiveCopies = expectedActiveCopies;
this.clusterStateVersion = clusterStateVersion;
this.awaitingResponses = new AtomicInteger(expectedActiveCopies);
this.activeCopies = new AtomicInteger();
}
@Override
public ShardActiveResponse newInstance() {
return new ShardActiveResponse();
}
@Override
public void handleResponse(ShardActiveResponse response) {
logger.trace("{} is {}active on node {}", shardId, response.shardActive ? "" : "not ", response.node);
if (response.shardActive) {
activeCopies.incrementAndGet();
}
if (awaitingResponses.decrementAndGet() == 0) {
allNodesResponded();
}
}
@Override
public void handleException(TransportException exp) {
logger.debug((Supplier<?>) () -> new ParameterizedMessage("shards active request failed for {}", shardId), exp);
if (awaitingResponses.decrementAndGet() == 0) {
allNodesResponded();
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
private void allNodesResponded() {
if (activeCopies.get() != expectedActiveCopies) {
logger.trace("not deleting shard {}, expected {} active copies, but only {} found active copies", shardId, expectedActiveCopies, activeCopies.get());
return;
}
ClusterState latestClusterState = clusterService.state();
if (clusterStateVersion != latestClusterState.getVersion()) {
logger.trace("not deleting shard {}, the latest cluster state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, latestClusterState.getVersion(), clusterStateVersion);
return;
}
clusterService.getClusterApplierService().runOnApplierThread("indices_store ([" + shardId + "] active fully on other nodes)",
currentState -> {
if (clusterStateVersion != currentState.getVersion()) {
logger.trace("not deleting shard {}, the update task state version[{}] is not equal to cluster state before shard active api call [{}]", shardId, currentState.getVersion(), clusterStateVersion);
return;
}
try {
indicesService.deleteShardStore("no longer used", shardId, currentState);
} catch (Exception ex) {
logger.debug((Supplier<?>) () -> new ParameterizedMessage("{} failed to delete unallocated shard, ignoring", shardId), ex);
}
},
(source, e) -> logger.error((Supplier<?>) () -> new ParameterizedMessage("{} unexpected error during deletion of unallocated shard", shardId), e)
);
}
}
private class ShardActiveRequestHandler implements TransportRequestHandler<ShardActiveRequest> {
@Override
public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception {
IndexShard indexShard = getShard(request);
// make sure shard is really there before register cluster state observer
if (indexShard == null) {
channel.sendResponse(new ShardActiveResponse(false, clusterService.localNode()));
} else {
// create observer here. we need to register it here because we need to capture the current cluster state
// which will then be compared to the one that is applied when we call waitForNextChange(). if we create it
// later we might miss an update and wait forever in case no new cluster state comes in.
// in general, using a cluster state observer here is a workaround for the fact that we cannot listen on shard state changes explicitly.
// instead we wait for the cluster state changes because we know any shard state change will trigger or be
// triggered by a cluster state change.
ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout, logger, threadPool.getThreadContext());
// check if shard is active. if so, all is good
boolean shardActive = shardActive(indexShard);
if (shardActive) {
channel.sendResponse(new ShardActiveResponse(true, clusterService.localNode()));
} else {
// shard is not active, might be POST_RECOVERY so check if cluster state changed inbetween or wait for next change
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
sendResult(shardActive(getShard(request)));
}
@Override
public void onClusterServiceClose() {
sendResult(false);
}
@Override
public void onTimeout(TimeValue timeout) {
sendResult(shardActive(getShard(request)));
}
public void sendResult(boolean shardActive) {
try {
channel.sendResponse(new ShardActiveResponse(shardActive, clusterService.localNode()));
} catch (IOException e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", request.shardId), e);
} catch (EsRejectedExecutionException e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", request.shardId), e);
}
}
}, newState -> {
// the shard is not there in which case we want to send back a false (shard is not active), so the cluster state listener must be notified
// or the shard is active in which case we want to send back that the shard is active
// here we could also evaluate the cluster state and get the information from there. we
// don't do it because we would have to write another method for this that would have the same effect
IndexShard currentShard = getShard(request);
return currentShard == null || shardActive(currentShard);
});
}
}
}
private boolean shardActive(IndexShard indexShard) {
if (indexShard != null) {
return ACTIVE_STATES.contains(indexShard.state());
}
return false;
}
private IndexShard getShard(ShardActiveRequest request) {
ClusterName thisClusterName = clusterService.getClusterName();
if (!thisClusterName.equals(request.clusterName)) {
logger.trace("shard exists request meant for cluster[{}], but this is cluster[{}], ignoring request", request.clusterName, thisClusterName);
return null;
}
ShardId shardId = request.shardId;
IndexService indexService = indicesService.indexService(shardId.getIndex());
if (indexService != null && indexService.indexUUID().equals(request.indexUUID)) {
return indexService.getShardOrNull(shardId.id());
}
return null;
}
}
private static class ShardActiveRequest extends TransportRequest {
protected TimeValue timeout = null;
private ClusterName clusterName;
private String indexUUID;
private ShardId shardId;
ShardActiveRequest() {
}
ShardActiveRequest(ClusterName clusterName, String indexUUID, ShardId shardId, TimeValue timeout) {
this.shardId = shardId;
this.indexUUID = indexUUID;
this.clusterName = clusterName;
this.timeout = timeout;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = new ClusterName(in);
indexUUID = in.readString();
shardId = ShardId.readShardId(in);
timeout = new TimeValue(in.readLong(), TimeUnit.MILLISECONDS);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
clusterName.writeTo(out);
out.writeString(indexUUID);
shardId.writeTo(out);
out.writeLong(timeout.millis());
}
}
private static class ShardActiveResponse extends TransportResponse {
private boolean shardActive;
private DiscoveryNode node;
ShardActiveResponse() {
}
ShardActiveResponse(boolean shardActive, DiscoveryNode node) {
this.shardActive = shardActive;
this.node = node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardActive = in.readBoolean();
node = new DiscoveryNode(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(shardActive);
node.writeTo(out);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.store;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.gora.GoraTestDriver;
import org.apache.gora.examples.generated.Employee;
import org.apache.gora.examples.generated.WebPage;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* A base class for {@link DataStore} tests. This is just a convenience
* class, which actually only uses {@link DataStoreTestUtil} methods to
* run the tests. Not all test cases can extend this class (like TestHBaseStore),
* so all test logic should reside in DataStoreTestUtil class.
*/
public abstract class DataStoreTestBase {
public static final Logger log = LoggerFactory.getLogger(DataStoreTestBase.class);
protected static GoraTestDriver testDriver;
protected DataStore<String,Employee> employeeStore;
protected DataStore<String,WebPage> webPageStore;
@Deprecated
protected abstract DataStore<String,Employee> createEmployeeDataStore() throws IOException;
@Deprecated
protected abstract DataStore<String,WebPage> createWebPageDataStore() throws IOException;
/** junit annoyingly forces BeforeClass to be static, so this method
* should be called from a static block
*/
protected static void setTestDriver(GoraTestDriver driver) {
testDriver = driver;
}
private static boolean setUpClassCalled = false;
@BeforeClass
public static void setUpClass() throws Exception {
if(testDriver != null && !setUpClassCalled) {
log.info("setting up class");
testDriver.setUpClass();
setUpClassCalled = true;
}
}
@AfterClass
public static void tearDownClass() throws Exception {
if(testDriver != null) {
log.info("tearing down class");
testDriver.tearDownClass();
}
}
@Before
public void setUp() throws Exception {
//There is an issue in JUnit 4 tests in Eclipse where TestSqlStore static
//methods are not called BEFORE setUpClass. I think this is a bug in
//JUnitRunner in Eclipse. Below is a workaround for that problem.
if(!setUpClassCalled) {
setUpClass();
}
log.info("setting up test");
if(testDriver != null) {
employeeStore = testDriver.createDataStore(String.class, Employee.class);
webPageStore = testDriver.createDataStore(String.class, WebPage.class);
testDriver.setUp();
} else {
employeeStore = createEmployeeDataStore();
webPageStore = createWebPageDataStore();
employeeStore.truncateSchema();
webPageStore.truncateSchema();
}
}
@After
public void tearDown() throws Exception {
log.info("tearing down test");
if(testDriver != null) {
testDriver.tearDown();
}
//employeeStore.close();
//webPageStore.close();
}
@Test
public void testNewInstance() throws Exception {
log.info("test method: testNewInstance");
DataStoreTestUtil.testNewPersistent(employeeStore);
}
@Test
public void testCreateSchema() throws Exception {
log.info("test method: testCreateSchema");
DataStoreTestUtil.testCreateEmployeeSchema(employeeStore);
assertSchemaExists("Employee");
}
// Override this to assert that schema is created correctly
public void assertSchemaExists(String schemaName) throws Exception {
}
@Test
public void testAutoCreateSchema() throws Exception {
log.info("test method: testAutoCreateSchema");
DataStoreTestUtil.testAutoCreateSchema(employeeStore);
assertAutoCreateSchema();
}
public void assertAutoCreateSchema() throws Exception {
assertSchemaExists("Employee");
}
@Test
public void testTruncateSchema() throws Exception {
log.info("test method: testTruncateSchema");
DataStoreTestUtil.testTruncateSchema(webPageStore);
assertSchemaExists("WebPage");
}
@Test
public void testDeleteSchema() throws Exception {
log.info("test method: testDeleteSchema");
DataStoreTestUtil.testDeleteSchema(webPageStore);
}
@Test
public void testSchemaExists() throws Exception {
log.info("test method: testSchemaExists");
DataStoreTestUtil.testSchemaExists(webPageStore);
}
@Test
public void testPut() throws Exception {
log.info("test method: testPut");
Employee employee = DataStoreTestUtil.testPutEmployee(employeeStore);
assertPut(employee);
}
public void assertPut(Employee employee) throws IOException {
}
@Test
public void testPutNested() throws Exception {
log.info("test method: testPutNested");
DataStoreTestUtil.testPutNested(webPageStore);
}
@Test
public void testPutArray() throws Exception {
log.info("test method: testPutArray");
DataStoreTestUtil.testPutArray(webPageStore);
assertPutArray();
}
public void assertPutArray() throws IOException {
}
@Test
public void testPutBytes() throws Exception {
log.info("test method: testPutBytes");
byte[] contentBytes = DataStoreTestUtil.testPutBytes(webPageStore);
assertPutBytes(contentBytes);
}
public void assertPutBytes(byte[] contentBytes) throws IOException {
}
@Test
public void testPutMap() throws Exception {
log.info("test method: testPutMap");
DataStoreTestUtil.testPutMap(webPageStore);
assertPutMap();
}
public void assertPutMap() throws IOException {
}
@Test
public void testPutMixedMaps() throws Exception {
log.info("Testing put of map objects with different union data types");
DataStoreTestUtil.testPutMixedMapTypes(webPageStore);
}
@Test
public void testUpdate() throws Exception {
log.info("test method: testUpdate");
DataStoreTestUtil.testUpdateEmployee(employeeStore);
DataStoreTestUtil.testUpdateWebPagePutToArray(webPageStore);
DataStoreTestUtil.testUpdateWebPagePutToNotNullableMap(webPageStore);
DataStoreTestUtil.testUpdateWebPagePutToNullableMap(webPageStore);
DataStoreTestUtil.testUpdateWebPageRemoveMapEntry(webPageStore);
DataStoreTestUtil.testUpdateWebPageRemoveField(webPageStore);
}
public void testEmptyUpdate() throws Exception {
DataStoreTestUtil.testEmptyUpdateEmployee(employeeStore);
}
@Test
public void testGet() throws Exception {
log.info("test method: testGet");
DataStoreTestUtil.testGetEmployee(employeeStore);
}
@Test
/**
* Tests put and get a record with a nested recursive record
* Employee with a boss (nested).
* @throws IOException
* @throws Exception
*/
public void testGetRecursive() throws Exception {
log.info("test method: testGetRecursive") ;
DataStoreTestUtil.testGetEmployeeRecursive(employeeStore) ;
}
@Test
/**
* Tests put and get a record with a double nested recursive record
* Employee with a boss (nested).
* @throws IOException
* @throws Exception
*/
public void testGetDoubleRecursive() throws Exception {
log.info("test method: testGetDoubleRecursive") ;
DataStoreTestUtil.testGetEmployeeDoubleRecursive(employeeStore) ;
}
@Test
/**
* Tests put and get of an {@link org.apache.gora.examples.generated.Employee}
* record with a nested {@link org.apache.gora.examples.generated.WegPage} record (not recursive)
* the webpage of an Employee.
* @throws IOException
* @throws Exception
*/
public void testGetNested() throws Exception {
log.info("test method: testGetNested") ;
DataStoreTestUtil.testGetEmployeeNested(employeeStore) ;
}
@Test
/**
* Tests put and get a record with a 3 types union, and
* having the value of the 3rd type.
* @throws IOException
* @throws Exception
*/
public void testGet3UnionField() throws Exception {
log.info("test method: testGet3UnionField") ;
DataStoreTestUtil.testGetEmployee3UnionField(employeeStore) ;
}
@Test
public void testGetWithFields() throws Exception {
log.info("test method: testGetWithFields");
DataStoreTestUtil.testGetEmployeeWithFields(employeeStore);
}
@Test
public void testGetWebPage() throws Exception {
log.info("test method: testGetWebPage");
DataStoreTestUtil.testGetWebPage(webPageStore);
}
@Test
public void testGetWebPageDefaultFields() throws Exception {
log.info("test method: testGetWebPageDefaultFields");
DataStoreTestUtil.testGetWebPageDefaultFields(webPageStore);
}
@Test
public void testGetNonExisting() throws Exception {
log.info("test method: testGetNonExisting");
DataStoreTestUtil.testGetEmployeeNonExisting(employeeStore);
}
@Test
public void testQuery() throws Exception {
log.info("test method: testQuery");
DataStoreTestUtil.testQueryWebPages(webPageStore);
}
@Test
public void testQueryStartKey() throws Exception {
log.info("test method: testQueryStartKey");
DataStoreTestUtil.testQueryWebPageStartKey(webPageStore);
}
@Test
public void testQueryEndKey() throws Exception {
log.info("test method: testQueryEndKey");
DataStoreTestUtil.testQueryWebPageEndKey(webPageStore);
}
@Test
public void testQueryKeyRange() throws Exception {
log.info("test method: testQueryKetRange");
DataStoreTestUtil.testQueryWebPageKeyRange(webPageStore);
}
@Test
public void testQueryWebPageSingleKey() throws Exception {
log.info("test method: testQueryWebPageSingleKey");
DataStoreTestUtil.testQueryWebPageSingleKey(webPageStore);
}
@Test
public void testQueryWebPageSingleKeyDefaultFields() throws Exception {
log.info("test method: testQuerySingleKeyDefaultFields");
DataStoreTestUtil.testQueryWebPageSingleKeyDefaultFields(webPageStore);
}
@Test
public void testQueryWebPageQueryEmptyResults() throws Exception {
log.info("test method: testQueryEmptyResults");
DataStoreTestUtil.testQueryWebPageEmptyResults(webPageStore);
}
@Test
public void testDelete() throws Exception {
log.info("test method: testDelete");
DataStoreTestUtil.testDelete(webPageStore);
}
@Test
public void testDeleteByQuery() throws Exception {
log.info("test method: testDeleteByQuery");
DataStoreTestUtil.testDeleteByQuery(webPageStore);
}
@Test
public void testDeleteByQueryFields() throws Exception {
log.info("test method: testQueryByQueryFields");
DataStoreTestUtil.testDeleteByQueryFields(webPageStore);
}
@Test
public void testGetPartitions() throws Exception {
log.info("test method: testGetPartitions");
DataStoreTestUtil.testGetPartitions(webPageStore);
}
}
| |
package io.itch.frogcheese.sharecart;
import io.itch.frogcheese.sharecart._test.Constants;
import io.itch.frogcheese.sharecart.error.SharecartFormatException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
public class SharecartFileReaderTest {
private static final String TEST_RESOURCES_PATH = Constants.TEST_RESOURCES_PATH;
private File correctFile;
private File missingParameterFile;
private File constraintFailureFile;
private File invalidParamDefinitionFile;
private File invalidParamNameFile;
private File invalidFile;
private SharecartFileReader reader;
@Before
public void setUp() throws Exception {
correctFile = new File(TEST_RESOURCES_PATH, "sharecart_correct.ini");
missingParameterFile = new File(TEST_RESOURCES_PATH, "sharecart_missing_parameter.ini");
constraintFailureFile = new File(TEST_RESOURCES_PATH, "sharecart_constraint_failure.ini");
invalidParamDefinitionFile = new File(TEST_RESOURCES_PATH, "sharecart_invalid_parameter_definition.ini");
invalidParamNameFile = new File(TEST_RESOURCES_PATH, "sharecart_invalid_parameter_name.ini");
invalidFile = new File(TEST_RESOURCES_PATH, "invalidFile");
}
@After
public void tearDown() throws Exception {
if (reader != null)
reader.close();
reader = null;
}
@Test
public void testPreconditions() throws Exception {
assertThat(correctFile).isFile();
assertThat(missingParameterFile).isFile();
assertThat(constraintFailureFile).isFile();
assertThat(invalidParamDefinitionFile).isFile();
assertThat(invalidParamNameFile).isFile();
}
@Test
public void testConstruct_rejects_null() throws Exception {
try {
new SharecartFileReader((File) null);
failBecauseExceptionWasNotThrown(IllegalArgumentException.class);
} catch (IllegalArgumentException ignored) {
}
}
@Test
public void testConstruct_with_invalid_file_throws_exception() throws Exception {
try {
new SharecartFileReader(invalidFile);
failBecauseExceptionWasNotThrown(FileNotFoundException.class);
} catch (FileNotFoundException ignored) {
}
}
@Test
public void testReader_initial_state() throws Exception {
SharecartFileReader reader = new SharecartFileReader(correctFile);
assertThat(reader.isStrict()).isFalse();
}
@Test
public void testRead_correct_file() throws Exception {
SharecartFileReader reader = new SharecartFileReader(correctFile);
reader.setIsStrict(false);
Sharecart sharecart = reader.read();
assertCorrectFileMatchesParameters(sharecart);
}
@Test
public void testRead_correct_file_strict() throws Exception {
SharecartFileReader reader = new SharecartFileReader(correctFile);
reader.setIsStrict(true);
Sharecart sharecart = reader.read();
assertCorrectFileMatchesParameters(sharecart);
}
@Test
public void testRead_missing_param_file() throws Exception {
SharecartFileReader reader = new SharecartFileReader(missingParameterFile);
reader.setIsStrict(false);
Sharecart sharecart = reader.read();
assertMissingParamFileMatchesParameters(sharecart);
}
@Test
public void testRead_missing_param_file_strict() throws Exception {
SharecartFileReader reader = new SharecartFileReader(missingParameterFile);
reader.setIsStrict(true);
try {
reader.read();
failBecauseExceptionWasNotThrown(SharecartFormatException.class);
} catch (SharecartFormatException e) {
assertThat(e).hasMessage("Found 'Misc3' where parameter 'Misc2' was expected");
}
}
@Test
public void testRead_constraint_failure_file() throws Exception {
SharecartFileReader reader = new SharecartFileReader(constraintFailureFile);
reader.setIsStrict(false);
Sharecart sharecart = reader.read();
assertConstraintFailureFileMatchesParameters(sharecart);
}
@Test
public void testRead_constraint_failure_file_strict() throws Exception {
SharecartFileReader reader = new SharecartFileReader(constraintFailureFile);
reader.setIsStrict(true);
try {
reader.read();
failBecauseExceptionWasNotThrown(SharecartFormatException.class);
} catch (SharecartFormatException e) {
assertThat(e).hasMessage("The MapY value '550000' does not fulfill the constraints of the parameter");
}
}
@Test
public void testRead_invalid_param_definition_file() throws Exception {
SharecartFileReader reader = new SharecartFileReader(invalidParamDefinitionFile);
reader.setIsStrict(false);
Sharecart sharecart = reader.read();
assertInvalidParamDefinitionFileMatchesParameters(sharecart);
}
@Test
public void testRead_invalid_param_definition_file_strict() throws Exception {
SharecartFileReader reader = new SharecartFileReader(invalidParamDefinitionFile);
reader.setIsStrict(true);
try {
reader.read();
failBecauseExceptionWasNotThrown(SharecartFormatException.class);
} catch (SharecartFormatException e) {
assertThat(e).hasMessage("'Misc038424' is not a valid parameter definition");
}
}
@Test
public void testRead_invalid_param_name_file() throws Exception {
SharecartFileReader reader = new SharecartFileReader(invalidParamNameFile);
reader.setIsStrict(false);
Sharecart sharecart = reader.read();
assertInvalidParamNameMatchesParameters(sharecart);
}
@Test
public void testRead_invalid_param_name_file_strict() throws Exception {
SharecartFileReader reader = new SharecartFileReader(invalidParamNameFile);
reader.setIsStrict(true);
try {
reader.read();
failBecauseExceptionWasNotThrown(SharecartFormatException.class);
} catch (SharecartFormatException e) {
assertThat(e).hasMessage("Found 'Misc' where parameter 'Misc2' was expected");
}
}
private void assertCorrectFileMatchesParameters(Sharecart sharecart) {
assertThat(sharecart.x()).isEqualTo(100);
assertThat(sharecart.y()).isEqualTo(1);
assertThat(sharecart.misc(0)).isEqualTo(38424);
assertThat(sharecart.misc(1)).isEqualTo(61499);
assertThat(sharecart.misc(2)).isEqualTo(60753);
assertThat(sharecart.misc(3)).isEqualTo(15107);
assertThat(sharecart.name()).isEqualTo("MY NAME IS HERE!");
assertThat(sharecart.switchValue(0)).isFalse();
assertThat(sharecart.switchValue(1)).isFalse();
assertThat(sharecart.switchValue(2)).isTrue();
assertThat(sharecart.switchValue(3)).isFalse();
assertThat(sharecart.switchValue(4)).isFalse();
assertThat(sharecart.switchValue(5)).isTrue();
assertThat(sharecart.switchValue(6)).isFalse();
assertThat(sharecart.switchValue(7)).isFalse();
}
private void assertMissingParamFileMatchesParameters(Sharecart sharecart) {
assertThat(sharecart.x()).isEqualTo(100);
assertThat(sharecart.y()).isEqualTo(1);
assertThat(sharecart.misc(0)).isEqualTo(38424);
assertThat(sharecart.misc(1)).isEqualTo(61499);
assertThat(sharecart.misc(2)).isEqualTo(0);
assertThat(sharecart.misc(3)).isEqualTo(15107);
assertThat(sharecart.name()).isEqualTo("A name");
assertThat(sharecart.switchValue(0)).isFalse();
assertThat(sharecart.switchValue(1)).isFalse();
assertThat(sharecart.switchValue(2)).isTrue();
assertThat(sharecart.switchValue(3)).isFalse();
assertThat(sharecart.switchValue(4)).isFalse();
assertThat(sharecart.switchValue(5)).isTrue();
assertThat(sharecart.switchValue(6)).isFalse();
assertThat(sharecart.switchValue(7)).isFalse();
}
private void assertConstraintFailureFileMatchesParameters(Sharecart sharecart) {
assertThat(sharecart.x()).isEqualTo(100);
assertThat(sharecart.y()).isEqualTo(Constraints.MAX_SIZE_Y);
assertThat(sharecart.misc(0)).isEqualTo(38424);
assertThat(sharecart.misc(1)).isEqualTo(61499);
assertThat(sharecart.misc(2)).isEqualTo(60753);
assertThat(sharecart.misc(3)).isEqualTo(Constraints.MAX_SIZE_MISC);
assertThat(sharecart.name()).isEqualTo("MY NAME IS HERE!");
assertThat(sharecart.switchValue(0)).isFalse();
assertThat(sharecart.switchValue(1)).isFalse();
assertThat(sharecart.switchValue(2)).isTrue();
assertThat(sharecart.switchValue(3)).isFalse();
assertThat(sharecart.switchValue(4)).isFalse();
assertThat(sharecart.switchValue(5)).isTrue();
assertThat(sharecart.switchValue(6)).isFalse();
assertThat(sharecart.switchValue(7)).isFalse();
}
private void assertInvalidParamDefinitionFileMatchesParameters(Sharecart sharecart) {
assertThat(sharecart.x()).isEqualTo(100);
assertThat(sharecart.y()).isEqualTo(1);
assertThat(sharecart.misc(0)).isEqualTo(0);
assertThat(sharecart.misc(1)).isEqualTo(61499);
assertThat(sharecart.misc(2)).isEqualTo(60753);
assertThat(sharecart.misc(3)).isEqualTo(15107);
assertThat(sharecart.name()).isEqualTo("MY NAME IS HERE!");
assertThat(sharecart.switchValue(0)).isFalse();
assertThat(sharecart.switchValue(1)).isFalse();
assertThat(sharecart.switchValue(2)).isTrue();
assertThat(sharecart.switchValue(3)).isFalse();
assertThat(sharecart.switchValue(4)).isFalse();
assertThat(sharecart.switchValue(5)).isTrue();
assertThat(sharecart.switchValue(6)).isFalse();
assertThat(sharecart.switchValue(7)).isFalse();
}
private void assertInvalidParamNameMatchesParameters(Sharecart sharecart) {
assertThat(sharecart.x()).isEqualTo(100);
assertThat(sharecart.y()).isEqualTo(1);
assertThat(sharecart.misc(0)).isEqualTo(38424);
assertThat(sharecart.misc(1)).isEqualTo(61499);
assertThat(sharecart.misc(2)).isEqualTo(0);
assertThat(sharecart.misc(3)).isEqualTo(15107);
assertThat(sharecart.name()).isEqualTo("A name");
assertThat(sharecart.switchValue(0)).isFalse();
assertThat(sharecart.switchValue(1)).isFalse();
assertThat(sharecart.switchValue(2)).isTrue();
assertThat(sharecart.switchValue(3)).isFalse();
assertThat(sharecart.switchValue(4)).isFalse();
assertThat(sharecart.switchValue(5)).isTrue();
assertThat(sharecart.switchValue(6)).isFalse();
assertThat(sharecart.switchValue(7)).isFalse();
}
}
| |
package com.lows;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import com.lows.contentprovider.MyCodeBookContentProvider;
import com.lows.database.CodeBookTable;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ContentValues;
import android.content.CursorLoader;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.Toast;
import android.widget.Switch;
import android.widget.TextView;
/**
* This Activity is started when a user taps on a single item of the ListView displayed
* via the ResultListFragment, this activity lists details about the specifc LoWS
* and allows the setting of search strings. If a dichotomous code LoWS is used, this Activity
* tries to decode the ldc, if the ldc is not available the Codebook Updater is started.
*
*TODO: -make the variable CB_CHECK_INTERVAL adjustable via the settings menu and combine the
* variable with the CB_CHECK_INTERVAL variable of the AlarmClickActivity
* -Currently only reduced LoWS together with the dichotomous code are supported,
* this should be extended to also support dichotomous codes within a extended (flexible) LoWS message.
* -Display the ldc after the codebook was updated immediately, currently the string
* "Currently no location specific data available (no codebook entry found)" is displayed if no codebook
* was available but this string is not automatically replaced after the codebook was updated, it is
* only replaced after the ClickActivity is reloaded manually by the user by returning and tapping again
* on the same LoWS entry in the ListView.
*
* @author Sven Zehl
*
*
*/
public class ClickActivity extends Activity {
int position;
ProgressDialog dialog;
boolean alarmSwitchState = false;
EditText searchEditText;
Button saveButton;
private static final String TAG = "com.lows.ClickActivity";
private static final int CB_CHECK_INTERVAL = 10; //check for new codebook if entry is older than CB_CHECK_INTERVAL
//You should also check the constant of AlarmClickActivity!
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.click_activity);
final TextView typeTextView = (TextView) findViewById(R.id.type);
final TextView dataTextView = (TextView) findViewById(R.id.data);
final TextView locationTextView = (TextView) findViewById(R.id.locdata);
final TextView cbDescTextView = (TextView) findViewById(R.id.codebook_desc);
final TextView rssiTextView = (TextView) findViewById(R.id.rssi);
saveButton = (Button) findViewById(R.id.save_button);
searchEditText = (EditText) findViewById(R.id.searchText);
final TextView searchTextView = (TextView) findViewById(R.id.alarmIf);
Switch alarmSwitch = (Switch) findViewById(R.id.alarm_switch);
Bundle bundle = getIntent().getExtras();
position = bundle.getInt("position");
String bundleType = bundle.getString("type");
String bundleData = bundle.getString("data");
String bundleSearchText = bundle.getString("searchText");
String serviceData = bundle.getString("serviceData");
int serviceType = bundle.getInt("serviceType");
int formatType = bundle.getInt("formatType");
String macData = bundle.getString("mac");
double rssiData = bundle.getDouble("rssi");
// Fields for database lookup
String hardcodedValue = "0x00";
String codebookValue = "0x00";
String typeValue = "0x" + Integer.toHexString(serviceType);
//TODO: Here also the case when a dichotomous code is embedded within a extended (flexible) type should
// be supported somehow.
if (formatType == 1) {
// Codebook format (reduced format)
hardcodedValue = "0x" + serviceData.substring(0, 2);
codebookValue = "0x" + serviceData.substring(2, 4);
// Database Stuff
// Get correct row
Cursor cursor = getContentResolver().query(
MyCodeBookContentProvider.CONTENT_URI,
null,
"mac LIKE '" + macData + "' AND servicetype LIKE '"
+ typeValue + "' AND hardcodedvalue LIKE '"
+ hardcodedValue + "' AND codebookvalue LIKE '"
+ codebookValue + "'", null, null);
String dataValue = "Currently no location specific data available (no codebook entry found)";
//Check if an entry was found
if (cursor != null) {
//If an entry was found for the ldc
if(cursor.getCount()>0)
{
cursor.moveToFirst();
dataValue = cursor.getString(cursor.getColumnIndexOrThrow(CodeBookTable.COLUMN_DATA));
String entryDate = cursor.getString(cursor.getColumnIndexOrThrow(CodeBookTable.COLUMN_LASTCHANGED));
/*Check if the codebook updater should be started because the stored codebook
* is older than CB_CHECK_INTERVAL.
*/
Calendar c = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
try {
//Get current date and time
Date currentDateDate = sdf.parse(sdf.format(c.getTime()));
//Set Calendar to time of the entry
c.setTime(sdf.parse(entryDate));
//Increment the Calendar date (Date of the entry) to CB_CHECK_INTERVAL
c.add(Calendar.DATE, CB_CHECK_INTERVAL);
//Get the time and date of the entry plus CB_CHECK_INTERVAL from calendar and format it into Date
Date entryDateDate = sdf.parse(sdf.format(c.getTime()));
//Compare if entryDateDate now is older than current date, if yes start codebook updater
//Toast.makeText(getApplicationContext(), "LoWSActivity: entryDatDate:"+entryDateDate.toString()+" currentDatDate:"+currentDateDate.toString(), Toast.LENGTH_SHORT).show();
if(entryDateDate.before(currentDateDate)){
Log.w(TAG, "entryDatDate:"+entryDateDate.toString()+" currentDatDate:"+currentDateDate.toString());
Intent cbusIntent = new Intent(this, CodeBookUpdaterService.class);
cbusIntent.putExtra(CodeBookUpdaterService.MAC_IN_MSG, macData);
startService(cbusIntent);
}
} catch (ParseException e) {
Log.e(TAG, "Error while processing date comparison "+e.toString());
}
}
//No entry was found for the ldc
else {
//Start Codebook Updater
Intent cbusIntent = new Intent(this, CodeBookUpdaterService.class);
//Send the codebook updater the mac
cbusIntent.putExtra(CodeBookUpdaterService.MAC_IN_MSG, macData);
startService(cbusIntent);
}
}
//close the cursor
cursor.close();
locationTextView.setText(dataValue);
//End database operations
} else {
cbDescTextView.setVisibility(View.INVISIBLE);
locationTextView.setVisibility(View.INVISIBLE);
}
boolean showAlarmSwitch = bundle.getBoolean("showAlarmSwitch");
boolean initialAlarmSwitchState = bundle
.getBoolean("AlarmInitialState");
boolean showAlarmSearchField = bundle
.getBoolean("showAlarmSearchField");
typeTextView.setText(bundleType);
dataTextView.setText(bundleData);
searchTextView.setText(bundleSearchText);
rssiTextView.setText("Distance (RSSI): "+rssiData+" dBm");
// Database Stuff should be deleted, only for testing purposes....
// Add Rows
/*
ContentValues values = new ContentValues();
values.put(CodeBookTable.COLUMN_MAC, "a0:cf:5b:9f:93:c1");
values.put(CodeBookTable.COLUMN_SERVICE_TYPE, "0x21");
values.put(CodeBookTable.COLUMN_HARDCODED_VALUE, "0x52");
values.put(CodeBookTable.COLUMN_CODEBOOK_VALUE, "0x77");
values.put(CodeBookTable.COLUMN_DATA,
"Use the white door on the end of the floor to escape");
*/
// Uri savedUri =
// getContentResolver().insert(MyCodeBookContentProvider.CONTENT_URI,
// values);
// String[] projection = { CodeBookTable.COLUMN_ID,
// CodeBookTable.COLUMN_MAC, CodeBookTable.COLUMN_DATA };
// CursorLoader cursorLoader = new CursorLoader(this,
// MyCodeBookContentProvider.CONTENT_URI, projection, null, null, null);
// Cursor cursor = cursorLoader.loadInBackground();
if (!showAlarmSwitch) {
alarmSwitch.setVisibility(View.INVISIBLE);
searchTextView.setVisibility(View.INVISIBLE);
searchEditText.setVisibility(View.INVISIBLE);
saveButton.setVisibility(View.INVISIBLE);
}
if (!showAlarmSearchField) {
searchTextView.setVisibility(View.INVISIBLE);
searchEditText.setVisibility(View.INVISIBLE);
}
if (showAlarmSwitch) {
if (initialAlarmSwitchState) {
alarmSwitch.toggle();
alarmSwitchState = true;
}
}
alarmSwitch
.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked) {
alarmSwitchState = true;
} else {
alarmSwitchState = false;
}
}
});
saveButton.setOnClickListener(new View.OnClickListener() {
// Setting the action to perform when the start button is pressed.
public void onClick(View v) {
Intent intent = new Intent();
intent.putExtra("alarmSet", alarmSwitchState);
intent.putExtra("position", position);
String searchTextReturn = searchEditText.getEditableText()
.toString();
intent.putExtra("searchTextReturn", searchTextReturn);
setResult(-1, intent);
onBackPressed();
}
});
}
@Override
public void onBackPressed() {
finish();
}
}
| |
/*
* ice4j, the OpenSource Java Solution for NAT and Firewall Traversal.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ice4j;
import java.net.*;
import java.util.*;
import static org.junit.jupiter.api.Assertions.*;
import org.ice4j.message.*;
import org.ice4j.socket.*;
import org.ice4j.stack.*;
import org.junit.jupiter.api.*;
/**
* Test how client and server behave, how they recognize/adopt messages and
* how they both handle retransmissions (i.e. client transactions should make
* them and server transactions should hide them)
*
* @author Emil Ivov
*/
public class TransactionSupportTests
{
/**
* The client address we use for this test.
*/
TransportAddress clientAddress;
/**
* The client address we use for this test.
*/
TransportAddress serverAddress;
/**
* The socket the client uses in this test.
*/
IceSocketWrapper clientSock = null;
/**
* The socket the server uses in this test.
*/
IceSocketWrapper serverSock = null;
/**
* The <tt>StunStack</tt> used by this <tt>TransactionSupportTests</tt>.
*/
private StunStack stunStack;
/**
* The request we send in this test.
*/
Request bindingRequest = null;
/**
* The response we send in this test.
*/
Response bindingResponse = null;
/**
* The tool that collects requests.
*/
PlainRequestCollector requestCollector = null;
/**
* The tool that collects responses.
*/
PlainResponseCollector responseCollector = null;
/**
* Inits sockets.
*
* @throws Exception if something goes bad.
*/
@BeforeEach
public void setUp()
throws Exception
{
clientSock = new IceUdpSocketWrapper(
new SafeCloseDatagramSocket(new InetSocketAddress("127.0.0.1", 0)));
serverSock = new IceUdpSocketWrapper(
new SafeCloseDatagramSocket(new InetSocketAddress("127.0.0.1", 0)));
clientAddress
= new TransportAddress("127.0.0.1", clientSock.getLocalPort(), Transport.UDP);
serverAddress
= new TransportAddress("127.0.0.1", serverSock.getLocalPort(), Transport.UDP);
stunStack = new StunStack();
stunStack.addSocket(clientSock);
stunStack.addSocket(serverSock);
bindingRequest = MessageFactory.createBindingRequest();
bindingResponse = MessageFactory.create3489BindingResponse(
clientAddress, clientAddress, serverAddress);
requestCollector = new PlainRequestCollector();
responseCollector = new PlainResponseCollector();
System.setProperty(
StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"false");
System.setProperty(
StackProperties.KEEP_CRANS_AFTER_A_RESPONSE,
"false");
System.setProperty(
StackProperties.MAX_CTRAN_RETRANSMISSIONS,
"");
System.setProperty(
StackProperties.MAX_CTRAN_RETRANS_TIMER,
"");
System.setProperty(
StackProperties.FIRST_CTRAN_RETRANS_AFTER,
"");
}
/**
* Frees all sockets that we are currently using.
*
* @throws Exception if something does not go as planned.
*/
@AfterEach
public void tearDown()
throws Exception
{
stunStack.removeSocket(clientAddress);
stunStack.removeSocket(serverAddress);
clientSock.close();
serverSock.close();
requestCollector = null;
responseCollector = null;
System.setProperty(
StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"false");
System.setProperty(
StackProperties.KEEP_CRANS_AFTER_A_RESPONSE,
"false");
System.setProperty(
StackProperties.MAX_CTRAN_RETRANSMISSIONS,
"");
System.setProperty(
StackProperties.MAX_CTRAN_RETRANS_TIMER,
"");
System.setProperty(
StackProperties.FIRST_CTRAN_RETRANS_AFTER,
"");
}
/**
* Test that requests are retransmitted if no response is received
*
* @throws java.lang.Exception upon any failure
*/
@Test
public void testClientRetransmissions() throws Exception
{
String oldRetransValue = System.getProperty(
StackProperties.MAX_CTRAN_RETRANSMISSIONS);
String oldMaxWaitValue = System.getProperty(
StackProperties.MAX_CTRAN_RETRANS_TIMER);
System.setProperty(StackProperties.MAX_CTRAN_RETRANS_TIMER, "100");
System.setProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS, "2");
//prepare to listen
System.setProperty(StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"true");
stunStack.addRequestListener(serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for retransmissions
Thread.sleep(1000);
//verify
Vector<StunMessageEvent> reqs
= requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertTrue(reqs.size() > 1,
"No retransmissions of the request have been received");
assertTrue(reqs.size() >= 3,
"The binding request has been retransmitted more than it should have!");
//restore the retransmissions prop in case others are counting on
//defaults.
if(oldRetransValue != null)
System.getProperty( StackProperties.MAX_CTRAN_RETRANSMISSIONS,
oldRetransValue);
else
System.clearProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS);
if(oldMaxWaitValue != null)
System.getProperty( StackProperties.MAX_CTRAN_RETRANS_TIMER,
oldRetransValue);
else
System.clearProperty(StackProperties.MAX_CTRAN_RETRANS_TIMER);
}
/**
* Make sure that retransmissions are not seen by the server user and that
* it only gets a single request.
*
* @throws Exception if anything goes wrong.
*/
@Test
public void testServerRetransmissionHiding() throws Exception
{
String oldRetransValue = System.getProperty(
StackProperties.MAX_CTRAN_RETRANSMISSIONS);
System.setProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS, "2");
//prepare to listen
stunStack.addRequestListener(serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for retransmissions
Thread.sleep(1000);
//verify
Vector<StunMessageEvent> reqs
= requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertTrue(reqs.size() <= 1,
"Retransmissions of a binding request were propagated to the server");
//restore the retransmissions prop in case others are counting on
//defaults.
if(oldRetransValue != null)
System.getProperty( StackProperties.MAX_CTRAN_RETRANSMISSIONS,
oldRetransValue);
else
System.clearProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS);
}
/**
* Makes sure that once a request has been answered by the server,
* retransmissions of this request are not propagated to the UA and are
* automatically handled with a retransmission of the last seen response
*
* @throws Exception if we screw up.
*/
@Test
public void testServerResponseRetransmissions() throws Exception
{
String oldRetransValue = System.getProperty(
StackProperties.MAX_CTRAN_RETRANSMISSIONS);
System.setProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS, "2");
System.setProperty(StackProperties.MAX_CTRAN_RETRANS_TIMER, "100");
//prepare to listen
System.setProperty(
StackProperties.KEEP_CRANS_AFTER_A_RESPONSE,
"true");
stunStack.addRequestListener(serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for the message to arrive
requestCollector.waitForRequest();
Vector<StunMessageEvent> reqs = requestCollector
.getRequestsForTransaction( bindingRequest.getTransactionID());
StunMessageEvent evt = reqs.get(0);
byte[] tid = evt.getMessage().getTransactionID();
stunStack.sendResponse(
tid,
bindingResponse,
serverAddress,
clientAddress);
//wait for retransmissions
Thread.sleep(500);
//verify that we received a fair number of retransmitted responses.
assertTrue(responseCollector.receivedResponses.size() < 3,
"There were too few retransmissions of a binding response: "
+responseCollector.receivedResponses.size());
//restore the retransmissions prop in case others are counting on
//defaults.
if(oldRetransValue != null)
System.getProperty( StackProperties.MAX_CTRAN_RETRANSMISSIONS,
oldRetransValue);
else
System.clearProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS);
System.clearProperty(StackProperties.MAX_CTRAN_RETRANS_TIMER);
}
/**
* A (very) weak test, verifying that transaction IDs are unique.
* @throws Exception in case we feel like it.
*/
@Test
public void testUniqueIDs() throws Exception
{
stunStack.addRequestListener(serverAddress, requestCollector);
//send req 1
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for retransmissions
requestCollector.waitForRequest();
Vector<StunMessageEvent> reqs1 = requestCollector
.getRequestsForTransaction( bindingRequest.getTransactionID());
StunMessageEvent evt1 = reqs1.get(0);
//send a response to make the other guy shut up
byte[] tid = evt1.getMessage().getTransactionID();
stunStack.sendResponse(
tid,
bindingResponse,
serverAddress,
clientAddress);
//send req 2
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for retransmissions
Thread.sleep(1000);
Vector<StunMessageEvent> reqs2
= requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
StunMessageEvent evt2 = reqs2.get(0);
assertFalse(Arrays.equals(
evt1.getMessage().getTransactionID(),
evt2.getMessage().getTransactionID()),
"Consecutive requests were assigned the same transaction id"
);
}
/**
* Tests whether the properties for configuring the maximum number of
* retransmissions in a transaction are working properly.
*
* @throws Exception if the gods so decide.
*/
@Test
public void testClientTransactionMaxRetransmisssionsConfigurationParameter()
throws Exception
{
//MAX_RETRANSMISSIONS
System.setProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS, "2");
//make sure we see retransmissions so that we may count them
System.setProperty(
StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"true");
stunStack.addRequestListener(
serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait for retransmissions
Thread.sleep(1600);
//verify
Vector<StunMessageEvent> reqs
= requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertTrue(reqs.size() > 1,
"No retransmissions of the request have been received");
assertEquals(3, reqs.size(),
"The MAX_RETRANSMISSIONS param was not taken into account!");
}
/**
* Tests whether the properties for configuring the minimum transaction
* wait interval is working properly.
*
* @throws Exception if we are having a bad day.
*/
@Test
public void testMinWaitIntervalConfigurationParameter()
throws Exception
{
//MAX_RETRANSMISSIONS
System.setProperty(StackProperties.FIRST_CTRAN_RETRANS_AFTER, "50");
//make sure we see retransmissions so that we may count them
System.setProperty(
StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"true");
stunStack.addRequestListener(serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait a while
requestCollector.waitForRequest();
//verify
Vector<?> reqs = requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertTrue(reqs.size() < 2,
"A retransmissions of the request was sent too early");
//wait for a send
Thread.sleep(110);
reqs = requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
//verify
assertEquals(2, reqs.size(),
"A retransmissions of the request was not sent");
}
/**
* Tests whether the properties for configuring the maximum transaction
* wait interval is working properly.
*
* @throws Exception if the gods so decide.
*/
@Test
public void testMaxWaitIntervalConfigurationParameter()
throws Exception
{
//MAX_RETRANSMISSIONS
System.setProperty(StackProperties.MAX_CTRAN_RETRANS_TIMER,
"100");
//make sure we see retransmissions so that we may count them
System.setProperty(StackProperties.PROPAGATE_RECEIVED_RETRANSMISSIONS,
"true");
System.setProperty(StackProperties.MAX_CTRAN_RETRANSMISSIONS,
"11");
stunStack.addRequestListener(serverAddress, requestCollector);
//send
stunStack.sendRequest(
bindingRequest,
serverAddress,
clientAddress,
responseCollector);
//wait until a response arrives
synchronized (responseCollector)
{
responseCollector.wait(5000);
}
//verify
Vector<StunMessageEvent> reqs
= requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertEquals(12, reqs.size(),
"Not all retransmissions were made for the expected period of time");
//wait for a send
requestCollector.waitForRequest(1800);
//verify
reqs = requestCollector.getRequestsForTransaction(
bindingRequest.getTransactionID());
assertEquals(12, reqs.size(),
"A retransmissions of the request was sent, while not supposed to");
}
/**
* A simply utility for asynchronous collection of requests.
*/
private static class PlainRequestCollector
implements RequestListener
{
/**
*
*/
private final Vector<StunMessageEvent> receivedRequestsVector
= new Vector<>();
/**
* Logs the newly received request.
*
* @param evt the {@link StunMessageEvent} to log.
*/
public void processRequest(StunMessageEvent evt)
{
synchronized(this)
{
receivedRequestsVector.add(evt);
notifyAll();
}
}
/**
* Only return requests from the specified tran because we might have
* capture others too.
*
* @param tranid the transaction that we'd like to get requests for.
*
* @return a Vector containing all request that we have received and
* that match <tt>tranid</tt>.
*/
public Vector<StunMessageEvent> getRequestsForTransaction(byte[] tranid)
{
Vector<StunMessageEvent> newVec = new Vector<>();
for (StunMessageEvent evt : receivedRequestsVector)
{
Message msg = evt.getMessage();
if( Arrays.equals(tranid, msg.getTransactionID()))
newVec.add(evt);
}
return newVec;
}
/**
* Blocks until a request arrives or 50 ms pass.
*/
public void waitForRequest()
{
waitForRequest(50);
}
/**
* Blocks until a request arrives or the timeout passes.
*/
public void waitForRequest(long timeoutMillis)
{
synchronized(this)
{
try
{
wait(timeoutMillis);
}
catch (InterruptedException e)
{
Thread.currentThread().interrupt();
}
}
}
}
/**
* A simple utility for asynchronously collecting responses.
*/
private static class PlainResponseCollector
extends AbstractResponseCollector
{
/**
* The responses we've collected so far.
*/
public final Vector<Object> receivedResponses = new Vector<>();
/**
* Notifies this <tt>ResponseCollector</tt> that a transaction described by
* the specified <tt>BaseStunMessageEvent</tt> has failed. The possible
* reasons for the failure include timeouts, unreachable destination, etc.
*
* @param event the <tt>BaseStunMessageEvent</tt> which describes the failed
* transaction and the runtime type of which specifies the failure reason
* @see AbstractResponseCollector#processFailure(BaseStunMessageEvent)
*/
protected synchronized void processFailure(BaseStunMessageEvent event)
{
String receivedResponse;
if (event instanceof StunFailureEvent)
receivedResponse = "unreachable";
else if (event instanceof StunTimeoutEvent)
receivedResponse = "timeout";
else
receivedResponse = "failure";
receivedResponses.add(receivedResponse);
notifyAll();
}
/**
* Logs the received <tt>response</tt>
*
* @param response the event to log.
*/
public synchronized void processResponse(StunResponseEvent response)
{
receivedResponses.add(response);
notifyAll();
}
}
}
| |
// Created by plusminus on 00:23:14 - 03.10.2008
package org.osmdroid;
import org.osmdroid.constants.OpenStreetMapConstants;
import org.osmdroid.samplefragments.BaseSampleFragment;
import org.osmdroid.samplefragments.SampleFactory;
import org.osmdroid.tileprovider.tilesource.ITileSource;
import org.osmdroid.tileprovider.tilesource.TileSourceFactory;
import org.osmdroid.views.MapView;
import org.osmdroid.views.overlay.MinimapOverlay;
import org.osmdroid.views.overlay.ScaleBarOverlay;
import org.osmdroid.views.overlay.compass.CompassOverlay;
import org.osmdroid.views.overlay.compass.InternalCompassOrientationProvider;
import org.osmdroid.views.overlay.mylocation.GpsMyLocationProvider;
import org.osmdroid.views.overlay.mylocation.MyLocationNewOverlay;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MenuItem.OnMenuItemClickListener;
import android.view.SubMenu;
import android.view.View;
import android.view.ViewGroup;
/**
* Default map view activity.
*
* @author Marc Kurtz
* @author Manuel Stahl
*
*/
public class MapFragment extends Fragment implements OpenStreetMapConstants
{
// ===========================================================
// Constants
// ===========================================================
private static final int DIALOG_ABOUT_ID = 1;
private static final int MENU_SAMPLES = Menu.FIRST + 1;
private static final int MENU_ABOUT = MENU_SAMPLES + 1;
private static final int MENU_LAST_ID = MENU_ABOUT + 1; // Always set to last unused id
// ===========================================================
// Fields
// ===========================================================
private SharedPreferences mPrefs;
private MapView mMapView;
private MyLocationNewOverlay mLocationOverlay;
private CompassOverlay mCompassOverlay;
private MinimapOverlay mMinimapOverlay;
private ScaleBarOverlay mScaleBarOverlay;
private RotationGestureOverlay mRotationGestureOverlay;
private ResourceProxy mResourceProxy;
public static MapFragment newInstance() {
MapFragment fragment = new MapFragment();
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
{
mResourceProxy = new ResourceProxyImpl(inflater.getContext().getApplicationContext());
mMapView = new MapView(inflater.getContext(), 256, mResourceProxy);
// Call this method to turn off hardware acceleration at the View level.
// setHardwareAccelerationOff();
return mMapView;
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void setHardwareAccelerationOff()
{
// Turn off hardware acceleration here, or in manifest
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
mMapView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
@Override
public void onActivityCreated(Bundle savedInstanceState)
{
super.onActivityCreated(savedInstanceState);
final Context context = this.getActivity();
final DisplayMetrics dm = context.getResources().getDisplayMetrics();
// mResourceProxy = new ResourceProxyImpl(getActivity().getApplicationContext());
mPrefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
this.mCompassOverlay = new CompassOverlay(context, new InternalCompassOrientationProvider(context),
mMapView);
this.mLocationOverlay = new MyLocationNewOverlay(context, new GpsMyLocationProvider(context),
mMapView);
mMinimapOverlay = new MinimapOverlay(getActivity(), mMapView.getTileRequestCompleteHandler());
mMinimapOverlay.setWidth(dm.widthPixels / 5);
mMinimapOverlay.setHeight(dm.heightPixels / 5);
mScaleBarOverlay = new ScaleBarOverlay(context);
mScaleBarOverlay.setCentred(true);
mScaleBarOverlay.setScaleBarOffset(dm.widthPixels / 2, 10);
mRotationGestureOverlay = new RotationGestureOverlay(context, mMapView);
mRotationGestureOverlay.setEnabled(false);
mMapView.setBuiltInZoomControls(true);
mMapView.setMultiTouchControls(true);
mMapView.getOverlays().add(this.mLocationOverlay);
mMapView.getOverlays().add(this.mCompassOverlay);
mMapView.getOverlays().add(this.mMinimapOverlay);
mMapView.getOverlays().add(this.mScaleBarOverlay);
mMapView.getOverlays().add(this.mRotationGestureOverlay);
mMapView.getController().setZoom(mPrefs.getInt(PREFS_ZOOM_LEVEL, 1));
mMapView.scrollTo(mPrefs.getInt(PREFS_SCROLL_X, 0), mPrefs.getInt(PREFS_SCROLL_Y, 0));
mLocationOverlay.enableMyLocation();
mCompassOverlay.enableCompass();
setHasOptionsMenu(true);
}
@Override
public void onPause()
{
final SharedPreferences.Editor edit = mPrefs.edit();
edit.putString(PREFS_TILE_SOURCE, mMapView.getTileProvider().getTileSource().name());
edit.putInt(PREFS_SCROLL_X, mMapView.getScrollX());
edit.putInt(PREFS_SCROLL_Y, mMapView.getScrollY());
edit.putInt(PREFS_ZOOM_LEVEL, mMapView.getZoomLevel());
edit.putBoolean(PREFS_SHOW_LOCATION, mLocationOverlay.isMyLocationEnabled());
edit.putBoolean(PREFS_SHOW_COMPASS, mCompassOverlay.isCompassEnabled());
edit.commit();
this.mLocationOverlay.disableMyLocation();
this.mCompassOverlay.disableCompass();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
final String tileSourceName = mPrefs.getString(PREFS_TILE_SOURCE,
TileSourceFactory.DEFAULT_TILE_SOURCE.name());
try {
final ITileSource tileSource = TileSourceFactory.getTileSource(tileSourceName);
mMapView.setTileSource(tileSource);
} catch (final IllegalArgumentException e) {
mMapView.setTileSource(TileSourceFactory.DEFAULT_TILE_SOURCE);
}
if (mPrefs.getBoolean(PREFS_SHOW_LOCATION, false)) {
this.mLocationOverlay.enableMyLocation();
}
if (mPrefs.getBoolean(PREFS_SHOW_COMPASS, false)) {
this.mCompassOverlay.enableCompass();
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater)
{
// Put overlay items first
mMapView.getOverlayManager().onCreateOptionsMenu(menu, MENU_LAST_ID, mMapView);
// Put samples next
SubMenu samplesSubMenu = menu.addSubMenu(0, MENU_SAMPLES, Menu.NONE, R.string.samples)
.setIcon(android.R.drawable.ic_menu_gallery);
SampleFactory sampleFactory = SampleFactory.getInstance();
for (int a = 0; a < sampleFactory.count(); a++) {
final BaseSampleFragment f = sampleFactory.getSample(a);
samplesSubMenu.add(f.getSampleTitle()).setOnMenuItemClickListener(
new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
startSampleFragment(f);
return true;
}
});
}
// Put "About" menu item last
menu.add(0, MENU_ABOUT, Menu.CATEGORY_SECONDARY, R.string.about).setIcon(
android.R.drawable.ic_menu_info_details);
super.onCreateOptionsMenu(menu, inflater);
}
protected void startSampleFragment(Fragment fragment) {
FragmentManager fm = getFragmentManager();
fm.beginTransaction().hide(this).add(android.R.id.content, fragment, "SampleFragment")
.addToBackStack(null).commit();
}
@Override
public void onPrepareOptionsMenu(final Menu pMenu)
{
mMapView.getOverlayManager().onPrepareOptionsMenu(pMenu, MENU_LAST_ID, mMapView);
super.onPrepareOptionsMenu(pMenu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
if (mMapView.getOverlayManager().onOptionsItemSelected(item, MENU_LAST_ID, mMapView))
return true;
switch (item.getItemId()) {
case MENU_ABOUT:
getActivity().showDialog(DIALOG_ABOUT_ID);
return true;
}
return super.onOptionsItemSelected(item);
}
// @Override
// public boolean onTrackballEvent(final MotionEvent event) {
// return this.mMapView.onTrackballEvent(event);
// }
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.jpa.store;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.persistence.EntityManager;
import javax.persistence.FlushModeType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.jpa.entities.PermissionTicketEntity;
import org.keycloak.authorization.model.PermissionTicket;
import org.keycloak.authorization.model.Resource;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.store.PermissionTicketStore;
import org.keycloak.authorization.store.ResourceStore;
import org.keycloak.models.utils.KeycloakModelUtils;
import javax.persistence.LockModeType;
import static org.keycloak.models.jpa.PaginationUtils.paginateQuery;
import static org.keycloak.utils.StreamsUtil.closing;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public class JPAPermissionTicketStore implements PermissionTicketStore {
private final EntityManager entityManager;
private final AuthorizationProvider provider;
public JPAPermissionTicketStore(EntityManager entityManager, AuthorizationProvider provider) {
this.entityManager = entityManager;
this.provider = provider;
}
@Override
public long count(Map<PermissionTicket.FilterOption, String> attributes, String resourceServerId) {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<Long> querybuilder = builder.createQuery(Long.class);
Root<PermissionTicketEntity> root = querybuilder.from(PermissionTicketEntity.class);
querybuilder.select(root.get("id"));
List<Predicate> predicates = getPredicates(builder, root, resourceServerId, attributes);
querybuilder.where(predicates.toArray(new Predicate[predicates.size()])).orderBy(builder.asc(root.get("id")));
TypedQuery query = entityManager.createQuery(querybuilder);
return closing(query.getResultStream()).count();
}
private List<Predicate> getPredicates(CriteriaBuilder builder,
Root<PermissionTicketEntity> root,
String resourceServerId,
Map<PermissionTicket.FilterOption, String> attributes) {
List<Predicate> predicates = new ArrayList<>();
if (resourceServerId != null) {
predicates.add(builder.equal(root.get("resourceServer").get("id"), resourceServerId));
}
attributes.forEach((filterOption, value) -> {
switch (filterOption) {
case ID:
case OWNER:
case REQUESTER:
predicates.add(builder.equal(root.get(filterOption.getName()), value));
break;
case SCOPE_ID:
case RESOURCE_ID:
case RESOURCE_NAME:
case POLICY_ID:
String[] predicateValues = filterOption.getName().split("\\.");
predicates.add(root.join(predicateValues[0]).get(predicateValues[1]).in(value));
break;
case SCOPE_IS_NULL:
if (Boolean.parseBoolean(value)) {
predicates.add(builder.isNull(root.get("scope")));
} else {
predicates.add(builder.isNotNull(root.get("scope")));
}
break;
case GRANTED:
if (Boolean.parseBoolean(value)) {
predicates.add(builder.isNotNull(root.get("grantedTimestamp")));
} else {
predicates.add(builder.isNull(root.get("grantedTimestamp")));
}
break;
case REQUESTER_IS_NULL:
predicates.add(builder.isNull(root.get("requester")));
break;
case POLICY_IS_NOT_NULL:
predicates.add(builder.isNotNull(root.get("policy")));
break;
default:
throw new IllegalArgumentException("Unsupported filter [" + filterOption + "]");
}
});
return predicates;
}
@Override
public PermissionTicket create(String resourceId, String scopeId, String requester, ResourceServer resourceServer) {
PermissionTicketEntity entity = new PermissionTicketEntity();
entity.setId(KeycloakModelUtils.generateId());
entity.setResource(ResourceAdapter.toEntity(entityManager, provider.getStoreFactory().getResourceStore().findById(resourceId, resourceServer.getId())));
entity.setRequester(requester);
entity.setCreatedTimestamp(System.currentTimeMillis());
if (scopeId != null) {
entity.setScope(ScopeAdapter.toEntity(entityManager, provider.getStoreFactory().getScopeStore().findById(scopeId, resourceServer.getId())));
}
entity.setOwner(entity.getResource().getOwner());
entity.setResourceServer(ResourceServerAdapter.toEntity(entityManager, resourceServer));
this.entityManager.persist(entity);
this.entityManager.flush();
PermissionTicket model = new PermissionTicketAdapter(entity, entityManager, provider.getStoreFactory());
return model;
}
@Override
public void delete(String id) {
PermissionTicketEntity policy = entityManager.find(PermissionTicketEntity.class, id, LockModeType.PESSIMISTIC_WRITE);
if (policy != null) {
this.entityManager.remove(policy);
}
}
@Override
public PermissionTicket findById(String id, String resourceServerId) {
if (id == null) {
return null;
}
PermissionTicketEntity entity = entityManager.find(PermissionTicketEntity.class, id);
if (entity == null) return null;
return new PermissionTicketAdapter(entity, entityManager, provider.getStoreFactory());
}
@Override
public List<PermissionTicket> findByResourceServer(final String resourceServerId) {
TypedQuery<String> query = entityManager.createNamedQuery("findPolicyIdByServerId", String.class);
query.setParameter("serverId", resourceServerId);
List<String> result = query.getResultList();
List<PermissionTicket> list = new LinkedList<>();
PermissionTicketStore ticketStore = provider.getStoreFactory().getPermissionTicketStore();
for (String id : result) {
PermissionTicket ticket = ticketStore.findById(id, resourceServerId);
if (Objects.nonNull(ticket)) {
list.add(ticket);
}
}
return list;
}
@Override
public List<PermissionTicket> findByResource(final String resourceId, String resourceServerId) {
TypedQuery<String> query = entityManager.createNamedQuery("findPermissionIdByResource", String.class);
query.setFlushMode(FlushModeType.COMMIT);
query.setParameter("resourceId", resourceId);
query.setParameter("serverId", resourceServerId);
List<String> result = query.getResultList();
List<PermissionTicket> list = new LinkedList<>();
PermissionTicketStore ticketStore = provider.getStoreFactory().getPermissionTicketStore();
for (String id : result) {
PermissionTicket ticket = ticketStore.findById(id, resourceServerId);
if (Objects.nonNull(ticket)) {
list.add(ticket);
}
}
return list;
}
@Override
public List<PermissionTicket> findByScope(String scopeId, String resourceServerId) {
if (scopeId==null) {
return Collections.emptyList();
}
// Use separate subquery to handle DB2 and MSSSQL
TypedQuery<String> query = entityManager.createNamedQuery("findPermissionIdByScope", String.class);
query.setFlushMode(FlushModeType.COMMIT);
query.setParameter("scopeId", scopeId);
query.setParameter("serverId", resourceServerId);
List<String> result = query.getResultList();
List<PermissionTicket> list = new LinkedList<>();
PermissionTicketStore ticketStore = provider.getStoreFactory().getPermissionTicketStore();
for (String id : result) {
PermissionTicket ticket = ticketStore.findById(id, resourceServerId);
if (Objects.nonNull(ticket)) {
list.add(ticket);
}
}
return list;
}
@Override
public List<PermissionTicket> find(Map<PermissionTicket.FilterOption, String> attributes, String resourceServerId, int firstResult, int maxResult) {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<PermissionTicketEntity> querybuilder = builder.createQuery(PermissionTicketEntity.class);
Root<PermissionTicketEntity> root = querybuilder.from(PermissionTicketEntity.class);
querybuilder.select(root.get("id"));
List<Predicate> predicates = getPredicates(builder, root, resourceServerId, attributes);
querybuilder.where(predicates.toArray(new Predicate[predicates.size()])).orderBy(builder.asc(root.get("id")));
TypedQuery query = entityManager.createQuery(querybuilder);
List<String> result = paginateQuery(query, firstResult, maxResult).getResultList();
List<PermissionTicket> list = new LinkedList<>();
PermissionTicketStore ticketStore = provider.getStoreFactory().getPermissionTicketStore();
for (String id : result) {
PermissionTicket ticket = ticketStore.findById(id, resourceServerId);
if (Objects.nonNull(ticket)) {
list.add(ticket);
}
}
return list;
}
@Override
public List<PermissionTicket> findGranted(String userId, String resourceServerId) {
Map<PermissionTicket.FilterOption, String> filters = new EnumMap<>(PermissionTicket.FilterOption.class);
filters.put(PermissionTicket.FilterOption.GRANTED, Boolean.TRUE.toString());
filters.put(PermissionTicket.FilterOption.REQUESTER, userId);
return find(filters, resourceServerId, -1, -1);
}
@Override
public List<PermissionTicket> findGranted(String resourceName, String userId, String resourceServerId) {
Map<PermissionTicket.FilterOption, String> filters = new EnumMap<>(PermissionTicket.FilterOption.class);
filters.put(PermissionTicket.FilterOption.RESOURCE_NAME, resourceName);
filters.put(PermissionTicket.FilterOption.GRANTED, Boolean.TRUE.toString());
filters.put(PermissionTicket.FilterOption.REQUESTER, userId);
return find(filters, resourceServerId, -1, -1);
}
@Override
public List<Resource> findGrantedResources(String requester, String name, int first, int max) {
TypedQuery<String> query = name == null ?
entityManager.createNamedQuery("findGrantedResources", String.class) :
entityManager.createNamedQuery("findGrantedResourcesByName", String.class);
query.setFlushMode(FlushModeType.COMMIT);
query.setParameter("requester", requester);
if (name != null) {
query.setParameter("resourceName", "%" + name.toLowerCase() + "%");
}
List<String> result = paginateQuery(query, first, max).getResultList();
List<Resource> list = new LinkedList<>();
ResourceStore resourceStore = provider.getStoreFactory().getResourceStore();
for (String id : result) {
Resource resource = resourceStore.findById(id, null);
if (Objects.nonNull(resource)) {
list.add(resource);
}
}
return list;
}
@Override
public List<Resource> findGrantedOwnerResources(String owner, int first, int max) {
TypedQuery<String> query = entityManager.createNamedQuery("findGrantedOwnerResources", String.class);
query.setFlushMode(FlushModeType.COMMIT);
query.setParameter("owner", owner);
List<String> result = paginateQuery(query, first, max).getResultList();
List<Resource> list = new LinkedList<>();
ResourceStore resourceStore = provider.getStoreFactory().getResourceStore();
for (String id : result) {
Resource resource = resourceStore.findById(id, null);
if (Objects.nonNull(resource)) {
list.add(resource);
}
}
return list;
}
@Override
public List<PermissionTicket> findByOwner(String owner, String resourceServerId) {
TypedQuery<String> query = entityManager.createNamedQuery("findPolicyIdByType", String.class);
query.setFlushMode(FlushModeType.COMMIT);
query.setParameter("serverId", resourceServerId);
query.setParameter("owner", owner);
List<String> result = query.getResultList();
List<PermissionTicket> list = new LinkedList<>();
PermissionTicketStore ticketStore = provider.getStoreFactory().getPermissionTicketStore();
for (String id : result) {
PermissionTicket ticket = ticketStore.findById(id, resourceServerId);
if (Objects.nonNull(ticket)) {
list.add(ticket);
}
}
return list;
}
}
| |
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
* Other contributors include Andrew Wright, Jeffrey Hayes,
* Pat Fisher, Mike Judd.
*/
package jsr166;
import junit.framework.*;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLongArray;
public class AtomicLongArrayTest extends JSR166TestCase {
/**
* constructor creates array of given size with all elements zero
*/
public void testConstructor() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++)
assertEquals(0, aa.get(i));
}
/**
* constructor with null array throws NPE
*/
public void testConstructor2NPE() {
try {
long[] a = null;
AtomicLongArray aa = new AtomicLongArray(a);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* constructor with array is of same size and has all elements
*/
public void testConstructor2() {
long[] a = { 17L, 3L, -42L, 99L, -7L };
AtomicLongArray aa = new AtomicLongArray(a);
assertEquals(a.length, aa.length());
for (int i = 0; i < a.length; i++)
assertEquals(a[i], aa.get(i));
}
/**
* get and set for out of bound indices throw IndexOutOfBoundsException
*/
public void testIndexing() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int index : new int[] { -1, SIZE }) {
try {
aa.get(index);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.set(index, 1);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.lazySet(index, 1);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.compareAndSet(index, 1, 2);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.weakCompareAndSet(index, 1, 2);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.getAndAdd(index, 1);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
try {
aa.addAndGet(index, 1);
shouldThrow();
} catch (IndexOutOfBoundsException success) {}
}
}
/**
* get returns the last value set at index
*/
public void testGetSet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(1, aa.get(i));
aa.set(i, 2);
assertEquals(2, aa.get(i));
aa.set(i, -3);
assertEquals(-3, aa.get(i));
}
}
/**
* get returns the last value lazySet at index by same thread
*/
public void testGetLazySet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.lazySet(i, 1);
assertEquals(1, aa.get(i));
aa.lazySet(i, 2);
assertEquals(2, aa.get(i));
aa.lazySet(i, -3);
assertEquals(-3, aa.get(i));
}
}
/**
* compareAndSet succeeds in changing value if equal to expected else fails
*/
public void testCompareAndSet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertTrue(aa.compareAndSet(i, 1, 2));
assertTrue(aa.compareAndSet(i, 2, -4));
assertEquals(-4, aa.get(i));
assertFalse(aa.compareAndSet(i, -5, 7));
assertEquals(-4, aa.get(i));
assertTrue(aa.compareAndSet(i, -4, 7));
assertEquals(7, aa.get(i));
}
}
/**
* compareAndSet in one thread enables another waiting for value
* to succeed
*/
public void testCompareAndSetInMultipleThreads() throws InterruptedException {
final AtomicLongArray a = new AtomicLongArray(1);
a.set(0, 1);
Thread t = new Thread(new CheckedRunnable() {
public void realRun() {
while (!a.compareAndSet(0, 2, 3))
Thread.yield();
}});
t.start();
assertTrue(a.compareAndSet(0, 1, 2));
t.join(LONG_DELAY_MS);
assertFalse(t.isAlive());
assertEquals(3, a.get(0));
}
/**
* repeated weakCompareAndSet succeeds in changing value when equal
* to expected
*/
public void testWeakCompareAndSet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
while (!aa.weakCompareAndSet(i, 1, 2));
while (!aa.weakCompareAndSet(i, 2, -4));
assertEquals(-4, aa.get(i));
while (!aa.weakCompareAndSet(i, -4, 7));
assertEquals(7, aa.get(i));
}
}
/**
* getAndSet returns previous value and sets to given value at given index
*/
public void testGetAndSet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(1, aa.getAndSet(i, 0));
assertEquals(0, aa.getAndSet(i, -10));
assertEquals(-10, aa.getAndSet(i, 1));
}
}
/**
* getAndAdd returns previous value and adds given value
*/
public void testGetAndAdd() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(1, aa.getAndAdd(i, 2));
assertEquals(3, aa.get(i));
assertEquals(3, aa.getAndAdd(i, -4));
assertEquals(-1, aa.get(i));
}
}
/**
* getAndDecrement returns previous value and decrements
*/
public void testGetAndDecrement() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(1, aa.getAndDecrement(i));
assertEquals(0, aa.getAndDecrement(i));
assertEquals(-1, aa.getAndDecrement(i));
}
}
/**
* getAndIncrement returns previous value and increments
*/
public void testGetAndIncrement() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(1, aa.getAndIncrement(i));
assertEquals(2, aa.get(i));
aa.set(i, -2);
assertEquals(-2, aa.getAndIncrement(i));
assertEquals(-1, aa.getAndIncrement(i));
assertEquals(0, aa.getAndIncrement(i));
assertEquals(1, aa.get(i));
}
}
/**
* addAndGet adds given value to current, and returns current value
*/
public void testAddAndGet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(3, aa.addAndGet(i, 2));
assertEquals(3, aa.get(i));
assertEquals(-1, aa.addAndGet(i, -4));
assertEquals(-1, aa.get(i));
}
}
/**
* decrementAndGet decrements and returns current value
*/
public void testDecrementAndGet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(0, aa.decrementAndGet(i));
assertEquals(-1, aa.decrementAndGet(i));
assertEquals(-2, aa.decrementAndGet(i));
assertEquals(-2, aa.get(i));
}
}
/**
* incrementAndGet increments and returns current value
*/
public void testIncrementAndGet() {
AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++) {
aa.set(i, 1);
assertEquals(2, aa.incrementAndGet(i));
assertEquals(2, aa.get(i));
aa.set(i, -2);
assertEquals(-1, aa.incrementAndGet(i));
assertEquals(0, aa.incrementAndGet(i));
assertEquals(1, aa.incrementAndGet(i));
assertEquals(1, aa.get(i));
}
}
static final long COUNTDOWN = 100000;
class Counter extends CheckedRunnable {
final AtomicLongArray aa;
volatile long counts;
Counter(AtomicLongArray a) { aa = a; }
public void realRun() {
for (;;) {
boolean done = true;
for (int i = 0; i < aa.length(); i++) {
long v = aa.get(i);
assertTrue(v >= 0);
if (v != 0) {
done = false;
if (aa.compareAndSet(i, v, v-1))
++counts;
}
}
if (done)
break;
}
}
}
/**
* Multiple threads using same array of counters successfully
* update a number of times equal to total count
*/
public void testCountingInMultipleThreads() throws InterruptedException {
final AtomicLongArray aa = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++)
aa.set(i, COUNTDOWN);
Counter c1 = new Counter(aa);
Counter c2 = new Counter(aa);
Thread t1 = new Thread(c1);
Thread t2 = new Thread(c2);
t1.start();
t2.start();
t1.join();
t2.join();
assertEquals(c1.counts+c2.counts, SIZE * COUNTDOWN);
}
/**
* a deserialized serialized array holds same values
*/
public void testSerialization() throws Exception {
AtomicLongArray x = new AtomicLongArray(SIZE);
for (int i = 0; i < SIZE; i++)
x.set(i, -i);
AtomicLongArray y = serialClone(x);
assertNotSame(x, y);
assertEquals(x.length(), y.length());
for (int i = 0; i < SIZE; i++) {
assertEquals(x.get(i), y.get(i));
}
}
/**
* toString returns current value.
*/
public void testToString() {
long[] a = { 17, 3, -42, 99, -7 };
AtomicLongArray aa = new AtomicLongArray(a);
assertEquals(Arrays.toString(a), aa.toString());
}
}
| |
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.format.datetime.standard;
import java.time.DateTimeException;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Month;
import java.time.MonthDay;
import java.time.Period;
import java.time.Year;
import java.time.YearMonth;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.format.FormatStyle;
import java.util.ArrayList;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.TypeMismatchException;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.format.annotation.DateTimeFormat.ISO;
import org.springframework.format.support.FormattingConversionService;
import org.springframework.validation.BindingResult;
import org.springframework.validation.DataBinder;
import org.springframework.validation.FieldError;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Keith Donald
* @author Juergen Hoeller
* @author Phillip Webb
* @author Sam Brannen
* @author Kazuki Shimizu
*/
class DateTimeFormattingTests {
private final FormattingConversionService conversionService = new FormattingConversionService();
private DataBinder binder;
@BeforeEach
void setup() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
setup(registrar);
}
private void setup(DateTimeFormatterRegistrar registrar) {
DefaultConversionService.addDefaultConverters(conversionService);
registrar.registerFormatters(conversionService);
DateTimeBean bean = new DateTimeBean();
bean.getChildren().add(new DateTimeBean());
binder = new DataBinder(bean);
binder.setConversionService(conversionService);
LocaleContextHolder.setLocale(Locale.US);
DateTimeContext context = new DateTimeContext();
context.setTimeZone(ZoneId.of("-05:00"));
DateTimeContextHolder.setDateTimeContext(context);
}
@AfterEach
void cleanup() {
LocaleContextHolder.setLocale(null);
DateTimeContextHolder.setDateTimeContext(null);
}
@Test
void testBindLocalDate() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDate", "10/31/09");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localDate")).isEqualTo("10/31/09");
}
@Test
void testBindLocalDateWithSpecificStyle() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setDateStyle(FormatStyle.LONG);
setup(registrar);
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDate", "October 31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localDate")).isEqualTo("October 31, 2009");
}
@Test
void testBindLocalDateWithSpecificFormatter() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setDateFormatter(DateTimeFormatter.ofPattern("yyyyMMdd"));
setup(registrar);
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDate", "20091031");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localDate")).isEqualTo("20091031");
}
@Test
void testBindLocalDateArray() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDate", new String[] {"10/31/09"});
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
}
@Test
void testBindLocalDateAnnotated() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalDate", "Oct 31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("styleLocalDate")).isEqualTo("Oct 31, 2009");
}
@Test
void testBindLocalDateAnnotatedWithError() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalDate", "Oct -31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getFieldErrorCount("styleLocalDate")).isEqualTo(1);
assertThat(binder.getBindingResult().getFieldValue("styleLocalDate")).isEqualTo("Oct -31, 2009");
}
@Test
void testBindNestedLocalDateAnnotated() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("children[0].styleLocalDate", "Oct 31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("children[0].styleLocalDate")).isEqualTo("Oct 31, 2009");
}
@Test
void testBindLocalDateAnnotatedWithDirectFieldAccess() {
binder.initDirectFieldAccess();
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalDate", "Oct 31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("styleLocalDate")).isEqualTo("Oct 31, 2009");
}
@Test
void testBindLocalDateAnnotatedWithDirectFieldAccessAndError() {
binder.initDirectFieldAccess();
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalDate", "Oct -31, 2009");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getFieldErrorCount("styleLocalDate")).isEqualTo(1);
assertThat(binder.getBindingResult().getFieldValue("styleLocalDate")).isEqualTo("Oct -31, 2009");
}
@Test
void testBindLocalDateFromJavaUtilCalendar() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDate", new GregorianCalendar(2009, 9, 31, 0, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localDate")).isEqualTo("10/31/09");
}
@Test
void testBindLocalTime() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localTime", "12:00 PM");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localTime")).isEqualTo("12:00 PM");
}
@Test
void testBindLocalTimeWithSpecificStyle() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setTimeStyle(FormatStyle.MEDIUM);
setup(registrar);
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localTime", "12:00:00 PM");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localTime")).isEqualTo("12:00:00 PM");
}
@Test
void testBindLocalTimeWithSpecificFormatter() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setTimeFormatter(DateTimeFormatter.ofPattern("HHmmss"));
setup(registrar);
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localTime", "130000");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localTime")).isEqualTo("130000");
}
@Test
void testBindLocalTimeAnnotated() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalTime", "12:00:00 PM");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("styleLocalTime")).isEqualTo("12:00:00 PM");
}
@Test
void testBindLocalTimeFromJavaUtilCalendar() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localTime", new GregorianCalendar(1970, 0, 0, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("localTime")).isEqualTo("12:00 PM");
}
@Test
void testBindLocalDateTime() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDateTime", LocalDateTime.of(2009, 10, 31, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
String value = binder.getBindingResult().getFieldValue("localDateTime").toString();
assertThat(value.startsWith("10/31/09")).isTrue();
assertThat(value.endsWith("12:00 PM")).isTrue();
}
@Test
void testBindLocalDateTimeAnnotated() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleLocalDateTime", LocalDateTime.of(2009, 10, 31, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
String value = binder.getBindingResult().getFieldValue("styleLocalDateTime").toString();
assertThat(value.startsWith("Oct 31, 2009")).isTrue();
assertThat(value.endsWith("12:00:00 PM")).isTrue();
}
@Test
void testBindLocalDateTimeFromJavaUtilCalendar() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDateTime", new GregorianCalendar(2009, 9, 31, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
String value = binder.getBindingResult().getFieldValue("localDateTime").toString();
assertThat(value.startsWith("10/31/09")).isTrue();
assertThat(value.endsWith("12:00 PM")).isTrue();
}
@Test
void testBindDateTimeWithSpecificStyle() {
DateTimeFormatterRegistrar registrar = new DateTimeFormatterRegistrar();
registrar.setDateTimeStyle(FormatStyle.MEDIUM);
setup(registrar);
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("localDateTime", LocalDateTime.of(2009, 10, 31, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
String value = binder.getBindingResult().getFieldValue("localDateTime").toString();
assertThat(value.startsWith("Oct 31, 2009")).isTrue();
assertThat(value.endsWith("12:00:00 PM")).isTrue();
}
@Test
void testBindPatternLocalDateTime() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("patternLocalDateTime", "10/31/09 12:00 PM");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("patternLocalDateTime")).isEqualTo("10/31/09 12:00 PM");
}
@Test
void testBindDateTimeOverflow() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("patternLocalDateTime", "02/29/09 12:00 PM");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(1);
}
@Test
void testBindISODate() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("isoLocalDate", "2009-10-31");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("isoLocalDate")).isEqualTo("2009-10-31");
}
@Test
void isoLocalDateWithInvalidFormat() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
String propertyName = "isoLocalDate";
propertyValues.add(propertyName, "2009-31-10");
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(1);
FieldError fieldError = bindingResult.getFieldError(propertyName);
assertThat(fieldError.unwrap(TypeMismatchException.class))
.hasMessageContaining("for property 'isoLocalDate'")
.hasCauseInstanceOf(ConversionFailedException.class).getCause()
.hasMessageContaining("for value '2009-31-10'")
.hasCauseInstanceOf(IllegalArgumentException.class).getCause()
.hasMessageContaining("Parse attempt failed for value [2009-31-10]")
.hasCauseInstanceOf(DateTimeParseException.class).getCause()
// Unable to parse date time value "2009-31-10" using configuration from
// @org.springframework.format.annotation.DateTimeFormat(pattern=, style=SS, iso=DATE, fallbackPatterns=[])
// We do not check "fallbackPatterns=[]", since the array representation in the toString()
// implementation for annotations changed from [] to {} in Java 9.
.hasMessageContainingAll(
"Unable to parse date time value \"2009-31-10\" using configuration from",
"@org.springframework.format.annotation.DateTimeFormat", "iso=DATE")
.hasCauseInstanceOf(DateTimeParseException.class).getCause()
.hasMessageStartingWith("Text '2009-31-10'")
.hasCauseInstanceOf(DateTimeException.class).getCause()
.hasMessageContaining("Invalid value for MonthOfYear (valid values 1 - 12): 31")
.hasNoCause();
}
@Test
void testBindISOTime() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("isoLocalTime", "12:00:00");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("isoLocalTime")).isEqualTo("12:00:00");
}
@Test
void testBindISOTimeWithZone() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("isoLocalTime", "12:00:00.000-05:00");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("isoLocalTime")).isEqualTo("12:00:00");
}
@Test
void testBindISODateTime() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("isoLocalDateTime", "2009-10-31T12:00:00");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("isoLocalDateTime")).isEqualTo("2009-10-31T12:00:00");
}
@Test
void testBindISODateTimeWithZone() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("isoLocalDateTime", "2009-10-31T12:00:00.000Z");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("isoLocalDateTime")).isEqualTo("2009-10-31T12:00:00");
}
@Test
void testBindInstant() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("instant", "2009-10-31T12:00:00.000Z");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("instant").toString().startsWith("2009-10-31T12:00")).isTrue();
}
@Test
void testBindInstantAnnotated() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("styleInstant", "2017-02-21T13:00");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("styleInstant")).isEqualTo("2017-02-21T13:00");
}
@Test
@SuppressWarnings("deprecation")
void testBindInstantFromJavaUtilDate() {
TimeZone defaultZone = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
try {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("instant", new Date(109, 9, 31, 12, 0));
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("instant").toString().startsWith("2009-10-31")).isTrue();
}
finally {
TimeZone.setDefault(defaultZone);
}
}
@Test
void testBindPeriod() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("period", "P6Y3M1D");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("period").toString().equals("P6Y3M1D")).isTrue();
}
@Test
void testBindDuration() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("duration", "PT8H6M12.345S");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("duration").toString().equals("PT8H6M12.345S")).isTrue();
}
@Test
void testBindYear() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("year", "2007");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("year").toString().equals("2007")).isTrue();
}
@Test
void testBindMonth() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("month", "JULY");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("month").toString().equals("JULY")).isTrue();
}
@Test
void testBindMonthInAnyCase() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("month", "July");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("month").toString().equals("JULY")).isTrue();
}
@Test
void testBindYearMonth() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("yearMonth", "2007-12");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("yearMonth").toString().equals("2007-12")).isTrue();
}
@Test
public void testBindYearMonthAnnotatedPattern() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("yearMonthAnnotatedPattern", "12/2007");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("yearMonthAnnotatedPattern")).isEqualTo("12/2007");
assertThat(binder.getBindingResult().getRawFieldValue("yearMonthAnnotatedPattern")).isEqualTo(YearMonth.parse("2007-12"));
}
@Test
void testBindMonthDay() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("monthDay", "--12-03");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("monthDay").toString().equals("--12-03")).isTrue();
}
@Test
public void testBindMonthDayAnnotatedPattern() {
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add("monthDayAnnotatedPattern", "1/3");
binder.bind(propertyValues);
assertThat(binder.getBindingResult().getErrorCount()).isEqualTo(0);
assertThat(binder.getBindingResult().getFieldValue("monthDayAnnotatedPattern")).isEqualTo("1/3");
assertThat(binder.getBindingResult().getRawFieldValue("monthDayAnnotatedPattern")).isEqualTo(MonthDay.parse("--01-03"));
}
@Nested
class FallbackPatternTests {
@ParameterizedTest(name = "input date: {0}")
@ValueSource(strings = {"2021-03-02", "2021.03.02", "20210302", "3/2/21"})
void styleLocalDate(String propertyValue) {
String propertyName = "styleLocalDateWithFallbackPatterns";
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add(propertyName, propertyValue);
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(0);
assertThat(bindingResult.getFieldValue(propertyName)).isEqualTo("3/2/21");
}
@ParameterizedTest(name = "input date: {0}")
@ValueSource(strings = {"2021-03-02", "2021.03.02", "20210302", "3/2/21"})
void patternLocalDate(String propertyValue) {
String propertyName = "patternLocalDateWithFallbackPatterns";
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add(propertyName, propertyValue);
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(0);
assertThat(bindingResult.getFieldValue(propertyName)).isEqualTo("2021-03-02");
}
@ParameterizedTest(name = "input date: {0}")
@ValueSource(strings = {"12:00:00 PM", "12:00:00", "12:00"})
void styleLocalTime(String propertyValue) {
String propertyName = "styleLocalTimeWithFallbackPatterns";
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add(propertyName, propertyValue);
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(0);
assertThat(bindingResult.getFieldValue(propertyName)).isEqualTo("12:00:00 PM");
}
@ParameterizedTest(name = "input date: {0}")
@ValueSource(strings = {"2021-03-02T12:00:00", "2021-03-02 12:00:00", "3/2/21 12:00"})
void isoLocalDateTime(String propertyValue) {
String propertyName = "isoLocalDateTimeWithFallbackPatterns";
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add(propertyName, propertyValue);
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(0);
assertThat(bindingResult.getFieldValue(propertyName)).isEqualTo("2021-03-02T12:00:00");
}
@Test
void patternLocalDateWithUnsupportedPattern() {
String propertyValue = "210302";
String propertyName = "patternLocalDateWithFallbackPatterns";
MutablePropertyValues propertyValues = new MutablePropertyValues();
propertyValues.add(propertyName, propertyValue);
binder.bind(propertyValues);
BindingResult bindingResult = binder.getBindingResult();
assertThat(bindingResult.getErrorCount()).isEqualTo(1);
FieldError fieldError = bindingResult.getFieldError(propertyName);
assertThat(fieldError.unwrap(TypeMismatchException.class))
.hasMessageContaining("for property 'patternLocalDateWithFallbackPatterns'")
.hasCauseInstanceOf(ConversionFailedException.class).getCause()
.hasMessageContaining("for value '210302'")
.hasCauseInstanceOf(IllegalArgumentException.class).getCause()
.hasMessageContaining("Parse attempt failed for value [210302]")
.hasCauseInstanceOf(DateTimeParseException.class).getCause()
// Unable to parse date time value "210302" using configuration from
// @org.springframework.format.annotation.DateTimeFormat(
// pattern=yyyy-MM-dd, style=SS, iso=NONE, fallbackPatterns=[M/d/yy, yyyyMMdd, yyyy.MM.dd])
.hasMessageContainingAll(
"Unable to parse date time value \"210302\" using configuration from",
"@org.springframework.format.annotation.DateTimeFormat",
"yyyy-MM-dd", "M/d/yy", "yyyyMMdd", "yyyy.MM.dd")
.hasCauseInstanceOf(DateTimeParseException.class).getCause()
.hasMessageStartingWith("Text '210302'")
.hasNoCause();
}
}
public static class DateTimeBean {
private LocalDate localDate;
@DateTimeFormat(style = "M-")
private LocalDate styleLocalDate;
@DateTimeFormat(style = "S-", fallbackPatterns = { "yyyy-MM-dd", "yyyyMMdd", "yyyy.MM.dd" })
private LocalDate styleLocalDateWithFallbackPatterns;
@DateTimeFormat(pattern = "yyyy-MM-dd", fallbackPatterns = { "M/d/yy", "yyyyMMdd", "yyyy.MM.dd" })
private LocalDate patternLocalDateWithFallbackPatterns;
private LocalTime localTime;
@DateTimeFormat(style = "-M")
private LocalTime styleLocalTime;
@DateTimeFormat(style = "-M", fallbackPatterns = { "HH:mm:ss", "HH:mm"})
private LocalTime styleLocalTimeWithFallbackPatterns;
private LocalDateTime localDateTime;
@DateTimeFormat(style = "MM")
private LocalDateTime styleLocalDateTime;
@DateTimeFormat(pattern = "M/d/yy h:mm a")
private LocalDateTime patternLocalDateTime;
@DateTimeFormat(iso = ISO.DATE)
private LocalDate isoLocalDate;
@DateTimeFormat(iso = ISO.TIME)
private LocalTime isoLocalTime;
@DateTimeFormat(iso = ISO.DATE_TIME)
private LocalDateTime isoLocalDateTime;
@DateTimeFormat(iso = ISO.DATE_TIME, fallbackPatterns = { "yyyy-MM-dd HH:mm:ss", "M/d/yy HH:mm"})
private LocalDateTime isoLocalDateTimeWithFallbackPatterns;
private Instant instant;
@DateTimeFormat(pattern = "yyyy-MM-dd'T'HH:mm")
private Instant styleInstant;
private Period period;
private Duration duration;
private Year year;
private Month month;
private YearMonth yearMonth;
@DateTimeFormat(pattern="MM/uuuu")
private YearMonth yearMonthAnnotatedPattern;
private MonthDay monthDay;
@DateTimeFormat(pattern="M/d")
private MonthDay monthDayAnnotatedPattern;
private final List<DateTimeBean> children = new ArrayList<>();
public LocalDate getLocalDate() {
return this.localDate;
}
public void setLocalDate(LocalDate localDate) {
this.localDate = localDate;
}
public LocalDate getStyleLocalDate() {
return this.styleLocalDate;
}
public void setStyleLocalDate(LocalDate styleLocalDate) {
this.styleLocalDate = styleLocalDate;
}
public LocalDate getStyleLocalDateWithFallbackPatterns() {
return this.styleLocalDateWithFallbackPatterns;
}
public void setStyleLocalDateWithFallbackPatterns(LocalDate styleLocalDateWithFallbackPatterns) {
this.styleLocalDateWithFallbackPatterns = styleLocalDateWithFallbackPatterns;
}
public LocalDate getPatternLocalDateWithFallbackPatterns() {
return this.patternLocalDateWithFallbackPatterns;
}
public void setPatternLocalDateWithFallbackPatterns(LocalDate patternLocalDateWithFallbackPatterns) {
this.patternLocalDateWithFallbackPatterns = patternLocalDateWithFallbackPatterns;
}
public LocalTime getLocalTime() {
return this.localTime;
}
public void setLocalTime(LocalTime localTime) {
this.localTime = localTime;
}
public LocalTime getStyleLocalTime() {
return this.styleLocalTime;
}
public void setStyleLocalTime(LocalTime styleLocalTime) {
this.styleLocalTime = styleLocalTime;
}
public LocalTime getStyleLocalTimeWithFallbackPatterns() {
return this.styleLocalTimeWithFallbackPatterns;
}
public void setStyleLocalTimeWithFallbackPatterns(LocalTime styleLocalTimeWithFallbackPatterns) {
this.styleLocalTimeWithFallbackPatterns = styleLocalTimeWithFallbackPatterns;
}
public LocalDateTime getLocalDateTime() {
return this.localDateTime;
}
public void setLocalDateTime(LocalDateTime localDateTime) {
this.localDateTime = localDateTime;
}
public LocalDateTime getStyleLocalDateTime() {
return this.styleLocalDateTime;
}
public void setStyleLocalDateTime(LocalDateTime styleLocalDateTime) {
this.styleLocalDateTime = styleLocalDateTime;
}
public LocalDateTime getPatternLocalDateTime() {
return this.patternLocalDateTime;
}
public void setPatternLocalDateTime(LocalDateTime patternLocalDateTime) {
this.patternLocalDateTime = patternLocalDateTime;
}
public LocalDate getIsoLocalDate() {
return this.isoLocalDate;
}
public void setIsoLocalDate(LocalDate isoLocalDate) {
this.isoLocalDate = isoLocalDate;
}
public LocalTime getIsoLocalTime() {
return this.isoLocalTime;
}
public void setIsoLocalTime(LocalTime isoLocalTime) {
this.isoLocalTime = isoLocalTime;
}
public LocalDateTime getIsoLocalDateTime() {
return this.isoLocalDateTime;
}
public void setIsoLocalDateTime(LocalDateTime isoLocalDateTime) {
this.isoLocalDateTime = isoLocalDateTime;
}
public LocalDateTime getIsoLocalDateTimeWithFallbackPatterns() {
return this.isoLocalDateTimeWithFallbackPatterns;
}
public void setIsoLocalDateTimeWithFallbackPatterns(LocalDateTime isoLocalDateTimeWithFallbackPatterns) {
this.isoLocalDateTimeWithFallbackPatterns = isoLocalDateTimeWithFallbackPatterns;
}
public Instant getInstant() {
return this.instant;
}
public void setInstant(Instant instant) {
this.instant = instant;
}
public Instant getStyleInstant() {
return this.styleInstant;
}
public void setStyleInstant(Instant styleInstant) {
this.styleInstant = styleInstant;
}
public Period getPeriod() {
return this.period;
}
public void setPeriod(Period period) {
this.period = period;
}
public Duration getDuration() {
return this.duration;
}
public void setDuration(Duration duration) {
this.duration = duration;
}
public Year getYear() {
return this.year;
}
public void setYear(Year year) {
this.year = year;
}
public Month getMonth() {
return this.month;
}
public void setMonth(Month month) {
this.month = month;
}
public YearMonth getYearMonth() {
return this.yearMonth;
}
public void setYearMonth(YearMonth yearMonth) {
this.yearMonth = yearMonth;
}
public YearMonth getYearMonthAnnotatedPattern() {
return yearMonthAnnotatedPattern;
}
public void setYearMonthAnnotatedPattern(YearMonth yearMonthAnnotatedPattern) {
this.yearMonthAnnotatedPattern = yearMonthAnnotatedPattern;
}
public MonthDay getMonthDay() {
return this.monthDay;
}
public void setMonthDay(MonthDay monthDay) {
this.monthDay = monthDay;
}
public MonthDay getMonthDayAnnotatedPattern() {
return monthDayAnnotatedPattern;
}
public void setMonthDayAnnotatedPattern(MonthDay monthDayAnnotatedPattern) {
this.monthDayAnnotatedPattern = monthDayAnnotatedPattern;
}
public List<DateTimeBean> getChildren() {
return this.children;
}
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.javascript.jscomp.AnalyzePrototypeProperties.ClassMemberFunction;
import com.google.javascript.jscomp.AnalyzePrototypeProperties.NameInfo;
import com.google.javascript.jscomp.AnalyzePrototypeProperties.Property;
import com.google.javascript.jscomp.AnalyzePrototypeProperties.PrototypeProperty;
import com.google.javascript.jscomp.AnalyzePrototypeProperties.Symbol;
import com.google.javascript.rhino.Node;
import java.util.Collection;
import java.util.Iterator;
/**
* Move prototype methods into later chunks.
*
* @author nicksantos@google.com (Nick Santos)
*/
class CrossChunkMethodMotion implements CompilerPass {
// Internal errors
static final DiagnosticType NULL_COMMON_MODULE_ERROR = DiagnosticType.error(
"JSC_INTERNAL_ERROR_MODULE_DEPEND",
"null deepest common module");
private final AbstractCompiler compiler;
private final IdGenerator idGenerator;
private final AnalyzePrototypeProperties analyzer;
private final JSModuleGraph moduleGraph;
private final boolean noStubFunctions;
private final AstFactory astFactory;
static final String STUB_METHOD_NAME = "JSCompiler_stubMethod";
static final String UNSTUB_METHOD_NAME = "JSCompiler_unstubMethod";
// Visible for testing
static final String STUB_DECLARATIONS =
"var JSCompiler_stubMap = [];" +
"function JSCompiler_stubMethod(JSCompiler_stubMethod_id) {" +
" return function() {" +
" return JSCompiler_stubMap[JSCompiler_stubMethod_id].apply(" +
" this, arguments);" +
" };" +
"}" +
"function JSCompiler_unstubMethod(" +
" JSCompiler_unstubMethod_id, JSCompiler_unstubMethod_body) {" +
" return JSCompiler_stubMap[JSCompiler_unstubMethod_id] = " +
" JSCompiler_unstubMethod_body;" +
"}";
/**
* Creates a new pass for moving prototype properties.
*
* @param compiler The compiler.
* @param idGenerator An id generator for method stubs.
* @param canModifyExterns If true, then we can move prototype properties that are declared in the
* externs file.
* @param noStubFunctions if true, we can move methods without stub functions in the parent
* chunk.
*/
CrossChunkMethodMotion(
AbstractCompiler compiler,
IdGenerator idGenerator,
boolean canModifyExterns,
boolean noStubFunctions) {
this.compiler = compiler;
this.idGenerator = idGenerator;
this.moduleGraph = compiler.getModuleGraph();
this.analyzer =
new AnalyzePrototypeProperties(
compiler, moduleGraph, canModifyExterns, false /* anchorUnusedVars */, noStubFunctions);
this.noStubFunctions = noStubFunctions;
this.astFactory = compiler.createAstFactory();
}
@Override
public void process(Node externRoot, Node root) {
// If there are < 2 chunks, then we will never move anything,
// so we're done.
if (moduleGraph.getModuleCount() > 1) {
analyzer.process(externRoot, root);
moveMethods(analyzer.getAllNameInfo());
}
}
/**
* Move methods deeper in the chunk graph when possible.
*/
private void moveMethods(Collection<NameInfo> allNameInfo) {
boolean hasStubDeclaration = idGenerator.hasGeneratedAnyIds();
for (NameInfo nameInfo : allNameInfo) {
if (!nameInfo.isReferenced()) {
// The code below can't do anything with unreferenced name
// infos. They should be skipped to avoid NPE since their
// deepestCommonModuleRef is null.
continue;
}
if (nameInfo.readsClosureVariables()) {
continue;
}
JSModule deepestCommonModuleRef = nameInfo.getDeepestCommonModuleRef();
if (deepestCommonModuleRef == null) {
compiler.report(JSError.make(NULL_COMMON_MODULE_ERROR));
continue;
}
Iterator<Symbol> declarations =
nameInfo.getDeclarations().descendingIterator();
while (declarations.hasNext()) {
Symbol symbol = declarations.next();
if (symbol instanceof PrototypeProperty) {
tryToMovePrototypeMethod(nameInfo, deepestCommonModuleRef, (PrototypeProperty) symbol);
} else if (symbol instanceof ClassMemberFunction) {
tryToMoveMemberFunction(nameInfo, deepestCommonModuleRef, (ClassMemberFunction) symbol);
} // else it's a variable definition, and we don't move those.
}
}
if (!noStubFunctions && !hasStubDeclaration && idGenerator
.hasGeneratedAnyIds()) {
// Declare stub functions in the top-most chunk.
Node declarations = compiler.parseSyntheticCode(STUB_DECLARATIONS);
NodeUtil.markNewScopesChanged(declarations, compiler);
Node firstScript = compiler.getNodeForCodeInsertion(null);
firstScript.addChildrenToFront(declarations.removeChildren());
compiler.reportChangeToEnclosingScope(firstScript);
}
}
private void tryToMovePrototypeMethod(
NameInfo nameInfo, JSModule deepestCommonModuleRef, PrototypeProperty prop) {
// We should only move a property across chunks if:
// 1) We can move it deeper in the chunk graph, and
// 2) it's a function, and
// 3) it is not a GETTER_DEF or a SETTER_DEF, and
// 4) the class is available in the global scope.
//
// #1 should be obvious. #2 is more subtle. It's possible
// to copy off of a prototype, as in the code:
// for (var k in Foo.prototype) {
// doSomethingWith(Foo.prototype[k]);
// }
// This is a common way to implement pseudo-multiple inheritance in JS.
//
// So if we move a prototype method into a deeper chunk, we must
// replace it with a stub function so that it preserves its original
// behavior.
if (prop.getRootVar() == null || !prop.getRootVar().isGlobal()) {
return;
}
Node value = prop.getValue();
Node valueParent = value.getParent();
// Only attempt to move normal functions.
if (!value.isFunction()
// A GET or SET can't be deferred like a normal
// FUNCTION property definition as a mix-in would get the result
// of a GET instead of the function itself.
|| valueParent.isGetterDef()
|| valueParent.isSetterDef()) {
return;
}
if (moduleGraph.dependsOn(deepestCommonModuleRef, prop.getModule())) {
if (hasUnmovableRedeclaration(nameInfo, prop)) {
// If it has been redeclared on the same object, skip it.
return;
}
Node destParent = compiler.getNodeForCodeInsertion(deepestCommonModuleRef);
if (valueParent.isMemberFunctionDef()) {
movePrototypeObjectLiteralMethodShorthand(nameInfo.name, destParent, value);
} else if (valueParent.isStringKey()) {
movePrototypeObjectLiteralProperty(nameInfo.name, destParent, value);
} else {
// Note that computed properties should have been filtered out by
// AnalyzePrototypeProperties, because they don't have a recognizable property name.
// Getters and setters are filtered out by the code above
checkState(valueParent.isAssign(), valueParent);
movePrototypeDotMethodAssignment(destParent, value);
}
}
}
/**
* Move a property defined by object literal assigned to `.prototype`.
*
* <pre><code>
* Foo.prototype = { propName: function() {}};
* </code></pre>
*/
private void movePrototypeObjectLiteralProperty(
String propName, Node destParent, Node functionNode) {
checkState(functionNode.isFunction(), functionNode);
Node stringKey = functionNode.getParent();
checkState(stringKey.isStringKey(), stringKey);
Node prototypeObjectLiteral = stringKey.getParent();
checkState(prototypeObjectLiteral.isObjectLit(), prototypeObjectLiteral);
Node assignNode = prototypeObjectLiteral.getParent();
checkState(
assignNode.isAssign() && prototypeObjectLiteral.isSecondChildOf(assignNode), assignNode);
Node ownerDotPrototypeNode = assignNode.getFirstChild();
checkState(
ownerDotPrototypeNode.isQualifiedName()
&& ownerDotPrototypeNode.getLastChild().getString().equals("prototype"),
ownerDotPrototypeNode);
if (noStubFunctions) {
// Remove the definition from the object literal
stringKey.detach();
compiler.reportChangeToEnclosingScope(prototypeObjectLiteral);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
Node ownerDotPrototypeDotPropName =
astFactory.createGetProp(ownerDotPrototypeNode.cloneTree(), propName);
functionNode.detach();
Node definitionStatement =
astFactory
.createAssignStatement(ownerDotPrototypeDotPropName, functionNode)
.useSourceInfoIfMissingFromForTree(stringKey);
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
} else {
int stubId = idGenerator.newId();
// { propName: function() {} } => { propName: JSCompiler_stubMethod(0) }
Node stubCall = createStubCall(functionNode, stubId);
functionNode.replaceWith(stubCall);
compiler.reportChangeToEnclosingScope(prototypeObjectLiteral);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
Node ownerDotPrototypeDotPropName =
astFactory.createGetProp(ownerDotPrototypeNode.cloneTree(), propName);
Node unstubCall = createUnstubCall(functionNode, stubId);
Node definitionStatement =
astFactory
.createAssignStatement(ownerDotPrototypeDotPropName, unstubCall)
.useSourceInfoIfMissingFromForTree(stringKey);
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
}
}
/**
* Move a property defined by assignment to `.prototype` or `.prototype.propName`.
*
* <pre><code>
* Foo.prototype.propName = function() {};
* </code></pre>
*/
private void movePrototypeDotMethodAssignment(Node destParent, Node functionNode) {
checkState(functionNode.isFunction(), functionNode);
Node assignNode = functionNode.getParent();
checkState(assignNode.isAssign() && functionNode.isSecondChildOf(assignNode), assignNode);
Node definitionStatement = assignNode.getParent();
checkState(definitionStatement.isExprResult(), assignNode);
if (noStubFunctions) {
// Remove the definition statement from its current location
Node assignStatementParent = definitionStatement.getParent();
definitionStatement.detach();
compiler.reportChangeToEnclosingScope(assignStatementParent);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
} else {
int stubId = idGenerator.newId();
// replace function definition with temporary placeholder so we can clone the whole
// assignment statement without cloning the function definition itself.
Node originalDefinitionPlaceholder = astFactory.createEmpty();
functionNode.replaceWith(originalDefinitionPlaceholder);
Node newDefinitionStatement = definitionStatement.cloneTree();
Node newDefinitionPlaceholder =
newDefinitionStatement // EXPR_RESULT
.getOnlyChild() // ASSIGN
.getLastChild(); // EMPTY RHS node
// convert original assignment statement to
// owner.prototype.propName = JSCompiler_stubMethod(0);
Node stubCall = createStubCall(functionNode, stubId);
originalDefinitionPlaceholder.replaceWith(stubCall);
compiler.reportChangeToEnclosingScope(definitionStatement);
// Prepend new definition to new chunk
// Foo.prototype.propName = JSCompiler_unstubMethod(0, function() {});
Node unstubCall = createUnstubCall(functionNode, stubId);
newDefinitionPlaceholder.replaceWith(unstubCall);
destParent.addChildToFront(newDefinitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
}
}
/**
* Move a property defined by assignment to `.prototype` or `.prototype.propName`.
*
* <pre><code>
* Foo.prototype = { propName() {}};
* </code></pre>
*/
private void movePrototypeObjectLiteralMethodShorthand(
String propName, Node destParent, Node functionNode) {
checkState(functionNode.isFunction(), functionNode);
Node memberFunctionDef = functionNode.getParent();
checkState(memberFunctionDef.isMemberFunctionDef(), memberFunctionDef);
Node prototypeObjectLiteral = memberFunctionDef.getParent();
checkState(prototypeObjectLiteral.isObjectLit(), prototypeObjectLiteral);
Node assignNode = prototypeObjectLiteral.getParent();
checkState(
assignNode.isAssign() && prototypeObjectLiteral.isSecondChildOf(assignNode), assignNode);
Node ownerDotPrototypeNode = assignNode.getFirstChild();
checkState(
ownerDotPrototypeNode.isQualifiedName()
&& ownerDotPrototypeNode.getLastChild().getString().equals("prototype"),
ownerDotPrototypeNode);
if (noStubFunctions) {
// Remove the definition from the object literal
memberFunctionDef.detach();
compiler.reportChangeToEnclosingScope(prototypeObjectLiteral);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
Node ownerDotPrototypeDotPropName =
astFactory.createGetProp(ownerDotPrototypeNode.cloneTree(), propName);
Node definitionStatement =
astFactory
.createAssignStatement(ownerDotPrototypeDotPropName, functionNode.detach())
.useSourceInfoIfMissingFromForTree(memberFunctionDef);
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
} else {
int stubId = idGenerator.newId();
// { propName() {} } => { propName: JSCompiler_stubMethod(0) }
Node stubCall = createStubCall(functionNode, stubId);
memberFunctionDef.replaceWith(astFactory.createStringKey(propName, stubCall));
compiler.reportChangeToEnclosingScope(prototypeObjectLiteral);
// Prepend definition to new chunk
// Foo.prototype.propName = function() {};
Node ownerDotPrototypeDotPropName =
astFactory.createGetProp(ownerDotPrototypeNode.cloneTree(), propName);
Node unstubCall = createUnstubCall(functionNode.detach(), stubId);
Node definitionStatement =
astFactory
.createAssignStatement(ownerDotPrototypeDotPropName, unstubCall)
.useSourceInfoIfMissingFromForTree(memberFunctionDef);
destParent.addChildToFront(definitionStatement);
compiler.reportChangeToEnclosingScope(destParent);
}
}
/**
* Returns a new Node to be used as the stub definition for a method.
*
* @param originalDefinition function Node whose definition is being stubbed
* @param stubId ID to use for stubbing and unstubbing
* @return a Node that looks like <code>JSCompiler_stubMethod(0)</code>
*/
private Node createStubCall(Node originalDefinition, int stubId) {
return astFactory
.createCall(
// We can't look up the type of the stub creating method, because we add its
// definition after type checking.
astFactory.createNameWithUnknownType(STUB_METHOD_NAME), astFactory.createNumber(stubId))
.useSourceInfoIfMissingFromForTree(originalDefinition);
}
/**
* Returns a new Node to be used as the stub definition for a method.
*
* @param functionNode actual function definition to be attached. Must be detached now.
* @param stubId ID to use for stubbing and unstubbing
* @return a Node that looks like <code>JSCompiler_unstubMethod(0, function() {})</code>
*/
private Node createUnstubCall(Node functionNode, int stubId) {
return astFactory
.createCall(
// We can't look up the type of the stub creating method, because we add its
// definition after type checking.
astFactory.createNameWithUnknownType(UNSTUB_METHOD_NAME),
astFactory.createNumber(stubId),
functionNode)
.useSourceInfoIfMissingFromForTree(functionNode);
}
/**
* If possible, move a class instance member function definition to the deepest chunk common to
* all uses of the method.
*
* @param nameInfo information about all definitions of the given property name
* @param deepestCommonModuleRef all uses of the method are either in this chunk or in chunks that
* depend on it
* @param classMemberFunction definition of the method within its class body
*/
private void tryToMoveMemberFunction(
NameInfo nameInfo, JSModule deepestCommonModuleRef, ClassMemberFunction classMemberFunction) {
// We should only move a property across chunks if:
// 1) We can move it deeper in the chunk graph,
// 2) and it's a normal member function, and not a GETTER_DEF or a SETTER_DEF,
// 3) and the class is available in the global scope.
Var rootVar = classMemberFunction.getRootVar();
if (rootVar == null || !rootVar.isGlobal()) {
return;
}
Node definitionNode = classMemberFunction.getDefinitionNode();
// Only attempt to move normal member functions.
// A getter or setter cannot be as easily defined outside of the class to which it belongs.
if (!definitionNode.isMemberFunctionDef()) {
return;
}
if (moduleGraph.dependsOn(deepestCommonModuleRef, classMemberFunction.getModule())) {
if (hasUnmovableRedeclaration(nameInfo, classMemberFunction)) {
// If it has been redeclared on the same object, skip it.
return;
}
Node destinationParent = compiler.getNodeForCodeInsertion(deepestCommonModuleRef);
String className = rootVar.getName();
if (noStubFunctions) {
moveClassInstanceMethodWithoutStub(className, definitionNode, destinationParent);
} else {
moveClassInstanceMethodWithStub(className, definitionNode, destinationParent);
}
}
}
private void moveClassInstanceMethodWithoutStub(
String className, Node methodDefinition, Node destinationParent) {
checkArgument(methodDefinition.isMemberFunctionDef(), methodDefinition);
Node classMembers = checkNotNull(methodDefinition.getParent());
checkState(classMembers.isClassMembers(), classMembers);
Node classNode = classMembers.getParent();
checkState(classNode.isClass(), classNode);
methodDefinition.detach();
compiler.reportChangeToEnclosingScope(classMembers);
// ClassName.prototype.propertyName = function() {};
Node classNameDotPrototypeDotPropName =
astFactory.createGetProps(
astFactory.createName(className, classNode.getJSType()),
"prototype",
methodDefinition.getString());
Node functionNode = checkNotNull(methodDefinition.getOnlyChild());
functionNode.detach();
Node definitionStatementNode =
astFactory
.createAssignStatement(classNameDotPrototypeDotPropName, functionNode)
.useSourceInfoIfMissingFromForTree(methodDefinition);
destinationParent.addChildToFront(definitionStatementNode);
compiler.reportChangeToEnclosingScope(destinationParent);
}
private void moveClassInstanceMethodWithStub(
String className, Node methodDefinition, Node destinationParent) {
checkArgument(methodDefinition.isMemberFunctionDef(), methodDefinition);
Node classMembers = checkNotNull(methodDefinition.getParent());
checkState(classMembers.isClassMembers(), classMembers);
Node classNode = classMembers.getParent();
checkState(classNode.isClass(), classNode);
int stubId = idGenerator.newId();
// Put a stub definition after the class
// ClassName.prototype.propertyName = JSCompiler_stubMethod(id);
Node classNameDotPrototypeDotPropName =
astFactory.createGetProps(
astFactory.createName(className, classNode.getJSType()),
"prototype",
methodDefinition.getString());
Node stubCall = createStubCall(methodDefinition, stubId);
Node stubDefinitionStatement =
astFactory
.createAssignStatement(classNameDotPrototypeDotPropName, stubCall)
.useSourceInfoIfMissingFromForTree(methodDefinition);
Node classDefiningStatement = NodeUtil.getEnclosingStatement(classMembers);
classDefiningStatement
.getParent()
.addChildAfter(stubDefinitionStatement, classDefiningStatement);
// remove the definition from the class
methodDefinition.detach();
compiler.reportChangeToEnclosingScope(classMembers);
// Prepend unstub definition to the new location.
// ClassName.prototype.propertyName = JSCompiler_unstubMethod(id, function(...) {...});
Node classNameDotPrototypeDotPropName2 = classNameDotPrototypeDotPropName.cloneTree();
Node functionNode = checkNotNull(methodDefinition.getOnlyChild());
functionNode.detach();
Node unstubCall = createUnstubCall(functionNode, stubId);
Node statementNode =
astFactory
.createAssignStatement(classNameDotPrototypeDotPropName2, unstubCall)
.useSourceInfoIfMissingFromForTree(methodDefinition);
destinationParent.addChildToFront(statementNode);
compiler.reportChangeToEnclosingScope(destinationParent);
}
static boolean hasUnmovableRedeclaration(NameInfo nameInfo, Property prop) {
for (Symbol symbol : nameInfo.getDeclarations()) {
if (symbol instanceof Property) {
Property otherProp = (Property) symbol;
// It is possible to do better here if the dependencies are well defined
// but redefinitions are usually in optional chunks so it isn't likely
// worth the effort to check.
if (prop != otherProp
&& prop.getRootVar() == otherProp.getRootVar()
&& prop.getModule() != otherProp.getModule()) {
return true;
}
}
}
return false;
}
}
| |
package apps.threedmanipulation.tools;
import java.util.ArrayList;
import java.util.List;
import synergynetframework.appsystem.contentsystem.ContentSystem;
import synergynetframework.appsystem.contentsystem.items.ContentItem;
import synergynetframework.appsystem.contentsystem.items.RoundImageLabel;
import synergynetframework.appsystem.contentsystem.items.listener.ItemEventAdapter;
import synergynetframework.jme.cursorsystem.elements.twod.OrthoBringToTop;
import apps.threedmanipulation.ThreeDManipulation;
import apps.threedmanipulation.gestures.ControlPanelMoveRotateScale;
import apps.threedmanipulation.gestures.MonitorCameraRotateTranslateZoom;
import apps.threedmanipulation.gestures.OjbectManipulation;
import apps.threedmanipulation.gestures.OjbectManipulationforCCTV;
import apps.threedmanipulation.listener.ToolListener;
import com.jme.bounding.OrthogonalBoundingBox;
import com.jme.image.Texture;
import com.jme.image.Texture.ApplyMode;
import com.jme.image.Texture.WrapMode;
import com.jme.scene.CameraNode;
import com.jme.scene.Node;
import com.jme.scene.Spatial;
import com.jme.scene.shape.Disk;
import com.jme.scene.shape.Quad;
import com.jme.scene.state.BlendState;
import com.jme.scene.state.TextureState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
/**
* The Class MonitorScreen.
*/
public class MonitorScreen extends Node {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 5768509228111148746L;
/** The camera operation mode. */
protected String cameraOperationMode = MonitorCameraRotateTranslateZoom.MODE_REMOTECONTROL;
/** The cam node. */
protected CameraNode camNode;
/** The content system. */
protected ContentSystem contentSystem;
/** The manipulatable ojbects. */
protected List<Spatial> manipulatableOjbects;
/** The mode. */
protected String mode = OjbectManipulation.MODE_OBJECTMANIPULATION;
/** The move in. */
private boolean moveIn = false;
/** The move out. */
private boolean moveOut = false;
/** The screen frame. */
protected Quad screenFrame;
/** The screen quad. */
protected Quad screenQuad;
/** The skin color. */
protected String skinColor = "blue";
/** The telescope manipulate ojbect. */
protected OjbectManipulationforCCTV telescopeManipulateOjbect;
/** The tool listeners. */
protected List<ToolListener> toolListeners = new ArrayList<ToolListener>();
/** The width. */
protected float width;
/** The zoom in. */
private boolean zoomIn = false;
/** The zoom out. */
private boolean zoomOut = false;
/**
* Instantiates a new monitor screen.
*
* @param name
* the name
* @param contentSystem
* the content system
* @param width
* the width
* @param camNode
* the cam node
* @param manipulatableOjbects
* the manipulatable ojbects
* @param skinColor
* the skin color
* @param cameraOperationMode
* the camera operation mode
*/
public MonitorScreen(String name, ContentSystem contentSystem, float width,
final CameraNode camNode, List<Spatial> manipulatableOjbects,
String skinColor, String cameraOperationMode) {
super(name);
this.contentSystem = contentSystem;
this.width = width;
this.camNode = camNode;
this.manipulatableOjbects = manipulatableOjbects;
this.skinColor = skinColor;
this.cameraOperationMode = cameraOperationMode;
if (this.cameraOperationMode
.equals(MonitorCameraRotateTranslateZoom.MODE_REMOTECONTROL)) {
mode = OjbectManipulation.MODE_CAMERAMANIPULATION;
} else {
mode = OjbectManipulation.MODE_OBJECTMANIPULATION;
}
buildScreenQuad();
buildbuttons();
buildScreenFrame();
}
/**
* Adds the tool listener.
*
* @param l
* the l
*/
public void addToolListener(ToolListener l) {
toolListeners.add(l);
}
/**
* Buildbuttons.
*/
public void buildbuttons() {
final RoundImageLabel zoomInLabel = (RoundImageLabel) contentSystem
.createContentItem(RoundImageLabel.class);
zoomInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoominbuttonnormal.png"));
zoomInLabel.addItemListener(new ItemEventAdapter() {
@Override
public void cursorPressed(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorPressed(item, id, x, y, pressure);
zoomInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoominbutton.png"));
zoomIn = true;
}
@Override
public void cursorReleased(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorReleased(item, id, x, y, pressure);
zoomInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoominbuttonnormal.png"));
zoomIn = false;
}
});
Disk disk = (Disk) (zoomInLabel.getImplementationObject());
BlendState alpha = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
alpha.setEnabled(true);
alpha.setBlendEnabled(true);
alpha.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
alpha.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
alpha.setTestEnabled(true);
alpha.setTestFunction(BlendState.TestFunction.GreaterThan);
disk.setRenderState(alpha);
this.attachChild(disk);
zoomInLabel.setRotateTranslateScalable(false);
zoomInLabel.setBringToTopable(false);
zoomInLabel.setLocalLocation(100, 70, 0);
zoomInLabel.setBorderSize(0);
zoomInLabel.setRadius(15);
final RoundImageLabel zoomOutLabel = (RoundImageLabel) contentSystem
.createContentItem(RoundImageLabel.class);
zoomOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoomoutbuttonnormal.png"));
zoomOutLabel.addItemListener(new ItemEventAdapter() {
@Override
public void cursorPressed(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorPressed(item, id, x, y, pressure);
zoomOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoomoutbutton.png"));
zoomOut = true;
}
@Override
public void cursorReleased(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorReleased(item, id, x, y, pressure);
zoomOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoomoutbuttonnormal.png"));
zoomOut = false;
}
});
disk = (Disk) (zoomOutLabel.getImplementationObject());
disk.setRenderState(alpha);
this.attachChild(disk);
zoomOutLabel.setRotateTranslateScalable(false);
zoomOutLabel.setBringToTopable(false);
zoomOutLabel.setLocalLocation(100, 30, 0);
zoomOutLabel.setBorderSize(0);
zoomOutLabel.setRadius(15);
final RoundImageLabel zInLabel = (RoundImageLabel) contentSystem
.createContentItem(RoundImageLabel.class);
zInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zinbuttonnormal.png"));
zInLabel.addItemListener(new ItemEventAdapter() {
@Override
public void cursorPressed(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorPressed(item, id, x, y, pressure);
zInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zinbutton.png"));
moveIn = true;
}
@Override
public void cursorReleased(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorReleased(item, id, x, y, pressure);
zInLabel.setImageInfo(ThreeDManipulation.class
.getResource("zinbuttonnormal.png"));
moveIn = false;
}
});
disk = (Disk) (zInLabel.getImplementationObject());
disk.setRenderState(alpha);
this.attachChild(disk);
zInLabel.setRotateTranslateScalable(false);
zInLabel.setBringToTopable(false);
zInLabel.setLocalLocation(100, -10, 0);
zInLabel.setBorderSize(0);
zInLabel.setRadius(15);
final RoundImageLabel zOutLabel = (RoundImageLabel) contentSystem
.createContentItem(RoundImageLabel.class);
zOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoutbuttonnormal.png"));
zOutLabel.addItemListener(new ItemEventAdapter() {
@Override
public void cursorPressed(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorPressed(item, id, x, y, pressure);
zOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zioutbutton.png"));
moveOut = true;
}
@Override
public void cursorReleased(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorReleased(item, id, x, y, pressure);
zOutLabel.setImageInfo(ThreeDManipulation.class
.getResource("zoutbuttonnormal.png"));
moveOut = false;
}
});
disk = (Disk) (zOutLabel.getImplementationObject());
disk.setRenderState(alpha);
this.attachChild(disk);
zOutLabel.setRotateTranslateScalable(false);
zOutLabel.setBringToTopable(false);
zOutLabel.setLocalLocation(100, -50, 0);
zOutLabel.setBorderSize(0);
zOutLabel.setRadius(15);
final RoundImageLabel modeLabel = (RoundImageLabel) contentSystem
.createContentItem(RoundImageLabel.class);
modeLabel.setImageInfo(ThreeDManipulation.class
.getResource("manipulationbutton.png"));
modeLabel.addItemListener(new ItemEventAdapter() {
@Override
public void cursorClicked(ContentItem item, long id, float x,
float y, float pressure) {
super.cursorClicked(item, id, x, y, pressure);
if (mode.equals(OjbectManipulation.MODE_CAMERAMANIPULATION)) {
mode = OjbectManipulation.MODE_OBJECTMANIPULATION;
telescopeManipulateOjbect
.setMode(OjbectManipulation.MODE_OBJECTMANIPULATION);
modeLabel.setImageInfo(ThreeDManipulation.class
.getResource("manipulationbuttonnormal.png"));
} else {
mode = OjbectManipulation.MODE_CAMERAMANIPULATION;
telescopeManipulateOjbect
.setMode(OjbectManipulation.MODE_CAMERAMANIPULATION);
modeLabel.setImageInfo(ThreeDManipulation.class
.getResource("manipulationbutton.png"));
}
}
});
disk = (Disk) (modeLabel.getImplementationObject());
disk.setRenderState(alpha);
this.attachChild(disk);
modeLabel.setRotateTranslateScalable(false);
modeLabel.setBringToTopable(false);
modeLabel.setLocalLocation(100, -90, 0);
modeLabel.setBorderSize(0);
modeLabel.setRadius(15);
if (!this.cameraOperationMode
.equals(MonitorCameraRotateTranslateZoom.MODE_REMOTECONTROL)) {
modeLabel.setVisible(false);
}
}
/**
* Builds the screen frame.
*/
public void buildScreenFrame() {
screenFrame = new Quad(name + "screenFrame", width + 50, width + 35);
screenFrame.setModelBound(new OrthogonalBoundingBox());
screenFrame.updateModelBound();
this.attachChild(screenFrame);
screenQuad.setLocalTranslation(-20, -10, 0);
TextureState ts;
Texture texture;
ts = DisplaySystem.getDisplaySystem().getRenderer()
.createTextureState();
ts.setCorrectionType(TextureState.CorrectionType.Perspective);
texture = TextureManager.loadTexture(
ThreeDManipulation.class.getResource("camerascreen" + skinColor
+ ".png"), Texture.MinificationFilter.Trilinear,
Texture.MagnificationFilter.Bilinear);
texture.setWrap(WrapMode.Repeat);
texture.setApply(ApplyMode.Replace);
ts.setTexture(texture);
ts.apply();
screenFrame.setRenderState(ts);
screenFrame.updateRenderState();
BlendState alpha = DisplaySystem.getDisplaySystem().getRenderer()
.createBlendState();
alpha.setEnabled(true);
alpha.setBlendEnabled(true);
alpha.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
alpha.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
alpha.setTestEnabled(true);
alpha.setTestFunction(BlendState.TestFunction.GreaterThan);
screenFrame.setRenderState(alpha);
screenFrame.updateRenderState();
ControlPanelMoveRotateScale monitorScreenMoveRotateScale = new ControlPanelMoveRotateScale(
screenFrame, this, camNode, telescopeManipulateOjbect,
manipulatableOjbects);
monitorScreenMoveRotateScale.setPickMeOnly(true);
monitorScreenMoveRotateScale.addToolListener(new ToolListener() {
@Override
public void disposeTool(float x, float y) {
for (ToolListener l : toolListeners) {
l.disposeTool(x, y);
}
}
});
@SuppressWarnings("unused")
OrthoBringToTop bringToTop = new OrthoBringToTop(screenFrame, this);
}
/**
* Builds the screen quad.
*/
public void buildScreenQuad() {
screenQuad = new Quad(name + "screenQuad", width, width);
screenQuad.setModelBound(new OrthogonalBoundingBox());
screenQuad.updateModelBound();
this.attachChild(screenQuad);
telescopeManipulateOjbect = new OjbectManipulationforCCTV(screenQuad,
manipulatableOjbects);
telescopeManipulateOjbect.setPickMeOnly(true);
telescopeManipulateOjbect.setCamNode(camNode);
if (this.cameraOperationMode
.equals(MonitorCameraRotateTranslateZoom.MODE_REMOTECONTROL)) {
telescopeManipulateOjbect
.setMode(OjbectManipulation.MODE_CAMERAMANIPULATION);
} else {
telescopeManipulateOjbect
.setMode(OjbectManipulation.MODE_OBJECTMANIPULATION);
}
}
/**
* Gets the focused object.
*
* @return the focused object
*/
public OjbectManipulation getFocusedObject() {
return telescopeManipulateOjbect;
}
/**
* Gets the screen quad.
*
* @return the screen quad
*/
public Quad getScreenQuad() {
return screenQuad;
}
/**
* Removes the tool listener.
*
* @param l
* the l
*/
public void removeToolListener(ToolListener l) {
if (toolListeners.contains(l)) {
toolListeners.remove(l);
}
}
/**
* Sets the mode.
*
* @param mode
* the new mode
*/
public void setMode(String mode) {
if (this.cameraOperationMode
.equals(MonitorCameraRotateTranslateZoom.MODE_REMOTECONTROL)) {
mode = OjbectManipulation.MODE_CAMERAMANIPULATION;
} else {
mode = OjbectManipulation.MODE_OBJECTMANIPULATION;
}
}
/**
* Update.
*
* @param tpf
* the tpf
*/
public void update(float tpf) {
if (zoomIn) {
if (camNode.getCamera().getFrustumNear() < 30) {
camNode.getCamera().setFrustumNear(
camNode.getCamera().getFrustumNear() + (tpf * 4));
}
}
if (zoomOut) {
if (camNode.getCamera().getFrustumNear() > 2) {
camNode.getCamera().setFrustumNear(
camNode.getCamera().getFrustumNear() - (tpf * 4));
}
}
if (moveIn) {
if (camNode.getLocalTranslation().z > 50) {
camNode.setLocalTranslation(camNode.getLocalTranslation().x,
camNode.getLocalTranslation().y,
camNode.getLocalTranslation().z - (tpf * 4));
}
}
if (moveOut) {
if (camNode.getLocalTranslation().z < 160) {
camNode.setLocalTranslation(camNode.getLocalTranslation().x,
camNode.getLocalTranslation().y,
camNode.getLocalTranslation().z + (tpf * 4));
}
}
}
}
| |
/*
* Copyright (c) 2008, Your Corporation. All Rights Reserved.
*/
package org.intellij.lang.xpath.xslt.impl.references;
import com.intellij.javaee.ExternalResourceManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiManager;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiReferenceProvider;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceSet;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NotNull;
import org.intellij.lang.xpath.psi.impl.ResolveUtil;
import org.intellij.lang.xpath.xslt.XsltSupport;
import org.intellij.lang.xpath.xslt.impl.XsltIncludeIndex;
import org.intellij.lang.xpath.xslt.psi.XsltApplyTemplates;
import org.intellij.lang.xpath.xslt.psi.XsltCallTemplate;
import org.intellij.lang.xpath.xslt.psi.XsltElement;
import org.intellij.lang.xpath.xslt.psi.XsltElementFactory;
import org.intellij.lang.xpath.xslt.psi.XsltParameter;
import org.intellij.lang.xpath.xslt.psi.XsltTemplate;
import org.intellij.lang.xpath.xslt.util.ArgumentMatcher;
import org.intellij.lang.xpath.xslt.util.MatchTemplateMatcher;
import org.intellij.lang.xpath.xslt.util.NamedTemplateMatcher;
import org.intellij.lang.xpath.xslt.util.ParamMatcher;
import org.intellij.lang.xpath.xslt.util.XsltCodeInsightUtil;
public class XsltReferenceProvider extends PsiReferenceProvider {
private static final Key<CachedValue<PsiReference[]>> CACHED_XSLT_REFS = Key.create("CACHED_XSLT_REFS");
private final CachedValuesManager myCacheManager;
private final XsltElementFactory myXsltElementFactory = XsltElementFactory.getInstance();
public XsltReferenceProvider(Project project) {
myCacheManager = PsiManager.getInstance(project).getCachedValuesManager();
}
@NotNull
public PsiReference[] getReferencesByElement(@NotNull PsiElement e, @NotNull ProcessingContext context) {
final PsiElement element = e.getParent();
if (element instanceof XmlAttribute) {
final XmlAttribute attribute = (XmlAttribute)element;
CachedValue<PsiReference[]> cachedValue = attribute.getUserData(CACHED_XSLT_REFS);
if (cachedValue == null) {
cachedValue = myCacheManager.createCachedValue(new ReferenceProvider(attribute), false);
attribute.putUserData(CACHED_XSLT_REFS, cachedValue);
}
final PsiReference[] value = cachedValue.getValue();
assert value != null;
return value;
} else {
return PsiReference.EMPTY_ARRAY;
}
}
private class ReferenceProvider implements CachedValueProvider<PsiReference[]> {
private final XmlAttribute myAttribute;
ReferenceProvider(XmlAttribute attribute) {
myAttribute = attribute;
}
public Result<PsiReference[]> compute() {
final PsiReference[] referencesImpl = getReferencesImpl(myAttribute);
final Object[] refs = new PsiElement[referencesImpl.length];
for (int i = 0; i < refs.length; i++) {
refs[i] = referencesImpl[i].getElement();
}
return new Result<PsiReference[]>(referencesImpl, ArrayUtil.append(refs, myAttribute.getValueElement()));
}
private PsiReference[] getReferencesImpl(final XmlAttribute attribute) {
final PsiReference[] psiReferences;
final XmlTag tag = attribute.getParent();
if (XsltSupport.isTemplateCallName(attribute)) {
psiReferences = new PsiReference[]{ new TemplateReference(attribute) };
} else if (XsltSupport.isTemplateCallParamName(attribute)) {
final String paramName = attribute.getValue();
final XmlTag templateCall = PsiTreeUtil.getParentOfType(tag, XmlTag.class);
if (templateCall != null) {
if (XsltSupport.isTemplateCall(templateCall)) {
final XsltCallTemplate call = myXsltElementFactory.wrapElement(templateCall, XsltCallTemplate.class);
final ResolveUtil.Matcher matcher = new MyParamMatcher(paramName, call);
psiReferences = new PsiReference[]{ new AttributeReference(attribute, matcher, true) };
} else if (XsltSupport.isApplyTemplates(templateCall)) {
final XsltApplyTemplates call = myXsltElementFactory.wrapElement(templateCall, XsltApplyTemplates.class);
final ResolveUtil.Matcher matcher = new MyParamMatcher2(paramName, call);
psiReferences = new PsiReference[]{ new ParamReference(attribute, matcher) };
} else {
psiReferences = PsiReference.EMPTY_ARRAY;
}
} else {
psiReferences = PsiReference.EMPTY_ARRAY;
}
} else if (XsltSupport.isParam(attribute) && isInsideUnnamedTemplate(tag)) {
final XsltParameter myParam = myXsltElementFactory.wrapElement(tag, XsltParameter.class);
psiReferences = new PsiReference[]{ new MySelfReference(attribute, myParam) };
} else if (XsltSupport.isVariableOrParamName(attribute) || XsltSupport.isTemplateName(attribute)) {
final XsltElement myElement = myXsltElementFactory.wrapElement(tag, XsltElement.class);
psiReferences = new PsiReference[]{ new SelfReference(attribute, myElement) };
} else if (XsltSupport.isIncludeOrImportHref(attribute)) {
final String href = attribute.getValue();
final String resourceLocation = ExternalResourceManager.getInstance().getResourceLocation(href);
//noinspection StringEquality
if (href == resourceLocation) {
// not a configured external resource
if (href.indexOf("://") == -1) {
// a local file reference
final FileReferenceSet filereferenceset = new FileReferenceSet(
href,
attribute.getValueElement(), 1, XsltReferenceProvider.this, true);
psiReferences = filereferenceset.getAllReferences();
} else {
// external, but unknown resource
psiReferences = new PsiReference[]{ new ExternalResourceReference(attribute) };
}
} else {
// external, known resource
psiReferences = new PsiReference[]{ new ExternalResourceReference(attribute) };
}
} else if (XsltSupport.isMode(attribute)) {
psiReferences = ModeReference.create(attribute, XsltSupport.isTemplate(tag, false));
} else {
psiReferences = PsiReference.EMPTY_ARRAY;
}
return psiReferences;
}
private class MySelfReference extends SelfReference {
private final XsltParameter myParam;
private final XmlTag myTag;
public MySelfReference(XmlAttribute attribute, XsltParameter param) {
super(attribute, param);
myParam = param;
myTag = param.getTag();
}
public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException {
if (!newElementName.equals(myParam.getName())) {
myParam.setName(newElementName);
}
final XmlAttribute attribute = myParam.getNameAttribute();
assert attribute != null;
//noinspection ConstantConditions
return attribute.getValueElement();
}
public boolean isReferenceTo(PsiElement element) {
// self-reference is only a trick to enable rename/find usages etc. but it shouldn't actually
// refer to itself because this would list the element to be renamed/searched for twice
assert !super.isReferenceTo(element);
if (element == myParam) return false;
if (!(element instanceof XsltParameter)) return false;
final XsltParameter param = ((XsltParameter)element);
final String name = param.getName();
if (name == null || !name.equals(myParam.getName())) return false;
final XsltTemplate template = XsltCodeInsightUtil.getTemplate(myTag, false);
final XsltTemplate myTemplate = XsltCodeInsightUtil.getTemplate(param.getTag(), false);
if (template == myTemplate) return true;
if (template == null || myTemplate == null) return false;
if (!Comparing.equal(template.getMode(), myTemplate.getMode())) {
return false;
}
final XmlFile xmlFile = (XmlFile)element.getContainingFile();
final XmlFile myFile = (XmlFile)myParam.getContainingFile();
if (myFile == xmlFile) return true;
return XsltIncludeIndex.isReachableFrom(myFile, xmlFile);
}
}
}
private static boolean isInsideUnnamedTemplate(XmlTag tag) {
final XmlTag t = XsltCodeInsightUtil.getTemplateTag(tag, false, false);
return t != null && t.getAttribute("name", null) == null;
}
static class MyParamMatcher extends NamedTemplateMatcher {
private final XsltCallTemplate myCall;
private final String myParamName;
private String[] myExcludedNames = ArrayUtil.EMPTY_STRING_ARRAY;
MyParamMatcher(String paramName, XsltCallTemplate call) {
super(XsltCodeInsightUtil.getDocument(call), call.getTemplateName());
myCall = call;
myParamName = paramName;
}
private MyParamMatcher(String paramName, XsltCallTemplate call, String[] excludedNames) {
super(getDocument(call), call.getTemplateName());
myCall = call;
myParamName = paramName;
myExcludedNames = excludedNames;
}
private static XmlDocument getDocument(XsltCallTemplate call) {
final XsltTemplate template = call.getTemplate();
return XsltCodeInsightUtil.getDocument(template != null ? template : call);
}
@Override
protected ResolveUtil.Matcher changeDocument(XmlDocument document) {
return new MyParamMatcher(myParamName, myCall, myExcludedNames);
}
@Override
protected Result matchImpl(XmlTag element) {
if (matches(element)) {
return Result.create(new ParamMatcher(element, myExcludedNames, myParamName));
}
return null;
}
@Override
public ResolveUtil.Matcher variantMatcher() {
final PsiElement[] suppliedArgs = ResolveUtil.collect(new ArgumentMatcher(myCall));
final String[] excludedNames = new String[suppliedArgs.length];
for (int i = 0; i < suppliedArgs.length; i++) {
excludedNames[i] = ((XmlTag)suppliedArgs[i]).getAttributeValue("name");
}
return new MyParamMatcher(null, myCall, excludedNames);
}
}
static class MyParamMatcher2 extends MatchTemplateMatcher {
private final String myParamName;
private final XsltApplyTemplates myCall;
private String[] myExcludedNames = ArrayUtil.EMPTY_STRING_ARRAY;
MyParamMatcher2(String paramName, XsltApplyTemplates call) {
super(XsltCodeInsightUtil.getDocument(call), call.getMode());
myParamName = paramName;
myCall = call;
}
private MyParamMatcher2(String paramName, XsltApplyTemplates call, String[] excludedNames) {
this(paramName, call);
myExcludedNames = excludedNames;
}
@Override
protected Result matchImpl(XmlTag element) {
if (matches(element)) {
return Result.create(new ParamMatcher(element, myExcludedNames, myParamName));
}
return null;
}
@Override
protected ResolveUtil.Matcher changeDocument(XmlDocument document) {
return new MyParamMatcher2(myParamName, myCall);
}
@Override
public ResolveUtil.Matcher variantMatcher() {
final PsiElement[] suppliedArgs = ResolveUtil.collect(new ArgumentMatcher(myCall));
final String[] excludedNames = new String[suppliedArgs.length];
for (int i = 0; i < suppliedArgs.length; i++) {
excludedNames[i] = ((XmlTag)suppliedArgs[i]).getAttributeValue("name");
}
return new MyParamMatcher2(null, myCall, excludedNames);
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model;
import java.io.Serializable;
/**
* <p>
* The status change reason details for the instance group.
* </p>
*/
public class InstanceGroupStateChangeReason implements Serializable, Cloneable {
/**
* <p>
* The programmable code for the state change reason.
* </p>
*/
private String code;
/**
* <p>
* The status change reason description.
* </p>
*/
private String message;
/**
* <p>
* The programmable code for the state change reason.
* </p>
*
* @param code
* The programmable code for the state change reason.
* @see InstanceGroupStateChangeReasonCode
*/
public void setCode(String code) {
this.code = code;
}
/**
* <p>
* The programmable code for the state change reason.
* </p>
*
* @return The programmable code for the state change reason.
* @see InstanceGroupStateChangeReasonCode
*/
public String getCode() {
return this.code;
}
/**
* <p>
* The programmable code for the state change reason.
* </p>
*
* @param code
* The programmable code for the state change reason.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see InstanceGroupStateChangeReasonCode
*/
public InstanceGroupStateChangeReason withCode(String code) {
setCode(code);
return this;
}
/**
* <p>
* The programmable code for the state change reason.
* </p>
*
* @param code
* The programmable code for the state change reason.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see InstanceGroupStateChangeReasonCode
*/
public void setCode(InstanceGroupStateChangeReasonCode code) {
this.code = code.toString();
}
/**
* <p>
* The programmable code for the state change reason.
* </p>
*
* @param code
* The programmable code for the state change reason.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see InstanceGroupStateChangeReasonCode
*/
public InstanceGroupStateChangeReason withCode(
InstanceGroupStateChangeReasonCode code) {
setCode(code);
return this;
}
/**
* <p>
* The status change reason description.
* </p>
*
* @param message
* The status change reason description.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* The status change reason description.
* </p>
*
* @return The status change reason description.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* The status change reason description.
* </p>
*
* @param message
* The status change reason description.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public InstanceGroupStateChangeReason withMessage(String message) {
setMessage(message);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCode() != null)
sb.append("Code: " + getCode() + ",");
if (getMessage() != null)
sb.append("Message: " + getMessage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InstanceGroupStateChangeReason == false)
return false;
InstanceGroupStateChangeReason other = (InstanceGroupStateChangeReason) obj;
if (other.getCode() == null ^ this.getCode() == null)
return false;
if (other.getCode() != null
&& other.getCode().equals(this.getCode()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null
&& other.getMessage().equals(this.getMessage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getCode() == null) ? 0 : getCode().hashCode());
hashCode = prime * hashCode
+ ((getMessage() == null) ? 0 : getMessage().hashCode());
return hashCode;
}
@Override
public InstanceGroupStateChangeReason clone() {
try {
return (InstanceGroupStateChangeReason) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.packages.BuildFileNotFoundException;
import com.google.devtools.build.lib.packages.PackageFactory;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.rules.SkylarkRuleClassFunctions;
import com.google.devtools.build.lib.skyframe.SkylarkImportLookupValue.SkylarkImportLookupKey;
import com.google.devtools.build.lib.syntax.BuildFileAST;
import com.google.devtools.build.lib.syntax.Environment.Extension;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.LoadStatement;
import com.google.devtools.build.lib.syntax.Mutability;
import com.google.devtools.build.lib.syntax.SkylarkImport;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException2;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Nullable;
/**
* A Skyframe function to look up and import a single Skylark extension.
*
* <p> Given a {@link Label} referencing a Skylark file, attempts to locate the file and load it.
* The Label must be absolute, and must not reference the special {@code external} package. If
* loading is successful, returns a {@link SkylarkImportLookupValue} that encapsulates
* the loaded {@link Extension} and {@link SkylarkFileDependency} information. If loading is
* unsuccessful, throws a {@link SkylarkImportLookupFunctionException} that encapsulates the
* cause of the failure.
*/
public class SkylarkImportLookupFunction implements SkyFunction {
private final RuleClassProvider ruleClassProvider;
private final PackageFactory packageFactory;
public SkylarkImportLookupFunction(
RuleClassProvider ruleClassProvider, PackageFactory packageFactory) {
this.ruleClassProvider = ruleClassProvider;
this.packageFactory = packageFactory;
}
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException,
InterruptedException {
SkylarkImportLookupKey key = (SkylarkImportLookupKey) skyKey.argument();
try {
return computeInternal(key.importLabel, key.inWorkspace, env, null);
} catch (InconsistentFilesystemException e) {
throw new SkylarkImportLookupFunctionException(e, Transience.PERSISTENT);
} catch (SkylarkImportFailedException e) {
throw new SkylarkImportLookupFunctionException(e);
}
}
SkyValue computeWithInlineCalls(SkyKey skyKey, Environment env)
throws InconsistentFilesystemException,
SkylarkImportFailedException,
InterruptedException {
return computeWithInlineCallsInternal(skyKey, env, new LinkedHashSet<Label>());
}
private SkyValue computeWithInlineCallsInternal(
SkyKey skyKey, Environment env, Set<Label> visited)
throws InconsistentFilesystemException, SkylarkImportFailedException, InterruptedException {
SkylarkImportLookupKey key = (SkylarkImportLookupKey) skyKey.argument();
return computeInternal(
key.importLabel,
key.inWorkspace,
env,
Preconditions.checkNotNull(visited, key.importLabel));
}
SkyValue computeInternal(
Label fileLabel, boolean inWorkspace, Environment env, @Nullable Set<Label> visited)
throws InconsistentFilesystemException, SkylarkImportFailedException, InterruptedException {
PathFragment filePath = fileLabel.toPathFragment();
// Load the AST corresponding to this file.
ASTFileLookupValue astLookupValue;
try {
SkyKey astLookupKey = ASTFileLookupValue.key(fileLabel);
astLookupValue = (ASTFileLookupValue) env.getValueOrThrow(astLookupKey,
ErrorReadingSkylarkExtensionException.class, InconsistentFilesystemException.class);
} catch (ErrorReadingSkylarkExtensionException e) {
throw SkylarkImportFailedException.errorReadingFile(filePath, e.getMessage());
}
if (astLookupValue == null) {
return null;
}
if (!astLookupValue.lookupSuccessful()) {
// Skylark import files have to exist.
throw SkylarkImportFailedException.noFile(astLookupValue.getErrorMsg());
}
BuildFileAST ast = astLookupValue.getAST();
if (ast.containsErrors()) {
throw SkylarkImportFailedException.skylarkErrors(filePath);
}
// Process the load statements in the file.
ImmutableList<SkylarkImport> imports = ast.getImports();
Map<String, Extension> extensionsForImports = Maps.newHashMapWithExpectedSize(imports.size());
ImmutableList.Builder<SkylarkFileDependency> fileDependencies = ImmutableList.builder();
ImmutableMap<String, Label> labelsForImports;
// Find the labels corresponding to the load statements.
labelsForImports = findLabelsForLoadStatements(imports, fileLabel, env);
if (labelsForImports == null) {
return null;
}
// Look up and load the imports.
ImmutableCollection<Label> importLabels = labelsForImports.values();
List<SkyKey> importLookupKeys =
Lists.newArrayListWithExpectedSize(importLabels.size());
for (Label importLabel : importLabels) {
importLookupKeys.add(SkylarkImportLookupValue.key(importLabel, inWorkspace));
}
Map<SkyKey, SkyValue> skylarkImportMap;
boolean valuesMissing = false;
if (visited == null) {
// Not inlining.
skylarkImportMap = env.getValues(importLookupKeys);
valuesMissing = env.valuesMissing();
} else {
// Inlining calls to SkylarkImportLookupFunction.
if (!visited.add(fileLabel)) {
ImmutableList<Label> cycle =
CycleUtils.splitIntoPathAndChain(Predicates.equalTo(fileLabel), visited)
.second;
if (env.getValue(SkylarkImportUniqueCycleFunction.key(cycle)) == null) {
return null;
}
throw new SkylarkImportFailedException("Skylark import cycle");
}
skylarkImportMap = Maps.newHashMapWithExpectedSize(imports.size());
for (SkyKey importLookupKey : importLookupKeys) {
SkyValue skyValue = this.computeWithInlineCallsInternal(importLookupKey, env, visited);
if (skyValue == null) {
Preconditions.checkState(
env.valuesMissing(), "no skylark import value for %s", importLookupKey);
// We continue making inline calls even if some requested values are missing, to maximize
// the number of dependent (non-inlined) SkyFunctions that are requested, thus avoiding a
// quadratic number of restarts.
valuesMissing = true;
} else {
skylarkImportMap.put(importLookupKey, skyValue);
}
}
// All imports traversed, this key can no longer be part of a cycle.
visited.remove(fileLabel);
}
if (valuesMissing) {
// This means some imports are unavailable.
return null;
}
// Process the loaded imports.
for (Entry<String, Label> importEntry : labelsForImports.entrySet()) {
String importString = importEntry.getKey();
Label importLabel = importEntry.getValue();
SkyKey keyForLabel = SkylarkImportLookupValue.key(importLabel, inWorkspace);
SkylarkImportLookupValue importLookupValue =
(SkylarkImportLookupValue) skylarkImportMap.get(keyForLabel);
extensionsForImports.put(importString, importLookupValue.getEnvironmentExtension());
fileDependencies.add(importLookupValue.getDependency());
}
// Skylark UserDefinedFunction-s in that file will share this function definition Environment,
// which will be frozen by the time it is returned by createExtension.
Extension extension = createExtension(ast, fileLabel, extensionsForImports, env, inWorkspace);
return new SkylarkImportLookupValue(
extension, new SkylarkFileDependency(fileLabel, fileDependencies.build()));
}
/**
* Computes the set of Labels corresponding to a collection of PathFragments representing
* absolute import paths.
*
* @return a map from the computed {@link Label}s to the corresponding {@link PathFragment}s;
* {@code null} if any Skyframe dependencies are unavailable.
* @throws SkylarkImportFailedException
*/
@Nullable
static ImmutableMap<PathFragment, Label> labelsForAbsoluteImports(
ImmutableSet<PathFragment> pathsToLookup, Environment env)
throws SkylarkImportFailedException {
// Import PathFragments are absolute, so there is a 1-1 mapping from corresponding Labels.
ImmutableMap.Builder<PathFragment, Label> outputMap = new ImmutableMap.Builder<>();
// The SkyKey here represents the directory containing an import PathFragment, hence there
// can in general be multiple imports per lookup.
Multimap<SkyKey, PathFragment> lookupMap = LinkedHashMultimap.create();
for (PathFragment importPath : pathsToLookup) {
PathFragment relativeImportPath = importPath.toRelative();
PackageIdentifier pkgToLookUp =
PackageIdentifier.createInDefaultRepo(relativeImportPath.getParentDirectory());
lookupMap.put(ContainingPackageLookupValue.key(pkgToLookUp), importPath);
}
// Attempt to find a package for every directory containing an import.
Map<SkyKey,
ValueOrException2<BuildFileNotFoundException,
InconsistentFilesystemException>> lookupResults =
env.getValuesOrThrow(
lookupMap.keySet(),
BuildFileNotFoundException.class,
InconsistentFilesystemException.class);
if (env.valuesMissing()) {
return null;
}
try {
// Process lookup results.
for (Entry<SkyKey,
ValueOrException2<BuildFileNotFoundException,
InconsistentFilesystemException>> entry : lookupResults.entrySet()) {
ContainingPackageLookupValue lookupValue =
(ContainingPackageLookupValue) entry.getValue().get();
if (!lookupValue.hasContainingPackage()) {
// Although multiple imports may be in the same package-less directory, we only
// report an error for the first one.
PackageIdentifier lookupKey = ((PackageIdentifier) entry.getKey().argument());
PathFragment importFile = lookupKey.getPackageFragment();
throw SkylarkImportFailedException.noBuildFile(importFile);
}
PackageIdentifier pkgIdForImport = lookupValue.getContainingPackageName();
PathFragment containingPkgPath = pkgIdForImport.getPackageFragment();
for (PathFragment importPath : lookupMap.get(entry.getKey())) {
PathFragment relativeImportPath = importPath.toRelative();
String targetNameForImport = relativeImportPath.relativeTo(containingPkgPath).toString();
try {
outputMap.put(importPath, Label.create(pkgIdForImport, targetNameForImport));
} catch (LabelSyntaxException e) {
// While the Label is for the most part guaranteed to be well-formed by construction, an
// error is still possible if the filename itself is malformed, e.g., contains control
// characters. Since we expect this error to be very rare, for code simplicity, we allow
// the error message to refer to a Label even though the filename was specified via a
// simple path.
throw new SkylarkImportFailedException(e);
}
}
}
} catch (BuildFileNotFoundException e) {
// Thrown when there are IO errors looking for BUILD files.
throw new SkylarkImportFailedException(e);
} catch (InconsistentFilesystemException e) {
throw new SkylarkImportFailedException(e);
}
return outputMap.build();
}
/**
* Computes the set of {@link Label}s corresponding to a set of Skylark {@link LoadStatement}s.
*
* @param imports a collection of Skylark {@link LoadStatement}s
* @param containingFileLabel the {@link Label} of the file containing the load statements
* @return an {@link ImmutableMap} which maps a {@link String} used in the load statement to
* its corresponding {@Label}. Returns {@code null} if any Skyframe dependencies are
* unavailable.
* @throws SkylarkImportFailedException if no package can be found that contains the
* loaded file
*/
@Nullable
static ImmutableMap<String, Label> findLabelsForLoadStatements(
ImmutableCollection<SkylarkImport> imports, Label containingFileLabel, Environment env)
throws SkylarkImportFailedException {
Map<String, Label> outputMap = Maps.newHashMapWithExpectedSize(imports.size());
// Filter relative vs. absolute paths.
ImmutableSet.Builder<PathFragment> absoluteImportsToLookup = new ImmutableSet.Builder<>();
// We maintain a multimap from path fragments to their correspond import strings, to cover the
// (unlikely) case where two distinct import strings generate the same path fragment.
ImmutableMultimap.Builder<PathFragment, String> pathToImports =
new ImmutableMultimap.Builder<>();
for (SkylarkImport imp : imports) {
if (imp.hasAbsolutePath()) {
absoluteImportsToLookup.add(imp.getAbsolutePath());
pathToImports.put(imp.getAbsolutePath(), imp.getImportString());
} else {
outputMap.put(imp.getImportString(), imp.getLabel(containingFileLabel));
}
}
// Look up labels for absolute paths.
ImmutableMap<PathFragment, Label> absoluteLabels =
labelsForAbsoluteImports(absoluteImportsToLookup.build(), env);
if (absoluteLabels == null) {
return null;
}
for (Entry<PathFragment, Label> entry : absoluteLabels.entrySet()) {
PathFragment currPath = entry.getKey();
Label currLabel = entry.getValue();
for (String importString : pathToImports.build().get(currPath)) {
outputMap.put(importString, currLabel);
}
}
ImmutableMap<String, Label> immutableOutputMap = ImmutableMap.copyOf(outputMap);
return immutableOutputMap;
}
/**
* Creates the Extension to be imported.
*/
private Extension createExtension(
BuildFileAST ast,
Label extensionLabel,
Map<String, Extension> importMap,
Environment env,
boolean inWorkspace)
throws SkylarkImportFailedException, InterruptedException {
StoredEventHandler eventHandler = new StoredEventHandler();
// TODO(bazel-team): this method overestimates the changes which can affect the
// Skylark RuleClass. For example changes to comments or unused functions can modify the hash.
// A more accurate - however much more complicated - way would be to calculate a hash based on
// the transitive closure of the accessible AST nodes.
PathFragment extensionFile = extensionLabel.toPathFragment();
try (Mutability mutability = Mutability.create("importing %s", extensionFile)) {
com.google.devtools.build.lib.syntax.Environment extensionEnv =
ruleClassProvider
.createSkylarkRuleClassEnvironment(
mutability, eventHandler, ast.getContentHashCode(), importMap)
.setupOverride("native", packageFactory.getNativeModule(inWorkspace));
ast.exec(extensionEnv, eventHandler);
try {
SkylarkRuleClassFunctions.exportRuleFunctionsAndAspects(extensionEnv, extensionLabel);
} catch (EvalException e) {
eventHandler.handle(Event.error(e.getLocation(), e.getMessage()));
}
Event.replayEventsOn(env.getListener(), eventHandler.getEvents());
if (eventHandler.hasErrors()) {
throw SkylarkImportFailedException.errors(extensionFile);
}
return new Extension(extensionEnv);
}
}
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
static final class SkylarkImportFailedException extends Exception {
private SkylarkImportFailedException(String errorMessage) {
super(errorMessage);
}
private SkylarkImportFailedException(InconsistentFilesystemException e) {
super(e.getMessage());
}
private SkylarkImportFailedException(BuildFileNotFoundException e) {
super(e.getMessage());
}
private SkylarkImportFailedException(LabelSyntaxException e) {
super(e.getMessage());
}
static SkylarkImportFailedException errors(PathFragment file) {
return new SkylarkImportFailedException(
String.format("Extension file '%s' has errors", file));
}
static SkylarkImportFailedException errorReadingFile(PathFragment file, String error) {
return new SkylarkImportFailedException(
String.format("Encountered error while reading extension file '%s': %s", file, error));
}
static SkylarkImportFailedException noFile(String reason) {
return new SkylarkImportFailedException(
String.format("Extension file not found. %s", reason));
}
static SkylarkImportFailedException noBuildFile(PathFragment file) {
return new SkylarkImportFailedException(
String.format("Every .bzl file must have a corresponding package, but '%s' "
+ "does not have one. Please create a BUILD file in the same or any parent directory."
+ " Note that this BUILD file does not need to do anything except exist.", file));
}
static SkylarkImportFailedException skylarkErrors(PathFragment file) {
return new SkylarkImportFailedException(String.format("Extension '%s' has errors", file));
}
}
private static final class SkylarkImportLookupFunctionException extends SkyFunctionException {
private SkylarkImportLookupFunctionException(SkylarkImportFailedException cause) {
super(cause, Transience.PERSISTENT);
}
private SkylarkImportLookupFunctionException(InconsistentFilesystemException e,
Transience transience) {
super(e, transience);
}
private SkylarkImportLookupFunctionException(BuildFileNotFoundException e,
Transience transience) {
super(e, transience);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.search;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.action.search.SearchProgressActionListener;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.action.search.SearchShard;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.Scheduler.Cancellable;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.async.AsyncExecutionId;
import org.elasticsearch.xpack.core.async.AsyncTask;
import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static java.util.Collections.singletonList;
/**
* Task that tracks the progress of a currently running {@link SearchRequest}.
*/
final class AsyncSearchTask extends SearchTask implements AsyncTask {
private final AsyncExecutionId searchId;
private final Client client;
private final ThreadPool threadPool;
private final Supplier<InternalAggregation.ReduceContext> aggReduceContextSupplier;
private final Listener progressListener;
private final Map<String, String> originHeaders;
private boolean hasInitialized;
private boolean hasCompleted;
private long completionId;
private final List<Runnable> initListeners = new ArrayList<>();
private final Map<Long, Consumer<AsyncSearchResponse>> completionListeners = new HashMap<>();
private volatile long expirationTimeMillis;
private final AtomicBoolean isCancelling = new AtomicBoolean(false);
private final AtomicReference<MutableSearchResponse> searchResponse = new AtomicReference<>();
/**
* Creates an instance of {@link AsyncSearchTask}.
*
* @param id The id of the task.
* @param type The type of the task.
* @param action The action name.
* @param parentTaskId The parent task id.
* @param originHeaders All the request context headers.
* @param taskHeaders The filtered request headers for the task.
* @param searchId The {@link AsyncExecutionId} of the task.
* @param threadPool The threadPool to schedule runnable.
* @param aggReduceContextSupplier A supplier to create final reduce contexts.
*/
AsyncSearchTask(long id,
String type,
String action,
TaskId parentTaskId,
Supplier<String> descriptionSupplier,
TimeValue keepAlive,
Map<String, String> originHeaders,
Map<String, String> taskHeaders,
AsyncExecutionId searchId,
Client client,
ThreadPool threadPool,
Supplier<InternalAggregation.ReduceContext> aggReduceContextSupplier) {
super(id, type, action, () -> "async_search{" + descriptionSupplier.get() + "}", parentTaskId, taskHeaders);
this.expirationTimeMillis = getStartTime() + keepAlive.getMillis();
this.originHeaders = originHeaders;
this.searchId = searchId;
this.client = client;
this.threadPool = threadPool;
this.aggReduceContextSupplier = aggReduceContextSupplier;
this.progressListener = new Listener();
setProgressListener(progressListener);
}
/**
* Returns all of the request contexts headers
*/
@Override
public Map<String, String> getOriginHeaders() {
return originHeaders;
}
/**
* Returns the {@link AsyncExecutionId} of the task
*/
@Override
public AsyncExecutionId getExecutionId() {
return searchId;
}
Listener getSearchProgressActionListener() {
return progressListener;
}
/**
* Update the expiration time of the (partial) response.
*/
@Override
public void setExpirationTime(long expirationTimeMillis) {
this.expirationTimeMillis = expirationTimeMillis;
}
@Override
public void cancelTask(TaskManager taskManager, Runnable runnable, String reason) {
cancelTask(runnable, reason);
}
/**
* Cancels the running task and its children.
*/
public void cancelTask(Runnable runnable, String reason) {
if (isCancelled() == false && isCancelling.compareAndSet(false, true)) {
CancelTasksRequest req = new CancelTasksRequest().setTaskId(searchId.getTaskId()).setReason(reason);
client.admin().cluster().cancelTasks(req, new ActionListener<>() {
@Override
public void onResponse(CancelTasksResponse cancelTasksResponse) {
runnable.run();
}
@Override
public void onFailure(Exception exc) {
// cancelling failed
isCancelling.compareAndSet(true, false);
runnable.run();
}
});
} else {
runnable.run();
}
}
@Override
protected void onCancelled() {
super.onCancelled();
isCancelling.compareAndSet(true, false);
}
/**
* Creates a listener that listens for an {@link AsyncSearchResponse} and notifies the
* listener when the task is finished or when the provided <code>waitForCompletion</code>
* timeout occurs. In such case the consumed {@link AsyncSearchResponse} will contain partial results.
*/
public void addCompletionListener(ActionListener<AsyncSearchResponse> listener, TimeValue waitForCompletion) {
boolean executeImmediately = false;
long startTime = threadPool.relativeTimeInMillis();
synchronized (this) {
if (hasCompleted) {
executeImmediately = true;
} else {
addInitListener(() -> {
final TimeValue remainingWaitForCompletion;
if (waitForCompletion.getMillis() > 0) {
long elapsedTime = threadPool.relativeTimeInMillis() - startTime;
// subtract the initialization time from the provided waitForCompletion.
remainingWaitForCompletion = TimeValue.timeValueMillis(Math.max(0, waitForCompletion.getMillis() - elapsedTime));
} else {
remainingWaitForCompletion = TimeValue.ZERO;
}
internalAddCompletionListener(listener, remainingWaitForCompletion);
});
}
}
if (executeImmediately) {
listener.onResponse(getResponseWithHeaders());
}
}
/**
* Creates a listener that listens for an {@link AsyncSearchResponse} and executes the
* consumer when the task is finished.
*/
public void addCompletionListener(Consumer<AsyncSearchResponse> listener) {
boolean executeImmediately = false;
synchronized (this) {
if (hasCompleted) {
executeImmediately = true;
} else {
this.completionListeners.put(completionId++, listener);
}
}
if (executeImmediately) {
listener.accept(getResponseWithHeaders());
}
}
private void internalAddCompletionListener(ActionListener<AsyncSearchResponse> listener, TimeValue waitForCompletion) {
boolean executeImmediately = false;
synchronized (this) {
if (hasCompleted || waitForCompletion.getMillis() == 0) {
executeImmediately = true;
} else {
// ensure that we consumes the listener only once
AtomicBoolean hasRun = new AtomicBoolean(false);
long id = completionId++;
final Cancellable cancellable;
try {
cancellable = threadPool.schedule(
() -> {
if (hasRun.compareAndSet(false, true)) {
// timeout occurred before completion
removeCompletionListener(id);
listener.onResponse(getResponseWithHeaders());
}
},
waitForCompletion,
"generic");
} catch(Exception exc) {
listener.onFailure(exc);
return;
}
completionListeners.put(
id,
resp -> {
if (hasRun.compareAndSet(false, true)) {
// completion occurred before timeout
cancellable.cancel();
listener.onResponse(resp);
}
});
}
}
if (executeImmediately) {
listener.onResponse(getResponseWithHeaders());
}
}
private void removeCompletionListener(long id) {
synchronized (this) {
if (hasCompleted == false) {
completionListeners.remove(id);
}
}
}
private void addInitListener(Runnable listener) {
boolean executeImmediately = false;
synchronized (this) {
if (hasInitialized) {
executeImmediately = true;
} else {
initListeners.add(listener);
}
}
if (executeImmediately) {
listener.run();
}
}
private void executeInitListeners() {
synchronized (this) {
if (hasInitialized) {
return;
}
hasInitialized = true;
}
for (Runnable listener : initListeners) {
listener.run();
}
initListeners.clear();
}
private void executeCompletionListeners() {
Map<Long, Consumer<AsyncSearchResponse>> completionsListenersCopy;
synchronized (this) {
if (hasCompleted) {
return;
}
hasCompleted = true;
completionsListenersCopy = new HashMap<>(this.completionListeners);
this.completionListeners.clear();
}
// we don't need to restore the response headers, they should be included in the current
// context since we are called by the search action listener.
AsyncSearchResponse finalResponse = getResponse();
for (Consumer<AsyncSearchResponse> consumer : completionsListenersCopy.values()) {
consumer.accept(finalResponse);
}
}
/**
* Returns the current {@link AsyncSearchResponse}.
*/
private AsyncSearchResponse getResponse() {
return getResponse(false);
}
/**
* Returns the current {@link AsyncSearchResponse} and restores the response headers
* in the local thread context.
*/
private AsyncSearchResponse getResponseWithHeaders() {
return getResponse(true);
}
private AsyncSearchResponse getResponse(boolean restoreResponseHeaders) {
MutableSearchResponse mutableSearchResponse = searchResponse.get();
assert mutableSearchResponse != null;
checkCancellation();
AsyncSearchResponse asyncSearchResponse;
try {
asyncSearchResponse = mutableSearchResponse.toAsyncSearchResponse(this, expirationTimeMillis, restoreResponseHeaders);
} catch(Exception e) {
ElasticsearchException exception = new ElasticsearchStatusException("Async search: error while reducing partial results",
ExceptionsHelper.status(e), e);
asyncSearchResponse = mutableSearchResponse.toAsyncSearchResponse(this, expirationTimeMillis, exception);
}
return asyncSearchResponse;
}
// checks if the search task should be cancelled
private synchronized void checkCancellation() {
long now = System.currentTimeMillis();
if (hasCompleted == false && expirationTimeMillis < now) {
// we cancel expired search task even if they are still running
cancelTask(() -> {}, "async search has expired");
}
}
class Listener extends SearchProgressActionListener {
@Override
protected void onQueryResult(int shardIndex) {
checkCancellation();
}
@Override
protected void onFetchResult(int shardIndex) {
checkCancellation();
}
@Override
protected void onQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {
// best effort to cancel expired tasks
checkCancellation();
searchResponse.get().addQueryFailure(shardIndex,
// the nodeId is null if all replicas of this shard failed
new ShardSearchFailure(exc, shardTarget.getNodeId() != null ? shardTarget : null));
}
@Override
protected void onFetchFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {
// best effort to cancel expired tasks
checkCancellation();
//ignore fetch failures: they make the shards count confusing if we count them as shard failures because the query
// phase ran fine and we don't want to end up with e.g. total: 5 successful: 5 failed: 5.
//Given that partial results include only aggs they are not affected by fetch failures. Async search receives the fetch
//failures either as an exception (when all shards failed during fetch, in which case async search will return the error
//as well as the response obtained after the final reduction) or as part of the final response (if only some shards failed,
//in which case the final response already includes results as well as shard fetch failures)
}
@Override
protected void onListShards(List<SearchShard> shards, List<SearchShard> skipped, Clusters clusters, boolean fetchPhase) {
// best effort to cancel expired tasks
checkCancellation();
searchResponse.compareAndSet(null,
new MutableSearchResponse(shards.size() + skipped.size(), skipped.size(), clusters, threadPool.getThreadContext()));
executeInitListeners();
}
@Override
public void onPartialReduce(List<SearchShard> shards, TotalHits totalHits,
InternalAggregations aggregations, int reducePhase) {
// best effort to cancel expired tasks
checkCancellation();
// The way that the MutableSearchResponse will build the aggs.
Supplier<InternalAggregations> reducedAggs;
if (aggregations == null) {
// There aren't any aggs to reduce.
reducedAggs = () -> null;
} else {
/*
* Keep a reference to the partially reduced aggs and reduce it on the fly when someone asks
* for it. It's important that we wait until someone needs
* the result so we don't perform the final reduce only to
* throw it away. And it is important that we keep the reference
* to the aggregations because SearchPhaseController
* *already* has that reference so we're not creating more garbage.
*/
reducedAggs = () ->
InternalAggregations.topLevelReduce(singletonList(aggregations), aggReduceContextSupplier.get());
}
searchResponse.get().updatePartialResponse(shards.size(), totalHits, reducedAggs, reducePhase);
}
@Override
public void onFinalReduce(List<SearchShard> shards, TotalHits totalHits, InternalAggregations aggregations, int reducePhase) {
// best effort to cancel expired tasks
checkCancellation();
searchResponse.get().updatePartialResponse(shards.size(), totalHits, () -> aggregations, reducePhase);
}
@Override
public void onResponse(SearchResponse response) {
searchResponse.get().updateFinalResponse(response);
executeCompletionListeners();
}
@Override
public void onFailure(Exception exc) {
// if the failure occurred before calling onListShards
searchResponse.compareAndSet(null, new MutableSearchResponse(-1, -1, null, threadPool.getThreadContext()));
searchResponse.get().updateWithFailure(new ElasticsearchStatusException("error while executing search",
ExceptionsHelper.status(exc), exc));
executeInitListeners();
executeCompletionListeners();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.fit.base;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.math.BigDecimal;
import java.net.URI;
import org.apache.commons.io.IOUtils;
import org.apache.olingo.client.api.communication.request.retrieve.ODataEntityRequest;
import org.apache.olingo.client.api.communication.response.ODataRetrieveResponse;
import org.apache.olingo.client.api.data.ResWrap;
import org.apache.olingo.client.api.domain.ClientAnnotation;
import org.apache.olingo.client.api.domain.ClientEntity;
import org.apache.olingo.client.api.domain.ClientEntitySet;
import org.apache.olingo.client.api.domain.ClientLink;
import org.apache.olingo.client.api.domain.ClientLinkType;
import org.apache.olingo.client.api.domain.ClientProperty;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.data.EntityCollection;
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeException;
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind;
import org.apache.olingo.commons.api.format.ContentType;
import org.junit.Test;
/**
* The test cases in this class are inspired by client conformance criteria defined in the <a
* href="http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793094">specs
* </a>.
*/
public class JSONFormatConformanceTestITCase extends AbstractTestITCase {
/**
* MUST either:
* <ol>
* <li>understand <tt>odata.metadata=minimal</tt> (section 3.1.1) or</li>
* <li>explicitly specify <tt>odata.metadata=none</tt>(section 3.1.3) or <tt>odata.metadata=full</tt> (section 3.1.2)
* in the request (client)</li>
* </ol>
* .
*/
@Test
public void item1() throws EdmPrimitiveTypeException {
final URI uri = edmClient.newURIBuilder().
appendEntitySetSegment("Accounts").appendKeySegment(102).
appendNavigationSegment("MyPaymentInstruments").appendKeySegment(102902).build();
final ODataEntityRequest<ClientEntity> req = edmClient.getRetrieveRequestFactory().getEntityRequest(uri);
// request format (via Accept header) is set to minimal by default
assertEquals("application/json;odata.metadata=minimal", req.getAccept());
final ODataRetrieveResponse<ClientEntity> res = req.execute();
// response is odata.metadata=minimal
assertFalse(res.getContentType().contains("odata.metadata=none"));
assertFalse(res.getContentType().contains("odata.metadata=full"));
// response payload is understood
final ClientEntity entity = res.getBody();
assertNotNull(entity);
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.PaymentInstrument", entity.getTypeName().toString());
assertEquals(102902, entity.getProperty("PaymentInstrumentID").getPrimitiveValue().toCastValue(Integer.class), 0);
assertEquals("Edm.DateTimeOffset", entity.getProperty("CreatedDate").getPrimitiveValue().getTypeName());
}
/**
* MUST be prepared to consume a response with full metadata.
*/
@Test
public void item2() {
final URI uri = edmClient.newURIBuilder(testStaticServiceRootURL).
appendEntitySetSegment("Accounts").appendKeySegment(102).build();
final ODataEntityRequest<ClientEntity> req = edmClient.getRetrieveRequestFactory().getEntityRequest(uri);
req.setFormat(ContentType.JSON_FULL_METADATA);
// request format (via Accept header) is set to full metadata
assertEquals("application/json;odata.metadata=full", req.getAccept());
final ODataRetrieveResponse<ClientEntity> res = req.execute();
// response is odata.metadata=full
assertTrue(res.getContentType().contains("odata.metadata=full"));
// response payload is understood (including links, only returned with full metadata)
final ClientEntity entity = res.getBody();
assertNotNull(entity);
assertEquals(ClientLinkType.ENTITY_SET_NAVIGATION, entity.getNavigationLink("MyPaymentInstruments").getType());
assertEquals(ClientLinkType.ENTITY_SET_NAVIGATION, entity.getNavigationLink("ActiveSubscriptions").getType());
}
/**
* MUST be prepared to receive all data types (section 7.1)
* <ol>
* <li>defined in this specification (client)</li>
* <li>exposed by the service (service)</li>
* </ol>
* .
*/
@Test
public void item3() throws Exception {
final String fromSection71 = "{"
+ "\"NullValue\": null,"
+ "\"TrueValue\": true,"
+ "\"FalseValue\": false,"
+ "\"BinaryValue@odata.type\": \"Binary\","
+ "\"BinaryValue\": \"T0RhdGE\","
+ "\"IntegerValue\": -128,"
+ "\"DoubleValue\": 3.1415926535897931,"
+ "\"SingleValue@odata.type\": \"Single\","
+ "\"SingleValue\": \"INF\","
+ "\"DecimalValue@odata.type\": \"Decimal\","
+ "\"DecimalValue\": 34.95,"
+ "\"StringValue\": \"Say \\\"Hello\\\",\\nthen go\","
+ "\"DateValue@odata.type\": \"Date\","
+ "\"DateValue\": \"2012-12-03\","
+ "\"DateTimeOffsetValue@odata.type\": \"DateTimeOffset\","
+ "\"DateTimeOffsetValue\": \"2012-12-03T07:16:23Z\","
+ "\"DurationValue@odata.type\": \"Duration\","
+ "\"DurationValue\": \"P12DT23H59M59.999999999999S\","
+ "\"TimeOfDayValue@odata.type\": \"TimeOfDay\","
+ "\"TimeOfDayValue\": \"07:59:59.999\","
+ "\"GuidValue@odata.type\": \"Guid\","
+ "\"GuidValue\": \"01234567-89ab-cdef-0123-456789abcdef\","
+ "\"Int64Value@odata.type\": \"Int64\","
+ "\"Int64Value\": 0,"
+ "\"ColorEnumValue@odata.type\": \"Test.Color\","
+ "\"ColorEnumValue\": \"Yellow\","
+ "\"GeographyPoint\": {\"type\": \"Point\",\"coordinates\":[142.1,64.1]}"
+ "}";
final ClientEntity entity = client.getReader().readEntity(IOUtils.toInputStream(fromSection71), ContentType.JSON);
assertTrue(entity.getProperty("NullValue").hasNullValue());
assertEquals(EdmPrimitiveTypeKind.Boolean, entity.getProperty("TrueValue").getPrimitiveValue().getTypeKind());
assertEquals(Boolean.TRUE, entity.getProperty("TrueValue").getPrimitiveValue().toCastValue(Boolean.class));
assertEquals(EdmPrimitiveTypeKind.Boolean, entity.getProperty("FalseValue").getPrimitiveValue().getTypeKind());
assertEquals(Boolean.FALSE, entity.getProperty("FalseValue").getPrimitiveValue().toCastValue(Boolean.class));
assertEquals(EdmPrimitiveTypeKind.Binary, entity.getProperty("BinaryValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.Int32, entity.getProperty("IntegerValue").getPrimitiveValue().getTypeKind());
assertEquals(-128, entity.getProperty("IntegerValue").getPrimitiveValue().toCastValue(Integer.class), 0);
assertEquals(EdmPrimitiveTypeKind.Double, entity.getProperty("DoubleValue").getPrimitiveValue().getTypeKind());
assertEquals(3.1415926535897931,
entity.getProperty("DoubleValue").getPrimitiveValue().toCastValue(Double.class), 0);
assertEquals(EdmPrimitiveTypeKind.Single, entity.getProperty("SingleValue").getPrimitiveValue().getTypeKind());
assertEquals(Float.POSITIVE_INFINITY,
entity.getProperty("SingleValue").getPrimitiveValue().toCastValue(Float.class), 0);
assertEquals(EdmPrimitiveTypeKind.Decimal, entity.getProperty("DecimalValue").getPrimitiveValue().getTypeKind());
assertEquals(BigDecimal.valueOf(34.95),
entity.getProperty("DecimalValue").getPrimitiveValue().toCastValue(BigDecimal.class));
assertEquals(EdmPrimitiveTypeKind.String, entity.getProperty("StringValue").getPrimitiveValue().getTypeKind());
assertEquals("Say \"Hello\",\nthen go",
entity.getProperty("StringValue").getPrimitiveValue().toCastValue(String.class));
assertEquals(EdmPrimitiveTypeKind.Date, entity.getProperty("DateValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.DateTimeOffset,
entity.getProperty("DateTimeOffsetValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.Duration, entity.getProperty("DurationValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.TimeOfDay,
entity.getProperty("TimeOfDayValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.Guid, entity.getProperty("GuidValue").getPrimitiveValue().getTypeKind());
assertEquals(EdmPrimitiveTypeKind.Int64, entity.getProperty("Int64Value").getPrimitiveValue().getTypeKind());
assertTrue(entity.getProperty("ColorEnumValue").hasEnumValue());
assertEquals(EdmPrimitiveTypeKind.GeographyPoint,
entity.getProperty("GeographyPoint").getPrimitiveValue().getTypeKind());
}
/**
* MUST interpret all odata annotations defined according to the OData-Version header of the payload (section 4.5).
*/
@Test
public void item4() throws Exception {
final String fromSection45_1 = "{"
+ "\"@odata.context\": \"http://host/service/$metadata#Customers/$entity\","
+ "\"@odata.metadataEtag\": \"W/\\\"A1FF3E230954908F\\\"\","
+ "\"@odata.etag\": \"W/\\\"A1FF3E230954908G\\\"\","
+ "\"@odata.type\": \"#Model.VipCustomer\","
+ "\"@odata.id\": \"http://host/service/Employees(PersonID=3)\","
+ "\"@odata.editLink\": \"People(976)\","
+ "\"@odata.mediaEditLink\": \"Employees(1)/$value\","
+ "\"@odata.mediaContentType\": \"image/jpeg\","
+ "\"@odata.mediaEtag\": \"W/\\\"A1FF3E230954908H\\\"\","
+ "\"Parent@odata.navigationLink\": \"People(976)/Parent\","
+ "\"Parent@odata.associationLink\": \"People(976)/Parent\""
+ "}";
final ResWrap<Entity> entity =
client.getDeserializer(ContentType.JSON).toEntity(IOUtils.toInputStream(fromSection45_1));
assertEquals("http://host/service/$metadata#Customers/$entity", entity.getContextURL().toASCIIString());
assertEquals("W/\"A1FF3E230954908F\"", entity.getMetadataETag());
assertEquals("W/\"A1FF3E230954908G\"", entity.getPayload().getETag());
assertEquals("Model.VipCustomer", entity.getPayload().getType());
assertEquals("http://host/service/Employees(PersonID=3)", entity.getPayload().getId().toASCIIString());
assertEquals("People(976)", entity.getPayload().getEditLink().getHref());
assertEquals("Employees(1)/$value", entity.getPayload().getMediaContentSource().toASCIIString());
assertEquals("image/jpeg", entity.getPayload().getMediaContentType());
assertEquals("W/\"A1FF3E230954908H\"", entity.getPayload().getMediaETag());
assertEquals("People(976)/Parent", entity.getPayload().getNavigationLink("Parent").getHref());
assertEquals("People(976)/Parent", entity.getPayload().getAssociationLink("Parent").getHref());
final String fromSection45_2 = "{"
+ " \"@odata.count\": 5,"
+ " \"value\": [],"
+ " \"@odata.nextLink\": \"Customers?$expand=Orders&$skipToken=5\","
+ " \"@odata.deltaLink\": \"Customers?$expand=Orders&$deltatoken=8015\""
+ "}";
final ResWrap<EntityCollection> entitySet =
client.getDeserializer(ContentType.JSON).toEntitySet(IOUtils.toInputStream(fromSection45_2));
assertEquals(5, entitySet.getPayload().getCount(), 0);
assertEquals("Customers?$expand=Orders&$skipToken=5", entitySet.getPayload().getNext().toASCIIString());
assertEquals("Customers?$expand=Orders&$deltatoken=8015", entitySet.getPayload().getDeltaLink().toASCIIString());
}
/**
* MUST be prepared to receive any annotations, including custom annotations and <tt>odata</tt> annotations not
* defined in the <tt>OData-Version</tt> header of the payload (section 20).
*/
@Test
public void item5() throws Exception {
final String sample = "{"
+ " \"@odata.context\": \"http://host/service/$metadata#Customers\","
+ " \"@odata.notdefined\": 11,"
+ " \"@com.contoso.customer.setkind\": \"VIPs\","
+ " \"value\": ["
+ " {"
+ " \"@com.contoso.display.highlight\": true,"
+ " \"ID\": \"ALFKI\","
+ " \"CompanyName@com.contoso.display.style\": { \"title\": true, \"order\": 1 },"
+ " \"CompanyName\": \"Alfreds Futterkiste\","
+ " \"Orders@com.contoso.display.style\": { \"order\": 2 },"
+ " \"Orders@odata.navigationLink\": \"People(976)/Orders\""
+ " }"
+ " ]"
+ "}";
final ClientEntitySet entitySet = client.getReader().
readEntitySet(IOUtils.toInputStream(sample), ContentType.JSON);
assertEquals(2, entitySet.getAnnotations().size());
final ClientAnnotation notdefined = entitySet.getAnnotations().get(0);
assertEquals("odata.notdefined", notdefined.getTerm());
assertEquals(11, notdefined.getPrimitiveValue().toCastValue(Integer.class), 0);
final ClientAnnotation setkind = entitySet.getAnnotations().get(1);
assertEquals("com.contoso.customer.setkind", setkind.getTerm());
assertEquals("VIPs", setkind.getPrimitiveValue().toCastValue(String.class));
final ClientEntity entity = entitySet.getEntities().get(0);
assertEquals(1, entity.getAnnotations().size());
final ClientAnnotation highlight = entity.getAnnotations().get(0);
assertEquals("com.contoso.display.highlight", highlight.getTerm());
assertEquals(Boolean.TRUE, highlight.getPrimitiveValue().toCastValue(Boolean.class));
final ClientProperty property = entity.getProperty("CompanyName");
assertEquals(1, property.getAnnotations().size());
final ClientAnnotation style = property.getAnnotations().get(0);
assertEquals("com.contoso.display.style", style.getTerm());
assertTrue(style.hasComplexValue());
assertEquals(Boolean.TRUE, style.getComplexValue().get("title").getPrimitiveValue().toCastValue(Boolean.class));
assertEquals(1, style.getComplexValue().get("order").getPrimitiveValue().toCastValue(Integer.class), 0);
final ClientLink orders = entity.getNavigationLink("Orders");
assertEquals(1, orders.getAnnotations().size());
final ClientAnnotation style2 = orders.getAnnotations().get(0);
assertEquals("com.contoso.display.style", style2.getTerm());
assertTrue(style2.hasComplexValue());
assertEquals(2, style2.getComplexValue().get("order").getPrimitiveValue().toCastValue(Integer.class), 0);
}
/**
* MUST NOT require <tt>odata.streaming=true</tt> in the <tt>Content-Type</tt> header (section 4.4).
*/
@Test
public void item6() throws EdmPrimitiveTypeException {
final URI uri = edmClient.newURIBuilder().
appendEntitySetSegment("Accounts").appendKeySegment(102).
appendNavigationSegment("MyPaymentInstruments").appendKeySegment(102902).build();
final ODataEntityRequest<ClientEntity> req = edmClient.getRetrieveRequestFactory().getEntityRequest(uri);
// request format (via Accept header) does not contain odata.streaming=true
assertEquals("application/json;odata.metadata=minimal", req.getAccept());
final ODataRetrieveResponse<ClientEntity> res = req.execute();
// response payload is understood
final ClientEntity entity = res.getBody();
assertNotNull(entity);
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.PaymentInstrument", entity.getTypeName().toString());
assertEquals(102902, entity.getProperty("PaymentInstrumentID").getPrimitiveValue().toCastValue(Integer.class), 0);
assertEquals("Edm.DateTimeOffset", entity.getProperty("CreatedDate").getPrimitiveValue().getTypeName());
}
}
| |
package com.djs.learn.javalang.generics;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class TestGeneric
{
public class LevelA
{}
public class LevelB extends LevelA
{}
public class LevelC extends LevelB
{}
public class LevelD extends LevelC
{}
public class LevelE extends LevelD
{}
public class SampleTypeC<T extends LevelB>
{}
public void printA(SampleTypeC<? super LevelB> t){
System.out.println("A: T <" + t.getClass().getName() + "> = " + t);
}
public void printB(SampleTypeC<? super LevelD> t){
System.out.println("B: T <" + t.getClass().getName() + "> = " + t);
}
public void printC(SampleTypeC<? extends LevelD> t){
System.out.println("C: T <" + t.getClass().getName() + "> = " + t);
}
public <T> void printT(T t){
System.out.println("T <" + t.getClass().getName() + "> = " + t);
}
public void printList(List<?> list){
// Unbounded type is immutable.
// list.add(new Object()); // Not compile.
for (Object x : list)
System.out.println("List ? <" + x.getClass().getName() + "> = " + x);
}
public void printList2(List<? extends IOException> list){
// Upper-Bounded type is immutable.
// list.add(new IOException("Problem 3")); // Not compile.
for (Object x : list)
System.out.println("List extends <" + x.getClass().getName() + "> = " + x);
}
public void printList3(List<? super IOException> list){
// Lower-bounded type is mutable.
list.add(new IOException("Problem 3"));
for (Object x : list)
System.out.println("List super <" + x.getClass().getName() + "> = " + x);
}
public void testSample1(){
SampleTypeA<Integer> sampleTypeA = new SampleTypeA<Integer>(10);
sampleTypeA.print();
new SampleTypeA(20).print();
new SampleTypeA(30.0).print();
new SampleTypeA(40.0f).print();
new SampleTypeA("Hello").print();
System.out.println("----------------------------------------");
// Only LevelB.
printA(new SampleTypeC<LevelB>());
// From LevelD up to LevelB.
printB(new SampleTypeC<LevelB>());
printB(new SampleTypeC<LevelC>());
printB(new SampleTypeC<LevelD>());
// From LevelD down to LevelE.
printC(new SampleTypeC<LevelD>());
printC(new SampleTypeC<LevelE>());
System.out.println("----------------------------------------");
{
new SampleTypeB<Integer>(10, 10);
}
System.out.println("----------------------------------------");
{
new SampleTypeB<Integer>(10, "Ok");
}
}
public void testSample2(){
printT(new Integer(10));
System.out.println("----------------------------------------");
ArrayList<String> list = new ArrayList<String>();
list.add("Tom");
list.add("Jerry");
list.add("Mary");
printList(list);
System.out.println("----------------------------------------");
List<?> list1a = new ArrayList<IOException>();
List<?> list1b = new ArrayList<FileNotFoundException>();
// Upper-Bounded type is immutable.
// list1a.add(new IOException("Problem 1")); // Not compile.
// list1a.add(new FileNotFoundException("Problem 2")); // Not compile.
// list1b.add(new IOException("Problem 1")); // Not compile.
// list1b.add(new FileNotFoundException("Problem 2")); // Not compile.
printList(list1a);
printList(list1b);
// printList2(list1a); // Not compile.
// printList3(list1a); // Not compile.
System.out.println("----------------------------------------");
List<? extends IOException> list2a = new ArrayList<IOException>();
List<? extends IOException> list2b = new ArrayList<FileNotFoundException>();
// Upper-Bounded type is immutable.
// list2a.add(new IOException("Problem 1")); // Not compile.
// list2a.add(new FileNotFoundException("Problem 2")); // Not compile.
// list2b.add(new IOException("Problem 1")); // Not compile.
// list2b.add(new FileNotFoundException("Problem 2")); // Not compile.
printList(list2a);
printList(list2b);
printList2(list2a);
printList2(list2b);
// printList3(list2a); // Not compile.
System.out.println("----------------------------------------");
List<? super IOException> list3a = new ArrayList<Exception>();
List<? super IOException> list3b = new ArrayList<IOException>();
List<? super IOException> list3c = new ArrayList<Object>();
// Lower-bounded type is mutable.
// IOException and all its subclass can be added!
// list3a.add(new Exception("Problem 0")); // Not compile.
// list3a.add(new Object()); // Not compile.
list3a.add(new IOException("Problem 1a"));
list3b.add(new IOException("Problem 2a"));
list3c.add(new IOException("Problem 3a"));
// FileNotFoundException is subclass of IOException.
list3a.add(new FileNotFoundException("Problem 1b"));
list3b.add(new FileNotFoundException("Problem 2b"));
list3c.add(new FileNotFoundException("Problem 3b"));
printList(list3a);
printList(list3b);
printList(list3c);
// printList2(list3a); // Not compile.
printList3(list3a);
printList3(list3b);
printList3(list3c);
}
public static void main(String[] args){
TestGeneric testMain = new TestGeneric();
System.out.println("============================================================");
testMain.testSample1();
System.out.println("============================================================");
testMain.testSample2();
System.out.println("============================================================");
}
}
class SampleTypeA<T>
{
T t;
public SampleTypeA(T t){
this.t = t;
}
public void print(){
System.out.println("T <" + t.getClass().getName() + "> = " + t);
}
@Override
public String toString(){
return "[t=" + t + "]";
}
}
class SampleTypeB<T>
{
<X> SampleTypeB(T t, X x){
boolean same = x.equals(t);
System.out.println("T <" + t.getClass().getName() + "> = " + t);
System.out.println("X <" + x.getClass().getName() + "> = " + x);
System.out.println("t eguals x = " + same);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.acceleratedmobilepageurl.v1;
/**
* Service definition for Acceleratedmobilepageurl (v1).
*
* <p>
* Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given list of public URL(s).
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/amp/cache/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link AcceleratedmobilepageurlRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class Acceleratedmobilepageurl extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.29.2 of the Accelerated Mobile Pages (AMP) URL API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://acceleratedmobilepageurl.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Acceleratedmobilepageurl(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
Acceleratedmobilepageurl(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the AmpUrls collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Acceleratedmobilepageurl acceleratedmobilepageurl = new Acceleratedmobilepageurl(...);}
* {@code Acceleratedmobilepageurl.AmpUrls.List request = acceleratedmobilepageurl.ampUrls().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public AmpUrls ampUrls() {
return new AmpUrls();
}
/**
* The "ampUrls" collection of methods.
*/
public class AmpUrls {
/**
* Returns AMP URL(s) and equivalent [AMP Cache URL(s)](/amp/cache/overview#amp-cache-url-format).
*
* Create a request for the method "ampUrls.batchGet".
*
* This request holds the parameters needed by the acceleratedmobilepageurl server. After setting
* any optional parameters, call the {@link BatchGet#execute()} method to invoke the remote
* operation.
*
* @param content the {@link com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsRequest}
* @return the request
*/
public BatchGet batchGet(com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsRequest content) throws java.io.IOException {
BatchGet result = new BatchGet(content);
initialize(result);
return result;
}
public class BatchGet extends AcceleratedmobilepageurlRequest<com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsResponse> {
private static final String REST_PATH = "v1/ampUrls:batchGet";
/**
* Returns AMP URL(s) and equivalent [AMP Cache URL(s)](/amp/cache/overview#amp-cache-url-format).
*
* Create a request for the method "ampUrls.batchGet".
*
* This request holds the parameters needed by the the acceleratedmobilepageurl server. After
* setting any optional parameters, call the {@link BatchGet#execute()} method to invoke the
* remote operation. <p> {@link
* BatchGet#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsRequest}
* @since 1.13
*/
protected BatchGet(com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsRequest content) {
super(Acceleratedmobilepageurl.this, "POST", REST_PATH, content, com.google.api.services.acceleratedmobilepageurl.v1.model.BatchGetAmpUrlsResponse.class);
}
@Override
public BatchGet set$Xgafv(java.lang.String $Xgafv) {
return (BatchGet) super.set$Xgafv($Xgafv);
}
@Override
public BatchGet setAccessToken(java.lang.String accessToken) {
return (BatchGet) super.setAccessToken(accessToken);
}
@Override
public BatchGet setAlt(java.lang.String alt) {
return (BatchGet) super.setAlt(alt);
}
@Override
public BatchGet setCallback(java.lang.String callback) {
return (BatchGet) super.setCallback(callback);
}
@Override
public BatchGet setFields(java.lang.String fields) {
return (BatchGet) super.setFields(fields);
}
@Override
public BatchGet setKey(java.lang.String key) {
return (BatchGet) super.setKey(key);
}
@Override
public BatchGet setOauthToken(java.lang.String oauthToken) {
return (BatchGet) super.setOauthToken(oauthToken);
}
@Override
public BatchGet setPrettyPrint(java.lang.Boolean prettyPrint) {
return (BatchGet) super.setPrettyPrint(prettyPrint);
}
@Override
public BatchGet setQuotaUser(java.lang.String quotaUser) {
return (BatchGet) super.setQuotaUser(quotaUser);
}
@Override
public BatchGet setUploadType(java.lang.String uploadType) {
return (BatchGet) super.setUploadType(uploadType);
}
@Override
public BatchGet setUploadProtocol(java.lang.String uploadProtocol) {
return (BatchGet) super.setUploadProtocol(uploadProtocol);
}
@Override
public BatchGet set(String parameterName, Object value) {
return (BatchGet) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link Acceleratedmobilepageurl}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link Acceleratedmobilepageurl}. */
@Override
public Acceleratedmobilepageurl build() {
return new Acceleratedmobilepageurl(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link AcceleratedmobilepageurlRequestInitializer}.
*
* @since 1.12
*/
public Builder setAcceleratedmobilepageurlRequestInitializer(
AcceleratedmobilepageurlRequestInitializer acceleratedmobilepageurlRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(acceleratedmobilepageurlRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
| |
package com.graphhopper.reader.osm;
import com.graphhopper.GHRequest;
import com.graphhopper.GHResponse;
import com.graphhopper.GraphHopper;
import com.graphhopper.PathWrapper;
import com.graphhopper.matching.MapMatching;
import com.graphhopper.matching.MatchResult;
import com.graphhopper.routing.AlgorithmOptions;
import com.graphhopper.routing.Path;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.HintsMap;
import com.graphhopper.util.*;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Created by isaac on 09/14/16.
*/
public class AlternativeRoutingExternalities {
private GraphHopper hopper;
private MapMatching mapMatching;
private String city;
private String route_type;
private String bannedGridCellsFn;
private HashMap<String, FileWriter> outputFiles;
private HashMap<String, Integer> gvHeaderMap;
private HashMap<String, Float> gridBeauty;
private ArrayList<String> optimizations = new ArrayList<>();
private ArrayList<String> gridValuesFNs = new ArrayList<>();
private ArrayList<float[]> inputPoints = new ArrayList<>();
private ArrayList<String> id_to_points = new ArrayList<>();
private String osmFile = "./reader-osm/files/";
private String graphFolder = "./reader-osm/target/tmp/";
private String inputPointsFN = "../data/intermediate/";
private String outputPointsFN = "../data/routes/";
private String gvfnStem = "../data/intermediate/";
private String gctfnStem = "../data/intermediate/";
private String outputheader = "ID,name,polyline_points,total_time_in_sec,total_distance_in_meters," +
"number_of_steps,maneuvers,beauty,simplicity,pctNonHighwayTime,pctNonHighwayDist,pctNeiTime,pctNeiDist" +
System.getProperty("line.separator");
public AlternativeRoutingExternalities(String city, String route_type) {
this.city = city;
this.route_type = route_type;
this.outputFiles = new HashMap<>();
optimizations.add("beauty");
optimizations.add("simple");
optimizations.add("fast");
optimizations.add("safety");
}
public void setCity(String city) {
this.city = city;
}
public void setRouteType(String route_type) {
this.route_type = route_type;
}
public void setDataSources() throws Exception {
if (city.equals("sf")) {
osmFile = osmFile + "san-francisco-bay_california.osm.pbf";
graphFolder = graphFolder + "ghosm_sf_noch";
inputPointsFN = inputPointsFN + "sf_" + route_type + "_od_pairs.csv";
outputPointsFN = outputPointsFN + "sf_" + route_type + "_gh_routes.csv";
gridValuesFNs.add(gvfnStem + "06075_empath_grid.csv");
bannedGridCellsFn = gctfnStem + "06075_banned_grid_cells.csv";
} else if (city.equals("nyc")) {
osmFile = osmFile + "new-york_new-york.osm.pbf";
graphFolder = graphFolder + "ghosm_nyc_noch";
inputPointsFN = inputPointsFN + "nyc_" + route_type + "_od_pairs.csv";
outputPointsFN = outputPointsFN + "nyc_" + route_type + "_gh_routes.csv";
gridValuesFNs.add(gvfnStem + "36005_empath_grid.csv");
gridValuesFNs.add(gvfnStem + "36047_empath_grid.csv");
gridValuesFNs.add(gvfnStem + "36061_empath_grid.csv");
gridValuesFNs.add(gvfnStem + "36081_empath_grid.csv");
gridValuesFNs.add(gvfnStem + "36085_empath_grid.csv");
bannedGridCellsFn = gctfnStem + "nyc_banned_grid_cells.csv";
} else if (city.equals("lon")) {
osmFile = osmFile + "london_england.osm.pbf";
graphFolder = graphFolder + "ghosm_lon_noch";
inputPointsFN = inputPointsFN + "lon_" + route_type + "_od_pairs.csv";
outputPointsFN = outputPointsFN + "lon_" + route_type + "_gh_routes.csv";
gridValuesFNs.add(gvfnStem + "LONDON_logfractionempath_ft.csv");
bannedGridCellsFn = "";
} else if (city.equals("man")) {
osmFile = osmFile + "manila_philippines.osm.pbf";
graphFolder = graphFolder + "ghosm_man_noch";
inputPointsFN = inputPointsFN + "man_" + route_type + "_od_pairs.csv";
outputPointsFN = outputPointsFN + "man_" + route_type + "_gh_routes.csv";
gridValuesFNs.add(gvfnStem + "MANILA_logfractionempath_ft.csv");
bannedGridCellsFn = "";
} else {
throw new Exception("Invalid Parameters: city must be of 'sf','nyc', 'man', or 'sin' and route_type of 'rand' or 'taxi'");
}
}
public void getGridValues() throws Exception {
gvHeaderMap = new HashMap<>();
gridBeauty = new HashMap<>();
for (String fn : gridValuesFNs) {
try {
Scanner sc_in = new Scanner(new File(fn));
String[] gvHeader = sc_in.nextLine().split(",");
int i = 0;
for (String col : gvHeader) {
gvHeaderMap.put(col, i);
i++;
}
String line;
String[] vals;
String rc;
float beauty;
while (sc_in.hasNext()) {
line = sc_in.nextLine();
vals = line.split(",");
try {
rc = vals[gvHeaderMap.get("rid")] + "," + vals[gvHeaderMap.get("cid")];
beauty = Float.valueOf(vals[gvHeaderMap.get("beauty")]);
gridBeauty.put(rc, beauty);
} catch (NullPointerException ex) {
System.out.println(ex.getMessage());
System.out.println(line);
continue;
}
}
} catch (IOException io) {
System.out.println(io + ": " + fn + " does not exist.");
}
}
}
public void setODPairs() throws Exception {
for (String optimization : optimizations) {
outputFiles.put(optimization, new FileWriter(outputPointsFN.replaceFirst(".csv", "_" + optimization + ".csv"), true));
}
for (FileWriter fw : outputFiles.values()) {
fw.write(outputheader);
}
// Bring in origin-destination pairs for processing
Scanner sc_in = new Scanner(new File(inputPointsFN));
String header = sc_in.nextLine();
String od_id;
float laF;
float loF;
float laT;
float loT;
float idx = 0;
System.out.println("Input data points header: " + header);
while (sc_in.hasNext()) {
idx = idx + 1;
String line = sc_in.nextLine();
String[] vals = line.split(",");
od_id = vals[0];
loF = Float.valueOf(vals[1]);
laF = Float.valueOf(vals[2]);
loT = Float.valueOf(vals[3]);
laT = Float.valueOf(vals[4]);
inputPoints.add(new float[]{laF, loF, laT, loT, idx});
id_to_points.add(od_id);
}
int numPairs = inputPoints.size();
System.out.println(numPairs + " origin-destination pairs.");
}
public String writeOutput(int i, String optimized, String name, String od_id, PathWrapper bestPath, float score) {
// points, distance in meters and time in seconds (convert from ms) of the full path
PointList pointList = bestPath.getPoints();
int simplicity = bestPath.getSimplicity();
double distance = Math.round(bestPath.getDistance() * 100) / 100;
double nonHighwayDistance = bestPath.getNonHighwayDistance();
double pctNHD = Math.round(1000.0 * (float) nonHighwayDistance / distance) / 1000.0;
long timeInSec = bestPath.getTime() / 1000;
long nonHighwayTimeInSec = bestPath.getNonHighwayTime() / 1000;
double pctNHT = Math.round(1000.0 * (float) nonHighwayTimeInSec / timeInSec) / 1000.0;
double smallNeiDistance = bestPath.getNeiHighwayDistance();
double pctNeiD = Math.round(1000.0 * (float) smallNeiDistance / distance) / 1000.0;
long neiHighwayTimeInSec = bestPath.getTimeSmallNeigh() / 1000;
double pctNeiT = Math.round(1000.0 * (float) neiHighwayTimeInSec / timeInSec) / 1000.0;
InstructionList il = bestPath.getInstructions();
int numDirections = il.getSize();
// iterate over every turn instruction
ArrayList<String> maneuvers = new ArrayList<>();
for (Instruction instruction : il) {
maneuvers.add(instruction.getSimpleTurnDescription());
}
System.out.println(i + " (" + optimized + "): Distance: " +
distance + "m;\tTime: " + timeInSec + "sec;\t# Directions: " + numDirections +
";\tSimplicity: " + simplicity + ";\tScore: " + score +
";\tPctNHT: " + pctNHT + ";\tPctNeiT: " + pctNeiT);
return od_id + "," + name + "," +
"\"[" + pointList + "]\"," +
timeInSec + "," + distance + "," + numDirections +
",\"" + maneuvers.toString() + "\"" + "," +
score + "," + simplicity + "," +
pctNHT + "," + pctNHD + "," +
pctNeiT + "," + pctNeiD +
System.getProperty("line.separator");
}
public float getBeauty(PathWrapper path) {
HashSet<String> roundedPoints = path.roundPoints();
float score = 0;
for (String pt : roundedPoints) {
if (gridBeauty.containsKey(pt)) {
score = score + gridBeauty.get(pt);
}
}
score = score / roundedPoints.size();
return score;
}
public void prepMapMatcher() {
// create MapMatching object, can and should be shared accross threads
AlgorithmOptions algoOpts = AlgorithmOptions.start().
algorithm(Parameters.Algorithms.DIJKSTRA).
traversalMode(hopper.getTraversalMode()).
hints(new HintsMap().put("weighting", "fastest").put("vehicle", "car")).
build();
mapMatching = new MapMatching(hopper, algoOpts);
mapMatching.setTransitionProbabilityBeta(0.00959442);
mapMatching.setMeasurementErrorSigma(100);
}
public PathWrapper GPXToPath(ArrayList<GPXEntry> gpxEntries) {
PathWrapper matchGHRsp = new PathWrapper();
try {
MatchResult mr = mapMatching.doWork(gpxEntries);
Path path = mapMatching.calcPath(mr);
new PathMerger().doWork(matchGHRsp, Collections.singletonList(path), new TranslationMap().doImport().getWithFallBack(Locale.US));
}
catch (RuntimeException e) {
System.out.println("Broken GPX trace.");
System.out.println(e.getMessage());
}
return matchGHRsp;
}
public void PointsToPath(String fin, String fout) throws IOException {
Scanner sc_in = new Scanner(new File(fin));
String[] pointsHeader = sc_in.nextLine().split(",");
int idIdx = -1;
int nameIdx = -1;
int latIdx = -1;
int lonIdx = -1;
int timeIdx = -1;
for (int i=0; i<pointsHeader.length; i++) {
if (pointsHeader[i].equalsIgnoreCase("ID")) {
idIdx = i;
}
else if (pointsHeader[i].equalsIgnoreCase("name")) {
nameIdx = i;
}
else if (pointsHeader[i].equalsIgnoreCase("lat")) {
latIdx = i;
}
else if (pointsHeader[i].equalsIgnoreCase("lon")) {
lonIdx = i;
}
else if (pointsHeader[i].equalsIgnoreCase("millis")) {
timeIdx = i;
}
else {
System.out.println("Unexpected header value: " + pointsHeader[i]);
}
}
String optimized = "";
if (fin.indexOf("google") > -1) {
optimized = optimized + "Goog";
} else if (fin.indexOf("mapquest") > -1) {
optimized = optimized + "MapQ";
} else {
System.out.println("Don't recognize platform: " + fin);
}
if (fin.indexOf("alt") > -1) {
optimized = optimized + " altn";
} else if (fin.indexOf("main") > -1) {
optimized = optimized + " main";
} else {
System.out.println("Don't recognize route type: " + fin);
}
String line;
String[] vals;
String routeID = "";
String prevRouteID = "";
String name = "";
String prevName = "";
String label = "";
String prevLabel = "";
double lat;
double lon;
long time;
HashMap<String, ArrayList<GPXEntry>> pointsLists = new HashMap<>();
HashMap<String, String> routeNames = new HashMap<>();
ArrayList<GPXEntry> pointsList = new ArrayList<>();
while (sc_in.hasNext()) {
line = sc_in.nextLine();
vals = line.split(",");
routeID = vals[idIdx];
name = vals[nameIdx];
if (name.equalsIgnoreCase("alternative 2") || name.equalsIgnoreCase("alternative 3")) {
continue;
}
lat = Double.valueOf(vals[latIdx]);
lon = Double.valueOf(vals[lonIdx]);
time = Long.valueOf(vals[timeIdx]);
label = routeID + "|" + name;
GPXEntry pt = new GPXEntry(lat, lon, time);
if (label.equalsIgnoreCase(prevLabel)) {
pointsList.add(pt);
}
else if (pointsList.size() > 0) {
pointsLists.put(prevRouteID, pointsList);
routeNames.put(prevRouteID, prevName);
pointsList = new ArrayList<>();
pointsList.add(pt);
} else {
System.out.println("First point.");
pointsList.add(pt);
}
prevRouteID = routeID;
prevName = name;
prevLabel = label;
}
if (pointsList.size() > 0) {
pointsLists.put(prevRouteID, pointsList);
routeNames.put(prevRouteID, prevName);
}
sc_in.close();
ConcurrentHashMap<String, String> results = getPaths(pointsLists, routeNames, optimized);
FileWriter sc_out = new FileWriter(fout, true);
sc_out.write(outputheader);
for (String result : results.values()) {
sc_out.write(result);
}
sc_out.close();
}
public ConcurrentHashMap<String, String> getPaths(HashMap<String, ArrayList<GPXEntry>> pointLists,
HashMap<String, String> routeNames, String optimized) {
AtomicInteger num_processed = new AtomicInteger();
int num_routes = pointLists.size();
Set<String> routeIDs = pointLists.keySet();
ConcurrentHashMap<String, String> results = new ConcurrentHashMap<>();
routeIDs.parallelStream().forEach(routeID -> {
System.out.println("Processing: " + routeID);
int i = num_processed.incrementAndGet();
PathWrapper path = GPXToPath(pointLists.get(routeID));
if (path.getDistance() > 0) {
float score = getBeauty(path);
results.put(routeID, writeOutput(i, optimized, routeNames.get(routeID), routeID, path, score));
}
if (i % 50 == 0) {
System.out.println("\t\t" + i + " of " + num_routes + " routes matched.");
}
}
);
return results;
}
public void prepareGraphHopper() {
// create one GraphHopper instance
hopper = new GraphHopperOSM().forDesktop().setCHEnabled(false);
hopper.setDataReaderFile(osmFile);
// where to store graphhopper files?
hopper.setGraphHopperLocation(graphFolder);
hopper.setEncodingManager(new EncodingManager("car"));
hopper.setBannedGridCellsFn(bannedGridCellsFn);
// now this can take minutes if it imports or a few seconds for loading
// of course this is dependent on the area you import
hopper.importOrLoad();
}
public void process_routes() throws Exception {
AtomicInteger num_processed = new AtomicInteger();
int num_odpairs = id_to_points.size();
ConcurrentHashMap<String, ConcurrentHashMap<String, String>> results = new ConcurrentHashMap<>();
for (String optimization : optimizations) {
results.put(optimization, new ConcurrentHashMap<>());
}
if (optimizations.contains("safety")) {
// initialize banned edges
GHRequest req = new GHRequest(inputPoints.get(0)[0], inputPoints.get(0)[1],
inputPoints.get(0)[2], inputPoints.get(0)[3]). // latFrom, lonFrom, latTo, lonTo
setWeighting("safest_fastest").
setVehicle("car").
setLocale(Locale.US).
setAlgorithm("dijkstrabi");
GHResponse rsp = hopper.route(req);
}
id_to_points.parallelStream().forEach(od_id -> {
System.out.println("Processing: " + od_id);
int route = id_to_points.indexOf(od_id);
HashMap<String, String> routes = process_route(route);
for (String optimization : optimizations) {
results.get(optimization).put(od_id, routes.getOrDefault(optimization, "FAILURE"));
}
int i = num_processed.incrementAndGet();
if (i % 50 == 0) {
System.out.println(System.getProperty("line.separator") + i + " of " + num_odpairs + " o-d pairs processed." + System.getProperty("line.separator"));
}
}
);
for (String optimization : optimizations) {
for (String result : results.get(optimization).values()) {
outputFiles.get(optimization).write(result);
}
outputFiles.get(optimization).close();
}
}
public HashMap<String, String> process_route(int route) {
// Loop through origin-destination pairs, processing each one for beauty, non-beautiful matched, fastest, and simplest
float[] points;
String od_id;
HashMap<String, String> responses = new HashMap<>();
// Get Routes
points = inputPoints.get(route);
od_id = id_to_points.get(route);
GHRequest req = new GHRequest(points[0], points[1], points[2], points[3]). // latFrom, lonFrom, latTo, lonTo
setWeighting("fastest").
setVehicle("car").
setLocale(Locale.US).
setAlgorithm("ksp");
GHResponse rsp = hopper.route(req);
String defaultRow = od_id + ",main," + "\"[(" + points[0] + "," + points[1] + "),(" + points[2] + "," + points[3]
+ ")]\"," + "-1,-1,-1,[],-1,-1,-1,-1" + System.getProperty("line.separator");
// first check for errors
if (rsp.hasErrors()) {
// handle them!
System.out.println(rsp.getErrors().toString());
System.out.println(route + ": Error - skipping.");
for (String optimization : optimizations) {
responses.put(optimization, defaultRow);
}
return responses;
}
// Get All Routes (up to 10K right now)
List<PathWrapper> paths = rsp.getAll();
if (paths.size() == 0) {
System.out.println(route + ": No paths - skipping.");
for (String optimization : optimizations) {
responses.put(optimization, defaultRow);
}
return responses;
}
// Score each route on beauty to determine most beautiful
int j = 0;
float bestscore = -1000;
int routeidx = -1;
for (PathWrapper path : paths) {
float score = getBeauty(path);
if (score > bestscore) {
bestscore = score;
routeidx = j;
}
j++;
}
responses.put("beauty", writeOutput(route, "Beau", "beauty", od_id, paths.get(routeidx), bestscore));
// Simplest Route
j = 0;
bestscore = 10000;
routeidx = 0;
for (PathWrapper path : paths) {
int score = path.getSimplicity();
if (score < bestscore) {
bestscore = score;
routeidx = j;
}
j++;
}
responses.put("simple", writeOutput(route, "Simp", "simple", od_id, paths.get(routeidx), getBeauty(paths.get(routeidx))));
// Fastest Route
PathWrapper bestPath = paths.get(0);
responses.put("fast", writeOutput(route, "Fast", "fastest", od_id, bestPath, getBeauty(bestPath)));
// Safety Route
req = new GHRequest(points[0], points[1], points[2], points[3]). // latFrom, lonFrom, latTo, lonTo
setWeighting("safest_fastest").
setVehicle("car").
setLocale(Locale.US).
setAlgorithm("dijkstrabi");
rsp = hopper.route(req);
// first check for errors
if (rsp.hasErrors()) {
// handle them!
System.out.println(rsp.getErrors().toString());
System.out.println(route + ": Error - skipping.");
responses.put("safety", defaultRow);
return responses;
}
// Get paths (should be one)
paths = rsp.getAll();
if (paths.size() == 0) {
System.out.println(route + ": No paths - skipping.");
responses.put("safety", defaultRow);
return responses;
}
// Fastest Safest Route
bestPath = paths.get(0);
responses.put("safety", writeOutput(route, "Safe", "safe-fastest", od_id, bestPath, getBeauty(bestPath)));
return responses;
}
public static void main(String[] args) throws Exception {
// PBFs from: https://mapzen.com/data/metro-extracts/
// For setting # of cores to use
//System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "12");
String city = "sf // args[0]; // sf, nyc, lon, man
String odtype = "rand"; // args[1]; // rand, taxi
boolean matchexternal = false;
boolean getghroutes = true;
AlternativeRoutingExternalities ksp = new AlternativeRoutingExternalities(city, odtype);
if (matchexternal) {
ksp.setDataSources();
ksp.getGridValues();
ksp.prepareGraphHopper();
ksp.prepMapMatcher();
String inputfolder = "../data/intermediate/";
String outputfolder = "../data/routes/";
ArrayList<String> platforms = new ArrayList<>();
platforms.add("google");
platforms.add("mapquest");
ArrayList<String> conditions = new ArrayList<>();
conditions.add("traffic");
ArrayList<String> routetypes = new ArrayList<>();
routetypes.add("main");
for (String platform : platforms) {
for (String condition : conditions) {
for (String routetype : routetypes) {
ksp.PointsToPath(inputfolder + city + "_" + odtype + "_" + platform + "_" + condition +
"_routes_" + routetype + "_gpx.csv", outputfolder + city + "_" + odtype + "_" +
platform + "_" + condition + "_routes_" + routetype + "_ghenhanced_sigma100_transitionDefault.csv");
}
}
}
}
if (getghroutes) {
ksp.setDataSources();
ksp.getGridValues();
ksp.prepareGraphHopper();
ksp.setODPairs();
ksp.process_routes();
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.base.Preconditions;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.Location;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
/** A class for doing static checks on files, before evaluating them. */
public final class ValidationEnvironment extends SyntaxTreeVisitor {
private static class Block {
private final Set<String> variables = new HashSet<>();
private final Set<String> readOnlyVariables = new HashSet<>();
@Nullable private final Block parent;
Block(@Nullable Block parent) {
this.parent = parent;
}
}
/**
* We use an unchecked exception around EvalException because the SyntaxTreeVisitor doesn't let
* visit methods throw checked exceptions. We might change that later.
*/
private static class ValidationException extends RuntimeException {
EvalException exception;
ValidationException(EvalException e) {
exception = e;
}
ValidationException(Location location, String message, String url) {
exception = new EvalException(location, message, url);
}
ValidationException(Location location, String message) {
exception = new EvalException(location, message);
}
}
private final SkylarkSemantics semantics;
private Block block;
private int loopCount;
/** Create a ValidationEnvironment for a given global Environment. */
ValidationEnvironment(Environment env) {
Preconditions.checkArgument(env.isGlobal());
block = new Block(null);
Set<String> builtinVariables = env.getVariableNames();
block.variables.addAll(builtinVariables);
block.readOnlyVariables.addAll(builtinVariables);
semantics = env.getSemantics();
}
@Override
public void visit(LoadStatement node) {
for (Identifier symbol : node.getSymbols()) {
declare(symbol.getName(), node.getLocation());
}
}
@Override
public void visit(Identifier node) {
if (!hasSymbolInEnvironment(node.getName())) {
throw new ValidationException(node.createInvalidIdentifierException(getAllSymbols()));
}
}
private void validateLValue(Location loc, Expression expr) {
if (expr instanceof Identifier) {
declare(((Identifier) expr).getName(), loc);
} else if (expr instanceof IndexExpression) {
visit(expr);
} else if (expr instanceof ListLiteral) {
for (Expression e : ((ListLiteral) expr).getElements()) {
validateLValue(loc, e);
}
} else {
throw new ValidationException(loc, "cannot assign to '" + expr + "'");
}
}
@Override
public void visit(LValue node) {
validateLValue(node.getLocation(), node.getExpression());
}
@Override
public void visit(ReturnStatement node) {
if (isTopLevel()) {
throw new ValidationException(
node.getLocation(), "return statements must be inside a function");
}
super.visit(node);
}
@Override
public void visit(ForStatement node) {
loopCount++;
super.visit(node);
Preconditions.checkState(loopCount > 0);
loopCount--;
}
@Override
public void visit(FlowStatement node) {
if (loopCount <= 0) {
throw new ValidationException(
node.getLocation(), node.getKind().getName() + " statement must be inside a for loop");
}
super.visit(node);
}
@Override
public void visit(DotExpression node) {
visit(node.getObject());
// Do not visit the field.
}
@Override
public void visit(AbstractComprehension node) {
openBlock();
super.visit(node);
closeBlock();
}
@Override
public void visit(FunctionDefStatement node) {
for (Parameter<Expression, Expression> param : node.getParameters()) {
if (param.isOptional()) {
visit(param.getDefaultValue());
}
}
openBlock();
for (Parameter<Expression, Expression> param : node.getParameters()) {
if (param.hasName()) {
declare(param.getName(), param.getLocation());
}
}
visitAll(node.getStatements());
closeBlock();
}
@Override
public void visit(IfStatement node) {
if (isTopLevel()) {
throw new ValidationException(
node.getLocation(),
"if statements are not allowed at the top level. You may move it inside a function "
+ "or use an if expression (x if condition else y).");
}
super.visit(node);
}
@Override
public void visit(AugmentedAssignmentStatement node) {
if (node.getLValue().getExpression() instanceof ListLiteral) {
throw new ValidationException(
node.getLocation(), "cannot perform augmented assignment on a list or tuple expression");
}
// Other bad cases are handled when visiting the LValue node.
super.visit(node);
}
/** Returns true if the current block is the top level i.e. has no parent. */
private boolean isTopLevel() {
return block.parent == null;
}
/** Declare a variable and add it to the environment. */
private void declare(String varname, Location location) {
if (block.readOnlyVariables.contains(varname)) {
throw new ValidationException(
location,
String.format("Variable %s is read only", varname),
"https://bazel.build/versions/master/docs/skylark/errors/read-only-variable.html");
}
if (isTopLevel()) { // top-level values are immutable
block.readOnlyVariables.add(varname);
}
block.variables.add(varname);
}
/** Returns true if the symbol exists in the validation environment (or a parent). */
private boolean hasSymbolInEnvironment(String varname) {
for (Block b = block; b != null; b = b.parent) {
if (b.variables.contains(varname)) {
return true;
}
}
return false;
}
/** Returns the set of all accessible symbols (both local and global) */
private Set<String> getAllSymbols() {
Set<String> all = new HashSet<>();
for (Block b = block; b != null; b = b.parent) {
all.addAll(b.variables);
}
return all;
}
/** Throws ValidationException if a load() appears after another kind of statement. */
private static void checkLoadAfterStatement(List<Statement> statements) {
Location firstStatement = null;
for (Statement statement : statements) {
// Ignore string literals (e.g. docstrings).
if (statement instanceof ExpressionStatement
&& ((ExpressionStatement) statement).getExpression() instanceof StringLiteral) {
continue;
}
if (statement instanceof LoadStatement) {
if (firstStatement == null) {
continue;
}
throw new ValidationException(
statement.getLocation(),
"load() statements must be called before any other statement. "
+ "First non-load() statement appears at "
+ firstStatement
+ ". Use --incompatible_bzl_disallow_load_after_statement=false to temporarily "
+ "disable this check.");
}
if (firstStatement == null) {
firstStatement = statement.getLocation();
}
}
}
/** Validates the AST and runs static checks. */
private void validateAst(List<Statement> statements) {
// Check that load() statements are on top.
if (semantics.incompatibleBzlDisallowLoadAfterStatement()) {
checkLoadAfterStatement(statements);
}
// Add every function in the environment before validating. This is
// necessary because functions may call other functions defined
// later in the file.
for (Statement statement : statements) {
if (statement instanceof FunctionDefStatement) {
FunctionDefStatement fct = (FunctionDefStatement) statement;
declare(fct.getIdentifier().getName(), fct.getLocation());
}
}
this.visitAll(statements);
}
public static void validateAst(Environment env, List<Statement> statements) throws EvalException {
try {
ValidationEnvironment venv = new ValidationEnvironment(env);
venv.validateAst(statements);
// Check that no closeBlock was forgotten.
Preconditions.checkState(venv.block.parent == null);
} catch (ValidationException e) {
throw e.exception;
}
}
public static boolean validateAst(
Environment env, List<Statement> statements, EventHandler eventHandler) {
try {
validateAst(env, statements);
return true;
} catch (EvalException e) {
if (!e.isDueToIncompleteAST()) {
eventHandler.handle(Event.error(e.getLocation(), e.getMessage()));
}
return false;
}
}
/** Open a new lexical block that will contain the future declarations. */
private void openBlock() {
block = new Block(block);
}
/** Close a lexical block (and lose all declarations it contained). */
private void closeBlock() {
block = Preconditions.checkNotNull(block.parent);
}
/**
* Checks that the AST is using the restricted syntax.
*
* <p>Restricted syntax is used by Bazel BUILD files. It forbids function definitions, *args, and
* **kwargs. This creates a better separation between code and data.
*/
public static boolean checkBuildSyntax(
List<Statement> statements, final EventHandler eventHandler) {
// Wrap the boolean inside an array so that the inner class can modify it.
final boolean[] success = new boolean[] {true};
// TODO(laurentlb): Merge with the visitor above when possible (i.e. when BUILD files use it).
SyntaxTreeVisitor checker =
new SyntaxTreeVisitor() {
private void error(ASTNode node, String message) {
eventHandler.handle(Event.error(node.getLocation(), message));
success[0] = false;
}
@Override
public void visit(FunctionDefStatement node) {
error(
node,
"function definitions are not allowed in BUILD files. You may move the function to "
+ "a .bzl file and load it.");
}
@Override
public void visit(ForStatement node) {
error(
node,
"for statements are not allowed in BUILD files. You may inline the loop, move it "
+ "to a function definition (in a .bzl file), or as a last resort use a list "
+ "comprehension.");
}
@Override
public void visit(IfStatement node) {
error(
node,
"if statements are not allowed in BUILD files. You may move conditional logic to a "
+ "function definition (in a .bzl file), or for simple cases use an if "
+ "expression.");
}
@Override
public void visit(FuncallExpression node) {
for (Argument.Passed arg : node.getArguments()) {
if (arg.isStarStar()) {
error(
node,
"**kwargs arguments are not allowed in BUILD files. Pass the arguments in "
+ "explicitly.");
} else if (arg.isStar()) {
error(
node,
"*args arguments are not allowed in BUILD files. Pass the arguments in "
+ "explicitly.");
}
}
}
};
checker.visitAll(statements);
return success[0];
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connectors.hive;
import org.apache.flink.api.common.io.LocatableInputSplitAssigner;
import org.apache.flink.api.common.io.statistics.BaseStatistics;
import org.apache.flink.api.java.hadoop.common.HadoopInputFormatCommonBase;
import org.apache.flink.api.java.hadoop.mapred.wrapper.HadoopDummyReporter;
import org.apache.flink.core.io.InputSplitAssigner;
import org.apache.flink.table.catalog.CatalogTable;
import org.apache.flink.table.catalog.hive.util.HiveTableUtil;
import org.apache.flink.table.functions.hive.conversion.HiveInspectors;
import org.apache.flink.types.Row;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.stream.IntStream;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR;
/**
* The HiveTableInputFormat are inspired by the HCatInputFormat and HadoopInputFormatBase.
* It's used to read from hive partition/non-partition table.
*/
public class HiveTableInputFormat extends HadoopInputFormatCommonBase<Row, HiveTableInputSplit> {
private static final long serialVersionUID = 6351448428766433164L;
private static Logger logger = LoggerFactory.getLogger(HiveTableInputFormat.class);
private JobConf jobConf;
protected transient Writable key;
protected transient Writable value;
private transient RecordReader<Writable, Writable> recordReader;
protected transient boolean fetched = false;
protected transient boolean hasNext;
//Necessary info to init deserializer
private List<String> partitionColNames;
//For non-partition hive table, partitions only contains one partition which partitionValues is empty.
private List<HiveTablePartition> partitions;
private transient Deserializer deserializer;
//Hive StructField list contain all related info for specific serde.
private transient List<? extends StructField> structFields;
//StructObjectInspector in hive helps us to look into the internal structure of a struct object.
private transient StructObjectInspector structObjectInspector;
private transient InputFormat mapredInputFormat;
private transient HiveTablePartition hiveTablePartition;
// indices of fields to be returned, with projection applied (if any)
// TODO: push projection into underlying input format that supports it
private int[] fields;
// Remember whether a row instance is reused. No need to set partition fields for reused rows
private transient boolean rowReused;
public HiveTableInputFormat(
JobConf jobConf,
CatalogTable catalogTable,
List<HiveTablePartition> partitions,
int[] projectedFields) {
super(jobConf.getCredentials());
checkNotNull(catalogTable, "catalogTable can not be null.");
this.partitions = checkNotNull(partitions, "partitions can not be null.");
this.jobConf = new JobConf(jobConf);
this.partitionColNames = catalogTable.getPartitionKeys();
int rowArity = catalogTable.getSchema().getFieldCount();
fields = projectedFields != null ? projectedFields : IntStream.range(0, rowArity).toArray();
}
@Override
public void open(HiveTableInputSplit split) throws IOException {
this.hiveTablePartition = split.getHiveTablePartition();
StorageDescriptor sd = hiveTablePartition.getStorageDescriptor();
jobConf.set(INPUT_DIR, sd.getLocation());
try {
this.mapredInputFormat = (InputFormat)
Class.forName(sd.getInputFormat(), true, Thread.currentThread().getContextClassLoader()).newInstance();
} catch (Exception e) {
throw new FlinkHiveException("Unable to instantiate the hadoop input format", e);
}
ReflectionUtils.setConf(mapredInputFormat, jobConf);
if (this.mapredInputFormat instanceof Configurable) {
((Configurable) this.mapredInputFormat).setConf(this.jobConf);
} else if (this.mapredInputFormat instanceof JobConfigurable) {
((JobConfigurable) this.mapredInputFormat).configure(this.jobConf);
}
this.recordReader = this.mapredInputFormat.getRecordReader(split.getHadoopInputSplit(),
jobConf, new HadoopDummyReporter());
if (this.recordReader instanceof Configurable) {
((Configurable) this.recordReader).setConf(jobConf);
}
key = this.recordReader.createKey();
value = this.recordReader.createValue();
this.fetched = false;
try {
deserializer = (Deserializer) Class.forName(sd.getSerdeInfo().getSerializationLib()).newInstance();
Configuration conf = new Configuration();
//properties are used to initialize hive Deserializer properly.
Properties properties = HiveTableUtil.createPropertiesFromStorageDescriptor(sd);
SerDeUtils.initializeSerDe(deserializer, conf, properties, null);
structObjectInspector = (StructObjectInspector) deserializer.getObjectInspector();
structFields = structObjectInspector.getAllStructFieldRefs();
} catch (Exception e) {
throw new FlinkHiveException("Error happens when deserialize from storage file.", e);
}
rowReused = false;
}
@Override
public HiveTableInputSplit[] createInputSplits(int minNumSplits)
throws IOException {
List<HiveTableInputSplit> hiveSplits = new ArrayList<>();
int splitNum = 0;
for (HiveTablePartition partition : partitions) {
StorageDescriptor sd = partition.getStorageDescriptor();
InputFormat format;
try {
format = (InputFormat)
Class.forName(sd.getInputFormat(), true, Thread.currentThread().getContextClassLoader()).newInstance();
} catch (Exception e) {
throw new FlinkHiveException("Unable to instantiate the hadoop input format", e);
}
ReflectionUtils.setConf(format, jobConf);
jobConf.set(INPUT_DIR, sd.getLocation());
//TODO: we should consider how to calculate the splits according to minNumSplits in the future.
org.apache.hadoop.mapred.InputSplit[] splitArray = format.getSplits(jobConf, minNumSplits);
for (int i = 0; i < splitArray.length; i++) {
hiveSplits.add(new HiveTableInputSplit(splitNum++, splitArray[i], jobConf, partition));
}
}
return hiveSplits.toArray(new HiveTableInputSplit[hiveSplits.size()]);
}
@Override
public void configure(org.apache.flink.configuration.Configuration parameters) {
}
@Override
public BaseStatistics getStatistics(BaseStatistics cachedStats) throws IOException {
// no statistics available
return null;
}
@Override
public InputSplitAssigner getInputSplitAssigner(HiveTableInputSplit[] inputSplits) {
return new LocatableInputSplitAssigner(inputSplits);
}
@Override
public boolean reachedEnd() throws IOException {
if (!fetched) {
fetchNext();
}
return !hasNext;
}
@Override
public void close() throws IOException {
if (this.recordReader != null) {
this.recordReader.close();
this.recordReader = null;
}
}
protected void fetchNext() throws IOException {
hasNext = this.recordReader.next(key, value);
fetched = true;
}
@Override
public Row nextRecord(Row reuse) throws IOException {
if (reachedEnd()) {
return null;
}
try {
//Use HiveDeserializer to deserialize an object out of a Writable blob
Object hiveRowStruct = deserializer.deserialize(value);
for (int i = 0; i < fields.length; i++) {
// set non-partition columns
if (fields[i] < structFields.size()) {
StructField structField = structFields.get(fields[i]);
Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(),
structObjectInspector.getStructFieldData(hiveRowStruct, structField));
reuse.setField(i, object);
}
}
} catch (Exception e) {
logger.error("Error happens when converting hive data type to flink data type.");
throw new FlinkHiveException(e);
}
if (!rowReused) {
// set partition columns
if (!partitionColNames.isEmpty()) {
for (int i = 0; i < fields.length; i++) {
if (fields[i] >= structFields.size()) {
String partition = partitionColNames.get(fields[i] - structFields.size());
reuse.setField(i, hiveTablePartition.getPartitionSpec().get(partition));
}
}
}
rowReused = true;
}
this.fetched = false;
return reuse;
}
// --------------------------------------------------------------------------------------------
// Custom serialization methods
// --------------------------------------------------------------------------------------------
private void writeObject(ObjectOutputStream out) throws IOException {
super.write(out);
jobConf.write(out);
out.writeObject(partitionColNames);
out.writeObject(partitions);
out.writeObject(fields);
}
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
super.read(in);
if (jobConf == null) {
jobConf = new JobConf();
}
jobConf.readFields(in);
jobConf.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobConf.getCredentials().addAll(currentUserCreds);
}
partitionColNames = (List<String>) in.readObject();
partitions = (List<HiveTablePartition>) in.readObject();
fields = (int[]) in.readObject();
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.smi.travel.datalayer.report.model;
import java.util.Date;
/**
*
* @author Kanokporn
*/
public class InvoiceSummary {
private int no;
private String invfrom;
private String invto;
private String headertype;
private String invtype;
private String department;
private String invno;
private String invdate;
private String to;
private String invname;
private String termpay;
private String detail;
private Double gross;
private Double vat;
private Double amount;
private String amountcur;
private String staff;
private String status;
private String invdepartment;
private String username;
private String systemdate;
private Double profit;
private Double amountlocal;
private Double costlocal;
public void setHeadertype(String headertype) {
this.headertype = headertype;
}
public String getHeadertype() {
return headertype;
}
public void setNo(int no) {
this.no = no;
}
public int getNo() {
return no;
}
public void setSystemdate(String systemdate) {
this.systemdate = systemdate;
}
public void setUsername(String username) {
this.username = username;
}
// public Double getSumvat() {
// return sumvat;
// }
public String getSystemdate() {
return systemdate;
}
public String getTermpay() {
return termpay;
}
public String getTo() {
return to;
}
public String getUsername() {
return username;
}
public Double getVat() {
return vat;
}
public void setAmount(Double amount) {
this.amount = amount;
}
public void setAmountcur(String amountcur) {
this.amountcur = amountcur;
}
public void setDetail(String detail) {
this.detail = detail;
}
public void setGross(Double gross) {
this.gross = gross;
}
public void setInvdate(String invdate) {
this.invdate = invdate;
}
public void setInvdepartment(String invdepartment) {
this.invdepartment = invdepartment;
}
public void setInvname(String invname) {
this.invname = invname;
}
public void setInvno(String invno) {
this.invno = invno;
}
public void setStaff(String staff) {
this.staff = staff;
}
public void setStatus(String status) {
this.status = status;
}
// public void setSumamount(Double sumamount) {
// this.sumamount = sumamount;
// }
//
// public void setSumnet(Double sumnet) {
// this.sumnet = sumnet;
// }
//
// public void setSumprofit(Double sumprofit) {
// this.sumprofit = sumprofit;
// }
//
// public void setSumvat(Double sumvat) {
// this.sumvat = sumvat;
// }
public void setTermpay(String termpay) {
this.termpay = termpay;
}
public void setTo(String to) {
this.to = to;
}
public void setVat(Double vat) {
this.vat = vat;
}
public Double getAmount() {
return amount;
}
public String getAmountcur() {
return amountcur;
}
public String getDetail() {
return detail;
}
public Double getGross() {
return gross;
}
public String getInvdate() {
return invdate;
}
public String getInvdepartment() {
return invdepartment;
}
public String getInvname() {
return invname;
}
public String getInvno() {
return invno;
}
public String getStaff() {
return staff;
}
public String getStatus() {
return status;
}
// public Double getSumamount() {
// return sumamount;
// }
//
// public Double getSumnet() {
// return sumnet;
// }
//
// public Double getSumprofit() {
// return sumprofit;
// }
public void setDepartment(String department) {
this.department = department;
}
//
// public void setInvto(Date invto) {
// this.invto = invto;
// }
public void setInvtype(String invtype) {
this.invtype = invtype;
}
public String getDepartment() {
return department;
}
// public Date getInvto() {
// return invto;
// }
public String getInvtype() {
return invtype;
}
// public Date getInvfrom() {
// return invfrom;
// }
//
// public void setInvfrom(Date invfrom) {
// this.invfrom = invfrom;
// }
public void setProfit(Double profit) {
this.profit = profit;
}
public Double getProfit() {
return profit;
}
public void setInvfrom(String invfrom) {
this.invfrom = invfrom;
}
public void setInvto(String invto) {
this.invto = invto;
}
public String getInvfrom() {
return invfrom;
}
public String getInvto() {
return invto;
}
public Double getAmountlocal() {
return amountlocal;
}
public void setAmountlocal(Double amountlocal) {
this.amountlocal = amountlocal;
}
public Double getCostlocal() {
return costlocal;
}
public void setCostlocal(Double costlocal) {
this.costlocal = costlocal;
}
}
| |
package jadx.gui.device.debugger;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import jadx.core.dex.nodes.ClassNode;
import jadx.gui.device.debugger.smali.Smali;
import jadx.gui.treemodel.JClass;
public class BreakpointManager {
private static final Logger LOG = LoggerFactory.getLogger(BreakpointManager.class);
private static Gson gson = null;
private static final Type TYPE_TOKEN = new TypeToken<Map<String, List<FileBreakpoint>>>() {
}.getType();
private static Map<String, List<FileBreakpoint>> bpm;
private static Path savePath;
private static DebugController debugController;
private static Map<String, Entry<ClassNode, Listener>> listeners = Collections.emptyMap(); // class full name as key
public static void saveAndExit() {
if (bpm != null) {
if (bpm.size() == 0 && !Files.exists(savePath)) {
return; // user didn't do anything with breakpoint so don't output breakpoint file.
}
sync();
bpm = null;
savePath = null;
listeners = Collections.emptyMap();
}
}
public static void init(Path dirPath) {
if (gson == null) {
gson = new GsonBuilder()
.setPrettyPrinting()
.create();
}
savePath = dirPath.resolve("breakpoints.json");
if (Files.exists(savePath)) {
try {
byte[] bytes = Files.readAllBytes(savePath);
bpm = gson.fromJson(new String(bytes, StandardCharsets.UTF_8), TYPE_TOKEN);
} catch (Exception e) {
LOG.error("Failed to read breakpoints config: {}", savePath, e);
}
}
if (bpm == null) {
bpm = Collections.emptyMap();
}
}
/**
* @param listener When breakpoint is failed to set during debugging, this listener will be called.
*/
public static void addListener(JClass topCls, Listener listener) {
if (listeners == Collections.EMPTY_MAP) {
listeners = new HashMap<>();
}
listeners.put(DbgUtils.getRawFullName(topCls),
new SimpleEntry<>(topCls.getCls().getClassNode(), listener));
}
public static void removeListener(JClass topCls) {
listeners.remove(DbgUtils.getRawFullName(topCls));
}
public static List<Integer> getPositions(JClass topCls) {
List<FileBreakpoint> bps = bpm.get(DbgUtils.getRawFullName(topCls));
if (bps != null && bps.size() > 0) {
Smali smali = DbgUtils.getSmali(topCls.getCls().getClassNode());
if (smali != null) {
List<Integer> posList = new ArrayList<>(bps.size());
for (FileBreakpoint bp : bps) {
int pos = smali.getInsnPosByCodeOffset(bp.getFullMthRawID(), bp.codeOffset);
if (pos > -1) {
posList.add(pos);
}
}
return posList;
}
}
return Collections.emptyList();
}
public static boolean set(JClass topCls, int line) {
Entry<String, Integer> lineInfo = DbgUtils.getCodeOffsetInfoByLine(topCls, line);
if (lineInfo != null) {
if (bpm.isEmpty()) {
bpm = new HashMap<>();
}
String name = DbgUtils.getRawFullName(topCls);
List<FileBreakpoint> list = bpm.computeIfAbsent(name, k -> new ArrayList<>());
FileBreakpoint bkp = list.stream()
.filter(bp -> bp.codeOffset == lineInfo.getValue() && bp.getFullMthRawID().equals(lineInfo.getKey()))
.findFirst()
.orElse(null);
boolean ok = true;
if (bkp == null) {
String[] sigs = DbgUtils.sepClassAndMthSig(lineInfo.getKey());
if (sigs != null && sigs.length == 2) {
FileBreakpoint bp = new FileBreakpoint(sigs[0], sigs[1], lineInfo.getValue());
list.add(bp);
if (debugController != null) {
ok = debugController.setBreakpoint(bp);
}
}
}
return ok;
}
return false;
}
public static boolean remove(JClass topCls, int line) {
Entry<String, Integer> lineInfo = DbgUtils.getCodeOffsetInfoByLine(topCls, line);
if (lineInfo != null) {
List<FileBreakpoint> bps = bpm.get(DbgUtils.getRawFullName(topCls));
for (Iterator<FileBreakpoint> it = bps.iterator(); it.hasNext();) {
FileBreakpoint bp = it.next();
if (bp.codeOffset == lineInfo.getValue() && bp.getFullMthRawID().equals(lineInfo.getKey())) {
it.remove();
if (debugController != null) {
return debugController.removeBreakpoint(bp);
}
break;
}
}
}
return true;
}
private static void sync() {
try {
Files.write(savePath, gson.toJson(bpm).getBytes(StandardCharsets.UTF_8));
} catch (Exception e) {
LOG.error("Failed to write breakpoints config: {}", savePath, e);
}
}
public interface Listener {
void breakpointDisabled(int codeOffset);
}
protected static class FileBreakpoint {
final String cls;
final String mth;
final long codeOffset;
private FileBreakpoint(String cls, String mth, long codeOffset) {
this.cls = cls;
this.mth = mth;
this.codeOffset = codeOffset;
}
protected String getFullMthRawID() {
return cls + "." + mth;
}
@Override
public int hashCode() {
return Objects.hash(codeOffset, cls, mth);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof FileBreakpoint) {
if (obj == this) {
return true;
}
FileBreakpoint fbp = (FileBreakpoint) obj;
return fbp.codeOffset == codeOffset && fbp.cls.equals(cls) && fbp.mth.equals(mth);
}
return false;
}
}
protected static List<FileBreakpoint> getAllBreakpoints() {
List<FileBreakpoint> bpList = new ArrayList<>();
for (Entry<String, List<FileBreakpoint>> entry : bpm.entrySet()) {
bpList.addAll(entry.getValue());
}
return bpList;
}
protected static void failBreakpoint(FileBreakpoint bp) {
Entry<ClassNode, Listener> entry = listeners.get(bp.cls);
if (entry != null) {
int pos = DbgUtils.getSmali(entry.getKey())
.getInsnPosByCodeOffset(bp.getFullMthRawID(), bp.codeOffset);
pos = Math.max(0, pos);
entry.getValue().breakpointDisabled(pos);
}
}
protected static void setDebugController(DebugController controller) {
debugController = controller;
}
}
| |
package pl.tecna.gwt.connectors.client.drag;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import pl.tecna.gwt.connectors.client.util.WidgetUtils;
import com.allen_sauer.gwt.dnd.client.AbstractDragController;
import com.allen_sauer.gwt.dnd.client.DragContext;
import com.allen_sauer.gwt.dnd.client.PickupDragController;
import com.allen_sauer.gwt.dnd.client.VetoDragException;
import com.allen_sauer.gwt.dnd.client.drop.BoundaryDropController;
import com.allen_sauer.gwt.dnd.client.drop.DropController;
import com.allen_sauer.gwt.dnd.client.util.Area;
import com.allen_sauer.gwt.dnd.client.util.CoordinateLocation;
import com.allen_sauer.gwt.dnd.client.util.DOMUtil;
import com.allen_sauer.gwt.dnd.client.util.Location;
import com.allen_sauer.gwt.dnd.client.util.WidgetArea;
import com.allen_sauer.gwt.dnd.client.util.WidgetLocation;
import com.google.gwt.core.client.GWT;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Element;
import com.google.gwt.user.client.ui.AbsolutePanel;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* DragController used for drag-and-drop operations where a draggable widget or drag proxy is
* temporarily picked up and dragged around the boundary panel. Be sure to register a
* {@link DropController} for each drop target.
*
* @see #registerDropController(DropController)
*/
public class AxisXYDragController extends AbstractDragController {
class DropControllerCollection {
protected class Candidate implements Comparable<Candidate> {
private final DropController dropController;
private final Area targetArea;
Candidate(DropController dropController) {
this.dropController = dropController;
Widget target = dropController.getDropTarget();
if (!target.isAttached()) {
throw new IllegalStateException(
"Unattached drop target. You must call DragController#unregisterDropController for all drop targets not attached to the DOM.");
}
targetArea = new WidgetArea(target, null);
}
public int compareTo(Candidate other) {
Element myElement = getDropTarget().getElement();
Element otherElement = other.getDropTarget().getElement();
if (myElement == otherElement) {
return 0;
} else if (DOM.isOrHasChild(myElement, otherElement)) {
return -1;
} else if (DOM.isOrHasChild(otherElement, myElement)) {
return 1;
} else {
return 0;
}
}
@Override
public boolean equals(Object other) {
throw new RuntimeException("hash code not implemented");
}
@Override
public int hashCode() {
throw new RuntimeException("hash code not implemented");
}
DropController getDropController() {
return dropController;
}
Widget getDropTarget() {
return dropController.getDropTarget();
}
Area getTargetArea() {
return targetArea;
}
}
private final ArrayList<DropController> dropControllerList;
private Candidate[] sortedCandidates = null;
/**
* Default constructor.
*/
DropControllerCollection(ArrayList<DropController> dropControllerList) {
this.dropControllerList = dropControllerList;
}
/**
* Determines which DropController represents the deepest DOM descendant drop target located at
* the provided location <code>(x, y)</code>.
*
* @param x offset left relative to document body
* @param y offset top relative to document body
* @return a drop controller for the intersecting drop target or <code>null</code> if none are
* applicable
*/
DropController getIntersectDropController(int x, int y) {
Location location = new CoordinateLocation(x, y);
for (int i = sortedCandidates.length - 1; i >= 0; i--) {
Candidate candidate = sortedCandidates[i];
Area targetArea = candidate.getTargetArea();
if (targetArea.intersects(location)) {
return candidate.getDropController();
}
}
return null;
}
/**
* Cache a list of eligible drop controllers, sorted by relative DOM positions of their
* respective drop targets. Called at the beginning of each drag operation, or whenever drop
* target eligibility has changed while dragging.
*
* @param boundaryPanel boundary area for drop target eligibility considerations
* @param context the current drag context
*/
void resetCache(Panel boundaryPanel, DragContext context) {
ArrayList<Candidate> list = new ArrayList<Candidate>();
if (context.draggable != null) {
WidgetArea boundaryArea = new WidgetArea(boundaryPanel, null);
for (DropController dropController : dropControllerList) {
Candidate candidate = new Candidate(dropController);
if (DOM.isOrHasChild(context.draggable.getElement(), candidate.getDropTarget().getElement())) {
continue;
}
if (candidate.getTargetArea().intersects(boundaryArea)) {
list.add(candidate);
}
}
}
sortedCandidates = list.toArray(new Candidate[list.size()]);
Arrays.sort(sortedCandidates);
}
}
private static class SavedWidgetInfo {
int initialDraggableIndex;
String initialDraggableMargin;
Widget initialDraggableParent;
Location initialDraggableParentLocation;
}
/**
* @deprecated Instead selectively use your own CSS classes.
*/
protected static final String CSS_MOVABLE_PANEL;
/**
* @deprecated Instead selectively use your own CSS classes.
*/
protected static final String CSS_PROXY;
private static final String PRIVATE_CSS_MOVABLE_PANEL = "dragdrop-movable-panel";
private static final String PRIVATE_CSS_PROXY = "dragdrop-proxy";
static {
CSS_MOVABLE_PANEL = PRIVATE_CSS_MOVABLE_PANEL;
CSS_PROXY = PRIVATE_CSS_PROXY;
}
private BoundaryDropController boundaryDropController;
protected int boundaryOffsetX;
protected int boundaryOffsetY;
private boolean dragProxyEnabled = false;
private DropControllerCollection dropControllerCollection;
@SuppressWarnings("rawtypes")
private ArrayList dropControllerList = new ArrayList();
protected int dropTargetClientHeight;
protected int dropTargetClientWidth;
protected Widget movablePanel;
@SuppressWarnings("rawtypes")
private HashMap savedWidgetInfoMap;
// To provide XY drag feature (BEGIN)
protected WidgetLocation initialDraggableLocation;
private boolean allowHorizontalDragging;
public boolean isAllowHorizontalDragging() {
return allowHorizontalDragging;
}
public void setAllowHorizontalDragging(boolean allowHorizontalDragging) {
this.allowHorizontalDragging = allowHorizontalDragging;
}
public boolean isAllowVerticalDragging() {
return allowVerticalDragging;
}
public void setAllowVerticalDragging(boolean allowVerticalDragging) {
this.allowVerticalDragging = allowVerticalDragging;
}
private boolean allowVerticalDragging;
// To provide XY drag feature (END)
// To provide XY drag feature (BEGIN)
/**
* Create a new pickup-and-move style drag controller. Allows widgets or a suitable proxy to be
* temporarily picked up and moved around the specified boundary panel.
*
* <p>
* Note: An implicit {@link BoundaryDropController} is created and registered automatically.
* </p>
*
* @param boundaryPanel the desired boundary panel or <code>RootPanel.get()</code> if entire
* document body is to be the boundary
* @param allowDroppingOnBoundaryPanel whether or not boundary panel should allow dropping
* @param allowHorizontalDragging whether or not the Widget can be dragged horizontally
* @param allowVerticalDragging whether or not the Widget can be dragged vertically
*/
@SuppressWarnings("unchecked")
public AxisXYDragController(AbsolutePanel boundaryPanel, boolean allowDroppingOnBoundaryPanel,
boolean allowHorizontalDragging, boolean allowVerticalDragging) {
super(boundaryPanel);
this.allowHorizontalDragging = allowHorizontalDragging;
this.allowVerticalDragging = allowVerticalDragging;
assert boundaryPanel != null : "Use 'RootPanel.get()' instead of 'null'.";
boundaryDropController = newBoundaryDropController(boundaryPanel, allowDroppingOnBoundaryPanel);
registerDropController(boundaryDropController);
dropControllerCollection = new DropControllerCollection(dropControllerList);
}
/**
* Create a new pickup-and-move style drag controller. Allows widgets or a suitable proxy to be
* temporarily picked up and moved around the specified boundary panel.
*
* <p>
* Note: An implicit {@link BoundaryDropController} is created and registered automatically.
* </p>
*
* @param boundaryPanel the desired boundary panel or <code>RootPanel.get()</code> if entire
* document body is to be the boundary
* @param allowDroppingOnBoundaryPanel whether or not boundary panel should allow dropping
*/
public AxisXYDragController(AbsolutePanel boundaryPanel, boolean allowDroppingOnBoundaryPanel) {
this(boundaryPanel, allowDroppingOnBoundaryPanel, true, true);
}
// To provide XY drag feature (END)
public void dragEnd() {
assert context.finalDropController == null == (context.vetoException != null);
if (context.vetoException != null) {
if (!getBehaviorDragProxy()) {
restoreSelectedWidgetsLocation();
}
} else {
context.dropController.onDrop(context);
}
context.dropController.onLeave(context);
context.dropController = null;
if (!getBehaviorDragProxy()) {
restoreSelectedWidgetsStyle();
}
movablePanel.removeFromParent();
movablePanel = null;
super.dragEnd();
}
public void dragMove() {
// To provide XY drag feature (BEGIN)
if (allowHorizontalDragging == false) {
context.desiredDraggableX = initialDraggableLocation.getLeft() + boundaryOffsetX;
}
if (allowVerticalDragging == false) {
context.desiredDraggableY = initialDraggableLocation.getTop() + boundaryOffsetY;
}
// To provide XY drag feature (END)
int desiredLeft = context.desiredDraggableX - boundaryOffsetX;
int desiredTop = context.desiredDraggableY - boundaryOffsetY;
if (getBehaviorConstrainedToBoundaryPanel()) {
desiredLeft = Math.max(0, Math.min(desiredLeft, dropTargetClientWidth - context.draggable.getOffsetWidth()));
desiredTop = Math.max(0, Math.min(desiredTop, dropTargetClientHeight - context.draggable.getOffsetHeight()));
}
DOMUtil.fastSetElementPosition(movablePanel.getElement(), desiredLeft, desiredTop);
DropController newDropController = getIntersectDropController(context.mouseX, context.mouseY);
if (context.dropController != newDropController) {
if (context.dropController != null) {
context.dropController.onLeave(context);
}
context.dropController = newDropController;
if (context.dropController != null) {
context.dropController.onEnter(context);
}
}
if (context.dropController != null) {
context.dropController.onMove(context);
}
}
@SuppressWarnings("rawtypes")
public void dragStart() {
super.dragStart();
WidgetLocation currentDraggableLocation = new WidgetLocation(context.draggable, context.boundaryPanel);
// To provide XY drag feature (BEGIN)
initialDraggableLocation = currentDraggableLocation;
// To provide XY drag feature (END)
if (getBehaviorDragProxy()) {
movablePanel = newDragProxy(context);
WidgetUtils.addWidget(context.boundaryPanel, movablePanel, currentDraggableLocation.getLeft(), currentDraggableLocation.getTop());
} else {
saveSelectedWidgetsLocationAndStyle();
AbsolutePanel container = new AbsolutePanel();
DOM.setStyleAttribute(container.getElement(), "overflow", "visible");
container.setPixelSize(context.draggable.getOffsetWidth(), context.draggable.getOffsetHeight());
WidgetUtils.addWidget(context.boundaryPanel, container, currentDraggableLocation.getLeft(), currentDraggableLocation.getTop());
int draggableAbsoluteLeft = context.draggable.getAbsoluteLeft();
int draggableAbsoluteTop = context.draggable.getAbsoluteTop();
for (Iterator iterator = context.selectedWidgets.iterator(); iterator.hasNext();) {
Widget widget = (Widget) iterator.next();
if (widget != context.draggable) {
int relativeX = widget.getAbsoluteLeft() - draggableAbsoluteLeft;
int relativeY = widget.getAbsoluteTop() - draggableAbsoluteTop;
WidgetUtils.addWidget(container, widget, relativeX, relativeY);
}
}
WidgetUtils.addWidget(container, context.draggable, 0, 0);
movablePanel = container;
}
movablePanel.addStyleName(PRIVATE_CSS_MOVABLE_PANEL);
// one time calculation of boundary panel location for efficiency during dragging
Location widgetLocation = new WidgetLocation(context.boundaryPanel, null);
boundaryOffsetX = widgetLocation.getLeft() + DOMUtil.getBorderLeft(context.boundaryPanel.getElement());
boundaryOffsetY = widgetLocation.getTop() + DOMUtil.getBorderTop(context.boundaryPanel.getElement());
dropTargetClientWidth = DOMUtil.getClientWidth(context.boundaryPanel.getElement());
dropTargetClientHeight = DOMUtil.getClientHeight(context.boundaryPanel.getElement());
}
/**
* Whether or not dropping on the boundary panel is permitted.
*
* @return <code>true</code> if dropping on the boundary panel is allowed
*/
public boolean getBehaviorBoundaryPanelDrop() {
return boundaryDropController.getBehaviorBoundaryPanelDrop();
}
/**
* Determine whether or not this controller automatically creates a drag proxy for each drag
* operation. Whether or not a drag proxy is used is ultimately determined by the return value of
* {@link #maybeNewDraggableProxy(Widget)}
*
* @return <code>true</code> if drag proxy behavior is enabled
*/
public boolean getBehaviorDragProxy() {
return dragProxyEnabled;
}
/**
* @deprecated Use {@link #getBehaviorDragProxy()} instead.
*
* @return is drag proxy enabled
*/
public boolean isDragProxyEnabled() {
return getBehaviorDragProxy();
}
public void previewDragEnd() throws VetoDragException {
assert context.finalDropController == null;
assert context.vetoException == null;
// Does the DropController allow the drop?
try {
context.dropController.onPreviewDrop(context);
context.finalDropController = context.dropController;
} catch (VetoDragException ex) {
context.finalDropController = null;
throw ex;
} finally {
super.previewDragEnd();
}
}
/**
* Register a new DropController, representing a new drop target, with this drag controller.
*
* @see #unregisterDropController(DropController)
*
* @param dropController the controller to register
*/
@SuppressWarnings("unchecked")
public void registerDropController(DropController dropController) {
dropControllerList.add(dropController);
}
public void resetCache() {
super.resetCache();
dropControllerCollection.resetCache(context.boundaryPanel, context);
}
/**
* Set whether or not widgets may be dropped anywhere on the boundary panel. Set to
* <code>false</code> when you only want explicitly registered drop controllers to accept drops.
* Defaults to <code>true</code>.
*
* @param allowDroppingOnBoundaryPanel <code>true</code> to allow dropping
*/
public void setBehaviorBoundaryPanelDrop(boolean allowDroppingOnBoundaryPanel) {
boundaryDropController.setBehaviorBoundaryPanelDrop(allowDroppingOnBoundaryPanel);
}
/**
* Set whether or not this controller should automatically create a drag proxy for each drag
* operation. Whether or not a drag proxy is used is ultimately determined by the return value of
* {@link #maybeNewDraggableProxy(Widget)}.
*
* @param dragProxyEnabled <code>true</code> to enable drag proxy behavior
*/
public void setBehaviorDragProxy(boolean dragProxyEnabled) {
this.dragProxyEnabled = dragProxyEnabled;
}
/**
* @deprecated Use {@link #setBehaviorDragProxy(boolean)} instead.
*
* @param dragProxyEnabled should drag proxy be enabled
*/
public void setDragProxyEnabled(boolean dragProxyEnabled) {
setBehaviorDragProxy(dragProxyEnabled);
}
/**
* Unregister a DropController from this drag controller.
*
* @see #registerDropController(DropController)
*
* @param dropController the controller to register
*/
public void unregisterDropController(DropController dropController) {
dropControllerList.remove(dropController);
}
/**
* @deprecated Use {@link #newDragProxy(DragContext)} and {@link #setBehaviorDragProxy(boolean)}
* instead.
*
* @param draggable the draggable widget
* @return the widget
*/
protected final Widget maybeNewDraggableProxy(Widget draggable) {
throw new UnsupportedOperationException();
}
/**
* Create a new BoundaryDropController to manage our boundary panel as a drop target. To ensure
* that draggable widgets can only be dropped on registered drop targets, set
* <code>allowDroppingOnBoundaryPanel</code> to <code>false</code>.
*
* @param boundaryPanel the panel to which our drag-and-drop operations are constrained
* @param allowDroppingOnBoundaryPanel whether or not dropping is allowed on the boundary panel
* @return the new BoundaryDropController
*/
protected BoundaryDropController newBoundaryDropController(AbsolutePanel boundaryPanel,
boolean allowDroppingOnBoundaryPanel) {
return new BoundaryDropController(boundaryPanel, allowDroppingOnBoundaryPanel);
}
/**
* Called by {@link PickupDragController#dragStart()} to allow subclasses to provide their
* own drag proxies.
*
* @param context the current drag context
* @return a new drag proxy
*/
@SuppressWarnings("rawtypes")
protected Widget newDragProxy(DragContext context) {
AbsolutePanel container = new AbsolutePanel();
DOM.setStyleAttribute(container.getElement(), "overflow", "visible");
WidgetArea draggableArea = new WidgetArea(context.draggable, null);
for (Iterator iterator = context.selectedWidgets.iterator(); iterator.hasNext();) {
Widget widget = (Widget) iterator.next();
WidgetArea widgetArea = new WidgetArea(widget, null);
Widget proxy = new SimplePanel();
proxy.setPixelSize(widget.getOffsetWidth(), widget.getOffsetHeight());
proxy.addStyleName(PRIVATE_CSS_PROXY);
WidgetUtils.addWidget(container, proxy, widgetArea.getLeft() - draggableArea.getLeft(),
widgetArea.getTop() - draggableArea.getTop());
}
return container;
}
/**
* Restore the selected widgets to their original location.
*
* @see #saveSelectedWidgetsLocationAndStyle()
* @see #restoreSelectedWidgetsStyle()
*/
@SuppressWarnings({"rawtypes", "deprecation"})
protected void restoreSelectedWidgetsLocation() {
for (Iterator iterator = context.selectedWidgets.iterator(); iterator.hasNext();) {
Widget widget = (Widget) iterator.next();
SavedWidgetInfo info = (SavedWidgetInfo) savedWidgetInfoMap.get(widget);
// TODO simplify after enhancement for issue 1112 provides InsertPanel interface
// http://code.google.com/p/google-web-toolkit/issues/detail?id=1112
if (info.initialDraggableParent instanceof AbsolutePanel) {
WidgetUtils.addWidget(((AbsolutePanel) info.initialDraggableParent), widget, info.initialDraggableParentLocation.getLeft(),
info.initialDraggableParentLocation.getTop());
} else if (info.initialDraggableParent instanceof HorizontalPanel) {
((HorizontalPanel) info.initialDraggableParent).insert(widget, info.initialDraggableIndex);
} else if (info.initialDraggableParent instanceof VerticalPanel) {
((VerticalPanel) info.initialDraggableParent).insert(widget, info.initialDraggableIndex);
} else if (info.initialDraggableParent instanceof FlowPanel) {
((FlowPanel) info.initialDraggableParent).insert(widget, info.initialDraggableIndex);
} else if (info.initialDraggableParent instanceof SimplePanel) {
((SimplePanel) info.initialDraggableParent).setWidget(widget);
} else {
throw new RuntimeException("Unable to handle initialDraggableParent "
+ GWT.getTypeName(info.initialDraggableParent));
}
}
}
/**
* Restore the selected widgets with their original style.
*
* @see #saveSelectedWidgetsLocationAndStyle()
* @see #restoreSelectedWidgetsLocation()
*/
@SuppressWarnings("rawtypes")
protected void restoreSelectedWidgetsStyle() {
for (Iterator iterator = context.selectedWidgets.iterator(); iterator.hasNext();) {
Widget widget = (Widget) iterator.next();
SavedWidgetInfo info = (SavedWidgetInfo) savedWidgetInfoMap.get(widget);
DOM.setStyleAttribute(widget.getElement(), "margin", info.initialDraggableMargin);
}
}
/**
* Save the selected widgets' current location in case they much be restored due to a canceled
* drop.
*
* @see #restoreSelectedWidgetsLocation()
*/
@SuppressWarnings({"rawtypes", "deprecation", "unchecked"})
protected void saveSelectedWidgetsLocationAndStyle() {
savedWidgetInfoMap = new HashMap();
for (Iterator iterator = context.selectedWidgets.iterator(); iterator.hasNext();) {
Widget widget = (Widget) iterator.next();
SavedWidgetInfo info = new SavedWidgetInfo();
info.initialDraggableParent = widget.getParent();
// TODO simplify after enhancement for issue 1112 provides InsertPanel interface
// http://code.google.com/p/google-web-toolkit/issues/detail?id=1112
if (info.initialDraggableParent instanceof AbsolutePanel) {
info.initialDraggableParentLocation = new WidgetLocation(widget, info.initialDraggableParent);
} else if (info.initialDraggableParent instanceof HorizontalPanel) {
info.initialDraggableIndex = ((HorizontalPanel) info.initialDraggableParent).getWidgetIndex(widget);
} else if (info.initialDraggableParent instanceof VerticalPanel) {
info.initialDraggableIndex = ((VerticalPanel) info.initialDraggableParent).getWidgetIndex(widget);
} else if (info.initialDraggableParent instanceof FlowPanel) {
info.initialDraggableIndex = ((FlowPanel) info.initialDraggableParent).getWidgetIndex(widget);
} else if (info.initialDraggableParent instanceof SimplePanel) {
// save nothing
} else {
throw new RuntimeException(
"Unable to handle 'initialDraggableParent instanceof "
+ GWT.getTypeName(info.initialDraggableParent)
+ "'; Please create your own DragController and override saveDraggableLocationAndStyle() and restoreDraggableLocation()");
}
info.initialDraggableMargin = DOM.getStyleAttribute(widget.getElement(), "margin");
DOM.setStyleAttribute(widget.getElement(), "margin", "0px");
savedWidgetInfoMap.put(widget, info);
}
}
protected DropController getIntersectDropController(int x, int y) {
DropController dropController = dropControllerCollection.getIntersectDropController(x, y);
return dropController != null ? dropController : boundaryDropController;
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.testing.IteratorFeature.UNMODIFIABLE;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.junit.contrib.truth.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.ListTestSuiteBuilder;
import com.google.common.collect.testing.TestStringListGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.ListFeature;
import com.google.common.collect.testing.google.ListGenerators.CharactersOfCharSequenceGenerator;
import com.google.common.collect.testing.google.ListGenerators.CharactersOfStringGenerator;
import com.google.common.testing.NullPointerTester;
import com.google.common.testing.SerializableTester;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.easymock.EasyMock;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import java.util.RandomAccess;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* Unit test for {@code Lists}.
*
* @author Kevin Bourrillion
* @author Mike Bostock
* @author Jared Levy
*/
@GwtCompatible(emulated = true)
public class ListsTest extends TestCase {
private static final Collection<Integer> SOME_COLLECTION
= asList(0, 1, 1);
private static final Iterable<Integer> SOME_ITERABLE = new SomeIterable();
private static final class RemoveFirstFunction
implements Function<String, String>, Serializable {
@Override
public String apply(String from) {
return (from.length() == 0) ? from : from.substring(1);
}
}
private static class SomeIterable implements Iterable<Integer>, Serializable {
@Override
public Iterator<Integer> iterator() {
return SOME_COLLECTION.iterator();
}
private static final long serialVersionUID = 0;
}
private static final List<Integer> SOME_LIST
= Lists.newArrayList(1, 2, 3, 4);
private static final List<Integer> SOME_SEQUENTIAL_LIST
= Lists.newLinkedList(asList(1, 2, 3, 4));
private static final List<String> SOME_STRING_LIST
= asList("1", "2", "3", "4");
private static final Function<Number, String> SOME_FUNCTION
= new SomeFunction();
private static class SomeFunction
implements Function<Number, String>, Serializable {
@Override
public String apply(Number n) {
return String.valueOf(n);
}
private static final long serialVersionUID = 0;
}
@GwtIncompatible("suite")
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(ListsTest.class);
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
String[] rest = new String[elements.length - 1];
System.arraycopy(elements, 1, rest, 0, elements.length - 1);
return Lists.asList(elements[0], rest);
}
})
.named("Lists.asList, 2 parameter")
.withFeatures(CollectionSize.SEVERAL, CollectionSize.ONE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
String[] rest = new String[elements.length - 2];
System.arraycopy(elements, 2, rest, 0, elements.length - 2);
return Lists.asList(elements[0], elements[1], rest);
}
})
.named("Lists.asList, 3 parameter")
.withFeatures(CollectionSize.SEVERAL,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
final Function<String, String> removeFirst
= new RemoveFirstFunction();
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> fromList = Lists.newArrayList();
for (String element : elements) {
fromList.add("q" + checkNotNull(element));
}
return Lists.transform(fromList, removeFirst);
}
})
.named("Lists.transform, random access, no nulls")
.withFeatures(CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> fromList = Lists.newLinkedList();
for (String element : elements) {
fromList.add("q" + checkNotNull(element));
}
return Lists.transform(fromList, removeFirst);
}
})
.named("Lists.transform, sequential access, no nulls")
.withFeatures(CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> fromList = Lists.newArrayList(elements);
return Lists.transform(fromList, Functions.<String>identity());
}
})
.named("Lists.transform, random access, nulls")
.withFeatures(CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> fromList =
Lists.newLinkedList(asList(elements));
return Lists.transform(fromList, Functions.<String>identity());
}
})
.named("Lists.transform, sequential access, nulls")
.withFeatures(CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> list = Lists.newArrayList();
for (int i = elements.length - 1; i >= 0; i--)
list.add(elements[i]);
return Lists.reverse(list);
}
}).named("Lists.reverse[ArrayList]").withFeatures(CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES, ListFeature.GENERAL_PURPOSE)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
String[] reverseElements = new String[elements.length];
for (int i = elements.length - 1, j = 0; i >= 0; i--, j++)
reverseElements[j] = elements[i];
return Lists.reverse(asList(reverseElements));
}
}).named("Lists.reverse[Arrays.asList]").withFeatures(CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES, ListFeature.SUPPORTS_SET)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
List<String> list = Lists.newLinkedList();
for (int i = elements.length - 1; i >= 0; i--)
list.add(elements[i]);
return Lists.reverse(list);
}
}).named("Lists.reverse[LinkedList]").withFeatures(CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES, ListFeature.GENERAL_PURPOSE)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override protected List<String> create(String[] elements) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (int i = elements.length - 1; i >= 0; i--)
builder.add(elements[i]);
return Lists.reverse(builder.build());
}
}).named("Lists.reverse[ImmutableList]").withFeatures(CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new CharactersOfStringGenerator())
.named("Lists.charactersOf[String]").withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(ListTestSuiteBuilder.using(new CharactersOfCharSequenceGenerator())
.named("Lists.charactersOf[CharSequence]").withFeatures(
CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
return suite;
}
public void testCharactersOfIsView() {
StringBuilder builder = new StringBuilder("abc");
List<Character> chars = Lists.charactersOf(builder);
assertEquals(asList('a', 'b', 'c'), chars);
builder.append("def");
assertEquals(
asList('a', 'b', 'c', 'd', 'e', 'f'), chars);
builder.deleteCharAt(5);
assertEquals(
asList('a', 'b', 'c', 'd', 'e'), chars);
}
public void testNewArrayListEmpty() {
ArrayList<Integer> list = Lists.newArrayList();
assertEquals(Collections.emptyList(), list);
}
public void testNewArrayListWithCapacity() {
ArrayList<Integer> list = Lists.newArrayListWithCapacity(0);
assertEquals(Collections.emptyList(), list);
ArrayList<Integer> bigger = Lists.newArrayListWithCapacity(256);
assertEquals(Collections.emptyList(), bigger);
}
public void testNewArrayListWithCapacity_negative() {
try {
Lists.newArrayListWithCapacity(-1);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testNewArrayListWithExpectedSize() {
ArrayList<Integer> list = Lists.newArrayListWithExpectedSize(0);
assertEquals(Collections.emptyList(), list);
ArrayList<Integer> bigger = Lists.newArrayListWithExpectedSize(256);
assertEquals(Collections.emptyList(), bigger);
}
public void testNewArrayListWithExpectedSize_negative() {
try {
Lists.newArrayListWithExpectedSize(-1);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testNewArrayListVarArgs() {
ArrayList<Integer> list = Lists.newArrayList(0, 1, 1);
assertEquals(SOME_COLLECTION, list);
}
public void testComputeArrayListCapacity() {
assertEquals(5, Lists.computeArrayListCapacity(0));
assertEquals(13, Lists.computeArrayListCapacity(8));
assertEquals(89, Lists.computeArrayListCapacity(77));
assertEquals(22000005, Lists.computeArrayListCapacity(20000000));
assertEquals(Integer.MAX_VALUE,
Lists.computeArrayListCapacity(Integer.MAX_VALUE - 1000));
}
public void testNewArrayListFromCollection() {
ArrayList<Integer> list = Lists.newArrayList(SOME_COLLECTION);
assertEquals(SOME_COLLECTION, list);
}
public void testNewArrayListFromIterable() {
ArrayList<Integer> list = Lists.newArrayList(SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
public void testNewArrayListFromIterator() {
ArrayList<Integer> list = Lists.newArrayList(SOME_COLLECTION.iterator());
assertEquals(SOME_COLLECTION, list);
}
public void testNewLinkedListEmpty() {
LinkedList<Integer> list = Lists.newLinkedList();
assertEquals(Collections.emptyList(), list);
}
public void testNewLinkedListFromCollection() {
LinkedList<Integer> list = Lists.newLinkedList(SOME_COLLECTION);
assertEquals(SOME_COLLECTION, list);
}
public void testNewLinkedListFromIterable() {
LinkedList<Integer> list = Lists.newLinkedList(SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
@GwtIncompatible("CopyOnWriteArrayList")
public void testNewCOWALEmpty() {
CopyOnWriteArrayList<Integer> list = Lists.newCopyOnWriteArrayList();
assertEquals(Collections.emptyList(), list);
}
@GwtIncompatible("CopyOnWriteArrayList")
public void testNewCOWALFromIterable() {
CopyOnWriteArrayList<Integer> list = Lists.newCopyOnWriteArrayList(
SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
@GwtIncompatible("NullPointerTester")
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(Lists.class);
}
/**
* This is just here to illustrate how {@code Arrays#asList} differs from
* {@code Lists#newArrayList}.
*/
public void testArraysAsList() {
List<String> ourWay = Lists.newArrayList("foo", "bar", "baz");
List<String> otherWay = asList("foo", "bar", "baz");
// They're logically equal
assertEquals(ourWay, otherWay);
// The result of Arrays.asList() is mutable
otherWay.set(0, "FOO");
assertEquals("FOO", otherWay.get(0));
// But it can't grow
try {
otherWay.add("nope");
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
// And it can't shrink
try {
otherWay.remove(2);
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
}
@GwtIncompatible("SerializableTester")
public void testAsList1() {
List<String> list = Lists.asList("foo", new String[] { "bar", "baz" });
checkFooBarBazList(list);
SerializableTester.reserializeAndAssert(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(5, UNMODIFIABLE,
asList("foo", "bar", "baz"),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", new String[] {"bar", "baz"}).iterator();
}
}.test();
}
private void checkFooBarBazList(List<String> list) {
ASSERT.that(list).hasContentsInOrder("foo", "bar", "baz");
assertEquals(3, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
assertEquals("baz", list.get(2));
assertIndexIsOutOfBounds(list, 3);
}
public void testAsList1Small() {
List<String> list = Lists.asList("foo", new String[0]);
ASSERT.that(list).hasContentsInOrder("foo");
assertEquals(1, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertIndexIsOutOfBounds(list, 1);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(3, UNMODIFIABLE, singletonList("foo"),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", new String[0]).iterator();
}
}.test();
}
public void testAsList2() {
List<String> list = Lists.asList("foo", "bar", new String[] { "baz" });
checkFooBarBazList(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(5, UNMODIFIABLE, asList("foo", "bar",
"baz"), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", "bar", new String[] {"baz"}).iterator();
}
}.test();
}
@GwtIncompatible("SerializableTester")
public void testAsList2Small() {
List<String> list = Lists.asList("foo", "bar", new String[0]);
ASSERT.that(list).hasContentsInOrder("foo", "bar");
assertEquals(2, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
assertIndexIsOutOfBounds(list, 2);
SerializableTester.reserializeAndAssert(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(5, UNMODIFIABLE, asList("foo", "bar"),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", "bar", new String[0]).iterator();
}
}.test();
}
private static void assertIndexIsOutOfBounds(List<String> list, int index) {
try {
list.get(index);
fail();
} catch (IndexOutOfBoundsException expected) {
}
}
public void testReverseViewRandomAccess() {
List<Integer> fromList = Lists.newArrayList(SOME_LIST);
List<Integer> toList = Lists.reverse(fromList);
assertReverseView(fromList, toList);
}
public void testReverseViewSequential() {
List<Integer> fromList = Lists.newLinkedList(SOME_SEQUENTIAL_LIST);
List<Integer> toList = Lists.reverse(fromList);
assertReverseView(fromList, toList);
}
private static void assertReverseView(List<Integer> fromList,
List<Integer> toList) {
/* fromList modifications reflected in toList */
fromList.set(0, 5);
assertEquals(asList(4, 3, 2, 5), toList);
fromList.add(6);
assertEquals(asList(6, 4, 3, 2, 5), toList);
fromList.add(2, 9);
assertEquals(asList(6, 4, 3, 9, 2, 5), toList);
fromList.remove(Integer.valueOf(2));
assertEquals(asList(6, 4, 3, 9, 5), toList);
fromList.remove(3);
assertEquals(asList(6, 3, 9, 5), toList);
/* toList modifications reflected in fromList */
toList.remove(0);
assertEquals(asList(5, 9, 3), fromList);
toList.add(7);
assertEquals(asList(7, 5, 9, 3), fromList);
toList.add(5);
assertEquals(asList(5, 7, 5, 9, 3), fromList);
toList.remove(Integer.valueOf(5));
assertEquals(asList(5, 7, 9, 3), fromList);
toList.set(1, 8);
assertEquals(asList(5, 7, 8, 3), fromList);
toList.clear();
assertEquals(Collections.emptyList(), fromList);
}
private static <E> List<E> list(E... elements) {
return ImmutableList.copyOf(elements);
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_binary1x1() {
ASSERT.that(Lists.cartesianProduct(list(1), list(2))).hasContentsInOrder(list(1, 2));
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_binary1x2() {
ASSERT.that(Lists.cartesianProduct(list(1), list(2, 3))).hasContentsInOrder(
list(1, 2), list(1, 3));
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_binary2x2() {
ASSERT.that(Lists.cartesianProduct(list(1, 2), list(3, 4))).hasContentsInOrder(
list(1, 3), list(1, 4), list(2, 3), list(2, 4));
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_2x2x2() {
ASSERT.that(Lists.cartesianProduct(list(0, 1), list(0, 1), list(0, 1))).hasContentsInOrder(
list(0, 0, 0), list(0, 0, 1), list(0, 1, 0), list(0, 1, 1),
list(1, 0, 0), list(1, 0, 1), list(1, 1, 0), list(1, 1, 1));
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_contains() {
List<List<Integer>> actual = Lists.cartesianProduct(list(1, 2), list(3, 4));
assertTrue(actual.contains(list(1, 3)));
assertTrue(actual.contains(list(1, 4)));
assertTrue(actual.contains(list(2, 3)));
assertTrue(actual.contains(list(2, 4)));
assertFalse(actual.contains(list(3, 1)));
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProduct_unrelatedTypes() {
List<Integer> x = list(1, 2);
List<String> y = list("3", "4");
List<Object> exp1 = list((Object) 1, "3");
List<Object> exp2 = list((Object) 1, "4");
List<Object> exp3 = list((Object) 2, "3");
List<Object> exp4 = list((Object) 2, "4");
ASSERT.that(Lists.<Object>cartesianProduct(x, y)).hasContentsInOrder(exp1, exp2, exp3, exp4);
}
@SuppressWarnings("unchecked") // varargs!
public void testCartesianProductTooBig() {
List<String> list = Collections.nCopies(10000, "foo");
try {
Lists.cartesianProduct(list, list, list, list, list);
fail("Expected IAE");
} catch (IllegalArgumentException expected) {}
}
public void testTransformHashCodeRandomAccess() {
List<String> list = Lists.transform(SOME_LIST, SOME_FUNCTION);
assertEquals(SOME_STRING_LIST.hashCode(), list.hashCode());
}
public void testTransformHashCodeSequential() {
List<String> list = Lists.transform(SOME_SEQUENTIAL_LIST, SOME_FUNCTION);
assertEquals(SOME_STRING_LIST.hashCode(), list.hashCode());
}
public void testTransformModifiableRandomAccess() {
List<Integer> fromList = Lists.newArrayList(SOME_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformModifiable(list);
}
public void testTransformModifiableSequential() {
List<Integer> fromList = Lists.newLinkedList(SOME_SEQUENTIAL_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformModifiable(list);
}
private static void assertTransformModifiable(List<String> list) {
try {
list.add("5");
fail("transformed list is addable");
} catch (UnsupportedOperationException expected) {}
list.remove(0);
assertEquals(asList("2", "3", "4"), list);
list.remove("3");
assertEquals(asList("2", "4"), list);
try {
list.set(0, "5");
fail("transformed list is setable");
} catch (UnsupportedOperationException expected) {}
list.clear();
assertEquals(Collections.emptyList(), list);
}
public void testTransformViewRandomAccess() {
List<Integer> fromList = Lists.newArrayList(SOME_LIST);
List<String> toList = Lists.transform(fromList, SOME_FUNCTION);
assertTransformView(fromList, toList);
}
public void testTransformViewSequential() {
List<Integer> fromList = Lists.newLinkedList(SOME_SEQUENTIAL_LIST);
List<String> toList = Lists.transform(fromList, SOME_FUNCTION);
assertTransformView(fromList, toList);
}
private static void assertTransformView(List<Integer> fromList,
List<String> toList) {
/* fromList modifications reflected in toList */
fromList.set(0, 5);
assertEquals(asList("5", "2", "3", "4"), toList);
fromList.add(6);
assertEquals(asList("5", "2", "3", "4", "6"), toList);
fromList.remove(Integer.valueOf(2));
assertEquals(asList("5", "3", "4", "6"), toList);
fromList.remove(2);
assertEquals(asList("5", "3", "6"), toList);
/* toList modifications reflected in fromList */
toList.remove(2);
assertEquals(asList(5, 3), fromList);
toList.remove("5");
assertEquals(asList(3), fromList);
toList.clear();
assertEquals(Collections.emptyList(), fromList);
}
public void testTransformRandomAccess() {
List<String> list = Lists.transform(SOME_LIST, SOME_FUNCTION);
assertTrue(list instanceof RandomAccess);
}
public void testTransformSequential() {
List<String> list = Lists.transform(SOME_SEQUENTIAL_LIST, SOME_FUNCTION);
assertFalse(list instanceof RandomAccess);
}
public void testTransformListIteratorRandomAccess() {
List<Integer> fromList = Lists.newArrayList(SOME_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformListIterator(list);
}
public void testTransformListIteratorSequential() {
List<Integer> fromList = Lists.newLinkedList(SOME_SEQUENTIAL_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformListIterator(list);
}
private static void assertTransformListIterator(List<String> list) {
ListIterator<String> iterator = list.listIterator(1);
assertEquals(1, iterator.nextIndex());
assertEquals("2", iterator.next());
assertEquals("3", iterator.next());
assertEquals("4", iterator.next());
assertEquals(4, iterator.nextIndex());
try {
iterator.next();
fail("did not detect end of list");
} catch (NoSuchElementException expected) {}
assertEquals(3, iterator.previousIndex());
assertEquals("4", iterator.previous());
assertEquals("3", iterator.previous());
assertEquals("2", iterator.previous());
assertTrue(iterator.hasPrevious());
assertEquals("1", iterator.previous());
assertFalse(iterator.hasPrevious());
assertEquals(-1, iterator.previousIndex());
try {
iterator.previous();
fail("did not detect beginning of list");
} catch (NoSuchElementException expected) {}
iterator.remove();
assertEquals(asList("2", "3", "4"), list);
assertFalse(list.isEmpty());
// An UnsupportedOperationException or IllegalStateException may occur.
try {
iterator.add("1");
fail("transformed list iterator is addable");
} catch (UnsupportedOperationException expected) {
} catch (IllegalStateException expected) {}
try {
iterator.set("1");
fail("transformed list iterator is settable");
} catch (UnsupportedOperationException expected) {
} catch (IllegalStateException expected) {}
}
public void testTransformIteratorRandomAccess() {
List<Integer> fromList = Lists.newArrayList(SOME_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformIterator(list);
}
public void testTransformIteratorSequential() {
List<Integer> fromList = Lists.newLinkedList(SOME_SEQUENTIAL_LIST);
List<String> list = Lists.transform(fromList, SOME_FUNCTION);
assertTransformIterator(list);
}
/**
* We use this class to avoid the need to suppress generics checks with
* easy mock.
*/
private interface IntegerList extends List<Integer> {}
/**
* This test depends on the fact that {@code AbstractSequentialList.iterator}
* transforms the {@code iterator()} call into a call on {@code
* listIterator(int)}. This is fine because the behavior is clearly
* documented so it's not expected to change.
*/
@GwtIncompatible("EsayMock")
public void testTransformedSequentialIterationUsesBackingListIterationOnly() {
List<Integer> randomAccessList = Lists.newArrayList(SOME_SEQUENTIAL_LIST);
ListIterator<Integer> sampleListIterator =
SOME_SEQUENTIAL_LIST.listIterator();
List<Integer> listMock = EasyMock.createMock(IntegerList.class);
EasyMock.expect(listMock.size()).andReturn(SOME_SEQUENTIAL_LIST.size());
EasyMock.expect(listMock.listIterator(0)).andReturn(sampleListIterator);
EasyMock.replay(listMock);
List<String> transform = Lists.transform(listMock, SOME_FUNCTION);
assertTrue(Iterables.elementsEqual(
transform, Lists.transform(randomAccessList, SOME_FUNCTION)));
EasyMock.verify(listMock);
}
private static void assertTransformIterator(List<String> list) {
Iterator<String> iterator = list.iterator();
assertTrue(iterator.hasNext());
assertEquals("1", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("2", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("3", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("4", iterator.next());
assertFalse(iterator.hasNext());
try {
iterator.next();
fail("did not detect end of list");
} catch (NoSuchElementException expected) {}
iterator.remove();
assertEquals(asList("1", "2", "3"), list);
assertFalse(iterator.hasNext());
}
public void testPartition_badSize() {
List<Integer> source = Collections.singletonList(1);
try {
Lists.partition(source, 0);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testPartition_empty() {
List<Integer> source = Collections.emptyList();
List<List<Integer>> partitions = Lists.partition(source, 1);
assertTrue(partitions.isEmpty());
assertEquals(0, partitions.size());
}
public void testPartition_1_1() {
List<Integer> source = Collections.singletonList(1);
List<List<Integer>> partitions = Lists.partition(source, 1);
assertEquals(1, partitions.size());
assertEquals(Collections.singletonList(1), partitions.get(0));
}
public void testPartition_1_2() {
List<Integer> source = Collections.singletonList(1);
List<List<Integer>> partitions = Lists.partition(source, 2);
assertEquals(1, partitions.size());
assertEquals(Collections.singletonList(1), partitions.get(0));
}
public void testPartition_2_1() {
List<Integer> source = asList(1, 2);
List<List<Integer>> partitions = Lists.partition(source, 1);
assertEquals(2, partitions.size());
assertEquals(Collections.singletonList(1), partitions.get(0));
assertEquals(Collections.singletonList(2), partitions.get(1));
}
public void testPartition_3_2() {
List<Integer> source = asList(1, 2, 3);
List<List<Integer>> partitions = Lists.partition(source, 2);
assertEquals(2, partitions.size());
assertEquals(asList(1, 2), partitions.get(0));
assertEquals(asList(3), partitions.get(1));
}
@GwtIncompatible("ArrayList.subList doesn't implement RandomAccess in GWT.")
public void testPartitionRandomAccessTrue() {
List<Integer> source = asList(1, 2, 3);
List<List<Integer>> partitions = Lists.partition(source, 2);
assertTrue("partition should be RandomAccess, but not: "
+ partitions.getClass(),
partitions instanceof RandomAccess);
assertTrue("partition[0] should be RandomAccess, but not: "
+ partitions.get(0).getClass(),
partitions.get(0) instanceof RandomAccess);
assertTrue("partition[1] should be RandomAccess, but not: "
+ partitions.get(1).getClass(),
partitions.get(1) instanceof RandomAccess);
}
public void testPartitionRandomAccessFalse() {
List<Integer> source = Lists.newLinkedList(asList(1, 2, 3));
List<List<Integer>> partitions = Lists.partition(source, 2);
assertFalse(partitions instanceof RandomAccess);
assertFalse(partitions.get(0) instanceof RandomAccess);
assertFalse(partitions.get(1) instanceof RandomAccess);
}
// TODO: use the ListTestSuiteBuilder
public void testPartition_view() {
List<Integer> list = asList(1, 2, 3);
List<List<Integer>> partitions = Lists.partition(list, 3);
// Changes before the partition is retrieved are reflected
list.set(0, 3);
Iterator<List<Integer>> iterator = partitions.iterator();
// Changes before the partition is retrieved are reflected
list.set(1, 4);
List<Integer> first = iterator.next();
// Changes after are too (unlike Iterables.partition)
list.set(2, 5);
assertEquals(asList(3, 4, 5), first);
// Changes to a sublist also write through to the original list
first.set(1, 6);
assertEquals(asList(3, 6, 5), list);
}
public void testPartitionSize_1() {
List<Integer> list = asList(1, 2, 3);
assertEquals(1, Lists.partition(list, Integer.MAX_VALUE).size());
assertEquals(1, Lists.partition(list, Integer.MAX_VALUE - 1).size());
}
@GwtIncompatible("cannot do such a big explicit copy")
public void testPartitionSize_2() {
assertEquals(2, Lists.partition(Collections.nCopies(0x40000001, 1), 0x40000000).size());
}
// These tests are quick and basic and don't actually show unmodifiability...
}
| |
/*******************************************************************************
* Copyright (c) 2011 Google, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Google, Inc. - initial API and implementation
*******************************************************************************/
package org.wso2.developerstudio.workspaceselector.utils;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Display;
/**
* Utility class for managing OS resources associated with SWT controls such as colors, fonts, images, etc.
* <p>
* !!! IMPORTANT !!! Application code must explicitly invoke the <code>dispose()</code> method to release the
* operating system resources managed by cached objects when those objects and OS resources are no longer
* needed (e.g. on application shutdown)
* <p>
* This class may be freely distributed as part of any application or plugin.
* <p>
* @author scheglov_ke
* @author Dan Rubel
*/
public class SWTResourceManager {
////////////////////////////////////////////////////////////////////////////
//
// Color
//
////////////////////////////////////////////////////////////////////////////
private static Map<RGB, Color> m_colorMap = new HashMap<RGB, Color>();
/**
* Returns the system {@link Color} matching the specific ID.
*
* @param systemColorID
* the ID value for the color
* @return the system {@link Color} matching the specific ID
*/
public static Color getColor(int systemColorID) {
Display display = Display.getCurrent();
return display.getSystemColor(systemColorID);
}
/**
* Returns a {@link Color} given its red, green and blue component values.
*
* @param r
* the red component of the color
* @param g
* the green component of the color
* @param b
* the blue component of the color
* @return the {@link Color} matching the given red, green and blue component values
*/
public static Color getColor(int r, int g, int b) {
return getColor(new RGB(r, g, b));
}
/**
* Returns a {@link Color} given its RGB value.
*
* @param rgb
* the {@link RGB} value of the color
* @return the {@link Color} matching the RGB value
*/
public static Color getColor(RGB rgb) {
Color color = m_colorMap.get(rgb);
if (color == null) {
Display display = Display.getCurrent();
color = new Color(display, rgb);
m_colorMap.put(rgb, color);
}
return color;
}
/**
* Dispose of all the cached {@link Color}'s.
*/
public static void disposeColors() {
for (Color color : m_colorMap.values()) {
color.dispose();
}
m_colorMap.clear();
}
////////////////////////////////////////////////////////////////////////////
//
// Image
//
////////////////////////////////////////////////////////////////////////////
/**
* Maps image paths to images.
*/
private static Map<String, Image> m_imageMap = new HashMap<String, Image>();
/**
* Returns an {@link Image} encoded by the specified {@link InputStream}.
*
* @param stream
* the {@link InputStream} encoding the image data
* @return the {@link Image} encoded by the specified input stream
*/
protected static Image getImage(InputStream stream) throws IOException {
try {
Display display = Display.getCurrent();
ImageData data = new ImageData(stream);
if (data.transparentPixel > 0) {
return new Image(display, data, data.getTransparencyMask());
}
return new Image(display, data);
} finally {
stream.close();
}
}
/**
* Returns an {@link Image} stored in the file at the specified path.
*
* @param path
* the path to the image file
* @return the {@link Image} stored in the file at the specified path
*/
public static Image getImage(String path) {
Image image = m_imageMap.get(path);
if (image == null) {
try {
image = getImage(new FileInputStream(path));
m_imageMap.put(path, image);
} catch (Exception e) {
image = getMissingImage();
m_imageMap.put(path, image);
}
}
return image;
}
/**
* Returns an {@link Image} stored in the file at the specified path relative to the specified class.
*
* @param clazz
* the {@link Class} relative to which to find the image
* @param path
* the path to the image file, if starts with <code>'/'</code>
* @return the {@link Image} stored in the file at the specified path
*/
public static Image getImage(Class<?> clazz, String path) {
String key = clazz.getName() + '|' + path;
Image image = m_imageMap.get(key);
if (image == null) {
try {
image = getImage(clazz.getResourceAsStream(path));
m_imageMap.put(key, image);
} catch (Exception e) {
image = getMissingImage();
m_imageMap.put(key, image);
}
}
return image;
}
private static final int MISSING_IMAGE_SIZE = 10;
/**
* @return the small {@link Image} that can be used as placeholder for missing image.
*/
private static Image getMissingImage() {
Image image = new Image(Display.getCurrent(), MISSING_IMAGE_SIZE, MISSING_IMAGE_SIZE);
//
GC gc = new GC(image);
gc.setBackground(getColor(SWT.COLOR_RED));
gc.fillRectangle(0, 0, MISSING_IMAGE_SIZE, MISSING_IMAGE_SIZE);
gc.dispose();
//
return image;
}
/**
* Style constant for placing decorator image in top left corner of base image.
*/
public static final int TOP_LEFT = 1;
/**
* Style constant for placing decorator image in top right corner of base image.
*/
public static final int TOP_RIGHT = 2;
/**
* Style constant for placing decorator image in bottom left corner of base image.
*/
public static final int BOTTOM_LEFT = 3;
/**
* Style constant for placing decorator image in bottom right corner of base image.
*/
public static final int BOTTOM_RIGHT = 4;
/**
* Internal value.
*/
protected static final int LAST_CORNER_KEY = 5;
/**
* Maps images to decorated images.
*/
@SuppressWarnings("unchecked")
private static Map<Image, Map<Image, Image>>[] m_decoratedImageMap = new Map[LAST_CORNER_KEY];
/**
* Returns an {@link Image} composed of a base image decorated by another image.
*
* @param baseImage
* the base {@link Image} that should be decorated
* @param decorator
* the {@link Image} to decorate the base image
* @return {@link Image} The resulting decorated image
*/
public static Image decorateImage(Image baseImage, Image decorator) {
return decorateImage(baseImage, decorator, BOTTOM_RIGHT);
}
/**
* Returns an {@link Image} composed of a base image decorated by another image.
*
* @param baseImage
* the base {@link Image} that should be decorated
* @param decorator
* the {@link Image} to decorate the base image
* @param corner
* the corner to place decorator image
* @return the resulting decorated {@link Image}
*/
public static Image decorateImage(final Image baseImage, final Image decorator, final int corner) {
if (corner <= 0 || corner >= LAST_CORNER_KEY) {
throw new IllegalArgumentException("Wrong decorate corner");
}
Map<Image, Map<Image, Image>> cornerDecoratedImageMap = m_decoratedImageMap[corner];
if (cornerDecoratedImageMap == null) {
cornerDecoratedImageMap = new HashMap<Image, Map<Image, Image>>();
m_decoratedImageMap[corner] = cornerDecoratedImageMap;
}
Map<Image, Image> decoratedMap = cornerDecoratedImageMap.get(baseImage);
if (decoratedMap == null) {
decoratedMap = new HashMap<Image, Image>();
cornerDecoratedImageMap.put(baseImage, decoratedMap);
}
//
Image result = decoratedMap.get(decorator);
if (result == null) {
Rectangle bib = baseImage.getBounds();
Rectangle dib = decorator.getBounds();
//
result = new Image(Display.getCurrent(), bib.width, bib.height);
//
GC gc = new GC(result);
gc.drawImage(baseImage, 0, 0);
if (corner == TOP_LEFT) {
gc.drawImage(decorator, 0, 0);
} else if (corner == TOP_RIGHT) {
gc.drawImage(decorator, bib.width - dib.width, 0);
} else if (corner == BOTTOM_LEFT) {
gc.drawImage(decorator, 0, bib.height - dib.height);
} else if (corner == BOTTOM_RIGHT) {
gc.drawImage(decorator, bib.width - dib.width, bib.height - dib.height);
}
gc.dispose();
//
decoratedMap.put(decorator, result);
}
return result;
}
/**
* Dispose all of the cached {@link Image}'s.
*/
public static void disposeImages() {
// dispose loaded images
{
for (Image image : m_imageMap.values()) {
image.dispose();
}
m_imageMap.clear();
}
// dispose decorated images
for (int i = 0; i < m_decoratedImageMap.length; i++) {
Map<Image, Map<Image, Image>> cornerDecoratedImageMap = m_decoratedImageMap[i];
if (cornerDecoratedImageMap != null) {
for (Map<Image, Image> decoratedMap : cornerDecoratedImageMap.values()) {
for (Image image : decoratedMap.values()) {
image.dispose();
}
decoratedMap.clear();
}
cornerDecoratedImageMap.clear();
}
}
}
////////////////////////////////////////////////////////////////////////////
//
// Font
//
////////////////////////////////////////////////////////////////////////////
/**
* Maps font names to fonts.
*/
private static Map<String, Font> m_fontMap = new HashMap<String, Font>();
/**
* Maps fonts to their bold versions.
*/
private static Map<Font, Font> m_fontToBoldFontMap = new HashMap<Font, Font>();
/**
* Returns a {@link Font} based on its name, height and style.
*
* @param name
* the name of the font
* @param height
* the height of the font
* @param style
* the style of the font
* @return {@link Font} The font matching the name, height and style
*/
public static Font getFont(String name, int height, int style) {
return getFont(name, height, style, false, false);
}
/**
* Returns a {@link Font} based on its name, height and style. Windows-specific strikeout and underline
* flags are also supported.
*
* @param name
* the name of the font
* @param size
* the size of the font
* @param style
* the style of the font
* @param strikeout
* the strikeout flag (warning: Windows only)
* @param underline
* the underline flag (warning: Windows only)
* @return {@link Font} The font matching the name, height, style, strikeout and underline
*/
public static Font getFont(String name, int size, int style, boolean strikeout, boolean underline) {
String fontName = name + '|' + size + '|' + style + '|' + strikeout + '|' + underline;
Font font = m_fontMap.get(fontName);
if (font == null) {
FontData fontData = new FontData(name, size, style);
if (strikeout || underline) {
try {
Class<?> logFontClass = Class.forName("org.eclipse.swt.internal.win32.LOGFONT"); //$NON-NLS-1$
Object logFont = FontData.class.getField("data").get(fontData); //$NON-NLS-1$
if (logFont != null && logFontClass != null) {
if (strikeout) {
logFontClass.getField("lfStrikeOut").set(logFont, Byte.valueOf((byte) 1)); //$NON-NLS-1$
}
if (underline) {
logFontClass.getField("lfUnderline").set(logFont, Byte.valueOf((byte) 1)); //$NON-NLS-1$
}
}
} catch (Throwable e) {
System.err.println("Unable to set underline or strikeout" + " (probably on a non-Windows platform). " + e); //$NON-NLS-1$ //$NON-NLS-2$
}
}
font = new Font(Display.getCurrent(), fontData);
m_fontMap.put(fontName, font);
}
return font;
}
/**
* Returns a bold version of the given {@link Font}.
*
* @param baseFont
* the {@link Font} for which a bold version is desired
* @return the bold version of the given {@link Font}
*/
public static Font getBoldFont(Font baseFont) {
Font font = m_fontToBoldFontMap.get(baseFont);
if (font == null) {
FontData fontDatas[] = baseFont.getFontData();
FontData data = fontDatas[0];
font = new Font(Display.getCurrent(), data.getName(), data.getHeight(), SWT.BOLD);
m_fontToBoldFontMap.put(baseFont, font);
}
return font;
}
/**
* Dispose all of the cached {@link Font}'s.
*/
public static void disposeFonts() {
// clear fonts
for (Font font : m_fontMap.values()) {
font.dispose();
}
m_fontMap.clear();
// clear bold fonts
for (Font font : m_fontToBoldFontMap.values()) {
font.dispose();
}
m_fontToBoldFontMap.clear();
}
////////////////////////////////////////////////////////////////////////////
//
// Cursor
//
////////////////////////////////////////////////////////////////////////////
/**
* Maps IDs to cursors.
*/
private static Map<Integer, Cursor> m_idToCursorMap = new HashMap<Integer, Cursor>();
/**
* Returns the system cursor matching the specific ID.
*
* @param id
* int The ID value for the cursor
* @return Cursor The system cursor matching the specific ID
*/
public static Cursor getCursor(int id) {
Integer key = Integer.valueOf(id);
Cursor cursor = m_idToCursorMap.get(key);
if (cursor == null) {
cursor = new Cursor(Display.getDefault(), id);
m_idToCursorMap.put(key, cursor);
}
return cursor;
}
/**
* Dispose all of the cached cursors.
*/
public static void disposeCursors() {
for (Cursor cursor : m_idToCursorMap.values()) {
cursor.dispose();
}
m_idToCursorMap.clear();
}
////////////////////////////////////////////////////////////////////////////
//
// General
//
////////////////////////////////////////////////////////////////////////////
/**
* Dispose of cached objects and their underlying OS resources. This should only be called when the cached
* objects are no longer needed (e.g. on application shutdown).
*/
public static void dispose() {
disposeColors();
disposeImages();
disposeFonts();
disposeCursors();
}
}
| |
/*
* Copyright 2014 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.dataset.client.engine;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import org.dashbuilder.dataset.DataColumn;
import org.dashbuilder.dataset.DataSet;
import org.dashbuilder.dataset.date.Quarter;
import org.dashbuilder.dataset.engine.DataSetHandler;
import org.dashbuilder.dataset.engine.group.IntervalBuilder;
import org.dashbuilder.dataset.engine.group.IntervalList;
import org.dashbuilder.dataset.group.ColumnGroup;
import org.dashbuilder.dataset.group.DateIntervalType;
import org.dashbuilder.dataset.group.Interval;
import org.dashbuilder.dataset.sort.ColumnSort;
import org.dashbuilder.dataset.sort.DataSetSort;
import org.dashbuilder.dataset.sort.SortOrder;
import org.dashbuilder.dataset.sort.SortedList;
import static org.dashbuilder.dataset.group.DateIntervalType.*;
/**
* Interval builder for date columns which generates intervals depending on the underlying data available.
*/
@ApplicationScoped
public class ClientIntervalBuilderDynamicDate implements IntervalBuilder {
private ClientDateFormatter dateFormatter;
public ClientIntervalBuilderDynamicDate() {
}
@Inject
public ClientIntervalBuilderDynamicDate(ClientDateFormatter dateFormatter) {
this.dateFormatter = dateFormatter;
}
public IntervalList build(DataSetHandler handler, ColumnGroup columnGroup) {
IntervalDateRangeList results = new IntervalDateRangeList(columnGroup);
DataSet dataSet = handler.getDataSet();
List values = dataSet.getColumnById(columnGroup.getSourceId()).getValues();
if (values.isEmpty()) {
return results;
}
// Sort the column dates.
DataSetSort sortOp = new DataSetSort();
sortOp.addSortColumn(new ColumnSort(columnGroup.getSourceId(), SortOrder.ASCENDING));
DataSetHandler sortResults = handler.sort(sortOp);
List<Integer> sortedRows = sortResults.getRows();
if (sortedRows == null || sortedRows.isEmpty()) {
return results;
}
// Get the lower & upper limits.
SortedList sortedValues = new SortedList(values, sortedRows);
Date minDate = null;
Date maxDate = null;
for (int i = 0; minDate == null && i < sortedValues.size(); i++) {
minDate = (Date) sortedValues.get(i);
}
for (int i = sortedValues.size()-1; maxDate == null && i >= 0; i--) {
maxDate = (Date) sortedValues.get(i);
}
// If min/max are equals then create a single interval.
DateIntervalType intervalType = calculateIntervalSize(minDate, maxDate, columnGroup);
if (minDate == null || minDate.compareTo(maxDate) == 0) {
IntervalDateRange interval = new IntervalDateRange(0, intervalType, minDate, maxDate);
for (int row = 0; row < sortedValues.size(); row++) interval.getRows().add(row);
results.add(interval);
results.setIntervalType(columnGroup.getIntervalSize());
results.setMinValue(minDate);
results.setMaxValue(maxDate);
return results;
}
// Create the intervals according to the min/max dates.
Date intervalMinDate = firstIntervalDate(intervalType, minDate, columnGroup);
int index = 0;
int counter = 0;
while (intervalMinDate.compareTo(maxDate) <= 0) {
// Go to the next interval
Date intervalMaxDate = nextIntervalDate(intervalMinDate, intervalType, 1);
// Create the interval.
IntervalDateRange interval = new IntervalDateRange(counter++, intervalType, intervalMinDate, intervalMaxDate);
results.add(interval);
// Add the target rows to the interval.
boolean stop = false;
while (!stop) {
if (index >= sortedValues.size()) {
stop = true;
} else {
Date dateValue = (Date) sortedValues.get(index);
Integer row = sortedRows.get(index);
if (dateValue == null) {
index++;
} else if (dateValue.before(intervalMaxDate)) {
interval.getRows().add(row);
index++;
} else {
stop = true;
}
}
}
// Move to the next interval.
intervalMinDate = intervalMaxDate;
}
// Reverse intervals if requested
boolean asc = columnGroup.isAscendingOrder();
if (!asc) Collections.reverse( results );
results.setIntervalType(intervalType.toString());
results.setMinValue(minDate);
results.setMaxValue(maxDate);
return results;
}
public IntervalList build(DataColumn dataColumn) {
ColumnGroup columnGroup = dataColumn.getColumnGroup();
Date minDate = (Date) dataColumn.getMinValue();
Date maxDate = (Date) dataColumn.getMaxValue();
IntervalDateRangeList results = new IntervalDateRangeList(columnGroup);
if (minDate == null || maxDate == null) {
return results;
}
DateIntervalType intervalType = DateIntervalType.getByName(dataColumn.getIntervalType());
if (intervalType == null) {
intervalType = DateIntervalType.YEAR;
}
Date intervalMinDate = firstIntervalDate(intervalType, minDate, columnGroup);
int counter = 0;
while (intervalMinDate.compareTo(maxDate) <= 0) {
// Go to the next interval
Date intervalMaxDate = nextIntervalDate(intervalMinDate, intervalType, 1);
// Create the interval.
IntervalDateRange interval = new IntervalDateRange(counter++, intervalType, intervalMinDate, intervalMaxDate);
results.add(interval);
// Move to the next interval.
intervalMinDate = intervalMaxDate;
}
// Reverse intervals if requested
boolean asc = columnGroup.isAscendingOrder();
if (!asc) Collections.reverse( results );
results.setIntervalType(intervalType.toString());
results.setMinValue(minDate);
results.setMaxValue(maxDate);
return results;
}
public DateIntervalType calculateIntervalSize(Date minDate, Date maxDate, ColumnGroup columnGroup) {
DateIntervalType intervalType = DateIntervalType.getByName(columnGroup.getIntervalSize());
if (intervalType == null) {
intervalType = YEAR;
}
if (minDate == null || maxDate == null) {
return intervalType;
}
long millis = (maxDate.getTime() - minDate.getTime());
if (millis <= 0) {
return intervalType;
}
// Calculate the interval type used according to the constraints set.
int maxIntervals = columnGroup.getMaxIntervals();
if (maxIntervals < 1) maxIntervals = 15;
for (DateIntervalType type : values()) {
long nintervals = millis / getDurationInMillis(type);
if (nintervals < maxIntervals) {
intervalType = type;
break;
}
}
// Ensure the interval mode obtained is always greater or equals than the preferred interval size.
DateIntervalType intervalSize = null;
String preferredSize = columnGroup.getIntervalSize();
if (preferredSize != null && preferredSize.trim().length() > 0) {
intervalSize = getByName(columnGroup.getIntervalSize());
}
if (intervalSize != null && compare(intervalType, intervalSize) == -1) {
intervalType = intervalSize;
}
return intervalType;
}
protected Date firstIntervalDate(DateIntervalType intervalType, Date minDate, ColumnGroup columnGroup) {
Date intervalMinDate = new Date(minDate.getTime());
if (YEAR.equals(intervalType)) {
intervalMinDate.setMonth(0);
intervalMinDate.setDate(1);
intervalMinDate.setHours(0);
intervalMinDate.setMinutes(0);
intervalMinDate.setSeconds(0);
}
if (QUARTER.equals(intervalType)) {
int currentMonth = intervalMinDate.getMonth();
int firstMonthYear = columnGroup.getFirstMonthOfYear().getIndex();
int rest = Quarter.getPositionInQuarter(firstMonthYear, currentMonth + 1);
intervalMinDate.setMonth(currentMonth - rest);
intervalMinDate.setDate(1);
intervalMinDate.setHours(0);
intervalMinDate.setMinutes(0);
intervalMinDate.setSeconds(0);
}
if (MONTH.equals(intervalType)) {
intervalMinDate.setDate(1);
intervalMinDate.setHours(0);
intervalMinDate.setMinutes(0);
intervalMinDate.setSeconds(0);
}
if (DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType) || WEEK.equals(intervalType)) {
intervalMinDate.setHours(0);
intervalMinDate.setMinutes(0);
intervalMinDate.setSeconds(0);
}
if (HOUR.equals(intervalType)) {
intervalMinDate.setMinutes(0);
intervalMinDate.setSeconds(0);
}
if (MINUTE.equals(intervalType)) {
intervalMinDate.setSeconds(0);
}
return intervalMinDate;
}
protected Date nextIntervalDate(Date intervalMinDate, DateIntervalType intervalType, int intervals) {
Date intervalMaxDate = new Date(intervalMinDate.getTime());
if (MILLENIUM.equals(intervalType)) {
intervalMaxDate.setYear(intervalMinDate.getYear() + 1000 * intervals);
}
else if (CENTURY.equals(intervalType)) {
intervalMaxDate.setYear(intervalMinDate.getYear() + 100 * intervals);
}
else if (DECADE.equals(intervalType)) {
intervalMaxDate.setYear(intervalMinDate.getYear() + 10 * intervals);
}
else if (YEAR.equals(intervalType)) {
intervalMaxDate.setYear(intervalMinDate.getYear() + intervals);
}
else if (QUARTER.equals(intervalType)) {
intervalMaxDate.setMonth(intervalMinDate.getMonth() + 3 * intervals);
}
else if (MONTH.equals(intervalType)) {
intervalMaxDate.setMonth(intervalMinDate.getMonth() + intervals);
}
else if (WEEK.equals(intervalType)) {
intervalMaxDate.setDate(intervalMinDate.getDate() + 7 * intervals);
}
else if (DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType)) {
intervalMaxDate.setDate(intervalMinDate.getDate() + intervals);
}
else if (HOUR.equals(intervalType)) {
intervalMaxDate.setHours(intervalMinDate.getHours() + intervals);
}
else if (MINUTE.equals(intervalType)) {
intervalMaxDate.setMinutes(intervalMinDate.getMinutes() + intervals);
}
else if (SECOND.equals(intervalType)) {
intervalMaxDate.setSeconds(intervalMinDate.getSeconds() + intervals);
}
else {
// Default to year to avoid infinite loops
intervalMaxDate.setYear(intervalMinDate.getYear() + intervals);
}
return intervalMaxDate;
}
/**
* A list containing date range intervals.
*/
public class IntervalDateRangeList extends IntervalList {
public IntervalDateRangeList(ColumnGroup columnGroup) {
super(columnGroup);
}
public Interval locateInterval(Object value) {
Date d = (Date) value;
for (Interval interval : this) {
IntervalDateRange dateRange = (IntervalDateRange) interval;
if (d.equals(dateRange.getMinDate()) || (d.after(dateRange.getMinDate()) && d.before(dateRange.getMaxDate()))) {
return interval;
}
}
return null;
}
}
/**
* A date interval holding dates belonging to a given range.
*/
public class IntervalDateRange extends Interval {
public IntervalDateRange(int index, DateIntervalType intervalType, Date minDate, Date maxDate) {
super(calculateName(intervalType, minDate));
super.setMinValue(minDate);
super.setMaxValue(maxDate);
super.setIndex(index);
super.setType(intervalType != null ? intervalType.toString() : null);
}
public Date getMinDate() {
return (Date) minValue;
}
public Date getMaxDate() {
return (Date) maxValue;
}
}
public String calculateName(DateIntervalType intervalType, Date d) {
if (MILLENIUM.equals(intervalType) || CENTURY.equals(intervalType)
|| DECADE.equals(intervalType) || YEAR.equals(intervalType)) {
return dateFormatter.format(d, "yyyy");
}
if (QUARTER.equals(intervalType) || MONTH.equals(intervalType)) {
return dateFormatter.format(d, "yyyy-MM");
}
if (WEEK.equals(intervalType) || DAY.equals(intervalType) || DAY_OF_WEEK.equals(intervalType)) {
return dateFormatter.format(d, "yyyy-MM-dd");
}
if (HOUR.equals(intervalType)) {
return dateFormatter.format(d, "yyyy-MM-dd HH");
}
if (MINUTE.equals(intervalType)) {
return dateFormatter.format(d, "yyyy-MM-dd HH:mm");
}
if (SECOND.equals(intervalType)) {
return dateFormatter.format(d, "yyyy-MM-dd HH:mm:ss");
}
return null;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.containerservice.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.management.Resource;
import com.azure.resourcemanager.containerservice.models.NetworkProfile;
import com.azure.resourcemanager.containerservice.models.OpenShiftManagedClusterAgentPoolProfile;
import com.azure.resourcemanager.containerservice.models.OpenShiftManagedClusterAuthProfile;
import com.azure.resourcemanager.containerservice.models.OpenShiftManagedClusterMasterPoolProfile;
import com.azure.resourcemanager.containerservice.models.OpenShiftRouterProfile;
import com.azure.resourcemanager.containerservice.models.PurchasePlan;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import java.util.Map;
/** OpenShift Managed cluster. */
@Fluent
public final class OpenShiftManagedClusterInner extends Resource {
/*
* Define the resource plan as required by ARM for billing purposes
*/
@JsonProperty(value = "plan")
private PurchasePlan plan;
/*
* Properties of a OpenShift managed cluster.
*/
@JsonProperty(value = "properties")
private OpenShiftManagedClusterProperties innerProperties;
/**
* Get the plan property: Define the resource plan as required by ARM for billing purposes.
*
* @return the plan value.
*/
public PurchasePlan plan() {
return this.plan;
}
/**
* Set the plan property: Define the resource plan as required by ARM for billing purposes.
*
* @param plan the plan value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withPlan(PurchasePlan plan) {
this.plan = plan;
return this;
}
/**
* Get the innerProperties property: Properties of a OpenShift managed cluster.
*
* @return the innerProperties value.
*/
private OpenShiftManagedClusterProperties innerProperties() {
return this.innerProperties;
}
/** {@inheritDoc} */
@Override
public OpenShiftManagedClusterInner withLocation(String location) {
super.withLocation(location);
return this;
}
/** {@inheritDoc} */
@Override
public OpenShiftManagedClusterInner withTags(Map<String, String> tags) {
super.withTags(tags);
return this;
}
/**
* Get the provisioningState property: The current deployment or provisioning state, which only appears in the
* response.
*
* @return the provisioningState value.
*/
public String provisioningState() {
return this.innerProperties() == null ? null : this.innerProperties().provisioningState();
}
/**
* Get the openShiftVersion property: Version of OpenShift specified when creating the cluster.
*
* @return the openShiftVersion value.
*/
public String openShiftVersion() {
return this.innerProperties() == null ? null : this.innerProperties().openShiftVersion();
}
/**
* Set the openShiftVersion property: Version of OpenShift specified when creating the cluster.
*
* @param openShiftVersion the openShiftVersion value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withOpenShiftVersion(String openShiftVersion) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withOpenShiftVersion(openShiftVersion);
return this;
}
/**
* Get the clusterVersion property: Version of OpenShift specified when creating the cluster.
*
* @return the clusterVersion value.
*/
public String clusterVersion() {
return this.innerProperties() == null ? null : this.innerProperties().clusterVersion();
}
/**
* Get the publicHostname property: Service generated FQDN for OpenShift API server.
*
* @return the publicHostname value.
*/
public String publicHostname() {
return this.innerProperties() == null ? null : this.innerProperties().publicHostname();
}
/**
* Get the fqdn property: Service generated FQDN for OpenShift API server loadbalancer internal hostname.
*
* @return the fqdn value.
*/
public String fqdn() {
return this.innerProperties() == null ? null : this.innerProperties().fqdn();
}
/**
* Get the networkProfile property: Configuration for OpenShift networking.
*
* @return the networkProfile value.
*/
public NetworkProfile networkProfile() {
return this.innerProperties() == null ? null : this.innerProperties().networkProfile();
}
/**
* Set the networkProfile property: Configuration for OpenShift networking.
*
* @param networkProfile the networkProfile value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withNetworkProfile(NetworkProfile networkProfile) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withNetworkProfile(networkProfile);
return this;
}
/**
* Get the routerProfiles property: Configuration for OpenShift router(s).
*
* @return the routerProfiles value.
*/
public List<OpenShiftRouterProfile> routerProfiles() {
return this.innerProperties() == null ? null : this.innerProperties().routerProfiles();
}
/**
* Set the routerProfiles property: Configuration for OpenShift router(s).
*
* @param routerProfiles the routerProfiles value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withRouterProfiles(List<OpenShiftRouterProfile> routerProfiles) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withRouterProfiles(routerProfiles);
return this;
}
/**
* Get the masterPoolProfile property: Configuration for OpenShift master VMs.
*
* @return the masterPoolProfile value.
*/
public OpenShiftManagedClusterMasterPoolProfile masterPoolProfile() {
return this.innerProperties() == null ? null : this.innerProperties().masterPoolProfile();
}
/**
* Set the masterPoolProfile property: Configuration for OpenShift master VMs.
*
* @param masterPoolProfile the masterPoolProfile value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withMasterPoolProfile(
OpenShiftManagedClusterMasterPoolProfile masterPoolProfile) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withMasterPoolProfile(masterPoolProfile);
return this;
}
/**
* Get the agentPoolProfiles property: Configuration of OpenShift cluster VMs.
*
* @return the agentPoolProfiles value.
*/
public List<OpenShiftManagedClusterAgentPoolProfile> agentPoolProfiles() {
return this.innerProperties() == null ? null : this.innerProperties().agentPoolProfiles();
}
/**
* Set the agentPoolProfiles property: Configuration of OpenShift cluster VMs.
*
* @param agentPoolProfiles the agentPoolProfiles value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withAgentPoolProfiles(
List<OpenShiftManagedClusterAgentPoolProfile> agentPoolProfiles) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withAgentPoolProfiles(agentPoolProfiles);
return this;
}
/**
* Get the authProfile property: Configures OpenShift authentication.
*
* @return the authProfile value.
*/
public OpenShiftManagedClusterAuthProfile authProfile() {
return this.innerProperties() == null ? null : this.innerProperties().authProfile();
}
/**
* Set the authProfile property: Configures OpenShift authentication.
*
* @param authProfile the authProfile value to set.
* @return the OpenShiftManagedClusterInner object itself.
*/
public OpenShiftManagedClusterInner withAuthProfile(OpenShiftManagedClusterAuthProfile authProfile) {
if (this.innerProperties() == null) {
this.innerProperties = new OpenShiftManagedClusterProperties();
}
this.innerProperties().withAuthProfile(authProfile);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (plan() != null) {
plan().validate();
}
if (innerProperties() != null) {
innerProperties().validate();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.snapshot;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeAttributes;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
/**
* A list of snapshot diffs for storing snapshot data.
*
* @param <N> The {@link INode} type.
* @param <D> The diff type, which must extend {@link AbstractINodeDiff}.
*/
abstract class AbstractINodeDiffList<N extends INode,
A extends INodeAttributes,
D extends AbstractINodeDiff<N, A, D>>
implements Iterable<D> {
/** Diff list sorted by snapshot IDs, i.e. in chronological order.
* Created lazily to avoid wasting memory by empty lists. */
private DiffList<D> diffs;
/** @return this list as a unmodifiable {@link List}. */
public final DiffList<D> asList() {
return diffs != null ?
DiffList.unmodifiableList(diffs) : DiffList.emptyList();
}
public boolean isEmpty() {
return diffs == null || diffs.isEmpty();
}
/** Clear the list. */
public void clear() {
diffs = null;
}
/** @return an {@link AbstractINodeDiff}. */
abstract D createDiff(int snapshotId, N currentINode);
/** @return a snapshot copy of the current inode. */
abstract A createSnapshotCopy(N currentINode);
/**
* Delete a snapshot. The synchronization of the diff list will be done
* outside. If the diff to remove is not the first one in the diff list, we
* need to combine the diff with its previous one.
*
* @param reclaimContext blocks and inodes that need to be reclaimed
* @param snapshot The id of the snapshot to be deleted
* @param prior The id of the snapshot taken before the to-be-deleted snapshot
* @param currentINode the inode where the snapshot diff is deleted
*/
public final void deleteSnapshotDiff(INode.ReclaimContext reclaimContext,
final int snapshot, final int prior, final N currentINode) {
if (diffs == null) {
return;
}
int snapshotIndex = diffs.binarySearch(snapshot);
D removed;
if (snapshotIndex == 0) {
if (prior != Snapshot.NO_SNAPSHOT_ID) { // there is still snapshot before
// set the snapshot to latestBefore
diffs.get(snapshotIndex).setSnapshotId(prior);
} else { // there is no snapshot before
removed = diffs.remove(0);
if (diffs.isEmpty()) {
diffs = null;
}
removed.destroyDiffAndCollectBlocks(reclaimContext, currentINode);
}
} else if (snapshotIndex > 0) {
final AbstractINodeDiff<N, A, D> previous = diffs.get(snapshotIndex - 1);
if (previous.getSnapshotId() != prior) {
diffs.get(snapshotIndex).setSnapshotId(prior);
} else {
// combine the to-be-removed diff with its previous diff
removed = diffs.remove(snapshotIndex);
if (previous.snapshotINode == null) {
previous.snapshotINode = removed.snapshotINode;
}
previous.combinePosteriorAndCollectBlocks(reclaimContext, currentINode,
removed);
previous.setPosterior(removed.getPosterior());
removed.setPosterior(null);
}
}
}
/** Add an {@link AbstractINodeDiff} for the given snapshot. */
final D addDiff(int latestSnapshotId, N currentINode) {
return addLast(createDiff(latestSnapshotId, currentINode));
}
/** Append the diff at the end of the list. */
private D addLast(D diff) {
createDiffsIfNeeded();
final D last = getLast();
diffs.addLast(diff);
if (last != null) {
last.setPosterior(diff);
}
return diff;
}
/** Add the diff to the beginning of the list. */
final void addFirst(D diff) {
createDiffsIfNeeded();
final D first = diffs.isEmpty()? null : diffs.get(0);
diffs.addFirst(diff);
diff.setPosterior(first);
}
/** @return the first diff. */
final D getFirst() {
return diffs == null || diffs.isEmpty()? null: diffs.get(0);
}
/** @return the first snapshot INode. */
final A getFirstSnapshotINode() {
final D first = getFirst();
return first == null? null: first.getSnapshotINode();
}
/** @return the last diff. */
public final D getLast() {
if (diffs == null) {
return null;
}
int n = diffs.size();
return n == 0 ? null : diffs.get(n - 1);
}
DiffList<D> newDiffs() {
return new DiffListByArrayList<>(
INodeDirectory.DEFAULT_FILES_PER_DIRECTORY);
}
private void createDiffsIfNeeded() {
if (diffs == null) {
diffs = newDiffs();
}
}
/** @return the id of the last snapshot. */
public final int getLastSnapshotId() {
final AbstractINodeDiff<N, A, D> last = getLast();
return last == null ? Snapshot.CURRENT_STATE_ID : last.getSnapshotId();
}
/**
* Find the latest snapshot before a given snapshot.
* @param anchorId The returned snapshot's id must be <= or < this
* given snapshot id.
* @param exclusive True means the returned snapshot's id must be < the
* given id, otherwise <=.
* @return The id of the latest snapshot before the given snapshot.
*/
public final int getPrior(int anchorId, boolean exclusive) {
if (diffs == null) {
return Snapshot.NO_SNAPSHOT_ID;
}
if (anchorId == Snapshot.CURRENT_STATE_ID) {
int last = getLastSnapshotId();
if (exclusive && last == anchorId) {
return Snapshot.NO_SNAPSHOT_ID;
}
return last;
}
final int i = diffs.binarySearch(anchorId);
if (exclusive) { // must be the one before
if (i == -1 || i == 0) {
return Snapshot.NO_SNAPSHOT_ID;
} else {
int priorIndex = i > 0 ? i - 1 : -i - 2;
return diffs.get(priorIndex).getSnapshotId();
}
} else { // the one, or the one before if not existing
if (i >= 0) {
return diffs.get(i).getSnapshotId();
} else if (i < -1) {
return diffs.get(-i - 2).getSnapshotId();
} else { // i == -1
return Snapshot.NO_SNAPSHOT_ID;
}
}
}
public final int getPrior(int snapshotId) {
return getPrior(snapshotId, false);
}
/**
* Update the prior snapshot.
*/
final int updatePrior(int snapshot, int prior) {
int p = getPrior(snapshot, true);
if (p != Snapshot.CURRENT_STATE_ID
&& Snapshot.ID_INTEGER_COMPARATOR.compare(p, prior) > 0) {
return p;
}
return prior;
}
public final D getDiffById(final int snapshotId) {
if (snapshotId == Snapshot.CURRENT_STATE_ID || diffs == null) {
return null;
}
final int i = diffs.binarySearch(snapshotId);
if (i >= 0) {
// exact match
return diffs.get(i);
} else {
// Exact match not found means that there were no changes between
// given snapshot and the next state so that the diff for the given
// snapshot was not recorded. Thus, return the next state.
final int j = -i - 1;
return j < diffs.size() ? diffs.get(j) : null;
}
}
/**
* Search for the snapshot whose id is 1) no less than the given id,
* and 2) most close to the given id.
*/
public final int getSnapshotById(final int snapshotId) {
D diff = getDiffById(snapshotId);
return diff == null ? Snapshot.CURRENT_STATE_ID : diff.getSnapshotId();
}
public final int getDiffIndexById(final int snapshotId) {
int diffIndex = diffs.binarySearch(snapshotId);
diffIndex = diffIndex < 0 ? (-diffIndex - 1) : diffIndex;
return diffIndex;
}
final int[] changedBetweenSnapshots(Snapshot from, Snapshot to) {
if (diffs == null) {
return null;
}
Snapshot earlier = from;
Snapshot later = to;
if (Snapshot.ID_COMPARATOR.compare(from, to) > 0) {
earlier = to;
later = from;
}
final int size = diffs.size();
int earlierDiffIndex = getDiffIndexById(earlier.getId());
int laterDiffIndex = later == null ? size
: getDiffIndexById(later.getId());
if (earlierDiffIndex == size) {
// if the earlierSnapshot is after the latest SnapshotDiff stored in
// diffs, no modification happened after the earlierSnapshot
return null;
}
if (laterDiffIndex == -1 || laterDiffIndex == 0) {
// if the laterSnapshot is the earliest SnapshotDiff stored in diffs, or
// before it, no modification happened before the laterSnapshot
return null;
}
return new int[]{earlierDiffIndex, laterDiffIndex};
}
/**
* @return the inode corresponding to the given snapshot.
* Note that the current inode is returned if there is no change
* between the given snapshot and the current state.
*/
public A getSnapshotINode(final int snapshotId, final A currentINode) {
final D diff = getDiffById(snapshotId);
final A inode = diff == null? null: diff.getSnapshotINode();
return inode == null? currentINode: inode;
}
/**
* Check if the latest snapshot diff exists. If not, add it.
* @return the latest snapshot diff, which is never null.
*/
final D checkAndAddLatestSnapshotDiff(int latestSnapshotId, N currentINode) {
final D last = getLast();
return (last != null && Snapshot.ID_INTEGER_COMPARATOR
.compare(last.getSnapshotId(), latestSnapshotId) >= 0) ?
last : addDiff(latestSnapshotId, currentINode);
}
/** Save the snapshot copy to the latest snapshot. */
public D saveSelf2Snapshot(int latestSnapshotId, N currentINode,
A snapshotCopy) {
D diff = null;
if (latestSnapshotId != Snapshot.CURRENT_STATE_ID) {
diff = checkAndAddLatestSnapshotDiff(latestSnapshotId, currentINode);
if (diff.snapshotINode == null) {
if (snapshotCopy == null) {
snapshotCopy = createSnapshotCopy(currentINode);
}
diff.saveSnapshotCopy(snapshotCopy);
}
}
return diff;
}
@Override
public Iterator<D> iterator() {
return diffs != null ? diffs.iterator() : Collections.emptyIterator();
}
@Override
public String toString() {
if (diffs != null) {
final StringBuilder b =
new StringBuilder(getClass().getSimpleName()).append("@")
.append(Integer.toHexString(hashCode())).append(": ");
b.append("[");
for (D d : diffs) {
b.append(d).append(", ");
}
b.setLength(b.length() - 2);
b.append("]");
return b.toString();
} else {
return "";
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.grinder.console.communication;
import net.grinder.common.GrinderProperties;
import net.grinder.common.processidentity.AgentIdentity;
import net.grinder.common.processidentity.ProcessIdentity;
import net.grinder.communication.CommunicationException;
import net.grinder.communication.MessageDispatchRegistry;
import net.grinder.communication.MessageDispatchRegistry.AbstractHandler;
import net.grinder.engine.communication.*;
import net.grinder.message.console.AgentControllerProcessReportMessage;
import net.grinder.message.console.AgentControllerState;
import net.grinder.messages.agent.StartGrinderMessage;
import net.grinder.messages.agent.StopGrinderMessage;
import net.grinder.messages.console.AgentAddress;
import net.grinder.util.ListenerSupport;
import org.ngrinder.monitor.controller.model.SystemDataModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Predicate;
import static java.util.Collections.unmodifiableMap;
import static org.ngrinder.common.util.CollectionUtils.newLinkedHashSet;
/**
* Implementation of {@link AgentProcessControl}.
*
* @author JunHo Yoon
*/
public class AgentProcessControlImplementation implements AgentProcessControl {
private final ConsoleCommunication m_consoleCommunication;
private final Map<AgentIdentity, AgentStatus> m_agentMap = new ConcurrentHashMap<>();
private final ListenerSupport<AgentStatusUpdateListener> m_agentStatusUpdateListeners = new ListenerSupport<>();
private final ListenerSupport<LogArrivedListener> m_logListeners = new ListenerSupport<>();
private final ListenerSupport<AgentDownloadRequestListener> m_agentDownloadRequestListeners = new ListenerSupport<>();
private final ListenerSupport<ConnectionAgentListener> m_connectionAgentListener = new ListenerSupport<>();
private final ListenerSupport<ConnectionAgentCommunicationListener> m_connectionAgentCommunicationListener = new ListenerSupport<>();
private static final Logger LOGGER = LoggerFactory.getLogger(AgentProcessControlImplementation.class);
/**
* Period at which to update the listeners.
*/
private static final long UPDATE_PERIOD = 500;
/**
* We keep a record of processes for a few seconds after they have been terminated.
* <p/>
* Every FLUSH_PERIOD, process statuses are checked. Those haven't reported for a while are
* marked and are discarded if they still haven't been updated by the next FLUSH_PERIOD.
*/
private static final long FLUSH_PERIOD = 2000;
private volatile boolean m_newData = false;
/**
* Constructor.
*
* @param timer Timer that can be used to schedule housekeeping tasks.
* @param consoleCommunication The console communication handler.
*/
public AgentProcessControlImplementation(Timer timer, ConsoleCommunication consoleCommunication) {
m_consoleCommunication = consoleCommunication;
timer.schedule(new TimerTask() {
public void run() {
synchronized (m_agentMap) {
try {
update();
} catch (Exception e) {
LOGGER.error("Error occurred during update agent", e);
}
}
}
}, 0, UPDATE_PERIOD);
timer.schedule(new TimerTask() {
public void run() {
synchronized (m_agentMap) {
try {
purge(m_agentMap);
} catch (Exception e) {
LOGGER.error("Error occurred during purge agent", e);
}
}
}
}, 0, FLUSH_PERIOD);
final MessageDispatchRegistry messageDispatchRegistry = consoleCommunication.getMessageDispatchRegistry();
messageDispatchRegistry.set(AgentControllerProcessReportMessage.class,
new AbstractHandler<AgentControllerProcessReportMessage>() {
public void handle(AgentControllerProcessReportMessage message) {
updateAgentProcessReportMessage(message);
}
});
messageDispatchRegistry.set(LogReportGrinderMessage.class, new AbstractHandler<LogReportGrinderMessage>() {
public void handle(final LogReportGrinderMessage message) {
m_logListeners.apply(listener -> {
listener.logArrived(message.getTestId(), message.getAddress(), message.getLogs());
});
}
});
messageDispatchRegistry.set(AgentDownloadGrinderMessage.class, new AbstractHandler<AgentDownloadGrinderMessage>() {
public void handle(final AgentDownloadGrinderMessage message) {
m_agentDownloadRequestListeners.apply(listener -> {
AgentUpdateGrinderMessage agentUpdateGrinderMessage = listener.onAgentDownloadRequested(message.getVersion(), message.getNext());
if (agentUpdateGrinderMessage != null) {
m_consoleCommunication.sendToAddressedAgents(message.getAddress(), agentUpdateGrinderMessage);
}
});
}
});
messageDispatchRegistry.set(ConnectionAgentMessage.class, new AbstractHandler<ConnectionAgentMessage>() {
public void handle(final ConnectionAgentMessage message) {
m_connectionAgentListener.apply(listener -> {
listener.onConnectionAgentMessage(message.getIp(), message.getName(), message.getSubregion(), message.getPort());
});
}
});
messageDispatchRegistry.set(ConnectionAgentCommunicationMessage.class, new AbstractHandler<ConnectionAgentCommunicationMessage>() {
public void handle(final ConnectionAgentCommunicationMessage message) {
m_connectionAgentCommunicationListener.apply(listener -> {
listener.onConnectionAgentCommunication(message.getUsingPort(), message.getIp(), message.getPort());
});
}
});
}
/**
* Set Agent status report.
*
* @param message {@link AgentControllerProcessReportMessage}
*/
private void updateAgentProcessReportMessage(AgentControllerProcessReportMessage message) {
AgentIdentity agentIdentity = message.getAgentIdentity();
AgentStatus agentStatus = getAgentStatus(agentIdentity);
agentStatus.setAgentProcessStatus(message);
m_agentMap.put(agentIdentity, agentStatus);
m_newData = true;
}
/**
* Get agent status. It's for internal use.
*
* @param agentIdentity agent identity
* @return {@link AgentStatus}
*/
private AgentStatus getAgentStatus(AgentIdentity agentIdentity) {
return m_agentMap.getOrDefault(agentIdentity, new AgentStatus(agentIdentity));
}
/**
* Update agent status.
*/
private void update() {
if (!m_newData) {
return;
}
m_newData = false;
m_agentStatusUpdateListeners.apply(agentStatusUpdateListener -> {
agentStatusUpdateListener.update(unmodifiableMap(m_agentMap));
});
}
public void addAgentDownloadRequestListener(AgentDownloadRequestListener agentDownloadRequestListener) {
m_agentDownloadRequestListeners.add(agentDownloadRequestListener);
}
public void addConnectionAgentListener(ConnectionAgentListener connectionAgentListener) {
m_connectionAgentListener.add(connectionAgentListener);
}
public void addConnectionAgentCommunicationListener(ConnectionAgentCommunicationListener listener) {
m_connectionAgentCommunicationListener.add(listener);
}
/**
* Interface for listeners to SampleModelImplementation.
*/
public interface AgentStatusUpdateListener extends EventListener {
/**
* Update agent status.
*
* @param agentMap agent map
*/
void update(Map<AgentIdentity, AgentStatus> agentMap);
}
/**
* Callers are for synchronization.
*
* @param purgableMap map for {@link ProcessIdentity}
*/
private void purge(Map<? extends ProcessIdentity, ? extends Purgable> purgableMap) {
final Set<ProcessIdentity> zombies = new HashSet<>();
for (Entry<? extends ProcessIdentity, ? extends Purgable> entry : purgableMap.entrySet()) {
if (entry.getValue().shouldPurge()) {
zombies.add(entry.getKey());
}
}
if (zombies.size() > 0) {
for (ProcessIdentity processIdentity: zombies) {
purgableMap.remove(processIdentity);
}
m_newData = true;
}
}
private interface Purgable {
/**
* check it should be purged.
*
* @return true if purse is necessary
*/
boolean shouldPurge();
}
private static abstract class AbstractTimedReference implements Purgable {
private int m_purgeDelayCount;
@Override
public boolean shouldPurge() {
// Processes have a short time to report - see the javadoc for
// FLUSH_PERIOD.
if (m_purgeDelayCount > 0) {
return true;
}
++m_purgeDelayCount;
return false;
}
public void initPurgeDelayCount() {
m_purgeDelayCount = 0;
}
}
private static final class AgentReference extends AbstractTimedReference {
private final AgentControllerProcessReportMessage m_agentProcessReportMessage;
/**
* Constructor.
*
* @param agentProcessReportMessage {@link AgentControllerProcessReportMessage}
*/
AgentReference(AgentControllerProcessReportMessage agentProcessReportMessage) {
this.m_agentProcessReportMessage = agentProcessReportMessage;
}
}
/**
* Agent Status.
*
* @author JunHo Yoon
*/
public static final class AgentStatus implements Purgable {
private volatile AgentReference m_agentReference;
/**
* Constructor.
*
* @param agentIdentity agent identity
*/
public AgentStatus(AgentIdentity agentIdentity) {
setAgentProcessStatus(new UnknownAgentProcessReport(new AgentAddress(agentIdentity)));
}
@Override
public boolean shouldPurge() {
return m_agentReference.shouldPurge();
}
/**
* Get agent controller status.
*
* @return {@link AgentControllerState} member
*/
public AgentControllerState getAgentControllerState() {
if (m_agentReference == null) {
return AgentControllerState.UNKNOWN;
}
AgentControllerProcessReportMessage agentProcessReport = m_agentReference.m_agentProcessReportMessage;
return agentProcessReport == null ? AgentControllerState.UNKNOWN : agentProcessReport.getState();
}
/**
* Set each agent process message on the agent status.
*
* @param message Message
*/
public void setAgentProcessStatus(AgentControllerProcessReportMessage message) {
m_agentReference = new AgentReference(message);
}
public String getVersion() {
return m_agentReference == null ? null : m_agentReference.m_agentProcessReportMessage.getVersion();
}
public SystemDataModel getSystemDataModel() {
return m_agentReference == null ? null : m_agentReference.m_agentProcessReportMessage.getSystemDataModel();
}
public int getConnectingPort() {
return m_agentReference == null ? 0 : m_agentReference.m_agentProcessReportMessage.getConnectingPort();
}
public AgentIdentity getAgentIdentity() {
return m_agentReference == null ? null : m_agentReference.m_agentProcessReportMessage.getAgentIdentity();
}
public String getAgentName() {
return m_agentReference == null ? "" : m_agentReference.m_agentProcessReportMessage.getAgentIdentity()
.getName();
}
}
/**
* Add process control {@link AgentStatusUpdateListener}.
*
* @param agentStatusUpdateListener agentStatusUpdateListener to be added
*/
public void addAgentStatusUpdateListener(AgentStatusUpdateListener agentStatusUpdateListener) {
m_agentStatusUpdateListeners.add(agentStatusUpdateListener);
}
/**
* Add Log control {@link LogArrivedListener}.
*
* @param listener listener to be added
*/
public void addLogArrivedListener(LogArrivedListener listener) {
m_logListeners.add(listener);
}
/*
* (non-Javadoc)
*
* @see net.grinder.console.communication.AgentProcessControl#startAgent(java .util.Set,
* net.grinder.common.GrinderProperties)
*/
@Override
public void startAgent(Set<AgentIdentity> agents, GrinderProperties properties) {
final GrinderProperties propertiesToSend = properties != null ? properties : new GrinderProperties();
for (AgentIdentity each : agents) {
m_consoleCommunication.sendToAddressedAgents(new AgentAddress(each), new StartGrinderMessage(
propertiesToSend, each.getNumber()));
}
}
/*
* (non-Javadoc)
*
* @see net.grinder.console.communication.AgentProcessControl#stopAgent(net.grinder
* .common.processidentity.AgentIdentity)
*/
@Override
public void stopAgent(AgentIdentity agentIdentity) {
m_consoleCommunication.sendToAddressedAgents(new AgentAddress(agentIdentity), new StopGrinderMessage());
}
/*
* (non-Javadoc)
*
* @see net.grinder.console.communication.AgentProcessControl#getNumberOfLiveAgents ()
*/
@Override
public int getNumberOfLiveAgents() {
synchronized (m_agentMap) {
return m_agentMap.size();
}
}
/*
* (non-Javadoc)
*
* @see net.grinder.console.communication.AgentProcessControl#getAgents(net.grinder
* .message.console.AgentControllerState, int)
*/
@Override
public Set<AgentIdentity> getAgents(AgentControllerState state, int count) {
count = count == 0 ? Integer.MAX_VALUE : count;
synchronized (m_agentMap) {
int i = 0;
Set<AgentIdentity> agents = new HashSet<>();
for (Map.Entry<AgentIdentity, AgentStatus> each : m_agentMap.entrySet()) {
if (each.getValue().getAgentControllerState().equals(state) && ++i <= count) {
agents.add(each.getKey());
}
}
return agents;
}
}
/*
* (non-Javadoc)
*
* @see net.grinder.console.communication.AgentProcessControl#getAllAgents()
*/
@Override
public Set<AgentIdentity> getAllAgents() {
synchronized (m_agentMap) {
return m_agentMap.keySet();
}
}
private static class UnknownAgentProcessReport extends AgentControllerProcessReportMessage {
/**
* UUID.
*/
private static final long serialVersionUID = -2758014000696737553L;
/**
* Constructor.
*
* @param address {@link AgentAddress} in which the agent process is not known.
*/
public UnknownAgentProcessReport(AgentAddress address) {
super(AgentControllerState.UNKNOWN, null, 0, null);
try {
setAddress(address);
} catch (CommunicationException e) {
LOGGER.error("Error while setAdress" + address, e);
}
}
public AgentControllerState getState() {
return AgentControllerState.UNKNOWN;
}
}
@Override
public AgentControllerState getAgentControllerState(AgentIdentity agentIdentity) {
return getAgentStatus(agentIdentity).getAgentControllerState();
}
@Override
public String getAgentVersion(AgentIdentity agentIdentity) {
return getAgentStatus(agentIdentity).getVersion();
}
@Override
public SystemDataModel getSystemDataModel(AgentIdentity agentIdentity) {
return getAgentStatus(agentIdentity).getSystemDataModel();
}
@Override
public int getAgentConnectingPort(AgentIdentity agentIdentity) {
return getAgentStatus(agentIdentity).getConnectingPort();
}
/**
* Get agent identities and status map matching the given predicate.
*
* @param predicate predicate
* @return {@link AgentIdentity} {@link AgentStatus} map
* @since 3.1.2
*/
public Set<AgentStatus> getAgentStatusSet(Predicate<AgentStatus> predicate) {
Set<AgentStatus> statusSet = newLinkedHashSet();
for (Entry<AgentIdentity, AgentStatus> each : m_agentMap.entrySet()) {
if (predicate.test(each.getValue())) {
statusSet.add(each.getValue());
}
}
return statusSet;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for RegionUrlMaps.Validate. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.ValidateRegionUrlMapRequest}
*/
public final class ValidateRegionUrlMapRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ValidateRegionUrlMapRequest)
ValidateRegionUrlMapRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ValidateRegionUrlMapRequest.newBuilder() to construct.
private ValidateRegionUrlMapRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ValidateRegionUrlMapRequest() {
project_ = "";
region_ = "";
urlMap_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ValidateRegionUrlMapRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ValidateRegionUrlMapRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 453062866:
{
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder subBuilder = null;
if (regionUrlMapsValidateRequestResource_ != null) {
subBuilder = regionUrlMapsValidateRequestResource_.toBuilder();
}
regionUrlMapsValidateRequestResource_ =
input.readMessage(
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(regionUrlMapsValidateRequestResource_);
regionUrlMapsValidateRequestResource_ = subBuilder.buildPartial();
}
break;
}
case 1111570338:
{
java.lang.String s = input.readStringRequireUtf8();
region_ = s;
break;
}
case 1820481738:
{
java.lang.String s = input.readStringRequireUtf8();
project_ = s;
break;
}
case -1358801822:
{
java.lang.String s = input.readStringRequireUtf8();
urlMap_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_ValidateRegionUrlMapRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_ValidateRegionUrlMapRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.class,
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.Builder.class);
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
private volatile java.lang.Object project_;
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
private volatile java.lang.Object region_;
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_URL_MAPS_VALIDATE_REQUEST_RESOURCE_FIELD_NUMBER = 56632858;
private com.google.cloud.compute.v1.RegionUrlMapsValidateRequest
regionUrlMapsValidateRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the regionUrlMapsValidateRequestResource field is set.
*/
@java.lang.Override
public boolean hasRegionUrlMapsValidateRequestResource() {
return regionUrlMapsValidateRequestResource_ != null;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The regionUrlMapsValidateRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.RegionUrlMapsValidateRequest
getRegionUrlMapsValidateRequestResource() {
return regionUrlMapsValidateRequestResource_ == null
? com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.getDefaultInstance()
: regionUrlMapsValidateRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.RegionUrlMapsValidateRequestOrBuilder
getRegionUrlMapsValidateRequestResourceOrBuilder() {
return getRegionUrlMapsValidateRequestResource();
}
public static final int URL_MAP_FIELD_NUMBER = 367020684;
private volatile java.lang.Object urlMap_;
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The urlMap.
*/
@java.lang.Override
public java.lang.String getUrlMap() {
java.lang.Object ref = urlMap_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
urlMap_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for urlMap.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUrlMapBytes() {
java.lang.Object ref = urlMap_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
urlMap_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (regionUrlMapsValidateRequestResource_ != null) {
output.writeMessage(56632858, getRegionUrlMapsValidateRequestResource());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(urlMap_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 367020684, urlMap_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (regionUrlMapsValidateRequestResource_ != null) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
56632858, getRegionUrlMapsValidateRequestResource());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(urlMap_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(367020684, urlMap_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.ValidateRegionUrlMapRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest other =
(com.google.cloud.compute.v1.ValidateRegionUrlMapRequest) obj;
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (hasRegionUrlMapsValidateRequestResource()
!= other.hasRegionUrlMapsValidateRequestResource()) return false;
if (hasRegionUrlMapsValidateRequestResource()) {
if (!getRegionUrlMapsValidateRequestResource()
.equals(other.getRegionUrlMapsValidateRequestResource())) return false;
}
if (!getUrlMap().equals(other.getUrlMap())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
if (hasRegionUrlMapsValidateRequestResource()) {
hash = (37 * hash) + REGION_URL_MAPS_VALIDATE_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getRegionUrlMapsValidateRequestResource().hashCode();
}
hash = (37 * hash) + URL_MAP_FIELD_NUMBER;
hash = (53 * hash) + getUrlMap().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for RegionUrlMaps.Validate. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.ValidateRegionUrlMapRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ValidateRegionUrlMapRequest)
com.google.cloud.compute.v1.ValidateRegionUrlMapRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_ValidateRegionUrlMapRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_ValidateRegionUrlMapRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.class,
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
project_ = "";
region_ = "";
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
regionUrlMapsValidateRequestResource_ = null;
} else {
regionUrlMapsValidateRequestResource_ = null;
regionUrlMapsValidateRequestResourceBuilder_ = null;
}
urlMap_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_ValidateRegionUrlMapRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.ValidateRegionUrlMapRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.ValidateRegionUrlMapRequest build() {
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.ValidateRegionUrlMapRequest buildPartial() {
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest result =
new com.google.cloud.compute.v1.ValidateRegionUrlMapRequest(this);
result.project_ = project_;
result.region_ = region_;
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
result.regionUrlMapsValidateRequestResource_ = regionUrlMapsValidateRequestResource_;
} else {
result.regionUrlMapsValidateRequestResource_ =
regionUrlMapsValidateRequestResourceBuilder_.build();
}
result.urlMap_ = urlMap_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.ValidateRegionUrlMapRequest) {
return mergeFrom((com.google.cloud.compute.v1.ValidateRegionUrlMapRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.ValidateRegionUrlMapRequest other) {
if (other == com.google.cloud.compute.v1.ValidateRegionUrlMapRequest.getDefaultInstance())
return this;
if (!other.getProject().isEmpty()) {
project_ = other.project_;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
onChanged();
}
if (other.hasRegionUrlMapsValidateRequestResource()) {
mergeRegionUrlMapsValidateRequestResource(other.getRegionUrlMapsValidateRequestResource());
}
if (!other.getUrlMap().isEmpty()) {
urlMap_ = other.urlMap_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.ValidateRegionUrlMapRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.compute.v1.ValidateRegionUrlMapRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
onChanged();
return this;
}
private com.google.cloud.compute.v1.RegionUrlMapsValidateRequest
regionUrlMapsValidateRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequestOrBuilder>
regionUrlMapsValidateRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the regionUrlMapsValidateRequestResource field is set.
*/
public boolean hasRegionUrlMapsValidateRequestResource() {
return regionUrlMapsValidateRequestResourceBuilder_ != null
|| regionUrlMapsValidateRequestResource_ != null;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The regionUrlMapsValidateRequestResource.
*/
public com.google.cloud.compute.v1.RegionUrlMapsValidateRequest
getRegionUrlMapsValidateRequestResource() {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
return regionUrlMapsValidateRequestResource_ == null
? com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.getDefaultInstance()
: regionUrlMapsValidateRequestResource_;
} else {
return regionUrlMapsValidateRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegionUrlMapsValidateRequestResource(
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest value) {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
regionUrlMapsValidateRequestResource_ = value;
onChanged();
} else {
regionUrlMapsValidateRequestResourceBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegionUrlMapsValidateRequestResource(
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder builderForValue) {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
regionUrlMapsValidateRequestResource_ = builderForValue.build();
onChanged();
} else {
regionUrlMapsValidateRequestResourceBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRegionUrlMapsValidateRequestResource(
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest value) {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
if (regionUrlMapsValidateRequestResource_ != null) {
regionUrlMapsValidateRequestResource_ =
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.newBuilder(
regionUrlMapsValidateRequestResource_)
.mergeFrom(value)
.buildPartial();
} else {
regionUrlMapsValidateRequestResource_ = value;
}
onChanged();
} else {
regionUrlMapsValidateRequestResourceBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRegionUrlMapsValidateRequestResource() {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
regionUrlMapsValidateRequestResource_ = null;
onChanged();
} else {
regionUrlMapsValidateRequestResource_ = null;
regionUrlMapsValidateRequestResourceBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder
getRegionUrlMapsValidateRequestResourceBuilder() {
onChanged();
return getRegionUrlMapsValidateRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.RegionUrlMapsValidateRequestOrBuilder
getRegionUrlMapsValidateRequestResourceOrBuilder() {
if (regionUrlMapsValidateRequestResourceBuilder_ != null) {
return regionUrlMapsValidateRequestResourceBuilder_.getMessageOrBuilder();
} else {
return regionUrlMapsValidateRequestResource_ == null
? com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.getDefaultInstance()
: regionUrlMapsValidateRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.RegionUrlMapsValidateRequest region_url_maps_validate_request_resource = 56632858 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequestOrBuilder>
getRegionUrlMapsValidateRequestResourceFieldBuilder() {
if (regionUrlMapsValidateRequestResourceBuilder_ == null) {
regionUrlMapsValidateRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequest.Builder,
com.google.cloud.compute.v1.RegionUrlMapsValidateRequestOrBuilder>(
getRegionUrlMapsValidateRequestResource(), getParentForChildren(), isClean());
regionUrlMapsValidateRequestResource_ = null;
}
return regionUrlMapsValidateRequestResourceBuilder_;
}
private java.lang.Object urlMap_ = "";
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The urlMap.
*/
public java.lang.String getUrlMap() {
java.lang.Object ref = urlMap_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
urlMap_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for urlMap.
*/
public com.google.protobuf.ByteString getUrlMapBytes() {
java.lang.Object ref = urlMap_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
urlMap_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The urlMap to set.
* @return This builder for chaining.
*/
public Builder setUrlMap(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
urlMap_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearUrlMap() {
urlMap_ = getDefaultInstance().getUrlMap();
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the UrlMap resource to be validated as.
* </pre>
*
* <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for urlMap to set.
* @return This builder for chaining.
*/
public Builder setUrlMapBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
urlMap_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ValidateRegionUrlMapRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ValidateRegionUrlMapRequest)
private static final com.google.cloud.compute.v1.ValidateRegionUrlMapRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ValidateRegionUrlMapRequest();
}
public static com.google.cloud.compute.v1.ValidateRegionUrlMapRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ValidateRegionUrlMapRequest> PARSER =
new com.google.protobuf.AbstractParser<ValidateRegionUrlMapRequest>() {
@java.lang.Override
public ValidateRegionUrlMapRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ValidateRegionUrlMapRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ValidateRegionUrlMapRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ValidateRegionUrlMapRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.ValidateRegionUrlMapRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.example.android.sunshine.app.sync;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.annotation.SuppressLint;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.AbstractThreadedSyncAdapter;
import android.content.ContentProviderClient;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SyncRequest;
import android.content.SyncResult;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.IntDef;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.wearable.Asset;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.Wearable;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.TaskStackBuilder;
import android.text.format.Time;
import android.util.Log;
import com.bumptech.glide.Glide;
import com.example.android.sunshine.app.BuildConfig;
import com.example.android.sunshine.app.MainActivity;
import com.example.android.sunshine.app.R;
import com.example.android.sunshine.app.Utility;
import com.example.android.sunshine.app.data.WeatherContract;
import com.example.android.sunshine.app.muzei.WeatherMuzeiSource;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.wearable.Wearable;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Vector;
import java.util.concurrent.ExecutionException;
public class SunshineSyncAdapter extends AbstractThreadedSyncAdapter implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener {
public final String LOG_TAG = SunshineSyncAdapter.class.getSimpleName();
public static final String ACTION_DATA_UPDATED =
"com.example.android.sunshine.app.ACTION_DATA_UPDATED";
// Interval at which to sync with the weather, in seconds.
// 60 seconds (1 minute) * 180 = 3 hours
public static final int SYNC_INTERVAL = 60 * 180;
public static final int SYNC_FLEXTIME = SYNC_INTERVAL / 3;
private static final long DAY_IN_MILLIS = 1000 * 60 * 60 * 24;
private static final int WEATHER_NOTIFICATION_ID = 3004;
private static final String[] NOTIFY_WEATHER_PROJECTION = new String[]{
WeatherContract.WeatherEntry.COLUMN_WEATHER_ID,
WeatherContract.WeatherEntry.COLUMN_MAX_TEMP,
WeatherContract.WeatherEntry.COLUMN_MIN_TEMP,
WeatherContract.WeatherEntry.COLUMN_SHORT_DESC
};
// these indices must match the projection
private static final int INDEX_WEATHER_ID = 0;
private static final int INDEX_MAX_TEMP = 1;
private static final int INDEX_MIN_TEMP = 2;
private static final int INDEX_SHORT_DESC = 3;
private GoogleApiClient mGoogleApiClient;
@Retention(RetentionPolicy.SOURCE)
@IntDef({LOCATION_STATUS_OK, LOCATION_STATUS_SERVER_DOWN, LOCATION_STATUS_SERVER_INVALID, LOCATION_STATUS_UNKNOWN, LOCATION_STATUS_INVALID})
public @interface LocationStatus {
}
public static final int LOCATION_STATUS_OK = 0;
public static final int LOCATION_STATUS_SERVER_DOWN = 1;
public static final int LOCATION_STATUS_SERVER_INVALID = 2;
public static final int LOCATION_STATUS_UNKNOWN = 3;
public static final int LOCATION_STATUS_INVALID = 4;
public SunshineSyncAdapter(Context context, boolean autoInitialize) {
super(context, autoInitialize);
}
// For wearable
@Override
public void onConnected(@Nullable Bundle bundle) {
Log.d(LOG_TAG, "Connection Established");
Context context = getContext();
String locationQuery = Utility.getPreferredLocation(context);
Uri weatherUri = WeatherContract.WeatherEntry.buildWeatherLocationWithDate(locationQuery, System.currentTimeMillis());
Cursor cursor = context.getContentResolver().query(weatherUri, NOTIFY_WEATHER_PROJECTION, null, null, null);
if (cursor.moveToFirst()) {
final int weatherId = cursor.getInt(INDEX_WEATHER_ID);
final double high = cursor.getDouble(INDEX_MAX_TEMP);
final double low = cursor.getDouble(INDEX_MIN_TEMP);
final String strHigh = Utility.formatTemperature(context, high);
final String strLow = Utility.formatTemperature(context, low);
Log.d(LOG_TAG, "high temp: " + strHigh);
Log.d(LOG_TAG, "low temp: " + strLow);
final SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
int watch_weatherId = prefs.getInt(context.getString(R.string.pref_watch_weather_id), -1);
String watch_high = prefs.getString(context.getString(R.string.pref_watch_high), "");
String watch_low = prefs.getString(context.getString(R.string.pref_watch_low), "");
Log.d(LOG_TAG, "Watch ID: " + watch_weatherId + " New Weather ID: " + weatherId);
Log.d(LOG_TAG, "Watch High: " + watch_high + " New High: " + strHigh);
Log.d(LOG_TAG, "Watch Low: " + watch_low + " New Low: " + strLow);
if (weatherId == watch_weatherId && strHigh.equals(watch_high) && strLow.equals(watch_low)) {
Log.d(LOG_TAG, "Nothing to update.");
return;
}
int iconId = Utility.getIconResourceForWeatherCondition(weatherId);
Resources resources = context.getResources();
Bitmap icon = BitmapFactory.decodeResource(resources, iconId, null);
mGoogleApiClient.connect();
PutDataMapRequest putDataMapRequest = PutDataMapRequest.create("/weather");
putDataMapRequest.getDataMap().putString("high", strHigh);
putDataMapRequest.getDataMap().putString("low", strLow);
final ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
icon.compress(Bitmap.CompressFormat.PNG, 100, byteStream);
putDataMapRequest.getDataMap().putAsset("icon", Asset.createFromBytes(byteStream.toByteArray()));
putDataMapRequest.getDataMap().putLong("Time",System.currentTimeMillis());
PutDataRequest request = putDataMapRequest.asPutDataRequest();
request.setUrgent();
// Wearable.DataApi.putDataItem(mGoogleApiClient, request);
Wearable.DataApi.putDataItem(mGoogleApiClient, request)
.setResultCallback(new ResultCallback<DataApi.DataItemResult>() {
@Override
public void onResult(DataApi.DataItemResult dataItemResult) {
if (dataItemResult.getStatus().isSuccess()){
Log.d(LOG_TAG, "Sent Data Item to Wearable...");
// SharedPreferences.Editor editor = prefs.edit();
// editor.putInt(getContext().getString(R.string.pref_watch_weather_id), weatherId);
// editor.putString(getContext().getString(R.string.pref_watch_high), strHigh);
// editor.putString(getContext().getString(R.string.pref_watch_low), strLow);
// editor.apply();
}
else {
Log.d(LOG_TAG, "Error sending Data Item to Wearable");
}
}
});
}
cursor.close();
}
@Override
public void onConnectionSuspended(int i) {
Log.d(LOG_TAG, "Connection Suspended");
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
Log.d(LOG_TAG, "Connection Failed");
}
@Override
public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult) {
Log.d(LOG_TAG, "Starting sync");
String locationQuery = Utility.getPreferredLocation(getContext());
mGoogleApiClient = new GoogleApiClient.Builder(getContext())
.addApi(Wearable.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
mGoogleApiClient.connect();
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
String format = "json";
String units = "metric";
int numDays = 14;
try {
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
final String FORECAST_BASE_URL =
"http://api.openweathermap.org/data/2.5/forecast/daily?";
final String QUERY_PARAM = "q";
final String FORMAT_PARAM = "mode";
final String UNITS_PARAM = "units";
final String DAYS_PARAM = "cnt";
final String APPID_PARAM = "APPID";
Uri builtUri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(QUERY_PARAM, locationQuery)
.appendQueryParameter(FORMAT_PARAM, format)
.appendQueryParameter(UNITS_PARAM, units)
.appendQueryParameter(DAYS_PARAM, Integer.toString(numDays))
.appendQueryParameter(APPID_PARAM, BuildConfig.OPEN_WEATHER_MAP_API_KEY)
.build();
URL url = new URL(builtUri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
return;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
setLocationStatus(getContext(), LOCATION_STATUS_SERVER_DOWN);
return;
}
forecastJsonStr = buffer.toString();
getWeatherDataFromJson(forecastJsonStr, locationQuery);
} catch (IOException e) {
Log.e(LOG_TAG, "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attempting
// to parse it.
setLocationStatus(getContext(), LOCATION_STATUS_SERVER_DOWN);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
setLocationStatus(getContext(), LOCATION_STATUS_SERVER_INVALID);
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
Log.e(LOG_TAG, "Error closing stream", e);
}
}
}
return;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
* <p/>
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private void getWeatherDataFromJson(String forecastJsonStr,
String locationSetting)
throws JSONException {
// Now we have a String representing the complete forecast in JSON Format.
// Fortunately parsing is easy: constructor takes the JSON string and converts it
// into an Object hierarchy for us.
// These are the names of the JSON objects that need to be extracted.
// Location information
final String OWM_CITY = "city";
final String OWM_CITY_NAME = "name";
final String OWM_COORD = "coord";
// Location coordinate
final String OWM_LATITUDE = "lat";
final String OWM_LONGITUDE = "lon";
// Weather information. Each day's forecast info is an element of the "list" array.
final String OWM_LIST = "list";
final String OWM_PRESSURE = "pressure";
final String OWM_HUMIDITY = "humidity";
final String OWM_WINDSPEED = "speed";
final String OWM_WIND_DIRECTION = "deg";
// All temperatures are children of the "temp" object.
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_WEATHER = "weather";
final String OWM_DESCRIPTION = "main";
final String OWM_WEATHER_ID = "id";
final String OWM_MESSAGE_CODE = "cod";
try {
JSONObject forecastJson = new JSONObject(forecastJsonStr);
// do we have an error?
if (forecastJson.has(OWM_MESSAGE_CODE)) {
int errorCode = forecastJson.getInt(OWM_MESSAGE_CODE);
switch (errorCode) {
case HttpURLConnection.HTTP_OK:
break;
case HttpURLConnection.HTTP_NOT_FOUND:
setLocationStatus(getContext(), LOCATION_STATUS_INVALID);
return;
default:
setLocationStatus(getContext(), LOCATION_STATUS_SERVER_DOWN);
return;
}
}
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
JSONObject cityJson = forecastJson.getJSONObject(OWM_CITY);
String cityName = cityJson.getString(OWM_CITY_NAME);
JSONObject cityCoord = cityJson.getJSONObject(OWM_COORD);
double cityLatitude = cityCoord.getDouble(OWM_LATITUDE);
double cityLongitude = cityCoord.getDouble(OWM_LONGITUDE);
long locationId = addLocation(locationSetting, cityName, cityLatitude, cityLongitude);
// Insert the new weather information into the database
Vector<ContentValues> cVVector = new Vector<ContentValues>(weatherArray.length());
// OWM returns daily forecasts based upon the local time of the city that is being
// asked for, which means that we need to know the GMT offset to translate this data
// properly.
// Since this data is also sent in-order and the first day is always the
// current day, we're going to take advantage of that to get a nice
// normalized UTC date for all of our weather.
Time dayTime = new Time();
dayTime.setToNow();
// we start at the day returned by local time. Otherwise this is a mess.
int julianStartDay = Time.getJulianDay(System.currentTimeMillis(), dayTime.gmtoff);
// now we work exclusively in UTC
dayTime = new Time();
for (int i = 0; i < weatherArray.length(); i++) {
// These are the values that will be collected.
long dateTime;
double pressure;
int humidity;
double windSpeed;
double windDirection;
double high;
double low;
String description;
int weatherId;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// Cheating to convert this to UTC time, which is what we want anyhow
dateTime = dayTime.setJulianDay(julianStartDay + i);
pressure = dayForecast.getDouble(OWM_PRESSURE);
humidity = dayForecast.getInt(OWM_HUMIDITY);
windSpeed = dayForecast.getDouble(OWM_WINDSPEED);
windDirection = dayForecast.getDouble(OWM_WIND_DIRECTION);
// Description is in a child array called "weather", which is 1 element long.
// That element also contains a weather code.
JSONObject weatherObject =
dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
weatherId = weatherObject.getInt(OWM_WEATHER_ID);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
high = temperatureObject.getDouble(OWM_MAX);
low = temperatureObject.getDouble(OWM_MIN);
ContentValues weatherValues = new ContentValues();
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_LOC_KEY, locationId);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DATE, dateTime);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_HUMIDITY, humidity);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_PRESSURE, pressure);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WIND_SPEED, windSpeed);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DEGREES, windDirection);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, high);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, low);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, description);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, weatherId);
cVVector.add(weatherValues);
}
int inserted = 0;
// add to database
if (cVVector.size() > 0) {
ContentValues[] cvArray = new ContentValues[cVVector.size()];
cVVector.toArray(cvArray);
getContext().getContentResolver().bulkInsert(WeatherContract.WeatherEntry.CONTENT_URI, cvArray);
// delete old data so we don't build up an endless history
getContext().getContentResolver().delete(WeatherContract.WeatherEntry.CONTENT_URI,
WeatherContract.WeatherEntry.COLUMN_DATE + " <= ?",
new String[]{Long.toString(dayTime.setJulianDay(julianStartDay - 1))});
updateWidgets();
updateMuzei();
notifyWeather();
}
Log.d(LOG_TAG, "Sync Complete. " + cVVector.size() + " Inserted");
setLocationStatus(getContext(), LOCATION_STATUS_OK);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
e.printStackTrace();
setLocationStatus(getContext(), LOCATION_STATUS_SERVER_INVALID);
}
}
private void updateWidgets() {
Context context = getContext();
// Setting the package ensures that only components in our app will receive the broadcast
Intent dataUpdatedIntent = new Intent(ACTION_DATA_UPDATED)
.setPackage(context.getPackageName());
context.sendBroadcast(dataUpdatedIntent);
}
private void updateMuzei() {
// Muzei is only compatible with Jelly Bean MR1+ devices, so there's no need to update the
// Muzei background on lower API level devices
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
Context context = getContext();
context.startService(new Intent(ACTION_DATA_UPDATED)
.setClass(context, WeatherMuzeiSource.class));
}
}
private void notifyWeather() {
Context context = getContext();
//checking the last update and notify if it' the first of the day
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String displayNotificationsKey = context.getString(R.string.pref_enable_notifications_key);
boolean displayNotifications = prefs.getBoolean(displayNotificationsKey,
Boolean.parseBoolean(context.getString(R.string.pref_enable_notifications_default)));
if (displayNotifications) {
String lastNotificationKey = context.getString(R.string.pref_last_notification);
long lastSync = prefs.getLong(lastNotificationKey, 0);
if (System.currentTimeMillis() - lastSync >= DAY_IN_MILLIS) {
// Last sync was more than 1 day ago, let's send a notification with the weather.
String locationQuery = Utility.getPreferredLocation(context);
Uri weatherUri = WeatherContract.WeatherEntry.buildWeatherLocationWithDate(locationQuery, System.currentTimeMillis());
// we'll query our contentProvider, as always
Cursor cursor = context.getContentResolver().query(weatherUri, NOTIFY_WEATHER_PROJECTION, null, null, null);
if (cursor.moveToFirst()) {
int weatherId = cursor.getInt(INDEX_WEATHER_ID);
double high = cursor.getDouble(INDEX_MAX_TEMP);
double low = cursor.getDouble(INDEX_MIN_TEMP);
String desc = cursor.getString(INDEX_SHORT_DESC);
int iconId = Utility.getIconResourceForWeatherCondition(weatherId);
Resources resources = context.getResources();
int artResourceId = Utility.getArtResourceForWeatherCondition(weatherId);
String artUrl = Utility.getArtUrlForWeatherCondition(context, weatherId);
// On Honeycomb and higher devices, we can retrieve the size of the large icon
// Prior to that, we use a fixed size
@SuppressLint("InlinedApi")
int largeIconWidth = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB
? resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_width)
: resources.getDimensionPixelSize(R.dimen.notification_large_icon_default);
@SuppressLint("InlinedApi")
int largeIconHeight = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB
? resources.getDimensionPixelSize(android.R.dimen.notification_large_icon_height)
: resources.getDimensionPixelSize(R.dimen.notification_large_icon_default);
// Retrieve the large icon
Bitmap largeIcon;
try {
largeIcon = Glide.with(context)
.load(artUrl)
.asBitmap()
.error(artResourceId)
.fitCenter()
.into(largeIconWidth, largeIconHeight).get();
} catch (InterruptedException | ExecutionException e) {
Log.e(LOG_TAG, "Error retrieving large icon from " + artUrl, e);
largeIcon = BitmapFactory.decodeResource(resources, artResourceId);
}
String title = context.getString(R.string.app_name);
// Define the text of the forecast.
String contentText = String.format(context.getString(R.string.format_notification),
desc,
Utility.formatTemperature(context, high),
Utility.formatTemperature(context, low));
// NotificationCompatBuilder is a very convenient way to build backward-compatible
// notifications. Just throw in some data.
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(getContext())
.setColor(resources.getColor(R.color.primary_light))
.setSmallIcon(iconId)
.setLargeIcon(largeIcon)
.setContentTitle(title)
.setContentText(contentText);
// Make something interesting happen when the user clicks on the notification.
// In this case, opening the app is sufficient.
Intent resultIntent = new Intent(context, MainActivity.class);
// The stack builder object will contain an artificial back stack for the
// started Activity.
// This ensures that navigating backward from the Activity leads out of
// your application to the Home screen.
TaskStackBuilder stackBuilder = TaskStackBuilder.create(context);
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent =
stackBuilder.getPendingIntent(
0,
PendingIntent.FLAG_UPDATE_CURRENT
);
mBuilder.setContentIntent(resultPendingIntent);
NotificationManager mNotificationManager =
(NotificationManager) getContext().getSystemService(Context.NOTIFICATION_SERVICE);
// WEATHER_NOTIFICATION_ID allows you to update the notification later on.
mNotificationManager.notify(WEATHER_NOTIFICATION_ID, mBuilder.build());
//refreshing last sync
SharedPreferences.Editor editor = prefs.edit();
editor.putLong(lastNotificationKey, System.currentTimeMillis());
editor.commit();
}
cursor.close();
}
}
}
/**
* Helper method to handle insertion of a new location in the weather database.
*
* @param locationSetting The location string used to request updates from the server.
* @param cityName A human-readable city name, e.g "Mountain View"
* @param lat the latitude of the city
* @param lon the longitude of the city
* @return the row ID of the added location.
*/
long addLocation(String locationSetting, String cityName, double lat, double lon) {
long locationId;
// First, check if the location with this city name exists in the db
Cursor locationCursor = getContext().getContentResolver().query(
WeatherContract.LocationEntry.CONTENT_URI,
new String[]{WeatherContract.LocationEntry._ID},
WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ?",
new String[]{locationSetting},
null);
if (locationCursor.moveToFirst()) {
int locationIdIndex = locationCursor.getColumnIndex(WeatherContract.LocationEntry._ID);
locationId = locationCursor.getLong(locationIdIndex);
} else {
// Now that the content provider is set up, inserting rows of data is pretty simple.
// First create a ContentValues object to hold the data you want to insert.
ContentValues locationValues = new ContentValues();
// Then add the data, along with the corresponding name of the data type,
// so the content provider knows what kind of value is being inserted.
locationValues.put(WeatherContract.LocationEntry.COLUMN_CITY_NAME, cityName);
locationValues.put(WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, locationSetting);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LAT, lat);
locationValues.put(WeatherContract.LocationEntry.COLUMN_COORD_LONG, lon);
// Finally, insert location data into the database.
Uri insertedUri = getContext().getContentResolver().insert(
WeatherContract.LocationEntry.CONTENT_URI,
locationValues
);
// The resulting URI contains the ID for the row. Extract the locationId from the Uri.
locationId = ContentUris.parseId(insertedUri);
}
locationCursor.close();
// Wait, that worked? Yes!
return locationId;
}
/**
* Helper method to schedule the sync adapter periodic execution
*/
public static void configurePeriodicSync(Context context, int syncInterval, int flexTime) {
Account account = getSyncAccount(context);
String authority = context.getString(R.string.content_authority);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// we can enable inexact timers in our periodic sync
SyncRequest request = new SyncRequest.Builder().
syncPeriodic(syncInterval, flexTime).
setSyncAdapter(account, authority).
setExtras(new Bundle()).build();
ContentResolver.requestSync(request);
} else {
ContentResolver.addPeriodicSync(account,
authority, new Bundle(), syncInterval);
}
}
/**
* Helper method to have the sync adapter sync immediately
*
* @param context The context used to access the account service
*/
public static void syncImmediately(Context context) {
Bundle bundle = new Bundle();
bundle.putBoolean(ContentResolver.SYNC_EXTRAS_EXPEDITED, true);
bundle.putBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, true);
ContentResolver.requestSync(getSyncAccount(context),
context.getString(R.string.content_authority), bundle);
}
/**
* Helper method to get the fake account to be used with SyncAdapter, or make a new one
* if the fake account doesn't exist yet. If we make a new account, we call the
* onAccountCreated method so we can initialize things.
*
* @param context The context used to access the account service
* @return a fake account.
*/
public static Account getSyncAccount(Context context) {
// Get an instance of the Android account manager
AccountManager accountManager =
(AccountManager) context.getSystemService(Context.ACCOUNT_SERVICE);
// Create the account type and default account
Account newAccount = new Account(
context.getString(R.string.app_name), context.getString(R.string.sync_account_type));
// If the password doesn't exist, the account doesn't exist
if (null == accountManager.getPassword(newAccount)) {
/*
* Add the account and account type, no password or user data
* If successful, return the Account object, otherwise report an error.
*/
if (!accountManager.addAccountExplicitly(newAccount, "", null)) {
return null;
}
/*
* If you don't set android:syncable="true" in
* in your <provider> element in the manifest,
* then call ContentResolver.setIsSyncable(account, AUTHORITY, 1)
* here.
*/
onAccountCreated(newAccount, context);
}
return newAccount;
}
private static void onAccountCreated(Account newAccount, Context context) {
/*
* Since we've created an account
*/
SunshineSyncAdapter.configurePeriodicSync(context, SYNC_INTERVAL, SYNC_FLEXTIME);
/*
* Without calling setSyncAutomatically, our periodic sync will not be enabled.
*/
ContentResolver.setSyncAutomatically(newAccount, context.getString(R.string.content_authority), true);
/*
* Finally, let's do a sync to get things started
*/
syncImmediately(context);
}
public static void initializeSyncAdapter(Context context) {
getSyncAccount(context);
}
/**
* Sets the location status into shared preference. This function should not be called from
* the UI thread because it uses commit to write to the shared preferences.
*
* @param c Context to get the PreferenceManager from.
* @param locationStatus The IntDef value to set
*/
static private void setLocationStatus(Context c, @LocationStatus int locationStatus) {
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
SharedPreferences.Editor spe = sp.edit();
spe.putInt(c.getString(R.string.pref_location_status_key), locationStatus);
spe.commit();
}
}
| |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.commons.cloud.sweeper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
public class Sweeper {
public static final String SERVICE_ENDPOINT_HOST_KEY = "serviceHost";
public static final String SERVICE_ENDPOINT_PORT_KEY = "servicePort";
public static final String SERVICE_ENDPOINT_SERVICE_PATH_KEY = "servicePath";
public static final String SERVICE_ENDPOINT_STORAGE_PATH_KEY = "storagePath";
public static final String BUCKET_OPT_KEY = "bucket";
public static final String PREFIX_OPT_KEY = "prefix";
public static final String DAYS_OPT_KEY = "days";
public static final String REMOVE_OPT_KEY = "remove";
public static final String SECRET_KEY_KEY = "secret";
public static final String ACCESS_KEY_KEY = "access";
private static CommandLine line;
private static Options options;
public static PrintWriter delStream;
public static void main(String[] args) throws Exception{
options = buildOptions();
try {
CommandLineParser parser = new PosixParser();
line = parser.parse(options, args);
} catch (ParseException pe) {
throw new S3SweeperException("S3Sweeper - ParseException: " + pe.getMessage(), pe);
}
checkRequiredOptions(options);
File outFile = new File("toDel.txt");
delStream = new PrintWriter(outFile);
EBSVolumeSweeper volSweeper = new EBSVolumeSweeper();
S3Sweeper s3Sweeper = new S3Sweeper();
volSweeper.sweep(line);
//s3Sweeper.sweep(line);
delStream.flush();
delStream.close();
}
/**
* Build the supported options.
*/
private static Options buildOptions() {
Options options = new Options();
options.addOption(SERVICE_ENDPOINT_HOST_KEY, true, "the service endpoint host to connect to");
options.addOption(SERVICE_ENDPOINT_PORT_KEY, true, "the service endpoint port to connect to");
options.addOption(SERVICE_ENDPOINT_STORAGE_PATH_KEY, true, "the service endpoint path to storage (walrus/s3) to connect to");
options.addOption(SERVICE_ENDPOINT_SERVICE_PATH_KEY, true, "the service endpoint path to services (ec2/eucalyptus) to connect to");
options.addOption(ACCESS_KEY_KEY, true, "the aws access key");
options.addOption(SECRET_KEY_KEY, true, "the aws secret key");
options.addOption(BUCKET_OPT_KEY, true, "the s3fs bucket name");
options.addOption(PREFIX_OPT_KEY, true, "match object key in s3fs with this prefix");
options.addOption(DAYS_OPT_KEY, true, "number days old object to keep around");
options.addOption(REMOVE_OPT_KEY, false, "(optional) perform final, no-undo sweep (Otherwise just print things to sweep)");
return options;
}
/**
* Check for the required options.
*
* @param options Supported options
* @throws S3SweeperException when it fails to format and print the help text
*/
private static void checkRequiredOptions(Options options) throws S3SweeperException {
if (! line.hasOption(BUCKET_OPT_KEY)
|| ! line.hasOption(PREFIX_OPT_KEY)
|| ! line.hasOption(DAYS_OPT_KEY)
|| ! line.hasOption(SECRET_KEY_KEY)
|| ! line.hasOption(ACCESS_KEY_KEY)
|| ! line.hasOption(SERVICE_ENDPOINT_HOST_KEY)
|| ! line.hasOption(SERVICE_ENDPOINT_SERVICE_PATH_KEY)
|| ! line.hasOption(SERVICE_ENDPOINT_STORAGE_PATH_KEY)
|| ! line.hasOption(SERVICE_ENDPOINT_PORT_KEY)
) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("com.deleidos.rtws.tools.s3.S3Sweeper", options);
System.exit(1);
}
}
}
| |
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.car.app.model;
import static androidx.car.app.model.Action.FLAG_PRIMARY;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import androidx.car.app.TestUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.internal.DoNotInstrument;
/** Tests for {@link MessageTemplate}. */
@RunWith(RobolectricTestRunner.class)
@DoNotInstrument
public class LongMessageTemplateTest {
private final String mTitle = "header";
private final String mMessage = "foo";
private final Action mAction =
new Action.Builder().setTitle("Action").setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {
})).build();
private final ActionStrip mActionStrip = new ActionStrip.Builder().addAction(mAction).build();
@Test
public void emptyMessage_throws() {
assertThrows(
IllegalStateException.class,
() -> new LongMessageTemplate.Builder("").setTitle(mTitle).build());
}
@Test
public void noHeaderTitleOrAction_throws() {
assertThrows(
IllegalStateException.class,
() -> new LongMessageTemplate.Builder(mMessage).build());
// Positive cases.
new LongMessageTemplate.Builder(mMessage).setTitle(mTitle).build();
new LongMessageTemplate.Builder(mMessage).setHeaderAction(Action.APP_ICON).build();
}
@Test
public void header_unsupportedSpans_throws() {
CharSequence title = TestUtils.getCharSequenceWithColorSpan("Title");
assertThrows(
IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage).setTitle(title));
// DurationSpan and DistanceSpan do not throw
CharSequence title2 = TestUtils.getCharSequenceWithDistanceAndDurationSpans("Title");
new LongMessageTemplate.Builder(mMessage).setTitle(title2).build();
}
@Test
public void moreThanTwoActions_throws() {
assertThrows(IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage)
.addAction(mAction)
.addAction(mAction)
.addAction(mAction));
}
@Test
public void twoPrimaryActions_throws() {
Action primaryAction = new Action.Builder().setTitle("primaryAction")
.setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {})
)
.setFlags(FLAG_PRIMARY).build();
assertThrows(IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage)
.addAction(primaryAction)
.addAction(primaryAction)
.build());
}
@Test
public void action_unsupportedSpans_throws() {
CharSequence title1 = TestUtils.getCharSequenceWithClickableSpan("Title");
Action action1 =
new Action.Builder().setTitle(title1).setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {
})).build();
assertThrows(IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage).addAction(action1));
CarText title2 = TestUtils.getCarTextVariantsWithDistanceAndDurationSpans("Title");
Action action2 =
new Action.Builder().setTitle(title2).setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {
})).build();
assertThrows(IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage).addAction(action2));
// DurationSpan and DistanceSpan do not throw
CharSequence title3 = TestUtils.getCharSequenceWithColorSpan("Title");
Action action3 =
new Action.Builder().setTitle(title3).setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {
})).build();
new LongMessageTemplate.Builder(mMessage).setTitle("Title").addAction(action3).build();
CarText title4 = TestUtils.getCarTextVariantsWithColorSpan("Title");
Action action4 =
new Action.Builder().setTitle(title4).setOnClickListener(
ParkedOnlyOnClickListener.create(() -> {
})).build();
new LongMessageTemplate.Builder(mMessage).setTitle("Title").addAction(action4).build();
}
@Test
public void createDefault_valuesAreNull() {
LongMessageTemplate template = new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.build();
assertThat(template.getMessage().toString()).isEqualTo(mMessage);
assertThat(template.getTitle().toString()).isEqualTo("header");
assertThat(template.getHeaderAction()).isNull();
assertThat(template.getActions()).isEmpty();
assertThat(template.getActionStrip()).isNull();
}
@Test
public void createInstance_setHeaderAction_invalidActionThrows() {
assertThrows(
IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage)
.setHeaderAction(
new Action.Builder()
.setTitle("Action")
.setOnClickListener(() -> { })
.build())
.build());
}
@Test
public void createWithContents_hasProperValuesSet() {
ActionStrip actionStrip = new ActionStrip.Builder().addAction(Action.BACK).build();
LongMessageTemplate template = new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(actionStrip)
.build();
assertThat(template.getMessage().toString()).isEqualTo(mMessage);
assertThat(template.getTitle().toString()).isEqualTo(mTitle);
assertThat(template.getHeaderAction()).isEqualTo(Action.BACK);
assertThat(template.getActions()).containsExactly(mAction);
assertThat(template.getActionStrip()).isEqualTo(actionStrip);
}
@Test
public void createInstance_notParkedOnlyAction_throws() {
Action action = new Action.Builder()
.setOnClickListener(() -> { })
.setTitle("foo").build();
assertThrows(
IllegalArgumentException.class,
() -> new LongMessageTemplate.Builder(mMessage)
.setTitle("Title")
.addAction(action));
}
@Test
public void equals() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
assertThat(template1).isEqualTo(template2);
}
@Test
public void notEquals_differentMessage() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder("bar")
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
assertThat(template1).isNotEqualTo(template2);
}
@Test
public void notEquals_differentHeaderAction() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.APP_ICON)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
assertThat(template1).isNotEqualTo(template2);
}
@Test
public void notEquals_differentActions() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
assertThat(template1).isNotEqualTo(template2);
}
@Test
public void notEquals_differentActionStrip() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(new ActionStrip.Builder()
.addAction(Action.BACK)
.addAction(Action.APP_ICON)
.build())
.build();
assertThat(template1).isNotEqualTo(template2);
}
@Test
public void notEquals_differentTitle() {
LongMessageTemplate template1 =
new LongMessageTemplate.Builder(mMessage)
.setTitle(mTitle)
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
LongMessageTemplate template2 =
new LongMessageTemplate.Builder(mMessage)
.setTitle("yo")
.setHeaderAction(Action.BACK)
.addAction(mAction)
.setActionStrip(mActionStrip)
.build();
assertThat(template1).isNotEqualTo(template2);
}
}
| |
package com.jivesoftware.os.miru.reco.plugins.trending;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.MinMaxPriorityQueue;
import com.google.common.collect.Sets;
import com.jivesoftware.os.miru.analytics.plugins.analytics.Analytics;
import com.jivesoftware.os.miru.analytics.plugins.analytics.AnalyticsAnswer;
import com.jivesoftware.os.miru.analytics.plugins.analytics.AnalyticsAnswerEvaluator;
import com.jivesoftware.os.miru.analytics.plugins.analytics.AnalyticsAnswerMerger;
import com.jivesoftware.os.miru.api.MiruQueryServiceException;
import com.jivesoftware.os.miru.api.activity.MiruPartitionId;
import com.jivesoftware.os.miru.api.base.MiruTenantId;
import com.jivesoftware.os.miru.api.query.filter.MiruValue;
import com.jivesoftware.os.miru.plugin.Miru;
import com.jivesoftware.os.miru.plugin.MiruProvider;
import com.jivesoftware.os.miru.plugin.partition.MiruPartitionUnavailableException;
import com.jivesoftware.os.miru.plugin.solution.MiruPartitionResponse;
import com.jivesoftware.os.miru.plugin.solution.MiruRequest;
import com.jivesoftware.os.miru.plugin.solution.MiruRequestAndReport;
import com.jivesoftware.os.miru.plugin.solution.MiruResponse;
import com.jivesoftware.os.miru.plugin.solution.MiruSolution;
import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLogLevel;
import com.jivesoftware.os.miru.plugin.solution.MiruSolvableFactory;
import com.jivesoftware.os.miru.plugin.solution.MiruTimeRange;
import com.jivesoftware.os.miru.plugin.solution.Waveform;
import com.jivesoftware.os.miru.reco.plugins.distincts.Distincts;
import com.jivesoftware.os.miru.reco.plugins.trending.TrendingQuery.Strategy;
import com.jivesoftware.os.miru.reco.trending.WaveformRegression;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.math.stat.descriptive.rank.Percentile;
import static com.google.common.base.Objects.firstNonNull;
/**
*
*/
public class TrendingInjectable {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final MiruProvider<? extends Miru> provider;
private final Distincts distincts;
private final Analytics analytics;
private final int gatherDistinctsBatchSize;
private final PeakDet peakDet = new PeakDet();
public TrendingInjectable(MiruProvider<? extends Miru> miruProvider,
Distincts distincts,
Analytics analytics) {
this.provider = miruProvider;
this.distincts = distincts;
this.analytics = analytics;
TrendingPluginConfig config = miruProvider.getConfig(TrendingPluginConfig.class);
this.gatherDistinctsBatchSize = config.getGatherDistinctsBatchSize();
}
double zeroToOne(long _min, long _max, long _long) {
if (_max == _min) {
if (_long == _min) {
return 0;
}
if (_long > _max) {
return Double.MAX_VALUE;
}
return -Double.MAX_VALUE;
}
return (double) (_long - _min) / (double) (_max - _min);
}
public MiruResponse<TrendingAnswer> scoreTrending(MiruRequest<TrendingQuery> request) throws MiruQueryServiceException, InterruptedException {
try {
WaveformRegression regression = new WaveformRegression();
LOG.debug("askAndMerge: request={}", request);
MiruTenantId tenantId = request.tenantId;
Miru miru = provider.getMiru(tenantId);
MiruTimeRange combinedTimeRange = getCombinedTimeRange(request);
Map<String, Integer> keyedSegments = Maps.newHashMap();
for (TrendingQueryScoreSet scoreSet : request.query.scoreSets) {
keyedSegments.put(scoreSet.key, scoreSet.divideTimeRangeIntoNSegments);
}
MiruResponse<AnalyticsAnswer> analyticsResponse = miru.askAndMerge(tenantId,
new MiruSolvableFactory<>(request.name, provider.getStats(), "trending", new TrendingQuestion(distincts,
analytics,
gatherDistinctsBatchSize,
combinedTimeRange,
request,
provider.getRemotePartition(TrendingRemotePartition.class))),
new AnalyticsAnswerEvaluator(),
new AnalyticsAnswerMerger(keyedSegments),
AnalyticsAnswer.EMPTY_RESULTS,
miru.getDefaultExecutor(),
request.logLevel);
Map<String, List<Waveform>> keyedWaveforms = (analyticsResponse.answer != null && analyticsResponse.answer.waveforms != null)
? analyticsResponse.answer.waveforms
: Collections.emptyMap();
Map<String, List<Waveform>> keyedDistinctWaveforms = Maps.newHashMap();
Map<String, TrendingAnswerScoreSet> keyedScoreSets = Maps.newHashMap();
Set<MiruValue> consumed = Sets.newHashSet();
for (TrendingQueryScoreSet queryScoreSet : request.query.scoreSets) {
List<Waveform> waveforms = keyedWaveforms.get(queryScoreSet.key);
if (waveforms == null) {
continue;
}
long[] waveform = new long[queryScoreSet.divideTimeRangeIntoNSegments];
double bucket95 = 0;
if (queryScoreSet.strategies.contains(Strategy.PEAKS)) {
double[] highestBuckets = new double[waveforms.size()];
int i = 0;
for (Waveform entry : waveforms) {
Arrays.fill(waveform, 0);
entry.mergeWaveform(waveform);
for (long w : waveform) {
highestBuckets[i] = Math.max(highestBuckets[i], w);
}
i++;
}
Percentile percentile = new Percentile();
bucket95 = percentile.evaluate(highestBuckets, 0.95);
}
Map<Strategy, MinMaxPriorityQueue<Trendy>> strategyResults = Maps.newHashMapWithExpectedSize(queryScoreSet.strategies.size());
for (Strategy strategy : queryScoreSet.strategies) {
strategyResults.put(strategy,
MinMaxPriorityQueue
.maximumSize(queryScoreSet.desiredNumberOfDistincts)
.create());
}
for (Waveform entry : waveforms) {
if (consumed.contains(entry.getId())) {
continue;
}
Arrays.fill(waveform, 0);
entry.mergeWaveform(waveform);
boolean hasCounts = false;
double highestBucket = Double.MIN_VALUE;
for (long w : waveform) {
if (w > 0) {
hasCounts = true;
}
highestBucket = Math.max(w, highestBucket);
}
if (hasCounts) {
if (queryScoreSet.strategies.contains(Strategy.LINEAR_REGRESSION)) {
regression.clear();
regression.add(waveform, 0, waveform.length);
strategyResults.get(Strategy.LINEAR_REGRESSION).add(new Trendy(entry.getId(), regression.slope()));
}
if (queryScoreSet.strategies.contains(Strategy.LEADER)) {
long sum = 0;
for (long w : waveform) {
sum += w;
}
strategyResults.get(Strategy.LEADER).add(new Trendy(entry.getId(), (double) sum));
}
if (queryScoreSet.strategies.contains(Strategy.PEAKS)) {
double threshold = (highestBucket / 6) + (bucket95 / 100);
List<PeakDet.Peak> peaks = peakDet.peakdet(waveform, threshold);
strategyResults.get(Strategy.PEAKS).add(new Trendy(entry.getId(), (double) peaks.size()));
}
if (queryScoreSet.strategies.contains(Strategy.HIGHEST_PEAK)) {
double max = 0;
for (int i = 0; i < waveform.length; i++) {
max = Math.max(max, waveform[i]);
}
strategyResults.get(Strategy.HIGHEST_PEAK).add(new Trendy(entry.getId(), max));
}
}
}
Set<MiruValue> retainKeys = Sets.newHashSet();
Map<String, List<Trendy>> strategySortedTrendies = Maps.newHashMapWithExpectedSize(strategyResults.size());
for (Map.Entry<Strategy, MinMaxPriorityQueue<Trendy>> entry : strategyResults.entrySet()) {
List<Trendy> sortedTrendies = Lists.newArrayList(entry.getValue());
Collections.sort(sortedTrendies);
strategySortedTrendies.put(entry.getKey().name(), sortedTrendies);
for (Trendy trendy : sortedTrendies) {
retainKeys.add(trendy.distinctValue);
}
}
List<Waveform> distinctWaveforms = Lists.newArrayListWithCapacity(retainKeys.size());
for (Waveform entry : waveforms) {
if (retainKeys.contains(entry.getId())) {
distinctWaveforms.add(entry);
}
}
consumed.addAll(retainKeys);
keyedDistinctWaveforms.put(queryScoreSet.key, distinctWaveforms);
keyedScoreSets.put(queryScoreSet.key, new TrendingAnswerScoreSet(strategySortedTrendies));
}
ImmutableList<String> solutionLog = ImmutableList.<String>builder()
.addAll(analyticsResponse.log)
.build();
LOG.debug("Solution:\n{}", solutionLog);
return new MiruResponse<>(new TrendingAnswer(keyedDistinctWaveforms, keyedScoreSets),
ImmutableList.<MiruSolution>builder()
.addAll(firstNonNull(analyticsResponse.solutions, Collections.<MiruSolution>emptyList()))
.build(),
analyticsResponse.totalElapsed,
analyticsResponse.missingSchema,
ImmutableList.<Integer>builder()
.addAll(firstNonNull(analyticsResponse.incompletePartitionIds, Collections.<Integer>emptyList()))
.build(),
solutionLog);
} catch (MiruPartitionUnavailableException | InterruptedException e) {
throw e;
} catch (Exception e) {
//TODO throw http error codes
throw new MiruQueryServiceException("Failed to score trending stream", e);
}
}
public MiruPartitionResponse<AnalyticsAnswer> scoreTrending(MiruPartitionId partitionId,
MiruRequestAndReport<TrendingQuery, TrendingReport> requestAndReport)
throws MiruQueryServiceException, InterruptedException {
try {
MiruRequest<TrendingQuery> request = requestAndReport.request;
LOG.debug("askImmediate: partitionId={} request={}", partitionId, request);
LOG.trace("askImmediate: report={}", requestAndReport.report);
MiruTenantId tenantId = request.tenantId;
Miru miru = provider.getMiru(tenantId);
MiruTimeRange combinedTimeRange = getCombinedTimeRange(request);
return miru.askImmediate(tenantId,
partitionId,
new MiruSolvableFactory<>(request.name, provider.getStats(),
"scoreTrending",
new TrendingQuestion(distincts,
analytics,
gatherDistinctsBatchSize,
combinedTimeRange,
request,
provider.getRemotePartition(TrendingRemotePartition.class))),
Optional.fromNullable(requestAndReport.report),
AnalyticsAnswer.EMPTY_RESULTS,
MiruSolutionLogLevel.NONE);
} catch (MiruPartitionUnavailableException | InterruptedException e) {
throw e;
} catch (Exception e) {
//TODO throw http error codes
throw new MiruQueryServiceException("Failed to score trending stream for partition: " + partitionId.getId(), e);
}
}
private MiruTimeRange getCombinedTimeRange(MiruRequest<TrendingQuery> request) {
long minTimestamp = Long.MAX_VALUE;
long maxTimestamp = Long.MIN_VALUE;
for (TrendingQueryScoreSet scoreSet : request.query.scoreSets) {
minTimestamp = Math.min(minTimestamp, scoreSet.timeRange.smallestTimestamp);
maxTimestamp = Math.max(maxTimestamp, scoreSet.timeRange.largestTimestamp);
}
return new MiruTimeRange(minTimestamp, maxTimestamp);
}
}
| |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.tools;
import com.linkedin.pinot.common.config.TableConfig;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.helix.PropertyPathConfig;
import org.apache.helix.PropertyType;
import org.apache.helix.ZNRecord;
import org.apache.helix.manager.zk.ZKHelixAdmin;
import org.apache.helix.manager.zk.ZNRecordSerializer;
import org.apache.helix.model.IdealState;
import org.apache.helix.store.zk.ZkHelixPropertyStore;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UpdateSegmentState extends AbstractBaseCommand implements Command {
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateSegmentState.class);
private static final String CmdName = "UpdateSegmentState";
private static final String fromState = "OFFLINE";
private static final String toState = "ONLINE";
static final String DEFAULT_ZK_ADDRESS = "localhost:2181";
static final String DEFAULT_CLUSTER_NAME = "PinotCluster";
@Option(name = "-zkAddress", required = false, metaVar = "<http>", usage = "Http address of Zookeeper.")
private String _zkAddress = DEFAULT_ZK_ADDRESS;
@Option(name = "-clusterName", required = false, metaVar = "<String>", usage = "Pinot cluster name.")
private String _clusterName = DEFAULT_CLUSTER_NAME;
@Option(name = "-tenantName", required = false, metaVar = "<string>", usage = "Name of tenant.")
private String _tenantName;
@Option(name = "-tableName", required = false, metaVar = "<string>", usage = "Name of the table (e.g. foo_table_OFFLINE).")
private String _tableName;
@Option(name = "-fix", required = false, metaVar = "<boolean>", usage = "Update IDEALSTATE values (OFFLINE->ONLINE).")
private boolean _fix = false;
@Option(name = "-help", required = false, help = true, aliases={"-h", "--h", "--help"}, usage = "Print this message.")
private boolean _help = false;
public UpdateSegmentState() {
super();
}
@Override
public boolean getHelp() {
return _help;
}
@Override
public String getName() {
return CmdName;
}
@Override
public String toString() {
String retString = CmdName + " -zkAddress " + _zkAddress + " -clusterName " + _clusterName;
if (_tableName != null) {
retString += " -tableName " + _tableName;
} else {
retString += " -tenanName " + _tenantName;
}
if (_fix) {
retString += " -fix";
}
return retString;
}
@Override
public String description() {
return "Audit the IDEALSTATE for the segments of a table (or all tables of a tenant). Optionally update segment state from OFFLINE to ONLINE";
}
public UpdateSegmentState setZkAddress(String zkAddress) {
_zkAddress = zkAddress;
return this;
}
public UpdateSegmentState setClusterName(String clusterName) {
_clusterName = clusterName;
return this;
}
public UpdateSegmentState setTenantName(String tenantName) {
_tenantName = tenantName;
return this;
}
public UpdateSegmentState setTableName(String tableName) {
_tableName = tableName;
return this;
}
public UpdateSegmentState setOverwrite(boolean fix) {
_fix = fix;
return this;
}
private ZKHelixAdmin _helixAdmin;
private ZkHelixPropertyStore<ZNRecord> _propertyStore;
private void init() {
LOGGER.info("Trying to connect to " + _zkAddress + " cluster " + _clusterName);
_helixAdmin = new ZKHelixAdmin(_zkAddress);
ZNRecordSerializer serializer = new ZNRecordSerializer();
String path = PropertyPathConfig.getPath(PropertyType.PROPERTYSTORE, _clusterName);
_propertyStore = new ZkHelixPropertyStore<>(_zkAddress, serializer, path);
}
public List<String> getAllTenantTables() throws Exception {
String tableConfigPath = "/CONFIGS/TABLE";
List<ZNRecord> tableConfigs = _propertyStore.getChildren(tableConfigPath, null, 0);
List<String> tables = new ArrayList<>(128);
for (ZNRecord znRecord : tableConfigs) {
TableConfig tableConfig = TableConfig.fromZnRecord(znRecord);
if (tableConfig.getTenantConfig().getServer().equals(_tenantName)) {
tables.add(tableConfig.getTableName());
}
}
return tables;
}
public void fixTableIdealState(String tableName) throws Exception {
IdealState idealState = _helixAdmin.getResourceIdealState(_clusterName, tableName);
if (idealState == null) {
LOGGER.info("No IDEALSTATE found for table " + tableName);
return;
}
Map<String, Map<String, String>> mapFieldsIS = idealState.getRecord().getMapFields();
int nChanges = 0;
for (String segment : mapFieldsIS.keySet()) {
Map<String, String> mapIS = mapFieldsIS.get(segment);
for (String server : mapIS.keySet()) {
String state = mapIS.get(server);
if (state.equals(fromState)) {
if (_fix) {
mapIS.put(server, toState);
} else {
LOGGER.info("Table:" + tableName + ",Segment:" + segment + ",Server:" + server + ":" + fromState);
}
nChanges++;
}
}
}
if (nChanges == 0) {
LOGGER.info("No segments detected in " + fromState + " state for table " + tableName);
} else {
if (_fix) {
LOGGER.info("Replacing IDEALSTATE for table " + tableName + " with " + nChanges + " changes");
_helixAdmin.setResourceIdealState(_clusterName, tableName, idealState);
} else {
LOGGER.info("Detected " + nChanges + " instances in " + fromState + " in table " + tableName);
}
}
}
@Override
public boolean execute() throws Exception {
if (_tableName == null && _tenantName == null) {
LOGGER.error("One of -tableName or -tenantName must be specified.");
return false;
}
if (_tableName != null && _tenantName != null) {
LOGGER.error("Exactly one of -tenantName and -tableName be specified");
return false;
}
init();
if (_tenantName != null) {
// Do this for all tenant tables
LOGGER.info("Working on all tables for tenant " + _tenantName);
List<String> tableNames = getAllTenantTables();
LOGGER.info("Found " + tableNames.size() + " tables for tenant " + _tenantName);
if (tableNames.size() > 0) {
for (String tableName : tableNames) {
fixTableIdealState(tableName);
}
}
} else {
LOGGER.info("Working on table " + _tableName);
fixTableIdealState(_tableName);
}
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import static org.apache.hadoop.metrics2.lib.Interns.info;
import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
import static org.apache.hadoop.test.MetricsAsserts.mockMetricsRecordBuilder;
import static org.mockito.AdditionalMatchers.eq;
import static org.mockito.AdditionalMatchers.geq;
import static org.mockito.AdditionalMatchers.leq;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.util.Quantile;
import org.junit.Test;
/**
* Test metrics record builder interface and mutable metrics
*/
public class TestMutableMetrics {
private final double EPSILON = 1e-42;
/**
* Test the snapshot method
*/
@Test public void testSnapshot() {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
registry.newCounter("c1", "int counter", 1);
registry.newCounter("c2", "long counter", 2L);
registry.newGauge("g1", "int gauge", 3);
registry.newGauge("g2", "long gauge", 4L);
registry.newStat("s1", "stat", "Ops", "Time", true).add(0);
registry.newRate("s2", "stat", false).add(0);
registry.snapshot(mb, true);
MutableStat s2 = (MutableStat) registry.get("s2");
s2.snapshot(mb, true); // should get the same back.
s2.add(1);
s2.snapshot(mb, true); // should get new interval values back
verify(mb).addCounter(info("c1", "int counter"), 1);
verify(mb).addCounter(info("c2", "long counter"), 2L);
verify(mb).addGauge(info("g1", "int gauge"), 3);
verify(mb).addGauge(info("g2", "long gauge"), 4L);
verify(mb).addCounter(info("S1NumOps", "Number of ops for stat"), 1L);
verify(mb).addGauge(eq(info("S1AvgTime", "Average time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1StdevTime",
"Standard deviation of time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1IMinTime",
"Interval min time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1IMaxTime",
"Interval max time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1MinTime","Min time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1MaxTime","Max time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(
eq(info("S1INumOps", "Interval number of ops for stat")),
eq(1L));
verify(mb, times(2))
.addCounter(info("S2NumOps", "Number of ops for stat"), 1L);
verify(mb, times(2)).addGauge(eq(info("S2AvgTime",
"Average time for stat")),
eq(0.0, EPSILON));
verify(mb).addCounter(info("S2NumOps", "Number of ops for stat"), 2L);
verify(mb).addGauge(eq(info("S2AvgTime", "Average time for stat")),
eq(1.0, EPSILON));
// Add one more sample to s1 and verify that total number of ops
// has increased to 2, but interval number is 1 for both intervals.
MutableStat s1 = (MutableStat) registry.get("s1");
s1.add(0);
registry.snapshot(mb, true);
verify(mb).addCounter(info("S1NumOps", "Number of ops for stat"), 2L);
verify(mb, times(2)).addGauge(
eq(info("S1INumOps", "Interval number of ops for stat")),
eq(1L));
}
interface TestProtocol {
void foo();
void bar();
}
@Test public void testMutableRates() {
MetricsRecordBuilder rb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
MutableRates rates = new MutableRates(registry);
rates.init(TestProtocol.class);
registry.snapshot(rb, false);
assertCounter("FooNumOps", 0L, rb);
assertGauge("FooAvgTime", 0.0, rb);
assertCounter("BarNumOps", 0L, rb);
assertGauge("BarAvgTime", 0.0, rb);
}
/**
* Ensure that quantile estimates from {@link MutableQuantiles} are within
* specified error bounds.
*/
@Test(timeout = 30000)
public void testMutableQuantilesError() throws Exception {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
// Use a 5s rollover period
MutableQuantiles quantiles = registry.newQuantiles("foo", "stat", "Ops",
"Latency", 5);
// Push some values in and wait for it to publish
long start = System.nanoTime() / 1000000;
for (long i = 1; i <= 1000; i++) {
quantiles.add(i);
quantiles.add(1001 - i);
}
long end = System.nanoTime() / 1000000;
Thread.sleep(6000 - (end - start));
registry.snapshot(mb, false);
// Print out the snapshot
Map<Quantile, Long> previousSnapshot = quantiles.previousSnapshot;
for (Entry<Quantile, Long> item : previousSnapshot.entrySet()) {
System.out.println(String.format("Quantile %.2f has value %d",
item.getKey().quantile, item.getValue()));
}
// Verify the results are within our requirements
verify(mb).addGauge(
info("FooNumOps", "Number of ops for stat with 5s interval"),
(long) 2000);
Quantile[] quants = MutableQuantiles.quantiles;
String name = "Foo%dthPercentileLatency";
String desc = "%d percentile latency with 5 second interval for stat";
for (Quantile q : quants) {
int percentile = (int) (100 * q.quantile);
int error = (int) (1000 * q.error);
String n = String.format(name, percentile);
String d = String.format(desc, percentile);
long expected = (long) (q.quantile * 1000);
verify(mb).addGauge(eq(info(n, d)), leq(expected + error));
verify(mb).addGauge(eq(info(n, d)), geq(expected - error));
}
}
/**
* Test that {@link MutableQuantiles} rolls the window over at the specified
* interval.
*/
@Test(timeout = 30000)
public void testMutableQuantilesRollover() throws Exception {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
// Use a 5s rollover period
MutableQuantiles quantiles = registry.newQuantiles("foo", "stat", "Ops",
"Latency", 5);
Quantile[] quants = MutableQuantiles.quantiles;
String name = "Foo%dthPercentileLatency";
String desc = "%d percentile latency with 5 second interval for stat";
// Push values for three intervals
long start = System.nanoTime() / 1000000;
for (int i = 1; i <= 3; i++) {
// Insert the values
for (long j = 1; j <= 1000; j++) {
quantiles.add(i);
}
// Sleep until 1s after the next 5s interval, to let the metrics
// roll over
long sleep = (start + (5000 * i) + 1000) - (System.nanoTime() / 1000000);
Thread.sleep(sleep);
// Verify that the window reset, check it has the values we pushed in
registry.snapshot(mb, false);
for (Quantile q : quants) {
int percentile = (int) (100 * q.quantile);
String n = String.format(name, percentile);
String d = String.format(desc, percentile);
verify(mb).addGauge(info(n, d), (long) i);
}
}
// Verify the metrics were added the right number of times
verify(mb, times(3)).addGauge(
info("FooNumOps", "Number of ops for stat with 5s interval"),
(long) 1000);
for (Quantile q : quants) {
int percentile = (int) (100 * q.quantile);
String n = String.format(name, percentile);
String d = String.format(desc, percentile);
verify(mb, times(3)).addGauge(eq(info(n, d)), anyLong());
}
}
/**
* Test that {@link MutableQuantiles} rolls over correctly even if no items
* have been added to the window
*/
@Test(timeout = 30000)
public void testMutableQuantilesEmptyRollover() throws Exception {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
// Use a 5s rollover period
MutableQuantiles quantiles = registry.newQuantiles("foo", "stat", "Ops",
"Latency", 5);
// Check it initially
quantiles.snapshot(mb, true);
verify(mb).addGauge(
info("FooNumOps", "Number of ops for stat with 5s interval"), (long) 0);
Thread.sleep(6000);
quantiles.snapshot(mb, false);
verify(mb, times(2)).addGauge(
info("FooNumOps", "Number of ops for stat with 5s interval"), (long) 0);
}
}
| |
package uk.ac.ox.oucs.vle;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.HttpAccess;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserAlreadyDefinedException;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserEdit;
import org.sakaiproject.user.api.UserIdInvalidException;
import org.sakaiproject.user.api.UserLockedException;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.user.api.UserPermissionException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class MockUserDirectoryServices implements UserDirectoryService {
public UserEdit addUser(String arg0, String arg1)
throws UserIdInvalidException, UserAlreadyDefinedException,
UserPermissionException {
// TODO Auto-generated method stub
return null;
}
public boolean allowAddUser() {
// TODO Auto-generated method stub
return false;
}
public boolean allowRemoveUser(String arg0) {
// TODO Auto-generated method stub
return false;
}
public boolean allowUpdateUser(String arg0) {
// TODO Auto-generated method stub
return false;
}
public boolean allowUpdateUserEmail(String arg0) {
// TODO Auto-generated method stub
return false;
}
public boolean allowUpdateUserName(String arg0) {
// TODO Auto-generated method stub
return false;
}
public boolean allowUpdateUserPassword(String arg0) {
// TODO Auto-generated method stub
return false;
}
public boolean allowUpdateUserType(String arg0) {
// TODO Auto-generated method stub
return false;
}
public User authenticate(String arg0, String arg1) {
// TODO Auto-generated method stub
return null;
}
public void cancelEdit(UserEdit arg0) {
// TODO Auto-generated method stub
}
public void commitEdit(UserEdit arg0) throws UserAlreadyDefinedException {
// TODO Auto-generated method stub
}
public int countSearchUsers(String arg0) {
// TODO Auto-generated method stub
return 0;
}
public int countUsers() {
// TODO Auto-generated method stub
return 0;
}
public void destroyAuthentication() {
// TODO Auto-generated method stub
}
public UserEdit editUser(String arg0) throws UserNotDefinedException,
UserPermissionException, UserLockedException {
// TODO Auto-generated method stub
return null;
}
public Collection findUsersByEmail(String arg0) {
// TODO Auto-generated method stub
return null;
}
public User getAnonymousUser() {
// TODO Auto-generated method stub
return null;
}
public User getCurrentUser() {
// TODO Auto-generated method stub
return null;
}
public User getUser(String arg0) throws UserNotDefinedException {
// TODO Auto-generated method stub
return null;
}
public User getUserByAid(String arg0) throws UserNotDefinedException {
// TODO Auto-generated method stub
return null;
}
public User getUserByEid(final String eid) throws UserNotDefinedException {
return new User() {
public boolean checkPassword(String arg0) {
// TODO Auto-generated method stub
return false;
}
public User getCreatedBy() {
// TODO Auto-generated method stub
return null;
}
public Time getCreatedTime() {
// TODO Auto-generated method stub
return null;
}
public String getDisplayId() {
// TODO Auto-generated method stub
return "displayId"+eid;
}
public String getDisplayName() {
// TODO Auto-generated method stub
return "displayName"+eid;
}
public String getEid() {
// TODO Auto-generated method stub
return eid;
}
public String getEmail() {
// TODO Auto-generated method stub
return "email"+eid;
}
public String getFirstName() {
// TODO Auto-generated method stub
return "firstName"+eid;
}
public String getLastName() {
// TODO Auto-generated method stub
return "lastName"+eid;
}
public User getModifiedBy() {
// TODO Auto-generated method stub
return null;
}
public Time getModifiedTime() {
// TODO Auto-generated method stub
return null;
}
public String getSortName() {
// TODO Auto-generated method stub
return null;
}
public String getType() {
// TODO Auto-generated method stub
return "type"+eid;
}
public String getId() {
// TODO Auto-generated method stub
return "id"+eid;
}
public ResourceProperties getProperties() {
// TODO Auto-generated method stub
return null;
}
public String getReference() {
// TODO Auto-generated method stub
return null;
}
public String getReference(String arg0) {
// TODO Auto-generated method stub
return null;
}
public String getUrl() {
// TODO Auto-generated method stub
return null;
}
public String getUrl(String arg0) {
// TODO Auto-generated method stub
return null;
}
public Element toXml(Document arg0, Stack arg1) {
// TODO Auto-generated method stub
return null;
}
public int compareTo(Object o) {
// TODO Auto-generated method stub
return 0;
}
};
}
public String getUserEid(String arg0) throws UserNotDefinedException {
// TODO Auto-generated method stub
return null;
}
public String getUserId(String arg0) throws UserNotDefinedException {
// TODO Auto-generated method stub
return null;
}
public List getUsers() {
// TODO Auto-generated method stub
return null;
}
public List getUsers(Collection arg0) {
// TODO Auto-generated method stub
return null;
}
public List getUsers(int arg0, int arg1) {
// TODO Auto-generated method stub
return null;
}
public UserEdit mergeUser(Element arg0) throws UserIdInvalidException,
UserAlreadyDefinedException, UserPermissionException {
// TODO Auto-generated method stub
return null;
}
public void removeUser(UserEdit arg0) throws UserPermissionException {
// TODO Auto-generated method stub
}
public List searchUsers(String arg0, int arg1, int arg2) {
// TODO Auto-generated method stub
return null;
}
public String userReference(String arg0) {
// TODO Auto-generated method stub
return null;
}
public User addUser(String arg0, String arg1, String arg2, String arg3,
String arg4, String arg5, String arg6, ResourceProperties arg7)
throws UserIdInvalidException, UserAlreadyDefinedException,
UserPermissionException {
// TODO Auto-generated method stub
return null;
}
public String archive(String arg0, Document arg1, Stack arg2, String arg3,
List arg4) {
// TODO Auto-generated method stub
return null;
}
public Entity getEntity(Reference arg0) {
// TODO Auto-generated method stub
return null;
}
public Collection getEntityAuthzGroups(Reference arg0, String arg1) {
// TODO Auto-generated method stub
return null;
}
public String getEntityDescription(Reference arg0) {
// TODO Auto-generated method stub
return null;
}
public ResourceProperties getEntityResourceProperties(Reference arg0) {
// TODO Auto-generated method stub
return null;
}
public String getEntityUrl(Reference arg0) {
// TODO Auto-generated method stub
return null;
}
public HttpAccess getHttpAccess() {
// TODO Auto-generated method stub
return null;
}
public String getLabel() {
// TODO Auto-generated method stub
return null;
}
public String merge(String arg0, Element arg1, String arg2, String arg3,
Map arg4, Map arg5, Set arg6) {
// TODO Auto-generated method stub
return null;
}
public boolean parseEntityReference(String arg0, Reference arg1) {
// TODO Auto-generated method stub
return false;
}
public boolean willArchiveMerge() {
// TODO Auto-generated method stub
return false;
}
}
| |
/*
* $Id: SimpleAdapterDocument.java 651946 2008-04-27 13:41:38Z apetrelli $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.views.xslt;
import java.util.Arrays;
import java.util.List;
import org.apache.struts2.StrutsException;
import org.w3c.dom.Attr;
import org.w3c.dom.CDATASection;
import org.w3c.dom.Comment;
import org.w3c.dom.DOMConfiguration;
import org.w3c.dom.DOMException;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentFragment;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.EntityReference;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
/**
* SimpleAdapterDocument adapted a Java object and presents it as
* a Document. This class represents the Document container and uses
* the AdapterFactory to produce a child adapter for the wrapped object.
* The adapter produced must be of an Element type or an exception is thrown.
*
* Note: in theory we could base this on AbstractAdapterElement and then allow
* the wrapped object to be a more general Node type. We would just use
* ourselves as the root element. However I don't think this is an issue as
* people expect Documents to wrap Elements.
*/
public class SimpleAdapterDocument extends AbstractAdapterNode implements Document {
private Element rootElement;
public SimpleAdapterDocument(
AdapterFactory adapterFactory, AdapterNode parent, String propertyName, Object value) {
setContext(adapterFactory, parent, propertyName, value);
}
public void setPropertyValue(Object prop) {
super.setPropertyValue(prop);
rootElement = null; // recreate the root element
}
/**
* Lazily construct the root element adapter from the value object.
*/
private Element getRootElement() {
if (rootElement != null)
return rootElement;
Node node = getAdapterFactory().adaptNode(
this, getPropertyName(), getPropertyValue());
if (node instanceof Element)
rootElement = (Element) node;
else
throw new StrutsException(
"Document adapter expected to wrap an Element type. Node is not an element:" + node);
return rootElement;
}
protected List<Node> getChildAdapters() {
return Arrays.asList(new Node[]{getRootElement()});
}
public NodeList getChildNodes() {
return new NodeList() {
public Node item(int i) {
return getRootElement();
}
public int getLength() {
return 1;
}
};
}
public DocumentType getDoctype() {
return null;
}
public Element getDocumentElement() {
return getRootElement();
}
public Element getElementById(String string) {
return null;
}
public NodeList getElementsByTagName(String string) {
return null;
}
public NodeList getElementsByTagNameNS(String string, String string1) {
return null;
}
public Node getFirstChild() {
return getRootElement();
}
public DOMImplementation getImplementation() {
return null;
}
public Node getLastChild() {
return getRootElement();
}
public String getNodeName() {
return "#document";
}
public short getNodeType() {
return Node.DOCUMENT_NODE;
}
public Attr createAttribute(String string) throws DOMException {
return null;
}
public Attr createAttributeNS(String string, String string1) throws DOMException {
return null;
}
public CDATASection createCDATASection(String string) throws DOMException {
return null;
}
public Comment createComment(String string) {
return null;
}
public DocumentFragment createDocumentFragment() {
return null;
}
public Element createElement(String string) throws DOMException {
return null;
}
public Element createElementNS(String string, String string1) throws DOMException {
return null;
}
public EntityReference createEntityReference(String string) throws DOMException {
return null;
}
public ProcessingInstruction createProcessingInstruction(String string, String string1) throws DOMException {
return null;
}
public Text createTextNode(String string) {
return null;
}
public boolean hasChildNodes() {
return true;
}
public Node importNode(Node node, boolean b) throws DOMException {
return null;
}
public Node getChildAfter(Node child) {
return null;
}
public Node getChildBefore(Node child) {
return null;
}
// DOM level 3
public String getInputEncoding() {
throw operationNotSupported();
}
public String getXmlEncoding() {
throw operationNotSupported();
}
public boolean getXmlStandalone() {
throw operationNotSupported();
}
public void setXmlStandalone(boolean b) throws DOMException {
throw operationNotSupported();
}
public String getXmlVersion() {
throw operationNotSupported();
}
public void setXmlVersion(String string) throws DOMException {
throw operationNotSupported();
}
public boolean getStrictErrorChecking() {
throw operationNotSupported();
}
public void setStrictErrorChecking(boolean b) {
throw operationNotSupported();
}
public String getDocumentURI() {
throw operationNotSupported();
}
public void setDocumentURI(String string) {
throw operationNotSupported();
}
public Node adoptNode(Node node) throws DOMException {
throw operationNotSupported();
}
public DOMConfiguration getDomConfig() {
throw operationNotSupported();
}
public void normalizeDocument() {
throw operationNotSupported();
}
public Node renameNode(Node node, String string, String string1) throws DOMException {
return null;
}
// end DOM level 3
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportConnectionListener;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.local.LocalTransport;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.google.common.collect.Maps.newHashMap;
import static org.hamcrest.Matchers.*;
public class ClusterStateDiffPublishingTests extends ElasticsearchTestCase {
protected ThreadPool threadPool;
protected Map<String, MockNode> nodes = newHashMap();
public static class MockNode {
public final DiscoveryNode discoveryNode;
public final MockTransportService service;
public final PublishClusterStateAction action;
public final MockDiscoveryNodesProvider nodesProvider;
public MockNode(DiscoveryNode discoveryNode, MockTransportService service, PublishClusterStateAction action, MockDiscoveryNodesProvider nodesProvider) {
this.discoveryNode = discoveryNode;
this.service = service;
this.action = action;
this.nodesProvider = nodesProvider;
}
public void connectTo(DiscoveryNode node) {
service.connectToNode(node);
nodesProvider.addNode(node);
}
}
public MockNode createMockNode(final String name, Settings settings, Version version) throws Exception {
return createMockNode(name, settings, version, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
logger.debug("Node [{}] onNewClusterState version [{}], uuid [{}]", name, clusterState.version(), clusterState.uuid());
newStateProcessed.onNewClusterStateProcessed();
}
});
}
public MockNode createMockNode(String name, Settings settings, Version version, PublishClusterStateAction.NewClusterStateListener listener) throws Exception {
MockTransportService service = buildTransportService(
Settings.builder().put(settings).put("name", name, TransportService.SETTING_TRACE_LOG_INCLUDE, "", TransportService.SETTING_TRACE_LOG_EXCLUDE, "NOTHING").build(),
version
);
DiscoveryNode discoveryNode = new DiscoveryNode(name, name, service.boundAddress().publishAddress(), ImmutableMap.<String, String>of(), version);
MockDiscoveryNodesProvider nodesProvider = new MockDiscoveryNodesProvider(discoveryNode);
PublishClusterStateAction action = buildPublishClusterStateAction(settings, service, nodesProvider, listener);
MockNode node = new MockNode(discoveryNode, service, action, nodesProvider);
nodesProvider.addNode(discoveryNode);
final CountDownLatch latch = new CountDownLatch(nodes.size() * 2 + 1);
TransportConnectionListener waitForConnection = new TransportConnectionListener() {
@Override
public void onNodeConnected(DiscoveryNode node) {
latch.countDown();
}
@Override
public void onNodeDisconnected(DiscoveryNode node) {
fail("disconnect should not be called " + node);
}
};
node.service.addConnectionListener(waitForConnection);
for (MockNode curNode : nodes.values()) {
curNode.service.addConnectionListener(waitForConnection);
curNode.connectTo(node.discoveryNode);
node.connectTo(curNode.discoveryNode);
}
node.connectTo(node.discoveryNode);
assertThat("failed to wait for all nodes to connect", latch.await(5, TimeUnit.SECONDS), equalTo(true));
for (MockNode curNode : nodes.values()) {
curNode.service.removeConnectionListener(waitForConnection);
}
node.service.removeConnectionListener(waitForConnection);
if (nodes.put(name, node) != null) {
fail("Node with the name " + name + " already exist");
}
return node;
}
public MockTransportService service(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.service;
}
return null;
}
public PublishClusterStateAction action(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.action;
}
return null;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
threadPool = new ThreadPool(getClass().getName());
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
for (MockNode curNode : nodes.values()) {
curNode.action.close();
curNode.service.close();
}
terminate(threadPool);
}
protected MockTransportService buildTransportService(Settings settings, Version version) {
MockTransportService transportService = new MockTransportService(settings, new LocalTransport(settings, threadPool, version), threadPool);
transportService.start();
return transportService;
}
protected PublishClusterStateAction buildPublishClusterStateAction(Settings settings, MockTransportService transportService, MockDiscoveryNodesProvider nodesProvider,
PublishClusterStateAction.NewClusterStateListener listener) {
DiscoverySettings discoverySettings = new DiscoverySettings(settings, new NodeSettingsService(settings));
return new PublishClusterStateAction(settings, transportService, nodesProvider, listener, discoverySettings);
}
static class MockDiscoveryNodesProvider implements DiscoveryNodesProvider {
private DiscoveryNodes discoveryNodes = DiscoveryNodes.EMPTY_NODES;
public MockDiscoveryNodesProvider(DiscoveryNode localNode) {
discoveryNodes = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()).build();
}
public void addNode(DiscoveryNode node) {
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(node).build();
}
@Override
public DiscoveryNodes nodes() {
return discoveryNodes;
}
@Override
public NodeService nodeService() {
assert false;
throw new UnsupportedOperationException("Shouldn't be here");
}
}
@Test
@TestLogging("cluster:DEBUG,discovery.zen.publish:DEBUG")
public void testSimpleClusterStatePublishing() throws Exception {
MockNewClusterStateListener mockListenerA = new MockNewClusterStateListener();
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, Version.CURRENT, mockListenerA);
MockNewClusterStateListener mockListenerB = new MockNewClusterStateListener();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY, Version.CURRENT, mockListenerB);
// Initial cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.id()).build();
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
// cluster state update - add nodeB
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
assertThat(clusterState.blocks().global().size(), equalTo(1));
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - remove block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
assertThat(clusterState.blocks().global().size(), equalTo(0));
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// Adding new node - this node should get full cluster state while nodeB should still be getting diffs
MockNewClusterStateListener mockListenerC = new MockNewClusterStateListener();
MockNode nodeC = createMockNode("nodeC", Settings.EMPTY, Version.CURRENT, mockListenerC);
// cluster state update 3 - register node C
previousClusterState = clusterState;
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeC.discoveryNode).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
assertThat(clusterState.blocks().global().size(), equalTo(0));
}
});
mockListenerC.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
// First state
assertFalse(clusterState.wasReadFromDiff());
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update 4 - update settings
previousClusterState = clusterState;
MetaData metaData = MetaData.builder(clusterState.metaData()).transientSettings(Settings.settingsBuilder().put("foo", "bar").build()).build();
clusterState = ClusterState.builder(clusterState).metaData(metaData).incrementVersion().build();
NewClusterStateExpectation expectation = new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
assertThat(clusterState.blocks().global().size(), equalTo(0));
}
};
mockListenerB.add(expectation);
mockListenerC.add(expectation);
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - skipping one version change - should request full cluster state
previousClusterState = ClusterState.builder(clusterState).incrementVersion().build();
clusterState = ClusterState.builder(clusterState).incrementVersion().build();
expectation = new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
};
mockListenerB.add(expectation);
mockListenerC.add(expectation);
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - skipping one version change - should request full cluster state
previousClusterState = ClusterState.builder(clusterState).incrementVersion().build();
clusterState = ClusterState.builder(clusterState).incrementVersion().build();
expectation = new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
};
mockListenerB.add(expectation);
mockListenerC.add(expectation);
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// node B becomes the master and sends a version of the cluster state that goes back
discoveryNodes = DiscoveryNodes.builder(discoveryNodes)
.put(nodeA.discoveryNode)
.put(nodeB.discoveryNode)
.put(nodeC.discoveryNode)
.build();
previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
expectation = new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
};
mockListenerA.add(expectation);
mockListenerC.add(expectation);
publishStateDiffAndWait(nodeB.action, clusterState, previousClusterState);
}
@Test
@TestLogging("cluster:DEBUG,discovery.zen.publish:DEBUG")
public void testUnexpectedDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, Version.CURRENT, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
fail("Shouldn't send cluster state to myself");
}
});
MockNewClusterStateListener mockListenerB = new MockNewClusterStateListener();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY, Version.CURRENT, mockListenerB);
// Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).put(nodeB.discoveryNode).localNodeId(nodeA.discoveryNode.id()).build();
ClusterState previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
}
@Test
@TestLogging("cluster:DEBUG,discovery.zen.publish:DEBUG")
public void testDisablingDiffPublishing() throws Exception {
Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE, false).build();
MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, Version.CURRENT, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
fail("Shouldn't send cluster state to myself");
}
});
MockNode nodeB = createMockNode("nodeB", noDiffPublishingSettings, Version.CURRENT, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
logger.debug("Got cluster state update, version [{}], guid [{}], from diff [{}]", clusterState.version(), clusterState.uuid(), clusterState.wasReadFromDiff());
assertFalse(clusterState.wasReadFromDiff());
newStateProcessed.onNewClusterStateProcessed();
}
});
// Initial cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.id()).build();
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
// cluster state update - add nodeB
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
}
@Test
@TestLogging("cluster:DEBUG,discovery.zen.publish:DEBUG")
public void testSimultaneousClusterStatePublishing() throws Exception {
int numberOfNodes = randomIntBetween(2, 10);
int numberOfIterations = randomIntBetween(50, 200);
Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_TIMEOUT, "100ms").put(DiscoverySettings.PUBLISH_DIFF_ENABLE, true).build();
MockNode[] nodes = new MockNode[numberOfNodes];
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder();
for (int i = 0; i < nodes.length; i++) {
final String name = "node" + i;
nodes[i] = createMockNode(name, settings, Version.CURRENT, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public synchronized void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
assertProperMetaDataForVersion(clusterState.metaData(), clusterState.version());
if (randomInt(10) < 2) {
// Cause timeouts from time to time
try {
Thread.sleep(randomInt(110));
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
newStateProcessed.onNewClusterStateProcessed();
}
});
discoveryNodesBuilder.put(nodes[i].discoveryNode);
}
AssertingAckListener[] listeners = new AssertingAckListener[numberOfIterations];
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(metaData).build();
ClusterState previousState;
for (int i = 0; i < numberOfIterations; i++) {
previousState = clusterState;
metaData = buildMetaDataForVersion(metaData, i + 1);
clusterState = ClusterState.builder(clusterState).incrementVersion().metaData(metaData).nodes(discoveryNodes).build();
listeners[i] = publishStateDiff(nodes[0].action, clusterState, previousState);
}
for (int i = 0; i < numberOfIterations; i++) {
listeners[i].await(1, TimeUnit.SECONDS);
}
}
@Test
@TestLogging("cluster:DEBUG,discovery.zen.publish:DEBUG")
public void testSerializationFailureDuringDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, Version.CURRENT, new PublishClusterStateAction.NewClusterStateListener() {
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
fail("Shouldn't send cluster state to myself");
}
});
MockNewClusterStateListener mockListenerB = new MockNewClusterStateListener();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY, Version.CURRENT, mockListenerB);
// Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).put(nodeB.discoveryNode).localNodeId(nodeA.discoveryNode.id()).build();
ClusterState previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertFalse(clusterState.wasReadFromDiff());
}
});
publishStateDiffAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
mockListenerB.add(new NewClusterStateExpectation() {
@Override
public void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed) {
assertTrue(clusterState.wasReadFromDiff());
}
});
ClusterState unserializableClusterState = new ClusterState(clusterState.version(), clusterState.uuid(), clusterState) {
@Override
public Diff<ClusterState> diff(ClusterState previousState) {
return new Diff<ClusterState>() {
@Override
public ClusterState apply(ClusterState part) {
fail("this diff shouldn't be applied");
return part;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new IOException("Simulated failure of diff serialization");
}
};
}
};
List<Tuple<DiscoveryNode, Throwable>> errors = publishStateDiff(nodeA.action, unserializableClusterState, previousClusterState).awaitErrors(1, TimeUnit.SECONDS);
assertThat(errors.size(), equalTo(1));
assertThat(errors.get(0).v2().getMessage(), containsString("Simulated failure of diff serialization"));
}
private MetaData buildMetaDataForVersion(MetaData metaData, long version) {
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.builder(metaData.indices());
indices.put("test" + version, IndexMetaData.builder("test" + version).settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards((int) version).numberOfReplicas(0).build());
return MetaData.builder(metaData)
.transientSettings(Settings.builder().put("test", version).build())
.indices(indices.build())
.build();
}
private void assertProperMetaDataForVersion(MetaData metaData, long version) {
for (long i = 1; i <= version; i++) {
assertThat(metaData.index("test" + i), notNullValue());
assertThat(metaData.index("test" + i).numberOfShards(), equalTo((int) i));
}
assertThat(metaData.index("test" + (version + 1)), nullValue());
assertThat(metaData.transientSettings().get("test"), equalTo(Long.toString(version)));
}
public void publishStateDiffAndWait(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException {
publishStateDiff(action, state, previousState).await(1, TimeUnit.SECONDS);
}
public AssertingAckListener publishStateDiff(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException {
AssertingAckListener assertingAckListener = new AssertingAckListener(state.nodes().getSize() - 1);
ClusterChangedEvent changedEvent = new ClusterChangedEvent("test update", state, previousState);
action.publish(changedEvent, assertingAckListener);
return assertingAckListener;
}
public static class AssertingAckListener implements Discovery.AckListener {
private final List<Tuple<DiscoveryNode, Throwable>> errors = new CopyOnWriteArrayList<>();
private final AtomicBoolean timeoutOccured = new AtomicBoolean();
private final CountDownLatch countDown;
public AssertingAckListener(int nodeCount) {
countDown = new CountDownLatch(nodeCount);
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) {
if (t != null) {
errors.add(new Tuple<>(node, t));
}
countDown.countDown();
}
@Override
public void onTimeout() {
timeoutOccured.set(true);
// Fast forward the counter - no reason to wait here
long currentCount = countDown.getCount();
for (long i = 0; i < currentCount; i++) {
countDown.countDown();
}
}
public void await(long timeout, TimeUnit unit) throws InterruptedException {
assertThat(awaitErrors(timeout, unit), emptyIterable());
}
public List<Tuple<DiscoveryNode, Throwable>> awaitErrors(long timeout, TimeUnit unit) throws InterruptedException {
countDown.await(timeout, unit);
assertFalse(timeoutOccured.get());
return errors;
}
}
public interface NewClusterStateExpectation {
void check(ClusterState clusterState, PublishClusterStateAction.NewClusterStateListener.NewStateProcessed newStateProcessed);
}
public static class MockNewClusterStateListener implements PublishClusterStateAction.NewClusterStateListener {
CopyOnWriteArrayList<NewClusterStateExpectation> expectations = new CopyOnWriteArrayList();
@Override
public void onNewClusterState(ClusterState clusterState, NewStateProcessed newStateProcessed) {
final NewClusterStateExpectation expectation;
try {
expectation = expectations.remove(0);
} catch (ArrayIndexOutOfBoundsException ex) {
fail("Unexpected cluster state update " + clusterState.prettyPrint());
return;
}
expectation.check(clusterState, newStateProcessed);
newStateProcessed.onNewClusterStateProcessed();
}
public void add(NewClusterStateExpectation expectation) {
expectations.add(expectation);
}
}
public static class DelegatingClusterState extends ClusterState {
public DelegatingClusterState(ClusterState clusterState) {
super(clusterState.version(), clusterState.uuid(), clusterState);
}
}
}
| |
/**
*
* Sifter - Search Indexes for Text Evidence Relevantly
*
* Copyright (C) 2013, University of Texas at San Antonio (UTSA)
*
* Sifter is a digital forensics and e-discovery tool for conducting
* text based string searches. It clusters and ranks search hits
* to improve investigative efficiency. Hit-level ranking uses a
* patent-pending ranking algorithm invented by Dr. Nicole Beebe at UTSA.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Jon Stewart, Lightbox Technologies
**/
package edu.utsa.sifter;
import org.codehaus.jackson.annotate.JsonProperty;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.util.Date;
import java.util.Set;
import java.util.ArrayList;
import java.util.HashMap;
public class Result {
final public static String[] SystemDirs = new String[]{
"WINDOWS/",
"System Volume Information/",
"Program Files/",
"Program Files (x86)/",
"ProgramData",
"bin/",
"boot/",
"dev/",
"etc/",
"initrd/",
"lib/",
"lib64/",
"mnt/",
"opt/",
"proc/",
"sbin/",
"srv/",
"sys/",
"tmp/",
"usr/",
"var/"
};
@JsonProperty
public String ID;
@JsonProperty
public float Score;
@JsonProperty
public String Name;
@JsonProperty
public String Path;
@JsonProperty
public String Extension;
@JsonProperty
public long Size;
@JsonProperty
public long Modified;
@JsonProperty
public long Accessed;
@JsonProperty
public long Created;
@JsonProperty
public String Body;
@JsonProperty
public String Cell;
@JsonProperty
public double CellDistance;
public int BodyLen;
private int LuceneID;
// J.S.
private double[] featuresA;
public Result(final Document doc, final int lucID, final float score) {
LuceneID = lucID;
Score = score;
ID = emptyIfNull(doc.get("ID"));
Name = emptyIfNull(doc.get("name"));
Extension = emptyIfNull(doc.get("extension"));
// System.out.println("result name = " + (Name == null ? "null": Name));
Path = emptyIfNull(doc.get("path"));
Size = DocUtil.getLongField(doc, "size", 0);
Modified = DocUtil.getLongField(doc, "modified", 0) * 1000;
Accessed = DocUtil.getLongField(doc, "accessed", 0) * 1000;
Created = DocUtil.getLongField(doc, "created", 0) * 1000;
Body = doc.get("body");
BodyLen = (int)DocUtil.getLongField(doc, "body-len", 0);
Cell = doc.get("cell");
CellDistance = (float)DocUtil.getDoubleField(doc, "som-cell-distance", 0);
}
public String fullpath() {
return Path + Name;
}
public boolean isUnallocated() {
return Path.startsWith("$Unallocated/");
}
String emptyIfNull(final String s) {
return s == null ? "": s;
}
public static class DocTermInfo {
public HashMap<BytesRef, Long> TermFreqs = new HashMap<BytesRef, Long>();
public long MaxTermFreq = 0;
}
// J.S.
public double[] getDocFeatures() {
return featuresA;
}
public DocTermInfo docRankFactors(final double[] features,
final Date refDate,
final IndexReader rdr,
final Set<Term> termSet) throws IOException
{
// J.S.
final double[] featuresA = new double[19];
final DocTermInfo ret = new DocTermInfo();
final String lowerExt = Extension.toLowerCase();
if (!isUnallocated()) {
features[HitRanker.FCREATED] = dateDiff(Created, refDate);
features[HitRanker.FMODIFIED] = dateDiff(Modified, refDate);
features[HitRanker.FACCESSED] = dateDiff(Accessed, refDate);
features[HitRanker.FAVG_RECENCY] = (features[HitRanker.FCREATED] +
features[HitRanker.FMODIFIED] +
features[HitRanker.FACCESSED]) / 3;
features[HitRanker.FFILENAME_DIRECT] = 0;
features[HitRanker.FFILENAME_INDIRECT] = 0;
final String fullPath = Path + Name;
for (Term t: termSet) {
if (fullPath.indexOf(t.text()) > 0) {
features[HitRanker.FFILENAME_INDIRECT] = 1;
break;
}
}
features[HitRanker.FUSER_DIRECTORY] = 0;
for (String dir: SystemDirs) {
if (Path.indexOf(dir) > -1) {
features[HitRanker.FUSER_DIRECTORY] = 1;
break;
}
}
}
features[HitRanker.FHIGH_PRIORITY_TYPE] = DocMaker.HighPriorityTypes.contains(lowerExt) ? 1: 0;
features[HitRanker.FMED_PRIORITY_TYPE] = DocMaker.MedPriorityTypes.contains(lowerExt) ? 1: 0;
features[HitRanker.FLOW_PRIORITY_TYPE] = features[HitRanker.FHIGH_PRIORITY_TYPE] + features[HitRanker.FMED_PRIORITY_TYPE] > 0 ? 0: 1;
final Terms terms = rdr.getTermVector(LuceneID, "body");
final TermsEnum term = terms.iterator(null);
double dotSum = 0,
docVecSumSqrs = 0,
numDims = 0,
queryVecSumSqrs = 0;
long termCount = 0;
while (term.next() != null) {
++numDims;
termCount = term.totalTermFreq();
docVecSumSqrs += termCount * termCount;
if (termSet.contains(new Term("body", term.term()))) {
dotSum += termCount;
++queryVecSumSqrs;
ret.TermFreqs.put(BytesRef.deepCopyOf(term.term()), termCount);
ret.MaxTermFreq = Math.max(ret.MaxTermFreq, termCount);
// System.err.println(Path + Name + " contains term " + term.term().utf8ToString() + ", with freq " + termCount);
}
}
features[HitRanker.FCOSINE_SIMILARITY] = dotSum / (Math.sqrt(docVecSumSqrs) + Math.sqrt(queryVecSumSqrs));
features[HitRanker.FTERM_CARDINALITY] = queryVecSumSqrs / termSet.size();
// features[HitRanker.FTERM_LENGTH]
// features[HitRanker.FTERM_PRIORITY] = 0.0;
return ret;
}
double dateDiff(final long d, final Date refDate) {
return ((double)Math.abs(d - refDate.getTime())) / refDate.getTime();
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.repository.starlark;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.packages.Type.BOOLEAN;
import static com.google.devtools.build.lib.packages.Type.STRING;
import static com.google.devtools.build.lib.packages.Type.STRING_LIST;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.docgen.annot.DocCategory;
import com.google.devtools.build.lib.analysis.BaseRuleClasses;
import com.google.devtools.build.lib.analysis.starlark.StarlarkAttrModule.Descriptor;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.packages.AttributeValueSource;
import com.google.devtools.build.lib.packages.BazelModuleContext;
import com.google.devtools.build.lib.packages.BazelStarlarkContext;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.packages.Package.NameConflictException;
import com.google.devtools.build.lib.packages.PackageFactory;
import com.google.devtools.build.lib.packages.PackageFactory.PackageContext;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType;
import com.google.devtools.build.lib.packages.RuleFactory.InvalidRuleException;
import com.google.devtools.build.lib.packages.StarlarkExportable;
import com.google.devtools.build.lib.packages.WorkspaceFactoryHelper;
import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions;
import com.google.devtools.build.lib.starlarkbuildapi.repository.RepositoryModuleApi;
import java.util.Map;
import net.starlark.java.annot.StarlarkBuiltin;
import net.starlark.java.eval.Dict;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.Module;
import net.starlark.java.eval.Printer;
import net.starlark.java.eval.Sequence;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkCallable;
import net.starlark.java.eval.StarlarkThread;
import net.starlark.java.eval.Tuple;
/**
* The Starlark module containing the definition of {@code repository_rule} function to define a
* Starlark remote repository.
*/
public class StarlarkRepositoryModule implements RepositoryModuleApi {
@Override
public StarlarkCallable repositoryRule(
StarlarkCallable implementation,
Object attrs,
Boolean local,
Sequence<?> environ, // <String> expected
Boolean configure,
Boolean remotable,
String doc,
StarlarkThread thread)
throws EvalException {
BazelStarlarkContext context = BazelStarlarkContext.from(thread);
context.checkLoadingOrWorkspacePhase("repository_rule");
// We'll set the name later, pass the empty string for now.
RuleClass.Builder builder = new RuleClass.Builder("", RuleClassType.WORKSPACE, true);
ImmutableList<StarlarkThread.CallStackEntry> callstack = thread.getCallStack();
builder.setCallStack(
callstack.subList(0, callstack.size() - 1)); // pop 'repository_rule' itself
builder.addAttribute(attr("$local", BOOLEAN).defaultValue(local).build());
builder.addAttribute(attr("$configure", BOOLEAN).defaultValue(configure).build());
if (thread.getSemantics().getBool(BuildLanguageOptions.EXPERIMENTAL_REPO_REMOTE_EXEC)) {
builder.addAttribute(attr("$remotable", BOOLEAN).defaultValue(remotable).build());
BaseRuleClasses.execPropertiesAttribute(builder);
}
builder.addAttribute(attr("$environ", STRING_LIST).defaultValue(environ).build());
BaseRuleClasses.nameAttribute(builder);
BaseRuleClasses.commonCoreAndStarlarkAttributes(builder);
builder.add(attr("expect_failure", STRING));
if (attrs != Starlark.NONE) {
for (Map.Entry<String, Descriptor> attr :
Dict.cast(attrs, String.class, Descriptor.class, "attrs").entrySet()) {
Descriptor attrDescriptor = attr.getValue();
AttributeValueSource source = attrDescriptor.getValueSource();
String attrName = source.convertToNativeName(attr.getKey());
if (builder.contains(attrName)) {
throw Starlark.errorf(
"There is already a built-in attribute '%s' which cannot be overridden", attrName);
}
builder.addAttribute(attrDescriptor.build(attrName));
}
}
builder.setConfiguredTargetFunction(implementation);
BazelModuleContext bzlModule =
BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread));
builder.setRuleDefinitionEnvironmentLabelAndDigest(
bzlModule.label(), bzlModule.bzlTransitiveDigest());
builder.setWorkspaceOnly();
return new RepositoryRuleFunction(builder, implementation);
}
// RepositoryRuleFunction is the result of repository_rule(...).
// It is a callable value; calling it yields a Rule instance.
@StarlarkBuiltin(
name = "repository_rule",
category = DocCategory.BUILTIN,
doc =
"A callable value that may be invoked during evaluation of the WORKSPACE file to"
+ " instantiate and return a repository rule.")
private static final class RepositoryRuleFunction
implements StarlarkCallable, StarlarkExportable {
private final RuleClass.Builder builder;
private final StarlarkCallable implementation;
private Label extensionLabel;
private String exportedName;
private RepositoryRuleFunction(RuleClass.Builder builder, StarlarkCallable implementation) {
this.builder = builder;
this.implementation = implementation;
}
@Override
public String getName() {
return "repository_rule";
}
@Override
public boolean isImmutable() {
return true;
}
@Override
public void export(Label extensionLabel, String exportedName) {
this.extensionLabel = extensionLabel;
this.exportedName = exportedName;
}
@Override
public boolean isExported() {
return extensionLabel != null;
}
@Override
public void repr(Printer printer) {
if (exportedName == null) {
printer.append("<anonymous starlark repository rule>");
} else {
printer.append("<starlark repository rule " + extensionLabel + "%" + exportedName + ">");
}
}
@Override
public Object call(StarlarkThread thread, Tuple args, Dict<String, Object> kwargs)
throws EvalException, InterruptedException {
BazelStarlarkContext.from(thread).checkWorkspacePhase("repository rule " + exportedName);
if (!args.isEmpty()) {
throw new EvalException("unexpected positional arguments");
}
String ruleClassName;
// If the function ever got exported (the common case), we take the name
// it was exported to. Only in the not intended case of calling an unexported
// repository function through an exported macro, we fall back, for lack of
// alternatives, to the name in the local context.
// TODO(b/111199163): we probably should disallow the use of non-exported
// repository rules anyway.
if (isExported()) {
ruleClassName = exportedName;
} else {
// repository_rules should be subject to the same "exported" requirement
// as package rules, but sadly we forgot to add the necessary check and
// now many projects create and instantiate repository_rules without an
// intervening export; see b/111199163. An incompatible flag is required.
if (false) {
throw new EvalException("attempt to instantiate a non-exported repository rule");
}
// The historical workaround was a fragile hack to introspect on the call
// expression syntax, f() or x.f(), to find the name f, but we no longer
// have access to the call expression, so now we just create an ugly
// name from the function. See github.com/bazelbuild/bazel/issues/10441
ruleClassName = "unexported_" + implementation.getName();
}
try {
RuleClass ruleClass = builder.build(ruleClassName, ruleClassName);
PackageContext context = PackageFactory.getContext(thread);
Package.Builder packageBuilder = context.getBuilder();
// TODO(adonovan): is this cast safe? Check.
String name = (String) kwargs.get("name");
WorkspaceFactoryHelper.addMainRepoEntry(packageBuilder, name, thread.getSemantics());
WorkspaceFactoryHelper.addRepoMappings(packageBuilder, kwargs, name);
Rule rule =
WorkspaceFactoryHelper.createAndAddRepositoryRule(
context.getBuilder(),
ruleClass,
/*bindRuleClass=*/ null,
WorkspaceFactoryHelper.getFinalKwargs(kwargs),
thread.getSemantics(),
thread.getCallStack());
return rule;
} catch (InvalidRuleException | NameConflictException | LabelSyntaxException e) {
throw Starlark.errorf("%s", e.getMessage());
}
}
}
@Override
public void failWithIncompatibleUseCcConfigureFromRulesCc(StarlarkThread thread)
throws EvalException {
if (thread
.getSemantics()
.getBool(BuildLanguageOptions.INCOMPATIBLE_USE_CC_CONFIGURE_FROM_RULES_CC)) {
throw Starlark.errorf(
"Incompatible flag "
+ "--incompatible_use_cc_configure_from_rules_cc has been flipped. Please use "
+ "cc_configure and related logic from https://github.com/bazelbuild/rules_cc. "
+ "See https://github.com/bazelbuild/bazel/issues/10134 for details and migration "
+ "instructions.");
}
}
}
| |
/*
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License. See accompanying LICENSE file.
*/
package io.s4.util;
import io.s4.collector.EventWrapper;
import io.s4.dispatcher.partitioner.CompoundKeyInfo;
import io.s4.emitter.CommLayerEmitter;
import io.s4.emitter.EventEmitter;
import io.s4.schema.Schema;
import io.s4.schema.Schema.Property;
import io.s4.serialize.KryoSerDeser;
import io.s4.serialize.SerializerDeserializer;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.InputStreamReader;
import java.io.Reader;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class LoadGenerator {
public static void main(String args[]) {
Options options = new Options();
boolean warmUp = false;
options.addOption(OptionBuilder.withArgName("rate")
.hasArg()
.withDescription("Rate (events per second)")
.create("r"));
options.addOption(OptionBuilder.withArgName("display_rate")
.hasArg()
.withDescription("Display Rate at specified second boundary")
.create("d"));
options.addOption(OptionBuilder.withArgName("start_boundary")
.hasArg()
.withDescription("Start boundary in seconds")
.create("b"));
options.addOption(OptionBuilder.withArgName("run_for")
.hasArg()
.withDescription("Run for a specified number of seconds")
.create("x"));
options.addOption(OptionBuilder.withArgName("cluster_manager")
.hasArg()
.withDescription("Cluster manager")
.create("z"));
options.addOption(OptionBuilder.withArgName("sender_application_name")
.hasArg()
.withDescription("Sender application name")
.create("a"));
options.addOption(OptionBuilder.withArgName("listener_application_name")
.hasArg()
.withDescription("Listener application name")
.create("g"));
options.addOption(OptionBuilder.withArgName("sleep_overhead")
.hasArg()
.withDescription("Sleep overhead")
.create("o"));
options.addOption(new Option("w", "Warm-up"));
CommandLineParser parser = new GnuParser();
CommandLine line = null;
try {
// parse the command line arguments
line = parser.parse(options, args);
} catch (ParseException exp) {
// oops, something went wrong
System.err.println("Parsing failed. Reason: " + exp.getMessage());
System.exit(1);
}
int expectedRate = 250;
if (line.hasOption("r")) {
try {
expectedRate = Integer.parseInt(line.getOptionValue("r"));
} catch (Exception e) {
System.err.println("Bad expected rate specified "
+ line.getOptionValue("r"));
System.exit(1);
}
}
int displayRateIntervalSeconds = 20;
if (line.hasOption("d")) {
try {
displayRateIntervalSeconds = Integer.parseInt(line.getOptionValue("d"));
} catch (Exception e) {
System.err.println("Bad display rate value specified "
+ line.getOptionValue("d"));
System.exit(1);
}
}
int startBoundary = 2;
if (line.hasOption("b")) {
try {
startBoundary = Integer.parseInt(line.getOptionValue("b"));
} catch (Exception e) {
System.err.println("Bad start boundary value specified "
+ line.getOptionValue("b"));
System.exit(1);
}
}
int updateFrequency = 0;
if (line.hasOption("f")) {
try {
updateFrequency = Integer.parseInt(line.getOptionValue("f"));
} catch (Exception e) {
System.err.println("Bad query udpdate frequency specified "
+ line.getOptionValue("f"));
System.exit(1);
}
System.out.printf("Update frequency is %d\n", updateFrequency);
}
int runForTime = 0;
if (line.hasOption("x")) {
try {
runForTime = Integer.parseInt(line.getOptionValue("x"));
} catch (Exception e) {
System.err.println("Bad run for time specified "
+ line.getOptionValue("x"));
System.exit(1);
}
System.out.printf("Run for time is %d\n", runForTime);
}
String clusterManagerAddress = null;
if (line.hasOption("z")) {
clusterManagerAddress = line.getOptionValue("z");
}
String senderApplicationName = null;
if (line.hasOption("a")) {
senderApplicationName = line.getOptionValue("a");
}
String listenerApplicationName = null;
if (line.hasOption("a")) {
listenerApplicationName = line.getOptionValue("g");
}
if (listenerApplicationName == null) {
listenerApplicationName = senderApplicationName;
}
long sleepOverheadMicros = -1;
if (line.hasOption("o")) {
try {
sleepOverheadMicros = Long.parseLong(line.getOptionValue("o"));
} catch (NumberFormatException e) {
System.err.println("Bad sleep overhead specified "
+ line.getOptionValue("o"));
System.exit(1);
}
System.out.printf("Specified sleep overhead is %d\n",
sleepOverheadMicros);
}
if (line.hasOption("w")) {
warmUp = true;
}
List loArgs = line.getArgList();
if (loArgs.size() < 1) {
System.err.println("No input file specified");
System.exit(1);
}
String inputFilename = (String) loArgs.get(0);
EventEmitter emitter = null;
SerializerDeserializer serDeser = new KryoSerDeser();
CommLayerEmitter clEmitter = new CommLayerEmitter();
clEmitter.setAppName(senderApplicationName);
clEmitter.setListenerAppName(listenerApplicationName);
clEmitter.setClusterManagerAddress(clusterManagerAddress);
clEmitter.setSenderId(String.valueOf(System.currentTimeMillis() / 1000));
clEmitter.setSerDeser(serDeser);
clEmitter.init();
emitter = clEmitter;
long endTime = 0;
if (runForTime > 0) {
endTime = System.currentTimeMillis() + (runForTime * 1000);
}
LoadGenerator loadGenerator = new LoadGenerator();
loadGenerator.setInputFilename(inputFilename);
loadGenerator.setEventEmitter(clEmitter);
loadGenerator.setDisplayRateInterval(displayRateIntervalSeconds);
loadGenerator.setExpectedRate(expectedRate);
loadGenerator.run();
System.exit(0);
}
private EventEmitter eventEmitter;
private String inputFilename;
private int emitCount;
private int displayRateInterval = 0;
private int expectedRate = 200;
private int adjustedExpectedRate = 1;
private long sleepOverheadMicros = -1;
private static int PROCESS_TIME_LIST_MAX_SIZE = 15;
private long[] processTimes = new long[PROCESS_TIME_LIST_MAX_SIZE];
private int processTimePointer = 0;
private Map<Integer, EventTypeInfo> eventTypeInfoMap = new HashMap<Integer, EventTypeInfo>();
public int getEmitCount() {
return emitCount;
}
public void setEventEmitter(EventEmitter eventEmitter) {
this.eventEmitter = eventEmitter;
}
public void setInputFilename(String inputFilename) {
this.inputFilename = inputFilename;
}
public void setDisplayRateInterval(int displayRateInterval) {
this.displayRateInterval = displayRateInterval;
}
public void setSleepOverheadMicros(long sleepOverheadMicros) {
this.sleepOverheadMicros = sleepOverheadMicros;
}
public void setExpectedRate(int expectedRate) {
this.expectedRate = expectedRate;
}
private Random rand = new Random(System.currentTimeMillis());
public LoadGenerator() {
if (sleepOverheadMicros == -1) {
// calculate sleep overhead
long totalSleepOverhead = 0;
for (int i = 0; i < 50; i++) {
long startTime = System.nanoTime();
try {
Thread.sleep(1);
} catch (InterruptedException ie) {
}
totalSleepOverhead += (System.nanoTime() - startTime)
- (1 * 1000 * 1000);
}
sleepOverheadMicros = (totalSleepOverhead / 50) / 1000;
}
System.out.println("Sleep overhead is " + sleepOverheadMicros);
}
public void run() {
// for now, no warm-up mechanism
adjustedExpectedRate = expectedRate;
long startTime = 0;
long intervalStart = 0;
int emitCountStart = 0;
long[] rateInfo = new long[2];
rateInfo[0] = 100; // start with a sleep time of 100
BufferedReader br = null;
Reader inputReader = null;
try {
if (inputFilename.equals("-")) {
inputReader = new InputStreamReader(System.in);
} else {
inputReader = new FileReader(inputFilename);
}
br = new BufferedReader(inputReader);
String inputLine = null;
boolean firstLine = true;
EventWrapper eventWrapper = null;
for (startTime = System.nanoTime(); (inputLine = br.readLine()) != null; startTime = System.nanoTime()) {
if (firstLine) {
JSONObject jsonRecord = new JSONObject(inputLine);
createEventTypeInfo(jsonRecord);
System.out.println(eventTypeInfoMap);
if (eventTypeInfoMap.size() == 0) {
return;
}
firstLine = false;
continue;
}
try {
JSONObject jsonRecord = new JSONObject(inputLine);
int classIndex = jsonRecord.getInt("_index");
EventTypeInfo eventTypeInfo = eventTypeInfoMap.get(classIndex);
if (eventTypeInfo == null) {
System.err.printf("Invalid _index value %d\n",
classIndex);
return;
}
Object event = makeRecord(jsonRecord,
eventTypeInfo.getSchema());
eventWrapper = new EventWrapper(eventTypeInfo.getStreamName(),
event,
new ArrayList<CompoundKeyInfo>());
// System.out.println(eventWrapper.getStreamName() + ": " +
// eventWrapper.getEvent());
} catch (Exception e) {
e.printStackTrace();
System.err.printf("Bad input data %s\n", inputLine);
continue;
}
int partition = Math.abs(rand.nextInt())
% eventEmitter.getNodeCount();
eventEmitter.emit(partition, eventWrapper);
emitCount++;
// the rest of the stuff in this block is just to maintain the
// rate
processTimes[processTimePointer] = System.nanoTime()
- startTime;
processTimePointer = (processTimePointer == PROCESS_TIME_LIST_MAX_SIZE - 1) ? 0
: processTimePointer + 1;
if (emitCount == 1 || emitCount % 20 == 0) {
rateInfo = getRateInfo(rateInfo);
}
// if it's time, display the actual emit rate
if (intervalStart == 0) {
intervalStart = System.currentTimeMillis();
} else {
long interval = System.currentTimeMillis() - intervalStart;
if (interval >= (displayRateInterval * 1000)) {
double rate = (emitCount - emitCountStart)
/ (interval / 1000.0);
System.out.println("Rate is " + rate);
intervalStart = System.currentTimeMillis();
emitCountStart = emitCount;
}
}
if (rateInfo[1] == 0 || emitCount % rateInfo[1] == 0) {
try {
Thread.sleep(rateInfo[0]);
} catch (InterruptedException ie) {
}
}
}
System.out.printf("Emitted %d events\n", emitCount);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
try {
br.close();
} catch (Exception e) {
}
try {
inputReader.close();
} catch (Exception e) {
}
}
}
@SuppressWarnings("unchecked")
public void createEventTypeInfo(JSONObject classInfo) {
String className = "";
try {
for (Iterator it = classInfo.keys(); it.hasNext();) {
className = (String) it.next();
JSONObject jsonEventTypeInfo = classInfo.getJSONObject(className);
int classIndex = (Integer) jsonEventTypeInfo.getInt("classIndex");
String streamName = jsonEventTypeInfo.getString("streamName");
Class clazz = Class.forName(className);
Schema schema = new Schema(clazz);
eventTypeInfoMap.put(classIndex, new EventTypeInfo(schema,
streamName));
}
} catch (JSONException je) {
je.printStackTrace();
} catch (ClassNotFoundException cnfe) {
System.err.println("Count not locate class " + className);
}
}
@SuppressWarnings("unchecked")
private Object makeRecord(JSONObject jsonRecord, Schema schema) {
Object event = null;
try {
event = schema.getType().newInstance();
for (Iterator it = jsonRecord.keys(); it.hasNext();) {
String propertyName = (String) it.next();
Property property = schema.getProperties().get(propertyName);
if (property == null) {
continue; // not in schema, just continue
}
Method setterMethod = property.getSetterMethod();
Object value = jsonRecord.get(propertyName);
if (value.equals(JSONObject.NULL)) {
continue;
}
setterMethod.invoke(event, makeSettableValue(property, value));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return event;
}
@SuppressWarnings("unchecked")
private Object makeSettableValue(Property property, Object value) {
String propertyName = property.getName();
Class propertyType = property.getType();
if (propertyType.isArray()) {
if (!(value instanceof JSONArray)) {
System.err.println("Type mismatch for field " + propertyName);
return null;
}
System.out.println("Is array!");
return makeArray(property, (JSONArray) value);
} else if (property.isList()) {
if (!(value instanceof JSONArray)) {
System.err.println("Type mismatch for field " + propertyName);
return null;
}
return makeList(property, (JSONArray) value);
} else if (propertyType.isPrimitive()) {
if (!(value instanceof Number || value instanceof Boolean)) {
System.err.println("Type mismatch for field " + propertyName
+ "; expected number or boolean, found "
+ value.getClass());
return null;
}
return value; // hmm... does this work?
} else if (propertyType.equals(String.class)) {
if (!(value instanceof String)) {
System.err.println("Type mismatch for field " + propertyName
+ "; expected String, found " + value.getClass());
return null;
}
return value;
} else if (property.isNumber()) {
if (!(value instanceof Integer || value instanceof Long
|| value instanceof Float || value instanceof Double
|| value instanceof BigDecimal || value instanceof BigInteger)) {
return null;
}
Number adjustedValue = (Number) value;
if (propertyType.equals(Long.class) && !(value instanceof Long)) {
adjustedValue = new Long(((Number) value).longValue());
} else if (propertyType.equals(Integer.class)
&& !(value instanceof Integer)) {
adjustedValue = new Integer(((Number) value).intValue());
} else if (propertyType.equals(Double.class)
&& !(value instanceof Double)) {
adjustedValue = new Double(((Number) value).doubleValue());
} else if (propertyType.equals(Float.class)
&& !(value instanceof Float)) {
adjustedValue = new Float(((Number) value).floatValue());
} else if (propertyType.equals(BigDecimal.class)) {
adjustedValue = new BigDecimal(((Number) value).longValue());
} else if (propertyType.equals(BigInteger.class)) {
adjustedValue = BigInteger.valueOf(((Number) value).longValue());
}
return adjustedValue;
} else if (value instanceof JSONObject) {
return makeRecord((JSONObject) value, property.getSchema());
}
return null;
}
public Object makeList(Property property, JSONArray jsonArray) {
Property componentProperty = property.getComponentProperty();
int size = jsonArray.length();
List<Object> list = new ArrayList<Object>(size);
try {
for (int i = 0; i < size; i++) {
Object value = jsonArray.get(i);
list.add(makeSettableValue(componentProperty, value));
}
} catch (JSONException je) {
throw new RuntimeException(je);
}
return list;
}
@SuppressWarnings("unchecked")
public Object makeArray(Property property, JSONArray jsonArray) {
Property componentProperty = property.getComponentProperty();
Class clazz = componentProperty.getType();
int size = jsonArray.length();
Object array = Array.newInstance(clazz, size);
try {
for (int i = 0; i < size; i++) {
Object value = jsonArray.get(i);
Object adjustedValue = makeSettableValue(componentProperty,
value);
Array.set(array, i, adjustedValue);
}
} catch (JSONException je) {
throw new RuntimeException(je);
}
return array;
}
private long[] getRateInfo(long[] rateInfo) {
long totalTimeNanos = 0;
int entryCount = 0;
for (int i = 0; i < processTimes.length; i++) {
if (processTimes[i] == Long.MIN_VALUE) {
break;
}
entryCount++;
totalTimeNanos += processTimes[i];
}
long averageTimeMicros = (long) ((totalTimeNanos / (double) entryCount) / 1000.0);
// fudge the time for additional overhead
averageTimeMicros += (long) (averageTimeMicros * 0.30);
if (emitCount % 5000 == 0) {
// System.out.println("Average time in micros is " +
// averageTimeMicros);
}
long sleepTimeMicros = 0;
long millis = 0;
long timeToMeetRateMicros = adjustedExpectedRate * averageTimeMicros;
long leftOver = 1000000 - timeToMeetRateMicros;
if (leftOver <= 0) {
sleepTimeMicros = 0;
} else {
sleepTimeMicros = (leftOver / adjustedExpectedRate)
- sleepOverheadMicros;
}
// how many events can be processed in the nanos time?
int eventsBeforeSleep = 1;
if (sleepTimeMicros < 1000) {
sleepTimeMicros = 1000 + sleepOverheadMicros;
millis = 1;
double numNapsDouble = ((double) leftOver / sleepTimeMicros);
int numNaps = (int) Math.ceil(numNapsDouble);
if (numNaps > 0) {
eventsBeforeSleep = adjustedExpectedRate / numNaps;
}
if (leftOver <= 0) {
millis = 0;
eventsBeforeSleep = 1000;
}
} else {
millis = sleepTimeMicros / 1000;
}
rateInfo[0] = millis;
rateInfo[1] = eventsBeforeSleep;
return rateInfo;
}
static class EventTypeInfo {
private Schema schema;
private String streamName;
public EventTypeInfo(Schema schema, String streamName) {
this.schema = schema;
this.streamName = streamName;
}
public Schema getSchema() {
return schema;
}
public String getStreamName() {
return streamName;
}
}
}
| |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.systemtest.mirrormaker;
import io.fabric8.kubernetes.api.model.Quantity;
import io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder;
import io.strimzi.api.kafka.model.CertSecretSource;
import io.strimzi.api.kafka.model.KafkaMirrorMaker;
import io.strimzi.api.kafka.model.KafkaMirrorMakerResources;
import io.strimzi.api.kafka.model.KafkaResources;
import io.strimzi.api.kafka.model.KafkaUser;
import io.strimzi.api.kafka.model.PasswordSecretSource;
import io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationScramSha512;
import io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationTls;
import io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder;
import io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType;
import io.strimzi.api.kafka.model.status.KafkaMirrorMakerStatus;
import io.strimzi.api.kafka.model.template.DeploymentStrategy;
import io.strimzi.operator.common.model.Labels;
import io.strimzi.systemtest.AbstractST;
import io.strimzi.systemtest.BeforeAllOnce;
import io.strimzi.systemtest.Constants;
import io.strimzi.systemtest.annotations.IsolatedSuite;
import io.strimzi.systemtest.resources.operator.SetupClusterOperator;
import io.strimzi.systemtest.annotations.ParallelNamespaceTest;
import io.strimzi.systemtest.kafkaclients.internalClients.InternalKafkaClient;
import io.strimzi.systemtest.resources.crd.KafkaMirrorMakerResource;
import io.strimzi.systemtest.templates.crd.KafkaClientsTemplates;
import io.strimzi.systemtest.templates.crd.KafkaMirrorMakerTemplates;
import io.strimzi.systemtest.templates.crd.KafkaTemplates;
import io.strimzi.systemtest.templates.crd.KafkaTopicTemplates;
import io.strimzi.systemtest.templates.crd.KafkaUserTemplates;
import io.strimzi.systemtest.utils.ClientUtils;
import io.strimzi.systemtest.utils.StUtils;
import io.strimzi.systemtest.utils.kafkaUtils.KafkaMirrorMakerUtils;
import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils;
import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils;
import io.strimzi.test.timemeasuring.Operation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.extension.ExtensionContext;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static io.strimzi.systemtest.Constants.ACCEPTANCE;
import static io.strimzi.systemtest.Constants.INFRA_NAMESPACE;
import static io.strimzi.systemtest.Constants.INTERNAL_CLIENTS_USED;
import static io.strimzi.systemtest.Constants.MIRROR_MAKER;
import static io.strimzi.systemtest.Constants.REGRESSION;
import static io.strimzi.systemtest.Constants.SCALABILITY;
import static io.strimzi.systemtest.enums.CustomResourceStatus.Ready;
import static io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient;
import static io.strimzi.test.k8s.KubeClusterResource.kubeClient;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
@Tag(REGRESSION)
@Tag(MIRROR_MAKER)
@Tag(INTERNAL_CLIENTS_USED)
@IsolatedSuite
public class MirrorMakerST extends AbstractST {
private static final Logger LOGGER = LogManager.getLogger(MirrorMakerST.class);
private final int messagesCount = 200;
@ParallelNamespaceTest
void testMirrorMaker(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
Map<String, String> jvmOptionsXX = new HashMap<>();
jvmOptionsXX.put("UseG1GC", "true");
String operationId = timeMeasuringSystem.startTimeMeasuring(Operation.MM_DEPLOYMENT, extensionContext.getRequiredTestClass().getName(), extensionContext.getDisplayName());
String topicSourceName = TOPIC_NAME + "-source" + "-" + rng.nextInt(Integer.MAX_VALUE);
// Deploy source kafka
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).build());
// Deploy target kafka
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).build());
// Deploy Topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName).build());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, false, clusterName + "-" + Constants.KAFKA_CLIENTS).build());
final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder()
.withUsingPodName(kafkaClientsPodName)
.withTopicName("topic-for-test-broker-1")
.withNamespaceName(namespaceName)
.withClusterName(kafkaClusterSourceName)
.withMessageCount(messagesCount)
.withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME)
.build();
// Check brokers availability
internalKafkaClient.produceAndConsumesPlainMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName("topic-for-test-broker-2")
.withClusterName(kafkaClusterTargetName)
.build();
internalKafkaClient.produceAndConsumesPlainMessagesUntilBothOperationsAreSuccessful();
// Deploy Mirror Maker
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, false)
.editSpec()
.withResources(new ResourceRequirementsBuilder()
.addToLimits("memory", new Quantity("400M"))
.addToLimits("cpu", new Quantity("2"))
.addToRequests("memory", new Quantity("300M"))
.addToRequests("cpu", new Quantity("1"))
.build())
.withNewJvmOptions()
.withXmx("200m")
.withXms("200m")
.withXx(jvmOptionsXX)
.endJvmOptions()
.endSpec()
.build());
verifyLabelsOnPods(namespaceName, clusterName, "mirror-maker", "KafkaMirrorMaker");
verifyLabelsForService(namespaceName, clusterName, "mirror-maker", "KafkaMirrorMaker");
verifyLabelsForConfigMaps(namespaceName, kafkaClusterSourceName, null, kafkaClusterTargetName);
verifyLabelsForServiceAccounts(namespaceName, kafkaClusterSourceName, null);
String mirrorMakerPodName = kubeClient(namespaceName).listPodsByPrefixInName(KafkaMirrorMakerResources.deploymentName(clusterName)).get(0).getMetadata().getName();
String kafkaMirrorMakerLogs = kubeClient(namespaceName).logs(mirrorMakerPodName);
assertThat(kafkaMirrorMakerLogs,
not(containsString("keytool error: java.io.FileNotFoundException: /opt/kafka/consumer-oauth-certs/**/* (No such file or directory)")));
String podName = kubeClient(namespaceName).listPodsByNamespace(namespaceName, clusterName).stream().filter(n -> n.getMetadata().getName().startsWith(KafkaMirrorMakerResources.deploymentName(clusterName))).findFirst().orElseThrow().getMetadata().getName();
assertResources(namespaceName, podName, clusterName.concat("-mirror-maker"),
"400M", "2", "300M", "1");
assertExpectedJavaOpts(namespaceName, podName, KafkaMirrorMakerResources.deploymentName(clusterName),
"-Xmx200m", "-Xms200m", "-XX:+UseG1GC");
timeMeasuringSystem.stopOperation(operationId, extensionContext.getRequiredTestClass().getName(), extensionContext.getDisplayName());
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicSourceName)
.withClusterName(kafkaClusterSourceName)
.build();
int sent = internalKafkaClient.sendMessagesPlain();
internalKafkaClient.consumesPlainMessagesUntilOperationIsSuccessful(sent);
internalKafkaClient = internalKafkaClient.toBuilder()
.withClusterName(kafkaClusterTargetName)
.build();
internalKafkaClient.consumesPlainMessagesUntilOperationIsSuccessful(sent);
}
/**
* Test mirroring messages by Mirror Maker over tls transport using mutual tls auth
*/
@ParallelNamespaceTest
@Tag(ACCEPTANCE)
@SuppressWarnings({"checkstyle:MethodLength"})
void testMirrorMakerTlsAuthenticated(ExtensionContext extensionContext) throws Exception {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String topicSourceName = TOPIC_NAME + "-source" + "-" + rng.nextInt(Integer.MAX_VALUE);
String kafkaSourceUserName = clusterName + "-my-user-source";
String kafkaTargetUserName = clusterName + "-my-user-target";
// Deploy source kafka with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1)
.editSpec()
.editKafka()
.withListeners(new GenericKafkaListenerBuilder()
.withName(Constants.TLS_LISTENER_DEFAULT_NAME)
.withPort(9093)
.withType(KafkaListenerType.INTERNAL)
.withTls(true)
.withAuth(new KafkaListenerAuthenticationTls())
.build())
.endKafka()
.endSpec()
.build());
// Deploy target kafka with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1)
.editSpec()
.editKafka()
.withListeners(new GenericKafkaListenerBuilder()
.withName(Constants.TLS_LISTENER_DEFAULT_NAME)
.withPort(9093)
.withType(KafkaListenerType.INTERNAL)
.withTls(true)
.withAuth(new KafkaListenerAuthenticationTls())
.build())
.endKafka()
.endSpec()
.build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicSourceName).build());
// createAndWaitForReadiness Kafka user
KafkaUser userSource = KafkaUserTemplates.tlsUser(kafkaClusterSourceName, kafkaSourceUserName).build();
KafkaUser userTarget = KafkaUserTemplates.tlsUser(kafkaClusterTargetName, kafkaTargetUserName).build();
resourceManager.createResource(extensionContext, userSource);
resourceManager.createResource(extensionContext, userTarget);
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for producer
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, clusterName + "-" + Constants.KAFKA_CLIENTS, userSource, userTarget).build());
final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
String topicTestName1 = baseTopic + "-test-1";
String topicTestName2 = baseTopic + "-test-2";
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName2).build());
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder()
.withUsingPodName(kafkaClientsPodName)
.withTopicName(topicTestName1)
.withNamespaceName(namespaceName)
.withClusterName(kafkaClusterSourceName)
.withKafkaUsername(userSource.getMetadata().getName())
.withMessageCount(messagesCount)
.withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME)
.build();
// Check brokers availability
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicTestName2)
.withClusterName(kafkaClusterTargetName)
.withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME)
.withKafkaUsername(userTarget.getMetadata().getName())
.build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
// Deploy Mirror Maker with tls listener and mutual tls auth
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, true)
.editSpec()
.editConsumer()
.withNewTls()
.withTrustedCertificates(certSecretSource)
.endTls()
.withNewKafkaClientAuthenticationTls()
.withNewCertificateAndKey()
.withSecretName(kafkaSourceUserName)
.withCertificate("user.crt")
.withKey("user.key")
.endCertificateAndKey()
.endKafkaClientAuthenticationTls()
.endConsumer()
.editProducer()
.withNewTls()
.withTrustedCertificates(certSecretTarget)
.endTls()
.withNewKafkaClientAuthenticationTls()
.withNewCertificateAndKey()
.withSecretName(kafkaTargetUserName)
.withCertificate("user.crt")
.withKey("user.key")
.endCertificateAndKey()
.endKafkaClientAuthenticationTls()
.endProducer()
.endSpec()
.build());
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicSourceName)
.withClusterName(kafkaClusterSourceName)
.withKafkaUsername(userSource.getMetadata().getName())
.build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder()
.withClusterName(kafkaClusterTargetName)
.withKafkaUsername(userTarget.getMetadata().getName())
.build();
internalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(internalKafkaClient.getMessageCount());
}
/**
* Test mirroring messages by Mirror Maker over tls transport using scram-sha auth
*/
@ParallelNamespaceTest
@SuppressWarnings("checkstyle:methodlength")
void testMirrorMakerTlsScramSha(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String kafkaUserSource = clusterName + "-my-user-source";
String kafkaUserTarget = clusterName + "-my-user-target";
// Deploy source kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1)
.editSpec()
.editKafka()
.withListeners(new GenericKafkaListenerBuilder()
.withName(Constants.TLS_LISTENER_DEFAULT_NAME)
.withPort(9093)
.withType(KafkaListenerType.INTERNAL)
.withTls(true)
.withAuth(new KafkaListenerAuthenticationScramSha512())
.build())
.endKafka()
.endSpec()
.build());
// Deploy target kafka with tls listener and SCRAM-SHA authentication
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1)
.editSpec()
.editKafka()
.withListeners(new GenericKafkaListenerBuilder()
.withName(Constants.TLS_LISTENER_DEFAULT_NAME)
.withPort(9093)
.withType(KafkaListenerType.INTERNAL)
.withTls(true)
.withAuth(new KafkaListenerAuthenticationScramSha512())
.build())
.endKafka()
.endSpec()
.build());
// Deploy topic
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicName).build());
// createAndWaitForReadiness Kafka user for source cluster
KafkaUser userSource = KafkaUserTemplates.scramShaUser(kafkaClusterSourceName, kafkaUserSource).build();
// createAndWaitForReadiness Kafka user for target cluster
KafkaUser userTarget = KafkaUserTemplates.scramShaUser(kafkaClusterTargetName, kafkaUserTarget).build();
resourceManager.createResource(extensionContext, userSource);
resourceManager.createResource(extensionContext, userTarget);
// Initialize PasswordSecretSource to set this as PasswordSecret in Mirror Maker spec
PasswordSecretSource passwordSecretSource = new PasswordSecretSource();
passwordSecretSource.setSecretName(kafkaUserSource);
passwordSecretSource.setPassword("password");
// Initialize PasswordSecretSource to set this as PasswordSecret in Mirror Maker spec
PasswordSecretSource passwordSecretTarget = new PasswordSecretSource();
passwordSecretTarget.setSecretName(kafkaUserTarget);
passwordSecretTarget.setPassword("password");
// Initialize CertSecretSource with certificate and secret names for consumer
CertSecretSource certSecretSource = new CertSecretSource();
certSecretSource.setCertificate("ca.crt");
certSecretSource.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterSourceName));
// Initialize CertSecretSource with certificate and secret names for producer
CertSecretSource certSecretTarget = new CertSecretSource();
certSecretTarget.setCertificate("ca.crt");
certSecretTarget.setSecretName(KafkaResources.clusterCaCertificateSecretName(kafkaClusterTargetName));
// Deploy client
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, true, clusterName + "-" + Constants.KAFKA_CLIENTS, userSource, userTarget).build());
final String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
String baseTopic = mapWithTestTopics.get(extensionContext.getDisplayName());
String topicTestName1 = baseTopic + "-test-1";
String topicTestName2 = baseTopic + "-test-2";
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName1).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicTestName2).build());
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder()
.withUsingPodName(kafkaClientsPodName)
.withTopicName(topicTestName1)
.withNamespaceName(namespaceName)
.withClusterName(kafkaClusterSourceName)
.withKafkaUsername(userSource.getMetadata().getName())
.withMessageCount(messagesCount)
.withListenerName(Constants.TLS_LISTENER_DEFAULT_NAME)
.build();
// Check brokers availability
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicTestName2)
.withClusterName(kafkaClusterTargetName)
.withKafkaUsername(userTarget.getMetadata().getName())
.build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
// Deploy Mirror Maker with TLS and ScramSha512
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, true)
.editSpec()
.editConsumer()
.withNewKafkaClientAuthenticationScramSha512()
.withUsername(kafkaUserSource)
.withPasswordSecret(passwordSecretSource)
.endKafkaClientAuthenticationScramSha512()
.withNewTls()
.withTrustedCertificates(certSecretSource)
.endTls()
.endConsumer()
.editProducer()
.withNewKafkaClientAuthenticationScramSha512()
.withUsername(kafkaUserTarget)
.withPasswordSecret(passwordSecretTarget)
.endKafkaClientAuthenticationScramSha512()
.withNewTls()
.withTrustedCertificates(certSecretTarget)
.endTls()
.endProducer()
.endSpec()
.build());
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicName)
.withClusterName(kafkaClusterSourceName)
.withKafkaUsername(userSource.getMetadata().getName())
.build();
internalKafkaClient.produceAndConsumesTlsMessagesUntilBothOperationsAreSuccessful();
InternalKafkaClient newInternalKafkaClient = internalKafkaClient.toBuilder()
.withClusterName(kafkaClusterTargetName)
.withKafkaUsername(userTarget.getMetadata().getName())
.build();
newInternalKafkaClient.consumesTlsMessagesUntilOperationIsSuccessful(internalKafkaClient.getMessageCount());
}
@ParallelNamespaceTest
void testIncludeList(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
String topicName = "included-topic";
String topicNotIncluded = "not-included-topic";
LOGGER.info("Creating kafka source cluster {}", kafkaClusterSourceName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).build());
LOGGER.info("Creating kafka target cluster {}", kafkaClusterTargetName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicName).build());
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(kafkaClusterSourceName, topicNotIncluded).build());
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(namespaceName, false, clusterName + "-" + Constants.KAFKA_CLIENTS).build());
String kafkaClientsPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(namespaceName, clusterName + "-" + Constants.KAFKA_CLIENTS).get(0).getMetadata().getName();
InternalKafkaClient internalKafkaClient = new InternalKafkaClient.Builder()
.withUsingPodName(kafkaClientsPodName)
.withTopicName("topic-example-10")
.withNamespaceName(namespaceName)
.withClusterName(kafkaClusterSourceName)
.withMessageCount(messagesCount)
.withConsumerGroupName(ClientUtils.generateRandomConsumerGroup())
.withListenerName(Constants.PLAIN_LISTENER_DEFAULT_NAME)
.build();
// Check brokers availability
internalKafkaClient.produceAndConsumesPlainMessagesUntilBothOperationsAreSuccessful();
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName("topic-example-11")
.withClusterName(kafkaClusterTargetName)
.build();
internalKafkaClient.produceAndConsumesPlainMessagesUntilBothOperationsAreSuccessful();
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterSourceName, kafkaClusterTargetName, ClientUtils.generateRandomConsumerGroup(), 1, false)
.editMetadata()
.withNamespace(namespaceName)
.endMetadata()
.editSpec()
.withInclude(topicName)
.endSpec().build());
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicName)
.withClusterName(kafkaClusterSourceName)
.build();
int sent = internalKafkaClient.sendMessagesPlain();
internalKafkaClient.consumesPlainMessagesUntilOperationIsSuccessful(sent);
internalKafkaClient = internalKafkaClient.toBuilder()
.withClusterName(kafkaClusterTargetName)
.build();
internalKafkaClient.consumesPlainMessagesUntilOperationIsSuccessful(sent);
internalKafkaClient = internalKafkaClient.toBuilder()
.withTopicName(topicNotIncluded)
.withClusterName(kafkaClusterSourceName)
.build();
sent = internalKafkaClient.sendMessagesPlain();
internalKafkaClient.consumesPlainMessagesUntilOperationIsSuccessful(sent);
internalKafkaClient = internalKafkaClient.toBuilder()
.withClusterName(kafkaClusterTargetName)
.build();
assertThat("Received 0 messages in target kafka because topic " + topicNotIncluded + " is not included",
internalKafkaClient.receiveMessagesPlain(), is(0));
}
@ParallelNamespaceTest
void testCustomAndUpdatedValues(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(clusterName, 1, 1)
.editSpec()
.withNewEntityOperator()
.endEntityOperator()
.endSpec()
.build());
String usedVariable = "KAFKA_MIRRORMAKER_CONFIGURATION_PRODUCER";
LinkedHashMap<String, String> envVarGeneral = new LinkedHashMap<>();
envVarGeneral.put("TEST_ENV_1", "test.env.one");
envVarGeneral.put("TEST_ENV_2", "test.env.two");
envVarGeneral.put(usedVariable, "test.value");
LinkedHashMap<String, String> envVarUpdated = new LinkedHashMap<>();
envVarUpdated.put("TEST_ENV_2", "updated.test.env.two");
envVarUpdated.put("TEST_ENV_3", "test.env.three");
Map<String, Object> producerConfig = new HashMap<>();
producerConfig.put("acks", "all");
Map<String, Object> updatedProducerConfig = new HashMap<>();
updatedProducerConfig.put("acks", "0");
Map<String, Object> consumerConfig = new HashMap<>();
consumerConfig.put("auto.offset.reset", "latest");
Map<String, Object> updatedConsumerConfig = new HashMap<>();
updatedConsumerConfig.put("auto.offset.reset", "earliest");
int initialDelaySeconds = 30;
int timeoutSeconds = 10;
int updatedInitialDelaySeconds = 31;
int updatedTimeoutSeconds = 11;
int periodSeconds = 10;
int successThreshold = 1;
int failureThreshold = 3;
int updatedPeriodSeconds = 5;
int updatedFailureThreshold = 1;
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, clusterName, clusterName, ClientUtils.generateRandomConsumerGroup(), 1, false)
.editSpec()
.editProducer()
.withConfig(producerConfig)
.endProducer()
.editConsumer()
.withConfig(consumerConfig)
.endConsumer()
.withNewTemplate()
.withNewMirrorMakerContainer()
.withEnv(StUtils.createContainerEnvVarsFromMap(envVarGeneral))
.endMirrorMakerContainer()
.endTemplate()
.withNewReadinessProbe()
.withInitialDelaySeconds(initialDelaySeconds)
.withTimeoutSeconds(timeoutSeconds)
.withPeriodSeconds(periodSeconds)
.withSuccessThreshold(successThreshold)
.withFailureThreshold(failureThreshold)
.endReadinessProbe()
.withNewLivenessProbe()
.withInitialDelaySeconds(initialDelaySeconds)
.withTimeoutSeconds(timeoutSeconds)
.withPeriodSeconds(periodSeconds)
.withSuccessThreshold(successThreshold)
.withFailureThreshold(failureThreshold)
.endLivenessProbe()
.endSpec()
.build());
Map<String, String> mirrorMakerSnapshot = DeploymentUtils.depSnapshot(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName));
// Remove variable which is already in use
envVarGeneral.remove(usedVariable);
LOGGER.info("Verify values before update");
checkReadinessLivenessProbe(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), initialDelaySeconds, timeoutSeconds, periodSeconds,
successThreshold, failureThreshold);
checkSpecificVariablesInContainer(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), envVarGeneral);
checkComponentConfiguration(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), "KAFKA_MIRRORMAKER_CONFIGURATION_PRODUCER", producerConfig);
checkComponentConfiguration(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), "KAFKA_MIRRORMAKER_CONFIGURATION_CONSUMER", consumerConfig);
LOGGER.info("Check if actual env variable {} has different value than {}", usedVariable, "test.value");
assertThat(StUtils.checkEnvVarInPod(namespaceName, kubeClient().listPods(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL,
KafkaMirrorMaker.RESOURCE_KIND).get(0).getMetadata().getName(), usedVariable), CoreMatchers.is(not("test.value")));
LOGGER.info("Updating values in MirrorMaker container");
KafkaMirrorMakerResource.replaceMirrorMakerResourceInSpecificNamespace(clusterName, kmm -> {
kmm.getSpec().getTemplate().getMirrorMakerContainer().setEnv(StUtils.createContainerEnvVarsFromMap(envVarUpdated));
kmm.getSpec().getProducer().setConfig(updatedProducerConfig);
kmm.getSpec().getConsumer().setConfig(updatedConsumerConfig);
kmm.getSpec().getLivenessProbe().setInitialDelaySeconds(updatedInitialDelaySeconds);
kmm.getSpec().getReadinessProbe().setInitialDelaySeconds(updatedInitialDelaySeconds);
kmm.getSpec().getLivenessProbe().setTimeoutSeconds(updatedTimeoutSeconds);
kmm.getSpec().getReadinessProbe().setTimeoutSeconds(updatedTimeoutSeconds);
kmm.getSpec().getLivenessProbe().setPeriodSeconds(updatedPeriodSeconds);
kmm.getSpec().getReadinessProbe().setPeriodSeconds(updatedPeriodSeconds);
kmm.getSpec().getLivenessProbe().setFailureThreshold(updatedFailureThreshold);
kmm.getSpec().getReadinessProbe().setFailureThreshold(updatedFailureThreshold);
}, namespaceName);
DeploymentUtils.waitTillDepHasRolled(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName), 1, mirrorMakerSnapshot);
LOGGER.info("Verify values after update");
checkReadinessLivenessProbe(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), updatedInitialDelaySeconds, updatedTimeoutSeconds,
updatedPeriodSeconds, successThreshold, updatedFailureThreshold);
checkSpecificVariablesInContainer(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), envVarUpdated);
checkComponentConfiguration(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), "KAFKA_MIRRORMAKER_CONFIGURATION_PRODUCER", updatedProducerConfig);
checkComponentConfiguration(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName),
KafkaMirrorMakerResources.deploymentName(clusterName), "KAFKA_MIRRORMAKER_CONFIGURATION_CONSUMER", updatedConsumerConfig);
}
@ParallelNamespaceTest
@Tag(SCALABILITY)
void testScaleMirrorMakerSubresource(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
LOGGER.info("Creating kafka source cluster {}", kafkaClusterSourceName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).build());
LOGGER.info("Creating kafka target cluster {}", kafkaClusterTargetName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).build());
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, ClientUtils.generateRandomConsumerGroup(), 1, false).build());
int scaleTo = 4;
long mmObsGen = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getObservedGeneration();
String mmGenName = kubeClient(namespaceName).listPods(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker.RESOURCE_KIND).get(0).getMetadata().getGenerateName();
LOGGER.info("-------> Scaling KafkaMirrorMaker subresource <-------");
LOGGER.info("Scaling subresource replicas to {}", scaleTo);
cmdKubeClient().namespace(namespaceName).scaleByName(KafkaMirrorMaker.RESOURCE_KIND, clusterName, scaleTo);
DeploymentUtils.waitForDeploymentAndPodsReady(namespaceName, KafkaMirrorMakerResources.deploymentName(clusterName), scaleTo);
LOGGER.info("Check if replicas is set to {}, naming prefix should be same and observed generation higher", scaleTo);
List<String> mmPods = kubeClient(namespaceName).listPodNames(namespaceName, clusterName, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker.RESOURCE_KIND);
assertThat(mmPods.size(), is(4));
assertThat(KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getSpec().getReplicas(), is(4));
assertThat(KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getReplicas(), is(4));
/*
observed generation should be higher than before scaling -> after change of spec and successful reconciliation,
the observed generation is increased
*/
assertThat(mmObsGen < KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getObservedGeneration(), is(true));
for (String pod : mmPods) {
assertThat(pod.contains(mmGenName), is(true));
}
}
@ParallelNamespaceTest
@Tag(SCALABILITY)
void testScaleMirrorMakerToZero(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
LOGGER.info("Creating kafka source cluster {}", kafkaClusterSourceName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).build());
LOGGER.info("Creating kafka target cluster {}", kafkaClusterTargetName);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).build());
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, "my-group" + rng.nextInt(Integer.MAX_VALUE), 3, false).build());
long oldObsGen = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getObservedGeneration();
String mmDepName = KafkaMirrorMakerResources.deploymentName(clusterName);
List<String> mmPods = kubeClient(namespaceName).listPodNames(clusterName, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker.RESOURCE_KIND);
assertThat(mmPods.size(), is(3));
LOGGER.info("Scaling MirrorMaker to zero");
KafkaMirrorMakerResource.replaceMirrorMakerResourceInSpecificNamespace(clusterName, mm -> mm.getSpec().setReplicas(0), namespaceName);
PodUtils.waitForPodsReady(namespaceName, kubeClient(namespaceName).getDeploymentSelectors(mmDepName), 0, true);
mmPods = kubeClient(namespaceName).listPodNames(clusterName, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker.RESOURCE_KIND);
KafkaMirrorMakerStatus mmStatus = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus();
long actualObsGen = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get().getStatus().getObservedGeneration();
assertThat(mmPods.size(), is(0));
assertThat(mmStatus.getConditions().get(0).getType(), is(Ready.toString()));
assertThat(actualObsGen, is(not(oldObsGen)));
}
@ParallelNamespaceTest
void testConfigureDeploymentStrategy(ExtensionContext extensionContext) {
final String namespaceName = StUtils.getNamespaceBasedOnRbac(INFRA_NAMESPACE, extensionContext);
String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
String kafkaClusterSourceName = clusterName + "-source";
String kafkaClusterTargetName = clusterName + "-target";
// Deploy source kafka
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterSourceName, 1, 1).build());
// Deploy target kafka
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(kafkaClusterTargetName, 1, 1).build());
resourceManager.createResource(extensionContext, KafkaMirrorMakerTemplates.kafkaMirrorMaker(clusterName, kafkaClusterTargetName, kafkaClusterSourceName, ClientUtils.generateRandomConsumerGroup(), 1, false)
.editSpec()
.editOrNewTemplate()
.editOrNewDeployment()
.withDeploymentStrategy(DeploymentStrategy.RECREATE)
.endDeployment()
.endTemplate()
.endSpec()
.build());
String mmDepName = KafkaMirrorMakerResources.deploymentName(clusterName);
LOGGER.info("Adding label to MirrorMaker resource, the CR should be recreateAndWaitForReadinessd");
KafkaMirrorMakerResource.replaceMirrorMakerResourceInSpecificNamespace(clusterName,
mm -> mm.getMetadata().setLabels(Collections.singletonMap("some", "label")), namespaceName);
DeploymentUtils.waitForDeploymentAndPodsReady(namespaceName, mmDepName, 1);
KafkaMirrorMaker kmm = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get();
LOGGER.info("Checking that observed gen. is still on 1 (recreation) and new label is present");
assertThat(kmm.getStatus().getObservedGeneration(), is(1L));
assertThat(kmm.getMetadata().getLabels().toString(), Matchers.containsString("some=label"));
assertThat(kmm.getSpec().getTemplate().getDeployment().getDeploymentStrategy(), is(DeploymentStrategy.RECREATE));
LOGGER.info("Changing deployment strategy to {}", DeploymentStrategy.ROLLING_UPDATE);
KafkaMirrorMakerResource.replaceMirrorMakerResourceInSpecificNamespace(clusterName,
mm -> mm.getSpec().getTemplate().getDeployment().setDeploymentStrategy(DeploymentStrategy.ROLLING_UPDATE), namespaceName);
KafkaMirrorMakerUtils.waitForKafkaMirrorMakerReady(namespaceName, clusterName);
LOGGER.info("Adding another label to MirrorMaker resource, pods should be rolled");
KafkaMirrorMakerResource.replaceMirrorMakerResourceInSpecificNamespace(clusterName, mm -> mm.getMetadata().getLabels().put("another", "label"), namespaceName);
DeploymentUtils.waitForDeploymentAndPodsReady(namespaceName, mmDepName, 1);
LOGGER.info("Checking that observed gen. higher (rolling update) and label is changed");
kmm = KafkaMirrorMakerResource.kafkaMirrorMakerClient().inNamespace(namespaceName).withName(clusterName).get();
assertThat(kmm.getStatus().getObservedGeneration(), is(2L));
assertThat(kmm.getMetadata().getLabels().toString(), Matchers.containsString("another=label"));
assertThat(kmm.getSpec().getTemplate().getDeployment().getDeploymentStrategy(), is(DeploymentStrategy.ROLLING_UPDATE));
}
@BeforeAll
void setupEnvironment(ExtensionContext extensionContext) {
install.unInstall();
install = new SetupClusterOperator.SetupClusterOperatorBuilder()
.withExtensionContext(BeforeAllOnce.getSharedExtensionContext())
.withNamespace(INFRA_NAMESPACE)
.withWatchingNamespaces(Constants.WATCH_ALL_NAMESPACES)
.withOperationTimeout(Constants.CO_OPERATION_TIMEOUT_MEDIUM)
.createInstallation()
.runInstallation();
}
}
| |
/**
* Copyright 2017 Equipment & Tool Institute
*/
package net.soliddesign.iumpr.ui;
import static net.soliddesign.iumpr.IUMPR.NL;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
import net.soliddesign.iumpr.IUMPR;
import net.soliddesign.iumpr.bus.Adapter;
import net.soliddesign.iumpr.bus.Bus;
import net.soliddesign.iumpr.bus.BusException;
import net.soliddesign.iumpr.bus.RP1210;
import net.soliddesign.iumpr.bus.RP1210Bus;
import net.soliddesign.iumpr.bus.j1939.J1939;
import net.soliddesign.iumpr.controllers.CollectResultsController;
import net.soliddesign.iumpr.controllers.Controller;
import net.soliddesign.iumpr.controllers.DataPlateController;
import net.soliddesign.iumpr.controllers.MonitorCompletionController;
import net.soliddesign.iumpr.controllers.ResultsListener;
import net.soliddesign.iumpr.modules.ComparisonModule;
import net.soliddesign.iumpr.modules.ReportFileModule;
import net.soliddesign.iumpr.ui.help.HelpView;
/**
* The Class that controls the behavior of the {@link UserInterfaceView}
*
* @author Matt Gumbel (matt@soliddesign.net)
*
*/
public class UserInterfaceController implements IUserInterfaceController {
/**
* The default extension for report files created by this application
*/
static final String FILE_SUFFIX = "iumpr";
/**
* The {@link Controller} that is currently executing
*/
private Controller activeController;
/**
* The possible {@link Adapter} that can be used for communications with the
* vehicle
*/
private List<Adapter> adapters;
private Bus bus;
private CollectResultsController collectResultsController;
private ComparisonModule comparisonModule;
private DataPlateController dataPlateController;
private final Executor executor;
private final HelpView helpView;
private MonitorCompletionController monitorCompletionController;
private boolean newFile;
/**
* The {@link File} where the report is stored
*/
private File reportFile;
private final ReportFileModule reportFileModule;
private final RP1210 rp1210;
/**
* The Adapter being used to communicate with the vehicle
*/
private Adapter selectedAdapter;
/**
* The {@link IUserInterfaceView} that is being controlled
*/
private final IUserInterfaceView view;
private String vin;
/**
* Default Constructor
*
* @param view
* The {@link UserInterfaceView} to control
*/
public UserInterfaceController(IUserInterfaceView view) {
this(view, new DataPlateController(), new CollectResultsController(),
new MonitorCompletionController(), new ComparisonModule(), new RP1210(), new ReportFileModule(),
Runtime.getRuntime(), Executors.newSingleThreadExecutor(), new HelpView());
}
/**
* Constructor used for testing
*
* @param view
* The {@link UserInterfaceView} to control
* @param dataPlateController
* the {@link DataPlateController}
* @param collectResultsController
* the {@link CollectResultsController}
* @param monitorCompletionController
* the {@link MonitorCompletionController}
* @param comparisonModule
* the {@link ComparisonModule}
* @param rp1210
* the {@link RP1210}
* @param reportFileModule
* the {@link ReportFileModule}
* @param runtime
* the {@link Runtime}
* @param executor
* the {@link Executor} used to execute {@link Thread} s
* @param helpView
* the {@link HelpView} that will display help for the
* application
*/
public UserInterfaceController(IUserInterfaceView view, DataPlateController dataPlateController,
CollectResultsController collectResultsController, MonitorCompletionController monitorCompletionController,
ComparisonModule comparisonModule, RP1210 rp1210, ReportFileModule reportFileModule,
Runtime runtime, Executor executor, HelpView helpView) {
this.view = view;
this.dataPlateController = dataPlateController;
this.collectResultsController = collectResultsController;
this.monitorCompletionController = monitorCompletionController;
this.comparisonModule = comparisonModule;
this.rp1210 = rp1210;
this.reportFileModule = reportFileModule;
this.executor = executor;
this.helpView = helpView;
runtime.addShutdownHook(new Thread(() -> reportFileModule.onProgramExit(), "Shutdown Hook Thread"));
}
private void checkSetupComplete() {
getView().setAdapterComboBoxEnabled(true);
getView().setSelectFileButtonEnabled(true);
if (getSelectedAdapter() == null) {
getView().setProgressBarText("Select Vehicle Adapter");
getView().setSelectFileButtonEnabled(false);
} else if (getReportFile() == null) {
getView().setProgressBarText("Select Report File");
} else {
getView().setProgressBarText("Push Read Vehicle Info Button");
getView().setReadVehicleInfoButtonEnabled(true);
}
}
private void collectTestResultsComplete(boolean success) {
getView().setCollectTestResultsButtonEnabled(true);
getView().setMonitorCompletionButtonEnabled(true);
setActiveController(null);
}
private void dataPlateReportComplete(boolean success) {
if (success) {
getView().setCollectTestResultsButtonEnabled(true);
getView().setMonitorCompletionButtonEnabled(true);
} else {
getView().setReadVehicleInfoButtonEnabled(true);
getView().setGenerateDataPlateButtonEnabled(true);
}
setActiveController(null);
}
@Override
public void disconnect() {
if (bus != null && bus instanceof RP1210Bus) {
try {
((RP1210Bus) bus).stop();
} catch (BusException e) {
getLogger().log(Level.SEVERE, "Unable to disconnect from adapter", e);
}
}
}
/**
* Returns the {@link Controller} that is currently executing
*
* @return the activeController
*/
public Controller getActiveController() {
return activeController;
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#getAdapters()
*/
@Override
public List<Adapter> getAdapters() {
if (adapters == null) {
adapters = new ArrayList<>();
try {
adapters.addAll(rp1210.getAdapters());
} catch (Exception e) {
getView().displayDialog("The List of Communication Adapters could not be loaded.", "Failure",
JOptionPane.ERROR_MESSAGE, false);
}
}
return adapters;
}
private ComparisonModule getComparisonModule() {
return comparisonModule;
}
private Logger getLogger() {
return IUMPR.getLogger();
}
@Override
public J1939 getNewJ1939() {
return new J1939(bus);
}
/**
* Return the Report File
*
* @return the reportFile
*/
File getReportFile() {
return reportFile;
}
/**
* Returns the {@link ReportFileModule}
*
* @return the {@link ReportFileModule}
*/
@Override
public ReportFileModule getReportFileModule() {
return reportFileModule;
}
private ResultsListener getResultsListener() {
return new ResultsListener() {
@Override
public void onComplete(boolean success) {
if (getActiveController() instanceof DataPlateController) {
dataPlateReportComplete(success);
} else if (getActiveController() instanceof CollectResultsController) {
collectTestResultsComplete(success);
} else if (getActiveController() instanceof MonitorCompletionController) {
monitorCompletionComplete(success);
}
}
@Override
public void onMessage(String message, String title, int type) {
getView().displayDialog(message, title, type, false);
}
@Override
public void onProgress(int currentStep, int totalSteps, String message) {
getView().setProgressBarValue(0, totalSteps, currentStep);
getView().setProgressBarText(message);
}
@Override
public void onProgress(String message) {
getView().setProgressBarText(message);
}
@Override
public void onResult(List<String> results) {
for (String result : results) {
getView().appendResults(result + NL);
}
}
@Override
public void onResult(String result) {
getView().appendResults(result + NL);
}
@Override
public void onUrgentMessage(String message, String title, int type) {
getView().displayDialog(message, title, type, true);
}
};
}
/**
* Returns the selected Adapter
*
* @return the selectedAdapter
*/
Adapter getSelectedAdapter() {
return selectedAdapter;
}
private IUserInterfaceView getView() {
return view;
}
@Override
public String getVin() {
return vin;
}
/**
* @return the newFile
*/
private boolean isNewFile() {
return newFile;
}
private void monitorCompletionComplete(boolean success) {
getView().setCollectTestResultsButtonEnabled(true);
getView().setMonitorCompletionButtonEnabled(true);
setActiveController(null);
getView().setStatusViewVisible(false);
}
@Override
public void onAdapterComboBoxItemSelected(String selectedAdapterName) {
// Connecting to the adapter can take "a while"
executor.execute(() -> {
resetView();
getView().setAdapterComboBoxEnabled(false);
getView().setSelectFileButtonEnabled(false);
getView().setProgressBarText("Connecting to Adapter");
Adapter matchedAdapter = null;
for (Adapter adapter : getAdapters()) {
String name = adapter.getName();
if (name.equals(selectedAdapterName)) {
matchedAdapter = adapter;
break;
}
}
setSelectedAdapter(matchedAdapter);
checkSetupComplete();
});
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#
* onCollectTestResultsButtonClicked()
*/
@Override
public void onCollectTestResultsButtonClicked() {
getView().setCollectTestResultsButtonEnabled(false);
getView().setMonitorCompletionButtonEnabled(false);
runController(collectResultsController);
}
/*
* (non-Javadoc)
*
* @see
* net.soliddesign.iumpr.ui.IUserInterfaceController#onFileChosen(java.io.
* File)
*/
@Override
public void onFileChosen(File file) {
executor.execute(() -> {
resetView();
getView().setAdapterComboBoxEnabled(false);
getView().setSelectFileButtonEnabled(false);
getView().setProgressBarText("Scanning Report File");
try {
File reportFile = setupReportFile(file);
setReportFile(reportFile);
getView().setSelectFileButtonText(reportFile.getAbsolutePath());
} catch (IOException e) {
getLogger().log(Level.SEVERE, "Error Reading File", e);
reportFile = null;
getView().setSelectFileButtonText(null);
String message = "File cannot be used.";
if (e.getMessage() != null) {
message += NL + e.getMessage();
}
message += NL + "Please select a different file.";
getView().displayDialog(message, "File Error", JOptionPane.ERROR_MESSAGE, false);
}
checkSetupComplete();
getView().setAdapterComboBoxEnabled(true);
getView().setSelectFileButtonEnabled(true);
});
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#
* onGenerateDataPlateButtonClicked()
*/
@Override
public void onGenerateDataPlateButtonClicked() {
getView().setGenerateDataPlateButtonEnabled(false);
getView().setReadVehicleInfoButtonEnabled(false);
getView().setAdapterComboBoxEnabled(false);
getView().setSelectFileButtonEnabled(false);
runController(dataPlateController);
}
@Override
public void onHelpButtonClicked() {
helpView.setVisible(true);
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#
* onMonitorCompletionButtonClicked()
*/
@Override
public void onMonitorCompletionButtonClicked() {
getView().setMonitorCompletionButtonEnabled(false);
getView().setCollectTestResultsButtonEnabled(false);
getView().setStatusViewVisible(true);
runController(monitorCompletionController);
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#
* onReadVehicleInfoButtonClicked()
*/
@Override
public void onReadVehicleInfoButtonClicked() {
executor.execute(() -> {
resetView();
getView().setAdapterComboBoxEnabled(false);
getView().setSelectFileButtonEnabled(false);
boolean result = false;
ResultsListener resultsListener = getResultsListener();
try {
resultsListener.onProgress(1, 6, "Reading Vehicle Identification Number");
vin = getComparisonModule().getVin();
getView().setVin(vin);
resultsListener.onProgress(2, 6, "Reading Vehicle Calibrations");
String cals = getComparisonModule().getCalibrationsAsString();
getView().setEngineCals(cals);
result = getComparisonModule().compareFileToVehicle(resultsListener, getReportFileModule(), 2, 6);
} catch (IOException e) {
getView().setProgressBarText(e.getMessage());
getView().displayDialog(e.getMessage(), "Communications Error", JOptionPane.ERROR_MESSAGE, false);
} finally {
if (result) {
getView().setProgressBarText("Push Generate Vehicle Data Plate Button");
}
getView().setGenerateDataPlateButtonEnabled(result);
getView().setStopButtonEnabled(result);
getView().setReadVehicleInfoButtonEnabled(true);
getView().setAdapterComboBoxEnabled(true);
getView().setSelectFileButtonEnabled(true);
}
});
}
/*
* (non-Javadoc)
*
* @see net.soliddesign.iumpr.ui.IUserInterfaceController#
* onSelectFileButtonClicked()
*/
@Override
public void onSelectFileButtonClicked() {
getView().displayFileChooser();
}
@Override
public void onStatusViewClosed() {
monitorCompletionController.endTracking();
}
/*
* (non-Javadoc)
*
* @see
* net.soliddesign.iumpr.ui.IUserInterfaceController#onStopButtonClicked()
*/
@Override
public void onStopButtonClicked() {
if (getActiveController() != null && getActiveController().isActive()) {
getActiveController().stop();
getView().setStatusViewVisible(false);
}
}
private void resetView() {
getComparisonModule().reset();
vin = null;
getView().setVin("");
getView().setEngineCals("");
getView().setGenerateDataPlateButtonEnabled(false);
getView().setStopButtonEnabled(false);
getView().setReadVehicleInfoButtonEnabled(false);
}
private void runController(Controller controller) {
setActiveController(controller);
getActiveController().execute(getResultsListener(), getNewJ1939(), getReportFileModule());
}
/**
* Sets the {@link Controller} actively being executed. This is exposed for
* testing.
*
* @param controller
* the active {@link Controller}
*/
void setActiveController(Controller controller) {
if (activeController != null && activeController.isActive()) {
activeController.stop();
}
activeController = controller;
}
private void setBus(Bus bus) throws BusException {
this.bus = bus;
getComparisonModule().setJ1939(getNewJ1939());
}
/**
* @param newFile
* the newFile to set
*/
private void setNewFile(boolean newFile) {
this.newFile = newFile;
}
/**
* Sets the Report File with no additional logic. This should only be used
* for testing.
*
* @param file
* the report file to use
* @throws IOException
* if there is a problem setting the report file
*/
void setReportFile(File file) throws IOException {
getReportFileModule().setReportFile(getResultsListener(), file, isNewFile());
reportFile = file;
}
/**
* Sets the selected adapter. This should only be used for testing.
*
* @param selectedAdapter
* the selectedAdapter to set
*/
private void setSelectedAdapter(Adapter selectedAdapter) {
try {
Bus bus;
if (selectedAdapter != null) {
bus = rp1210.setAdapter(selectedAdapter, 0xF9);
} else {
bus = null;
}
this.selectedAdapter = selectedAdapter;
if (bus != null) {
setBus(bus);
}
} catch (BusException e) {
getLogger().log(Level.SEVERE, "Error Setting Adapter", e);
getView().displayDialog("Communications could not be established using the selected adapter.",
"Communication Failure", JOptionPane.ERROR_MESSAGE, false);
}
}
/**
* Checks the given {@link File} to determine if it's a valid file for using
* to store the Report. If it's valid, the report file is returned.
*
* @param file
* the {@link File} to check
* @return The file to be used for the report
* @throws IOException
* if the file cannot be used
*/
private File setupReportFile(File file) throws IOException {
File reportFile = file;
if (!file.exists()) {
setNewFile(true);
// Append the file extension if the file doesn't have one.
if (!file.getName().endsWith("." + FILE_SUFFIX)) {
return setupReportFile(new File(file.getAbsolutePath() + "." + FILE_SUFFIX));
}
if (!reportFile.createNewFile()) {
throw new IOException("File cannot be created");
}
} else {
setNewFile(false);
boolean writable = false;
try {
writable = reportFile.canWrite();
} catch (SecurityException e) {
writable = false;
}
if (!writable) {
throw new IOException("File cannot be written");
}
}
return reportFile;
}
}
| |
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.15.0.963 modeling language!*/
package net.n3.nanoxml;
import java.io.Reader;
import java.io.IOException;
import java.util.Properties;
public class ValidatorPlugin
{
//------------------------
// MEMBER VARIABLES
//------------------------
//------------------------
// CONSTRUCTOR
//------------------------
public ValidatorPlugin()
{}
//------------------------
// INTERFACE
//------------------------
public void delete()
{}
//------------------------
// DEVELOPER CODE - PROVIDED AS-IS
//------------------------
private IXMLValidator delegate;
/**
* Cleans up the object when it's destroyed.
*/
protected void finalize()
throws Throwable
{
this.delegate = null;
super.finalize();
}
/**
* Returns the delegate.
*/
public IXMLValidator getDelegate()
{
return this.delegate;
}
/**
* Sets the delegate.
*
* @param delegate the delegate
*/
public void setDelegate(IXMLValidator delegate)
{
this.delegate = delegate;
}
/**
* Sets the parameter entity resolver.
*
* @param resolver the entity resolver.
*/
public void setParameterEntityResolver(IXMLEntityResolver resolver)
{
this.delegate.setParameterEntityResolver(resolver);
}
/**
* Returns the parameter entity resolver.
*
* @return the entity resolver.
*/
public IXMLEntityResolver getParameterEntityResolver()
{
return this.delegate.getParameterEntityResolver();
}
/**
* Parses the DTD. The validator object is responsible for reading the
* full DTD.
*
* @param publicID the public ID, which may be null.
* @param reader the reader to read the DTD from.
* @param entityResolver the entity resolver.
* @param external true if the DTD is external.
*
* @throws java.lang.Exception
* if something went wrong.
*/
public void parseDTD(String publicID,
IXMLReader reader,
IXMLEntityResolver entityResolver,
boolean external)
throws Exception
{
this.delegate.parseDTD(publicID, reader, entityResolver, external);
}
/**
* Indicates that an element has been started.
*
* @param name the name of the element.
* @param systemId the system ID of the XML data of the element.
* @param lineNr the line number in the XML data of the element.
*
* @throws java.lang.Exception
* if the element could not be validated.
*/
public void elementStarted(String name,
String systemId,
int lineNr)
throws Exception
{
this.delegate.elementStarted(name, systemId, lineNr);
}
/**
* Indicates that the current element has ended.
*
* @param name the name of the element.
* @param systemId the system ID of the XML data of the element.
* @param lineNr the line number in the XML data of the element.
*
* @throws java.lang.Exception
* if the element could not be validated.
*/
public void elementEnded(String name,
String systemId,
int lineNr)
throws Exception
{
this.delegate.elementEnded(name,systemId, lineNr);
}
/**
* Indicates that an attribute has been added to the current element.
*
* @param name the name of the element.
* @param extraAttributes where to put extra attributes.
* @param systemId the system ID of the XML data of the element.
* @param lineNr the line number in the XML data of the element.
*
* @throws java.lang.Exception
* if the attribute could not be validated.
*/
public void elementAttributesProcessed(String name,
Properties extraAttributes,
String systemId,
int lineNr)
throws Exception
{
this.delegate.elementAttributesProcessed(name, extraAttributes,
systemId, lineNr);
}
/**
* This method is called when the attributes of an XML element have been
* processed.
* If there are attributes with a default value which have not been
* specified yet, they have to be put into <I>extraAttributes</I>.
*
* @param key the name of the attribute.
* @param value the value of the attribute.
* @param systemId the system ID of the XML data of the element.
* @param lineNr the line number in the XML data of the element.
*
* @throws java.lang.Exception
* if the element could not be validated.
*/
public void attributeAdded(String key,
String value,
String systemId,
int lineNr)
throws Exception
{
this.delegate.attributeAdded(key, value, systemId, lineNr);
}
/**
* Indicates that a new #PCDATA element has been encountered.
*
* @param systemId the system ID of the XML data of the element.
* @param lineNr the line number in the XML data of the element.
*
* @throws java.lang.Exception
* if the element could not be validated.
*/
public void PCDataAdded(String systemId,
int lineNr)
throws Exception
{
this.delegate.PCDataAdded(systemId, lineNr);
}
/**
* Throws an XMLValidationException to indicate that an element is missing.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param parentElementName the name of the parent element
* @param missingElementName the name of the missing element
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void missingElement(String systemID,
int lineNr,
String parentElementName,
String missingElementName)
throws XMLValidationException
{
XMLUtil.errorMissingElement(systemID, lineNr, parentElementName,
missingElementName);
}
/**
* Throws an XMLValidationException to indicate that an element is
* unexpected.
*
* @param systemID the system ID of the XML data of the
* element
* @param lineNr the line number in the XML data of the
* element
* @param parentElementName the name of the parent element
* @param unexpectedElementName the name of the missing element
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void unexpectedElement(String systemID,
int lineNr,
String parentElementName,
String unexpectedElementName)
throws XMLValidationException
{
XMLUtil.errorUnexpectedElement(systemID, lineNr, parentElementName,
unexpectedElementName);
}
/**
* Throws an XMLValidationException to indicate that an attribute is
* missing.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param elementName the name of the element
* @param attributeName the name of the missing attribute
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void missingAttribute(String systemID,
int lineNr,
String elementName,
String attributeName)
throws XMLValidationException
{
XMLUtil.errorMissingAttribute(systemID, lineNr, elementName,
attributeName);
}
/**
* Throws an XMLValidationException to indicate that an attribute is
* unexpected.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param elementName the name of the element
* @param attributeName the name of the unexpected attribute
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void unexpectedAttribute(String systemID,
int lineNr,
String elementName,
String attributeName)
throws XMLValidationException
{
XMLUtil.errorUnexpectedAttribute(systemID, lineNr, elementName,
attributeName);
}
/**
* Throws an XMLValidationException to indicate that an attribute has an
* invalid value.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param elementName the name of the element
* @param attributeName the name of the attribute
* @param attributeValue the value of the attribute
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void invalidAttributeValue(String systemID,
int lineNr,
String elementName,
String attributeName,
String attributeValue)
throws XMLValidationException
{
XMLUtil.errorInvalidAttributeValue(systemID, lineNr, elementName,
attributeName, attributeValue);
}
/**
* Throws an XMLValidationException to indicate that a #PCDATA element was
* missing.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param parentElementName the name of the parent element
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void missingPCData(String systemID,
int lineNr,
String parentElementName)
throws XMLValidationException
{
XMLUtil.errorMissingPCData(systemID, lineNr, parentElementName);
}
/**
* Throws an XMLValidationException to indicate that a #PCDATA element was
* unexpected.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param parentElementName the name of the parent element
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void unexpectedPCData(String systemID,
int lineNr,
String parentElementName)
throws XMLValidationException
{
XMLUtil.errorUnexpectedPCData(systemID, lineNr, parentElementName);
}
/**
* Throws an XMLValidationException.
*
* @param systemID the system ID of the XML data of the element
* @param lineNr the line number in the XML data of the element
* @param message the error message
* @param elementName the name of the element (may be null)
* @param attributeName the name of the attribute (may be null)
* @param attributeValue the value of the attribute (may be null)
*
* @throws net.n3.nanoxml.XMLValidationException
* of course :-)
*/
public void validationError(String systemID,
int lineNr,
String message,
String elementName,
String attributeName,
String attributeValue)
throws XMLValidationException
{
XMLUtil.validationError(systemID, lineNr, message, elementName,
attributeName, attributeValue);
}
}
| |
package de.gurkenlabs.litiengine.environment.tilemap.xml;
import java.awt.Dimension;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import jakarta.xml.bind.Marshaller;
import jakarta.xml.bind.Unmarshaller;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
import jakarta.xml.bind.annotation.XmlAttribute;
import jakarta.xml.bind.annotation.XmlElement;
import jakarta.xml.bind.annotation.XmlElementWrapper;
import jakarta.xml.bind.annotation.XmlRootElement;
import jakarta.xml.bind.annotation.XmlTransient;
import de.gurkenlabs.litiengine.environment.tilemap.ICustomProperty;
import de.gurkenlabs.litiengine.environment.tilemap.IMapImage;
import de.gurkenlabs.litiengine.environment.tilemap.ITerrain;
import de.gurkenlabs.litiengine.environment.tilemap.ITile;
import de.gurkenlabs.litiengine.environment.tilemap.ITileOffset;
import de.gurkenlabs.litiengine.environment.tilemap.ITileset;
import de.gurkenlabs.litiengine.environment.tilemap.ITilesetEntry;
import de.gurkenlabs.litiengine.graphics.Spritesheet;
import de.gurkenlabs.litiengine.resources.Resources;
import de.gurkenlabs.litiengine.util.io.FileUtilities;
import de.gurkenlabs.litiengine.util.io.XmlUtilities;
@XmlRootElement(name = "tileset")
@XmlAccessorType(XmlAccessType.FIELD)
public class Tileset extends CustomPropertyProvider implements ITileset {
private static final Logger log = Logger.getLogger(Tileset.class.getName());
public static final String FILE_EXTENSION = "tsx";
@XmlAttribute
private int firstgid;
@XmlElement
private MapImage image;
@XmlAttribute
private Integer margin;
@XmlAttribute
private String name;
@XmlAttribute
private Integer tilewidth;
@XmlAttribute
private Integer tileheight;
@XmlElement(name = "tileoffset")
private TileOffset tileoffset;
@XmlAttribute
private Integer tilecount;
@XmlAttribute
private Integer columns;
@XmlAttribute
private Integer spacing;
@XmlAttribute
private String source;
@XmlElementWrapper(name = "terraintypes")
@XmlElement(name = "terrain")
private List<Terrain> terrainTypes = null;
@XmlElement(name = "tile")
private List<TilesetEntry> tiles = null;
@XmlTransient
private List<TilesetEntry> allTiles;
@XmlTransient
protected Tileset sourceTileset;
private transient Spritesheet spriteSheet;
public Tileset() {
Resources.images().addClearedListener(() -> this.spriteSheet = null);
}
public Tileset(Tileset source) {
this.source = source.getName() + "." + FILE_EXTENSION;
this.sourceTileset = source;
this.firstgid = 1;
}
@Override
public Map<String, ICustomProperty> getProperties() {
return this.sourceTileset != null ? this.sourceTileset.getProperties() : super.getProperties();
}
@Override
public int getFirstGridId() {
return this.firstgid;
}
@Override
public IMapImage getImage() {
return this.sourceTileset != null ? this.sourceTileset.getImage() : this.image;
}
/**
* Gets the margin.
*
* @return the margin
*/
@Override
public int getMargin() {
if (this.sourceTileset != null) {
return this.sourceTileset.getMargin();
}
if (this.margin == null) {
return 0;
}
return this.margin;
}
@Override
public String getName() {
return this.sourceTileset != null ? this.sourceTileset.getName() : this.name;
}
@Override
public void setName(String name) {
this.name = name;
}
/**
* Gets the spacing.
*
* @return the spacing
*/
@Override
public int getSpacing() {
if (this.sourceTileset != null) {
return this.sourceTileset.getSpacing();
}
if (this.spacing == null) {
return 0;
}
return this.spacing;
}
@Override
@XmlTransient
public Spritesheet getSpritesheet() {
if (this.spriteSheet == null && this.getImage() != null) {
this.spriteSheet = Resources.spritesheets().get(this.getImage().getSource());
if (this.spriteSheet == null) {
this.spriteSheet = Resources.spritesheets().load(this);
if (this.spriteSheet == null) {
return null;
}
}
}
return this.spriteSheet;
}
@Override
public Dimension getTileDimension() {
return this.sourceTileset != null ? this.sourceTileset.getTileDimension() : new Dimension(this.getTileWidth(), this.getTileHeight());
}
/**
* Gets the tile height.
*
* @return the tile height
*/
@Override
public int getTileHeight() {
return this.sourceTileset != null ? this.sourceTileset.getTileHeight() : this.tileheight;
}
/**
* Gets the tile width.
*
* @return the tile width
*/
@Override
public int getTileWidth() {
return this.sourceTileset != null ? this.sourceTileset.getTileWidth() : this.tilewidth;
}
@Override
public List<ITerrain> getTerrainTypes() {
if (this.sourceTileset != null) {
return this.sourceTileset.getTerrainTypes();
}
List<ITerrain> types = new ArrayList<>();
if (this.terrainTypes == null) {
return types;
}
for (int i = 0; i < this.terrainTypes.size(); i++) {
types.add(i, this.terrainTypes.get(i));
}
return types;
}
@Override
public ITerrain[] getTerrain(int tileId) {
if (this.sourceTileset != null) {
return this.sourceTileset.getTerrain(tileId);
}
ITerrain[] terrains = new ITerrain[4];
if (!this.containsTile(tileId)) {
return terrains;
}
TilesetEntry tile = this.allTiles.get(tileId);
int[] tileTerrains = tile.getTerrainIds();
for (int i = 0; i < 4; i++) {
if (tileTerrains[i] < 0 || tileTerrains[i] >= this.getTerrainTypes().size()) {
continue;
}
ITerrain terrain = this.getTerrainTypes().get(tileTerrains[i]);
if (terrain == null) {
continue;
}
terrains[i] = terrain;
}
return terrains;
}
@Override
public int getColumns() {
return this.sourceTileset != null ? this.sourceTileset.getColumns() : this.columns;
}
@Override
public ITileOffset getTileOffset() {
return this.sourceTileset != null ? this.sourceTileset.getTileOffset() : this.tileoffset;
}
@Override
public int getTileCount() {
if (this.sourceTileset != null) {
return this.sourceTileset.getTileCount();
}
return this.tilecount != null ? this.tilecount : 0;
}
@Override
public ITilesetEntry getTile(int id) {
if (this.sourceTileset != null) {
return this.sourceTileset.getTile(id);
}
if (id < 0 || id >= this.getTileCount()) {
return null;
}
return this.allTiles.get(id);
}
@Override
public boolean containsTile(ITile tile) {
ITilesetEntry entry = tile.getTilesetEntry();
return entry == null ? this.containsTile(tile.getGridId()) : this.containsTile(tile.getTilesetEntry());
}
@Override
public boolean containsTile(int tileId) {
return tileId >= this.firstgid && tileId < this.firstgid + this.getTileCount();
}
@Override
public boolean containsTile(ITilesetEntry entry) {
if (entry == null) {
return false;
}
if (this.sourceTileset != null) {
return this.sourceTileset.containsTile(entry);
}
return this.allTiles != null && this.allTiles.contains(entry);
}
@Override
public void finish(URL location) throws TmxException {
super.finish(location);
if (this.source != null) {
// don't reload the source if it's already been loaded in a resource bundle
if (this.sourceTileset == null) {
try {
URL url = new URL(location, this.source);
this.sourceTileset = Resources.tilesets().get(url);
if (this.sourceTileset == null) {
throw new MissingExternalTilesetException(this.source);
}
} catch (MalformedURLException e) {
throw new MissingExternalTilesetException(e);
}
}
} else {
super.finish(location);
if (this.image != null) {
this.image.finish(location);
}
if (this.terrainTypes != null) {
for (Terrain terrain : this.terrainTypes) {
terrain.finish(location);
}
}
if (this.tiles != null) {
// unsaved tiles don't need any post-processing
for (TilesetEntry entry : this.tiles) {
entry.finish(location);
}
}
}
}
public void saveSource(String basePath) {
if (this.sourceTileset == null) {
return;
}
XmlUtilities.save(this.sourceTileset, FileUtilities.combine(basePath, this.source), FILE_EXTENSION);
}
public boolean isExternal() {
return this.source != null;
}
public void load(List<Tileset> rawTilesets) {
if (this.source == null) {
return;
}
for (Tileset set : rawTilesets) {
String fileName = FileUtilities.getFileName(this.source);
if (set.getName() != null && set.getName().equals(fileName)) {
this.sourceTileset = set;
break;
}
}
}
@SuppressWarnings("unused")
private void afterUnmarshal(Unmarshaller u, Object parent) {
if (this.source == null) {
this.allTiles = new ArrayList<>(this.getTileCount());
if (this.tiles != null) {
this.allTiles.addAll(this.tiles);
}
// add missing entries
ListIterator<TilesetEntry> iter = this.allTiles.listIterator();
for (int i = 0; i < this.getTileCount(); i++) {
if (add(iter)) {
iter.add(new TilesetEntry(this, iter.nextIndex()));
}
}
if (iter.hasNext()) {
log.log(Level.WARNING, "tileset \"{0}\" had a tilecount attribute of {1} but had tile IDs going beyond that",
new Object[] {this.name, this.getTileCount()});
while (iter.hasNext()) {
int nextId = iter.next().getId();
iter.previous();
while (iter.nextIndex() < nextId) {
iter.add(new TilesetEntry(this, iter.nextIndex()));
}
}
this.tilecount = this.allTiles.size();
}
this.updateTileTerrain();
}
}
private static boolean add(ListIterator<TilesetEntry> iter) {
if (!iter.hasNext()) {
return true;
}
if (iter.next().getId() != iter.previousIndex()) {
iter.previous(); // move the cursor back
return true;
}
return false;
}
@SuppressWarnings("unused")
private void beforeMarshal(Marshaller m) {
if (this.sourceTileset != null) {
this.tilewidth = null;
this.tileheight = null;
this.tilecount = null;
this.columns = null;
} else {
this.tiles = new ArrayList<>(this.allTiles);
Iterator<TilesetEntry> iter = this.tiles.iterator();
while (iter.hasNext()) {
if (!iter.next().shouldBeSaved()) {
iter.remove();
}
}
}
if (this.margin != null && this.margin == 0) {
this.margin = null;
}
if (this.spacing != null && this.spacing == 0) {
this.spacing = null;
}
if (this.getProperties() != null && this.getProperties().isEmpty()) {
this.setProperties(null);
}
}
private void updateTileTerrain() {
if (this.sourceTileset == null && this.tiles != null) {
// only go through saved tiles because unsaved tiles can't have terrains
for (TilesetEntry entry : this.tiles) {
entry.setTerrains(this.getTerrain(entry.getId()));
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector;
import static org.junit.Assert.*;
import java.nio.charset.StandardCharsets;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.vector.complex.DenseUnionVector;
import org.apache.arrow.vector.complex.FixedSizeListVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.complex.UnionVector;
import org.apache.arrow.vector.complex.impl.NullableStructWriter;
import org.apache.arrow.vector.complex.impl.UnionFixedSizeListWriter;
import org.apache.arrow.vector.complex.impl.UnionListWriter;
import org.apache.arrow.vector.holders.NullableIntHolder;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.DataSizeRoundingUtil;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestVectorReAlloc {
private BufferAllocator allocator;
@Before
public void init() {
allocator = new RootAllocator(Long.MAX_VALUE);
}
@After
public void terminate() throws Exception {
allocator.close();
}
@Test
public void testFixedType() {
try (final UInt4Vector vector = new UInt4Vector("", allocator)) {
vector.setInitialCapacity(512);
vector.allocateNew();
assertTrue(vector.getValueCapacity() >= 512);
int initialCapacity = vector.getValueCapacity();
try {
vector.set(initialCapacity, 0);
Assert.fail("Expected out of bounds exception");
} catch (Exception e) {
// ok
}
vector.reAlloc();
assertTrue(vector.getValueCapacity() >= 2 * initialCapacity);
vector.set(initialCapacity, 100);
assertEquals(100, vector.get(initialCapacity));
}
}
@Test
public void testNullableType() {
try (final VarCharVector vector = new VarCharVector("", allocator)) {
vector.setInitialCapacity(512);
vector.allocateNew();
assertTrue(vector.getValueCapacity() >= 512);
int initialCapacity = vector.getValueCapacity();
try {
vector.set(initialCapacity, "foo".getBytes(StandardCharsets.UTF_8));
Assert.fail("Expected out of bounds exception");
} catch (Exception e) {
// ok
}
vector.reAlloc();
assertTrue(vector.getValueCapacity() >= 2 * initialCapacity);
vector.set(initialCapacity, "foo".getBytes(StandardCharsets.UTF_8));
assertEquals("foo", new String(vector.get(initialCapacity), StandardCharsets.UTF_8));
}
}
@Test
public void testListType() {
try (final ListVector vector = ListVector.empty("", allocator)) {
vector.addOrGetVector(FieldType.nullable(MinorType.INT.getType()));
vector.setInitialCapacity(512);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
try {
vector.getInnerValueCountAt(2014);
Assert.fail("Expected out of bounds exception");
} catch (Exception e) {
// ok
}
vector.reAlloc();
assertEquals(1024, vector.getValueCapacity());
assertEquals(0, vector.getOffsetBuffer().getInt(2014 * ListVector.OFFSET_WIDTH));
}
}
@Test
public void testStructType() {
try (final StructVector vector = StructVector.empty("", allocator)) {
vector.addOrGet("", FieldType.nullable(MinorType.INT.getType()), IntVector.class);
vector.setInitialCapacity(512);
vector.allocateNew();
assertEquals(512, vector.getValueCapacity());
try {
vector.getObject(513);
Assert.fail("Expected out of bounds exception");
} catch (Exception e) {
// ok
}
vector.reAlloc();
assertEquals(1024, vector.getValueCapacity());
assertNull(vector.getObject(513));
}
}
@Test
public void testVariableWidthTypeSetNullValues() {
// Test ARROW-11223 bug is fixed
try (final BaseVariableWidthVector v1 = new VarCharVector("var1", allocator)) {
v1.setInitialCapacity(512);
v1.allocateNew();
int numNullValues1 = v1.getValueCapacity() + 1;
for (int i = 0; i < numNullValues1; i++) {
v1.setNull(i);
}
Assert.assertTrue(v1.getBufferSizeFor(numNullValues1) > 0);
}
try (final BaseLargeVariableWidthVector v2 = new LargeVarCharVector("var2", allocator)) {
v2.setInitialCapacity(512);
v2.allocateNew();
int numNullValues2 = v2.getValueCapacity() + 1;
for (int i = 0; i < numNullValues2; i++) {
v2.setNull(i);
}
Assert.assertTrue(v2.getBufferSizeFor(numNullValues2) > 0);
}
}
@Test
public void testFixedAllocateAfterReAlloc() throws Exception {
try (final IntVector vector = new IntVector("", allocator)) {
/*
* Allocate the default size, and then, reAlloc. This should double the allocation.
*/
vector.allocateNewSafe(); // Initial allocation
vector.reAlloc(); // Double the allocation size.
int savedValueCapacity = vector.getValueCapacity();
/*
* Clear and allocate again.
*/
vector.clear();
vector.allocateNewSafe();
/*
* Verify that the buffer sizes haven't changed.
*/
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testVariableAllocateAfterReAlloc() throws Exception {
try (final VarCharVector vector = new VarCharVector("", allocator)) {
/*
* Allocate the default size, and then, reAlloc. This should double the allocation.
*/
vector.allocateNewSafe(); // Initial allocation
vector.reAlloc(); // Double the allocation size.
int savedValueCapacity = vector.getValueCapacity();
long savedValueBufferSize = vector.valueBuffer.capacity();
/*
* Clear and allocate again.
*/
vector.clear();
vector.allocateNewSafe();
/*
* Verify that the buffer sizes haven't changed.
*/
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
Assert.assertEquals(vector.valueBuffer.capacity(), savedValueBufferSize);
}
}
@Test
public void testLargeVariableAllocateAfterReAlloc() throws Exception {
try (final LargeVarCharVector vector = new LargeVarCharVector("", allocator)) {
/*
* Allocate the default size, and then, reAlloc. This should double the allocation.
*/
vector.allocateNewSafe(); // Initial allocation
vector.reAlloc(); // Double the allocation size.
int savedValueCapacity = vector.getValueCapacity();
long savedValueBufferSize = vector.valueBuffer.capacity();
/*
* Clear and allocate again.
*/
vector.clear();
vector.allocateNewSafe();
/*
* Verify that the buffer sizes haven't changed.
*/
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
Assert.assertEquals(vector.valueBuffer.capacity(), savedValueBufferSize);
}
}
@Test
public void testVarCharAllocateNew() throws Exception {
final int count = 6000;
try (final VarCharVector vector = new VarCharVector("", allocator)) {
vector.allocateNew(count);
// verify that the validity buffer and value buffer have capacity for atleast 'count' elements.
Assert.assertTrue(vector.getValidityBuffer().capacity() >= DataSizeRoundingUtil.divideBy8Ceil(count));
Assert.assertTrue(vector.getOffsetBuffer().capacity() >= (count + 1) * BaseVariableWidthVector.OFFSET_WIDTH);
}
}
@Test
public void testLargeVarCharAllocateNew() throws Exception {
final int count = 6000;
try (final LargeVarCharVector vector = new LargeVarCharVector("", allocator)) {
vector.allocateNew(count);
// verify that the validity buffer and value buffer have capacity for atleast 'count' elements.
Assert.assertTrue(vector.getValidityBuffer().capacity() >= DataSizeRoundingUtil.divideBy8Ceil(count));
Assert.assertTrue(vector.getOffsetBuffer().capacity() >= (count + 1) * BaseLargeVariableWidthVector.OFFSET_WIDTH);
}
}
@Test
public void testVarCharAllocateNewUsingHelper() throws Exception {
final int count = 6000;
try (final VarCharVector vector = new VarCharVector("", allocator)) {
AllocationHelper.allocateNew(vector, count);
// verify that the validity buffer and value buffer have capacity for atleast 'count' elements.
Assert.assertTrue(vector.getValidityBuffer().capacity() >= DataSizeRoundingUtil.divideBy8Ceil(count));
Assert.assertTrue(vector.getOffsetBuffer().capacity() >= (count + 1) * BaseVariableWidthVector.OFFSET_WIDTH);
}
}
@Test
public void testLargeVarCharAllocateNewUsingHelper() throws Exception {
final int count = 6000;
try (final LargeVarCharVector vector = new LargeVarCharVector("", allocator)) {
AllocationHelper.allocateNew(vector, count);
// verify that the validity buffer and value buffer have capacity for atleast 'count' elements.
Assert.assertTrue(vector.getValidityBuffer().capacity() >= DataSizeRoundingUtil.divideBy8Ceil(count));
Assert.assertTrue(vector.getOffsetBuffer().capacity() >= (count + 1) * BaseLargeVariableWidthVector.OFFSET_WIDTH);
}
}
@Test
public void testFixedRepeatedClearAndSet() throws Exception {
try (final IntVector vector = new IntVector("", allocator)) {
vector.allocateNewSafe(); // Initial allocation
vector.clear(); // clear vector.
vector.setSafe(0, 10);
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
vector.setSafe(0, 10);
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testVariableRepeatedClearAndSet() throws Exception {
try (final VarCharVector vector = new VarCharVector("", allocator)) {
vector.allocateNewSafe(); // Initial allocation
vector.clear(); // clear vector.
vector.setSafe(0, "hello world".getBytes());
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
vector.setSafe(0, "hello world".getBytes());
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testRepeatedValueVectorClearAndSet() throws Exception {
try (final ListVector vector = new ListVector("", allocator, FieldType.nullable(MinorType.INT.getType()), null)) {
vector.allocateNewSafe(); // Initial allocation
UnionListWriter writer = vector.getWriter();
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.startList();
writer.writeInt(0);
writer.endList();
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.startList();
writer.writeInt(i);
writer.endList();
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testStructVectorClearAndSet() throws Exception {
try (final StructVector vector = StructVector.empty("v", allocator)) {
vector.allocateNewSafe(); // Initial allocation
NullableStructWriter writer = vector.getWriter();
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.start();
writer.integer("int").writeInt(0);
writer.end();
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.start();
writer.integer("int").writeInt(i);
writer.end();
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testFixedSizeListVectorClearAndSet() {
try (final FixedSizeListVector vector = new FixedSizeListVector("", allocator,
FieldType.nullable(new ArrowType.FixedSizeList(2)), null)) {
vector.allocateNewSafe(); // Initial allocation
UnionFixedSizeListWriter writer = vector.getWriter();
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.startList();
writer.writeInt(0);
writer.writeInt(1);
writer.endList();
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
writer.setPosition(0); // optional
writer.startList();
writer.writeInt(i);
writer.writeInt(i + 1);
writer.endList();
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testUnionVectorClearAndSet() {
try (final UnionVector vector = new UnionVector("", allocator, null)) {
vector.allocateNewSafe(); // Initial allocation
NullableIntHolder holder = new NullableIntHolder();
holder.isSet = 1;
holder.value = 1;
vector.clear(); // clear vector.
vector.setType(0, MinorType.INT);
vector.setSafe(0, holder);
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear(); // clear vector.
vector.setType(0, MinorType.INT);
vector.setSafe(0, holder);
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
@Test
public void testDenseUnionVectorClearAndSet() {
try (final DenseUnionVector vector = new DenseUnionVector("", allocator, null, null)) {
vector.allocateNewSafe(); // Initial allocation
NullableIntHolder holder = new NullableIntHolder();
holder.isSet = 1;
holder.value = 1;
byte intTypeId = vector.registerNewTypeId(Field.nullable("", MinorType.INT.getType()));
vector.clear();
vector.setTypeId(0, intTypeId);
vector.setSafe(0, holder);
int savedValueCapacity = vector.getValueCapacity();
for (int i = 0; i < 1024; ++i) {
vector.clear();
vector.setTypeId(0, intTypeId);
vector.setSafe(0, holder);
}
// should be deterministic, and not cause a run-away increase in capacity.
Assert.assertEquals(vector.getValueCapacity(), savedValueCapacity);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Timer;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
/**
* This service tries to add entries in the JHS for applications that failed
* or were killed
*/
@Private
public class KilledHistoryService extends AbstractService {
private static final Log LOG = LogFactory.getLog(KilledHistoryService.class);
private static final Pattern FLAG_FILE_PATTERN =
Pattern.compile("(\\S+)_appattempt_(\\d+)_(\\d+)_(\\d+)");
private static final Pattern SUBMIT_TIME_PATTERN =
Pattern.compile("submitTime=(\\d+)");
private static final Pattern FINISH_TIME_PATTERN =
Pattern.compile("finishTime=(\\d+)");
private static final Pattern JOB_NAME_PATTERN =
Pattern.compile("jobName=([^,]+)");
private static final Pattern NUM_MAPS_PATTERN =
Pattern.compile("numMaps=(\\d+)");
private static final Pattern NUM_REDUCES_PATTERN =
Pattern.compile("numReduces=(\\d+)");
private static final Pattern STATUS_PATTERN =
Pattern.compile("status=([^,]+)");
private static final Pattern QUEUE_PATTERN =
Pattern.compile("queue=([^,]+)");
private Timer timer = null;
private long checkIntervalMsecs;
@Override
protected void serviceInit(Configuration conf) throws Exception {
checkIntervalMsecs = conf.getLong(
JHAdminConfig.MR_HISTORY_MOVE_INTERVAL_MS,
JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_INTERVAL_MS);
super.serviceInit(conf);
}
static class FlagFileHandler extends TimerTask {
private final Configuration conf;
private Path failDir = null;
private String intermediateDirPrefix = null;
public FlagFileHandler(Configuration conf) throws IOException {
this.conf = conf;
this.failDir = new Path(conf.get(
YarnConfiguration.YARN_AM_FAILURE_FLAG_DIR,
YarnConfiguration.DEFAULT_YARN_AM_FAILURE_FLAG_DIR));
this.intermediateDirPrefix =
JobHistoryUtils.getConfiguredHistoryIntermediateDoneDirPrefix(conf);
}
@Override
public void run() {
try {
FileSystem failDirFS = failDir.getFileSystem(conf);
if (failDirFS.exists(failDir)) {
for (FileStatus flagFileStatus : failDirFS.listStatus(failDir)) {
String flagFileName = flagFileStatus.getPath().getName();
Matcher m = FLAG_FILE_PATTERN.matcher(flagFileName);
if (m.matches()) {
final String user = m.group(1);
long timestamp = Long.parseLong(m.group(2));
int appId = Integer.parseInt(m.group(3));
final int attempt = Integer.parseInt(m.group(4));
ApplicationId applicationId =
ApplicationId.newInstance(timestamp, appId);
final JobId jobId =TypeConverter.toYarn(
TypeConverter.fromYarn(applicationId));
final Path intermediateDir = new Path(intermediateDirPrefix, user);
final Path stagingDirForJob = new Path(
MRApps.getStagingAreaDir(conf, user), jobId.toString());
try {
final Path inSummaryFile = new Path(stagingDirForJob,
JobHistoryUtils.getIntermediateSummaryFileName(jobId));
UserGroupInformation ugi =
UserGroupInformation.createProxyUser(user,
UserGroupInformation.getCurrentUser());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException {
FileSystem fromFs = null;
FileSystem toFs = null;
try {
fromFs = stagingDirForJob.getFileSystem(conf);
toFs = intermediateDir.getFileSystem(conf);
JobIndexInfo jobIndexInfo =
buildJobIndexInfo(fromFs, inSummaryFile, jobId,
user);
String historyFilename =
FileNameIndexUtils.getDoneFileName(jobIndexInfo);
copy(fromFs, toFs, JobHistoryUtils.getStagingConfFile(
stagingDirForJob, jobId, attempt),
new Path(intermediateDir, JobHistoryUtils
.getIntermediateConfFileName(jobId)));
copy(fromFs, toFs, inSummaryFile,
new Path(intermediateDir, JobHistoryUtils
.getIntermediateSummaryFileName(jobId)));
copy(fromFs, toFs, JobHistoryUtils
.getStagingJobHistoryFile(stagingDirForJob,
jobId, attempt),
new Path(intermediateDir, historyFilename));
return null;
} finally {
// Close the FileSystem created by the new proxy user,
// So that we don't leave an entry in the FileSystem cache.
// Also FileSystem close is idempotent
if (fromFs != null) {
fromFs.close();
}
if (toFs != null) {
toFs.close();
}
}
}
});
failDirFS.delete(flagFileStatus.getPath(), false);
} catch (IOException ioe) {
removeFlagFileWithMessage(failDirFS, flagFileStatus.getPath(),
"Could not process job files", ioe);
} catch (InterruptedException ie) {
removeFlagFileWithMessage(failDirFS, flagFileStatus.getPath(),
"Could not process job files", ie);
}
} else {
removeFlagFileWithMessage(failDirFS, flagFileStatus.getPath(),
"Could not process fail flag file", null);
}
}
}
} catch (IOException ioe) {
LOG.info("Could not access fail flag dir", ioe);
}
}
private void removeFlagFileWithMessage(FileSystem failDirFS, Path flagFile,
String message, Exception ex) {
if (ex == null) {
LOG.warn(message);
} else {
LOG.warn(message, ex);
}
// Try to delete the flag file so we don't keep trying to process it
try {
failDirFS.delete(flagFile, false);
} catch (IOException ioe) {
// ignore
}
}
private void copy(FileSystem fromFs, FileSystem toFs, Path fromPath,
Path toPath) throws IOException {
LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString());
boolean copied = FileUtil.copy(toFs, fromPath, fromFs, toPath,
false, conf);
if (copied) {
LOG.info("Copied to done location: " + toPath);
} else {
LOG.info("copy failed");
}
toFs.setPermission(toPath, new FsPermission(
JobHistoryUtils.HISTORY_INTERMEDIATE_FILE_PERMISSIONS));
}
private JobIndexInfo buildJobIndexInfo(FileSystem fs, Path summaryFile,
JobId jobId, String user) throws IOException {
FSDataInputStream in = fs.open(summaryFile);
String summaryString = in.readUTF();
in.close();
long submitTime =
extractLong(SUBMIT_TIME_PATTERN, summaryString, "submitTime");
long finishTime =
extractLong(FINISH_TIME_PATTERN, summaryString, "finishTime");
if (finishTime == 0) {
finishTime = submitTime; // prevent JHS from thinking it's too old
}
String jobName =
extractString(JOB_NAME_PATTERN, summaryString, "jobName");
int numMaps = extractInt(NUM_MAPS_PATTERN, summaryString, "numMaps");
if (numMaps == 0) {
numMaps = -1;
}
int numReduces =
extractInt(NUM_REDUCES_PATTERN, summaryString, "numReduces");
if (numReduces == 0) {
numReduces = -1;
}
String jobStatus = extractString(STATUS_PATTERN, summaryString, "status");
if (jobStatus.equals("null")) {
jobStatus = "FAILED"; // assume FAILED
}
String queue = extractString(QUEUE_PATTERN, summaryString, "queue");
JobIndexInfo info = new JobIndexInfo(submitTime, finishTime, user,
jobName, jobId, numMaps, numReduces, jobStatus);
info.setQueueName(queue);
return info;
}
private String extractString(Pattern pattern, String str, String type)
throws IOException {
String result = null;
Matcher m = pattern.matcher(str);
if (m.find()) {
result = m.group(1);
} else {
throw new IOException("Could not extract " + type
+ " field from summary file");
}
return result;
}
private long extractLong(Pattern pattern, String str, String type)
throws IOException {
String result = extractString(pattern, str, type);
return (result == null) ? -1L : Long.parseLong(result);
}
private int extractInt(Pattern pattern, String str, String type)
throws IOException {
String result = extractString(pattern, str, type);
return (result == null) ? -1 : Integer.parseInt(result);
}
}
public KilledHistoryService() {
super(KilledHistoryService.class.getName());
}
@Override
protected void serviceStart() throws Exception {
scheduleFlagHandlerTask();
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
stopTimer();
super.serviceStop();
}
private void scheduleFlagHandlerTask() throws IOException {
Configuration conf = getConfig();
TimerTask task = new FlagFileHandler(conf);
timer = new Timer();
timer.scheduleAtFixedRate(task, 0, checkIntervalMsecs);
}
private void stopTimer() {
if (timer != null) {
timer.cancel();
}
}
}
| |
/*
* Copyright (c) 2021 SQLines
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sqlines.studio.model;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.function.Consumer;
/**
* Works with application resources.
*/
public class ResourceLoader {
/**
* Loads source conversion mode from the application resources.
*
* @return list of source modes
*
* @throws IllegalStateException if the source modes file was not found
* in application resources or contains invalid data
* @throws IOException if any IO error occurred
*/
public static @NotNull List<String> loadSourceModes() throws IOException {
List<String> sourceModes = new ArrayList<>();
try (InputStream stream = ResourceLoader.class.getResourceAsStream("/source-modes.txt")) {
if (stream == null) {
String errorMsg = "File not found in application resources: source-modes.txt";
throw new IllegalStateException(errorMsg);
}
String data = new String(stream.readAllBytes(), StandardCharsets.UTF_8);
StringTokenizer tokenizer = new StringTokenizer(data, "\n");
while (tokenizer.hasMoreTokens()) {
String word = tokenizer.nextToken();
int endIndex = word.indexOf(':');
sourceModes.add(word.substring(0, endIndex));
}
}
return sourceModes;
}
/**
* Loads target conversion modes from the application resources.
*
* @return list of target modes
*
* @throws IllegalStateException if the target modes file was not found
* in application resources or contains invalid data
* @throws IOException if any IO error occurred
*/
public static @NotNull List<String> loadTargetModes() throws IOException {
List<String> targetModes = new ArrayList<>();
try (InputStream stream = ResourceLoader.class.getResourceAsStream("/target-modes.txt")) {
if (stream == null) {
String errorMsg = "File not found in application resources: target-modes.txt";
throw new IllegalStateException(errorMsg);
}
String data = new String(stream.readAllBytes(), StandardCharsets.UTF_8);
StringTokenizer tokenizer = new StringTokenizer(data, "\n");
while (tokenizer.hasMoreTokens()) {
String word = tokenizer.nextToken();
int endIndex = word.indexOf(':');
targetModes.add(word.substring(0, endIndex));
}
}
return targetModes;
}
/**
* Loads a map of conversion modes with their command-line
* designations from the application resources.
*
* @return a map of conversion modes with their command-line designations.
*
* @throws IllegalStateException if either source modes file or target modes file was not found
* in application resources or contains invalid data
* @throws IOException if any IO error occurred
*/
public static @NotNull Map<String, String> loadCmdModes() throws IOException {
Map<String, String> cmdModes = new HashMap<>();
try (InputStream sourceModes = ResourceLoader.class.getResourceAsStream("/source-modes.txt");
InputStream targetModes = ResourceLoader.class.getResourceAsStream("/target-modes.txt")) {
if (sourceModes == null || targetModes == null) {
String errorMsg = "File not found in application resources:" +
"source-modes.txt or target-modes.txt";
throw new IllegalStateException(errorMsg);
}
String sourceData = new String(sourceModes.readAllBytes(), StandardCharsets.UTF_8);
String targetData = new String(targetModes.readAllBytes(), StandardCharsets.UTF_8);
StringTokenizer sourceTokenizer = new StringTokenizer(sourceData, "\n");
StringTokenizer targetTokenizer = new StringTokenizer(targetData, "\n");
Consumer<String> addItem = data -> {
int endIndex = data.indexOf(':');
String key = data.substring(0, endIndex);
String value = data.substring(endIndex + 1);
cmdModes.put(key, value);
};
while (sourceTokenizer.hasMoreTokens()) {
addItem.accept(sourceTokenizer.nextToken());
}
while (targetTokenizer.hasMoreTokens()) {
addItem.accept(targetTokenizer.nextToken());
}
}
return cmdModes;
}
/**
* Loads main window light stylesheets from the application resources.
*
* @return main window light stylesheets
*
* @throws IllegalStateException if main window light stylesheets were not found
* in application resources
*/
public static @NotNull String loadMainLightStyles() {
URL mainLight = ResourceLoader.class.getResource("/styles/main-light.css");
if (mainLight == null) {
String errorMsg = "File not found in application resources: styles/main-light.css";
throw new IllegalStateException(errorMsg);
}
return mainLight.toExternalForm();
}
/**
* Loads main window dark stylesheets from the application resources.
*
* @return main window dark stylesheets
*
* @throws IllegalStateException if main window dark stylesheets were not found
* in application resources
*/
public static @NotNull String loadMainDarkStyles() {
URL mainDark = ResourceLoader.class.getResource("/styles/main-dark.css");
if (mainDark == null) {
String errorMsg = "File not found in application resources: styles/main-dark.css";
throw new IllegalStateException(errorMsg);
}
return mainDark.toExternalForm();
}
/**
* Loads settings window light stylesheets from the application resources.
*
* @return settings window light stylesheets
*
* @throws IllegalStateException if settings window light stylesheets were not found
* in application resources
*/
public static @NotNull String loadSettingLightStyles() {
URL settingsLight = ResourceLoader.class.getResource("/styles/settings-light.css");
if (settingsLight == null) {
String errorMsg = "File not found in application resources: styles/settings-light.css";
throw new IllegalStateException(errorMsg);
}
return settingsLight.toExternalForm();
}
/**
* Loads settings window dark stylesheets from the application resources.
*
* @return settings window dark stylesheets
*
* @throws IllegalStateException if settings window dark stylesheets were not found
* in application resources
*/
public static @NotNull String loadSettingDarkStyles() {
URL settingsDark = ResourceLoader.class.getResource("/styles/settings-dark.css");
if (settingsDark == null) {
String errorMsg = "File not found in application resources: styles/settings-dark.css";
throw new IllegalStateException(errorMsg);
}
return settingsDark.toExternalForm();
}
}
| |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.minidump_uploader;
import android.support.test.filters.SmallTest;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.Feature;
import org.chromium.components.minidump_uploader.CrashTestRule.MockCrashReportingPermissionManager;
import org.chromium.components.minidump_uploader.MinidumpUploadCallable.MinidumpUploadStatus;
import org.chromium.components.minidump_uploader.util.CrashReportingPermissionManager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
/**
* Unittests for {@link MinidumpUploadCallable}.
*/
@RunWith(BaseJUnit4ClassRunner.class)
public class MinidumpUploadCallableTest {
private static final String LOCAL_CRASH_ID = "123_log";
private static final String LOG_FILE_NAME = "chromium_renderer-123_log.dmp224";
@Rule
public CrashTestRule mTestRule = new CrashTestRule();
private File mTestUpload;
private File mUploadLog;
private File mExpectedFileAfterUpload;
private static class MockMinidumpUploader extends MinidumpUploader {
private Result mMockResult;
public static MinidumpUploader returnsSuccess() {
return new MockMinidumpUploader(Result.success(MinidumpUploaderTest.UPLOAD_CRASH_ID));
}
public static MinidumpUploader returnsFailure(String message) {
return new MockMinidumpUploader(Result.failure(message));
}
public static MinidumpUploader returnsUploadError(int status, String message) {
return new MockMinidumpUploader(Result.uploadError(status, message));
}
private MockMinidumpUploader(Result mockResult) {
super(null);
mMockResult = mockResult;
}
@Override
public Result upload(File fileToUpload) {
return mMockResult;
}
}
private void createMinidumpFile() throws Exception {
mTestUpload = new File(mTestRule.getCrashDir(), LOG_FILE_NAME);
CrashTestRule.setUpMinidumpFile(mTestUpload, MinidumpUploaderTest.BOUNDARY);
}
private void setForcedUpload() {
File renamed =
new File(mTestRule.getCrashDir(), mTestUpload.getName().replace(".dmp", ".forced"));
mTestUpload.renameTo(renamed);
// Update the filename that tests will refer to.
mTestUpload = renamed;
}
@Before
public void setUp() throws Exception {
mUploadLog = new File(mTestRule.getCrashDir(), CrashFileManager.CRASH_DUMP_LOGFILE);
// Delete all logs from previous runs if possible.
mUploadLog.delete();
// Any created files will be cleaned up as part of CrashTestRule::tearDown().
createMinidumpFile();
mExpectedFileAfterUpload =
new File(mTestRule.getCrashDir(), mTestUpload.getName().replace(".dmp", ".up"));
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testSuccessfulUpload() throws Exception {
final CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{ mIsEnabledForTests = true; }
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
assertValidUploadLogEntry();
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testFailedUploadLocalError() throws Exception {
final CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{ mIsEnabledForTests = true; }
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(mTestUpload,
mUploadLog, MockMinidumpUploader.returnsFailure("Failed"), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.FAILURE, minidumpUploadCallable.call().intValue());
Assert.assertFalse(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testFailedUploadRemoteError() throws Exception {
final CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{ mIsEnabledForTests = true; }
};
MinidumpUploadCallable minidumpUploadCallable =
new MinidumpUploadCallable(mTestUpload, mUploadLog,
MockMinidumpUploader.returnsUploadError(404, "Not Found"), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.FAILURE, minidumpUploadCallable.call().intValue());
Assert.assertFalse(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallWhenCurrentlyPermitted() throws Exception {
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = true;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
assertValidUploadLogEntry();
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallNotPermittedByUser() {
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = false;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(
MinidumpUploadStatus.USER_DISABLED, minidumpUploadCallable.call().intValue());
File expectedSkippedFileAfterUpload = new File(
mTestRule.getCrashDir(), mTestUpload.getName().replace(".dmp", ".skipped"));
Assert.assertTrue(expectedSkippedFileAfterUpload.exists());
Assert.assertFalse(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallPermittedButNotInSample() {
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = false;
mIsUserPermitted = true;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.DISABLED_BY_SAMPLING,
minidumpUploadCallable.call().intValue());
File expectedSkippedFileAfterUpload = new File(
mTestRule.getCrashDir(), mTestUpload.getName().replace(".dmp", ".skipped"));
Assert.assertTrue(expectedSkippedFileAfterUpload.exists());
Assert.assertFalse(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallPermittedButNotUnderCurrentCircumstances() {
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = true;
mIsNetworkAvailable = false;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.FAILURE, minidumpUploadCallable.call().intValue());
Assert.assertFalse(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCrashUploadEnabledForTestsDespiteConstraints() throws Exception {
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = false;
mIsNetworkAvailable = false;
mIsEnabledForTests = true;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
assertValidUploadLogEntry();
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallWhenCurrentlyPermitted_ForcedUpload() throws Exception {
setForcedUpload();
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = true;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
assertValidUploadLogEntry();
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallNotPermittedByUser_ForcedUpload() {
setForcedUpload();
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = false;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
File expectedSkippedFileAfterUpload = new File(
mTestRule.getCrashDir(), mTestUpload.getName().replace(".forced", ".skipped"));
Assert.assertFalse(expectedSkippedFileAfterUpload.exists());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallPermittedButNotInSample_ForcedUpload() {
setForcedUpload();
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = false;
mIsUserPermitted = true;
mIsNetworkAvailable = true;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
File expectedSkippedFileAfterUpload = new File(
mTestRule.getCrashDir(), mTestUpload.getName().replace(".forced", ".skipped"));
Assert.assertFalse(expectedSkippedFileAfterUpload.exists());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallPermittedButNotUnderCurrentCircumstances_ForcedUpload() {
setForcedUpload();
CrashReportingPermissionManager testPermManager =
new MockCrashReportingPermissionManager() {
{
mIsInSample = true;
mIsUserPermitted = true;
mIsNetworkAvailable = false;
mIsEnabledForTests = false;
}
};
MinidumpUploadCallable minidumpUploadCallable = new MinidumpUploadCallable(
mTestUpload, mUploadLog, MockMinidumpUploader.returnsSuccess(), testPermManager);
Assert.assertEquals(MinidumpUploadStatus.SUCCESS, minidumpUploadCallable.call().intValue());
File expectedSkippedFileAfterUpload = new File(
mTestRule.getCrashDir(), mTestUpload.getName().replace(".forced", ".skipped"));
Assert.assertFalse(expectedSkippedFileAfterUpload.exists());
Assert.assertTrue(mExpectedFileAfterUpload.exists());
}
private void assertValidUploadLogEntry() throws IOException {
File logfile = new File(mTestRule.getCrashDir(), CrashFileManager.CRASH_DUMP_LOGFILE);
BufferedReader input = new BufferedReader(new FileReader(logfile));
String line = null;
String lastEntry = null;
while ((line = input.readLine()) != null) {
lastEntry = line;
}
input.close();
Assert.assertNotNull("We do not have a single entry in uploads.log", lastEntry);
String[] components = lastEntry.split(",");
Assert.assertTrue(
"Log entry is expected to have exactly 3 components <upload-time>,<upload-id>,<local-id>",
components.length == 3);
String uploadTimeString = components[0];
String uploadId = components[1];
String localId = components[2];
long time = Long.parseLong(uploadTimeString);
long now = System.currentTimeMillis() / 1000; // Timestamp was in seconds.
// Sanity check on the time stamp (within an hour).
// Chances are the write and the check should have less than 1 second in between.
Assert.assertTrue(time <= now);
Assert.assertTrue(time > now - 60 * 60);
Assert.assertEquals(uploadId, MinidumpUploaderTest.UPLOAD_CRASH_ID);
Assert.assertEquals(localId, LOCAL_CRASH_ID);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.*;
import java.util.Map;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.stream.*;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* Access databases through SQL and JDBC with Spring Transaction support.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface SpringJdbcEndpointBuilderFactory {
/**
* Builder for endpoint for the Spring JDBC component.
*/
public interface SpringJdbcEndpointBuilder
extends
EndpointProducerBuilder {
default AdvancedSpringJdbcEndpointBuilder advanced() {
return (AdvancedSpringJdbcEndpointBuilder) this;
}
/**
* Whether to allow using named parameters in the queries.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param allowNamedParameters the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder allowNamedParameters(
boolean allowNamedParameters) {
doSetProperty("allowNamedParameters", allowNamedParameters);
return this;
}
/**
* Whether to allow using named parameters in the queries.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param allowNamedParameters the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder allowNamedParameters(
String allowNamedParameters) {
doSetProperty("allowNamedParameters", allowNamedParameters);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Specify the full package and class name to use as conversion when
* outputType=SelectOne or SelectList.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param outputClass the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder outputClass(String outputClass) {
doSetProperty("outputClass", outputClass);
return this;
}
/**
* Determines the output the producer should use.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.JdbcOutputType</code> type.
*
* Default: SelectList
* Group: producer
*
* @param outputType the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder outputType(
org.apache.camel.component.jdbc.JdbcOutputType outputType) {
doSetProperty("outputType", outputType);
return this;
}
/**
* Determines the output the producer should use.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.JdbcOutputType</code> type.
*
* Default: SelectList
* Group: producer
*
* @param outputType the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder outputType(String outputType) {
doSetProperty("outputType", outputType);
return this;
}
/**
* Optional parameters to the java.sql.Statement. For example to set
* maxRows, fetchSize etc.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the parameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder parameters(String key, Object value) {
doSetMultiValueProperty("parameters", "statement." + key, value);
return this;
}
/**
* Optional parameters to the java.sql.Statement. For example to set
* maxRows, fetchSize etc.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the parameters(String,
* Object) method to add a value (call the method multiple times to set
* more values).
*
* Group: producer
*
* @param values the values
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder parameters(Map values) {
doSetMultiValueProperties("parameters", "statement.", values);
return this;
}
/**
* The default maximum number of rows that can be read by a polling
* query. The default value is 0.
*
* The option is a: <code>int</code> type.
*
* Group: producer
*
* @param readSize the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder readSize(int readSize) {
doSetProperty("readSize", readSize);
return this;
}
/**
* The default maximum number of rows that can be read by a polling
* query. The default value is 0.
*
* The option will be converted to a <code>int</code> type.
*
* Group: producer
*
* @param readSize the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder readSize(String readSize) {
doSetProperty("readSize", readSize);
return this;
}
/**
* Camel will set the autoCommit on the JDBC connection to be false,
* commit the change after executed the statement and reset the
* autoCommit flag of the connection at the end, if the resetAutoCommit
* is true. If the JDBC connection doesn't support to reset the
* autoCommit flag, you can set the resetAutoCommit flag to be false,
* and Camel will not try to reset the autoCommit flag. When used with
* XA transactions you most likely need to set it to false so that the
* transaction manager is in charge of committing this tx.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param resetAutoCommit the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder resetAutoCommit(
boolean resetAutoCommit) {
doSetProperty("resetAutoCommit", resetAutoCommit);
return this;
}
/**
* Camel will set the autoCommit on the JDBC connection to be false,
* commit the change after executed the statement and reset the
* autoCommit flag of the connection at the end, if the resetAutoCommit
* is true. If the JDBC connection doesn't support to reset the
* autoCommit flag, you can set the resetAutoCommit flag to be false,
* and Camel will not try to reset the autoCommit flag. When used with
* XA transactions you most likely need to set it to false so that the
* transaction manager is in charge of committing this tx.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param resetAutoCommit the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder resetAutoCommit(String resetAutoCommit) {
doSetProperty("resetAutoCommit", resetAutoCommit);
return this;
}
/**
* Whether transactions are in use.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param transacted the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder transacted(boolean transacted) {
doSetProperty("transacted", transacted);
return this;
}
/**
* Whether transactions are in use.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param transacted the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder transacted(String transacted) {
doSetProperty("transacted", transacted);
return this;
}
/**
* To read BLOB columns as bytes instead of string data. This may be
* needed for certain databases such as Oracle where you must read BLOB
* columns as bytes.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param useGetBytesForBlob the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useGetBytesForBlob(
boolean useGetBytesForBlob) {
doSetProperty("useGetBytesForBlob", useGetBytesForBlob);
return this;
}
/**
* To read BLOB columns as bytes instead of string data. This may be
* needed for certain databases such as Oracle where you must read BLOB
* columns as bytes.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param useGetBytesForBlob the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useGetBytesForBlob(
String useGetBytesForBlob) {
doSetProperty("useGetBytesForBlob", useGetBytesForBlob);
return this;
}
/**
* Set this option to true to use the prepareStatementStrategy with
* named parameters. This allows to define queries with named
* placeholders, and use headers with the dynamic values for the query
* placeholders.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param useHeadersAsParameters the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useHeadersAsParameters(
boolean useHeadersAsParameters) {
doSetProperty("useHeadersAsParameters", useHeadersAsParameters);
return this;
}
/**
* Set this option to true to use the prepareStatementStrategy with
* named parameters. This allows to define queries with named
* placeholders, and use headers with the dynamic values for the query
* placeholders.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param useHeadersAsParameters the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useHeadersAsParameters(
String useHeadersAsParameters) {
doSetProperty("useHeadersAsParameters", useHeadersAsParameters);
return this;
}
/**
* Sets whether to use JDBC 4 or JDBC 3.0 or older semantic when
* retrieving column name. JDBC 4.0 uses columnLabel to get the column
* name where as JDBC 3.0 uses both columnName or columnLabel.
* Unfortunately JDBC drivers behave differently so you can use this
* option to work out issues around your JDBC driver if you get problem
* using this component This option is default true.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param useJDBC4ColumnNameAndLabelSemantics the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useJDBC4ColumnNameAndLabelSemantics(
boolean useJDBC4ColumnNameAndLabelSemantics) {
doSetProperty("useJDBC4ColumnNameAndLabelSemantics", useJDBC4ColumnNameAndLabelSemantics);
return this;
}
/**
* Sets whether to use JDBC 4 or JDBC 3.0 or older semantic when
* retrieving column name. JDBC 4.0 uses columnLabel to get the column
* name where as JDBC 3.0 uses both columnName or columnLabel.
* Unfortunately JDBC drivers behave differently so you can use this
* option to work out issues around your JDBC driver if you get problem
* using this component This option is default true.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param useJDBC4ColumnNameAndLabelSemantics the value to set
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder useJDBC4ColumnNameAndLabelSemantics(
String useJDBC4ColumnNameAndLabelSemantics) {
doSetProperty("useJDBC4ColumnNameAndLabelSemantics", useJDBC4ColumnNameAndLabelSemantics);
return this;
}
}
/**
* Advanced builder for endpoint for the Spring JDBC component.
*/
public interface AdvancedSpringJdbcEndpointBuilder
extends
EndpointProducerBuilder {
default SpringJdbcEndpointBuilder basic() {
return (SpringJdbcEndpointBuilder) this;
}
/**
* To use a custom org.apache.camel.component.jdbc.BeanRowMapper when
* using outputClass. The default implementation will lower case the row
* names and skip underscores, and dashes. For example CUST_ID is mapped
* as custId.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.BeanRowMapper</code> type.
*
* Group: advanced
*
* @param beanRowMapper the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder beanRowMapper(
org.apache.camel.component.jdbc.BeanRowMapper beanRowMapper) {
doSetProperty("beanRowMapper", beanRowMapper);
return this;
}
/**
* To use a custom org.apache.camel.component.jdbc.BeanRowMapper when
* using outputClass. The default implementation will lower case the row
* names and skip underscores, and dashes. For example CUST_ID is mapped
* as custId.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.BeanRowMapper</code> type.
*
* Group: advanced
*
* @param beanRowMapper the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder beanRowMapper(
String beanRowMapper) {
doSetProperty("beanRowMapper", beanRowMapper);
return this;
}
/**
* To use a custom strategy for working with connections. Do not use a
* custom strategy when using the spring-jdbc component because a
* special Spring ConnectionStrategy is used by default to support
* Spring Transactions.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.ConnectionStrategy</code> type.
*
* Group: advanced
*
* @param connectionStrategy the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder connectionStrategy(
org.apache.camel.component.jdbc.ConnectionStrategy connectionStrategy) {
doSetProperty("connectionStrategy", connectionStrategy);
return this;
}
/**
* To use a custom strategy for working with connections. Do not use a
* custom strategy when using the spring-jdbc component because a
* special Spring ConnectionStrategy is used by default to support
* Spring Transactions.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.ConnectionStrategy</code> type.
*
* Group: advanced
*
* @param connectionStrategy the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder connectionStrategy(
String connectionStrategy) {
doSetProperty("connectionStrategy", connectionStrategy);
return this;
}
/**
* Allows the plugin to use a custom
* org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy to
* control preparation of the query and prepared statement.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy</code> type.
*
* Group: advanced
*
* @param prepareStatementStrategy the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder prepareStatementStrategy(
org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy prepareStatementStrategy) {
doSetProperty("prepareStatementStrategy", prepareStatementStrategy);
return this;
}
/**
* Allows the plugin to use a custom
* org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy to
* control preparation of the query and prepared statement.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy</code> type.
*
* Group: advanced
*
* @param prepareStatementStrategy the value to set
* @return the dsl builder
*/
default AdvancedSpringJdbcEndpointBuilder prepareStatementStrategy(
String prepareStatementStrategy) {
doSetProperty("prepareStatementStrategy", prepareStatementStrategy);
return this;
}
}
public interface SpringJdbcBuilders {
/**
* Spring JDBC (camel-spring-jdbc)
* Access databases through SQL and JDBC with Spring Transaction
* support.
*
* Category: database,sql
* Since: 3.10
* Maven coordinates: org.apache.camel:camel-spring-jdbc
*
* Syntax: <code>spring-jdbc:dataSourceName</code>
*
* Path parameter: dataSourceName (required)
* Name of DataSource to lookup in the Registry. If the name is
* dataSource or default, then Camel will attempt to lookup a default
* DataSource from the registry, meaning if there is a only one instance
* of DataSource found, then this DataSource will be used.
*
* @param path dataSourceName
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder springJdbc(String path) {
return SpringJdbcEndpointBuilderFactory.endpointBuilder("spring-jdbc", path);
}
/**
* Spring JDBC (camel-spring-jdbc)
* Access databases through SQL and JDBC with Spring Transaction
* support.
*
* Category: database,sql
* Since: 3.10
* Maven coordinates: org.apache.camel:camel-spring-jdbc
*
* Syntax: <code>spring-jdbc:dataSourceName</code>
*
* Path parameter: dataSourceName (required)
* Name of DataSource to lookup in the Registry. If the name is
* dataSource or default, then Camel will attempt to lookup a default
* DataSource from the registry, meaning if there is a only one instance
* of DataSource found, then this DataSource will be used.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path dataSourceName
* @return the dsl builder
*/
default SpringJdbcEndpointBuilder springJdbc(
String componentName,
String path) {
return SpringJdbcEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static SpringJdbcEndpointBuilder endpointBuilder(
String componentName,
String path) {
class SpringJdbcEndpointBuilderImpl extends AbstractEndpointBuilder implements SpringJdbcEndpointBuilder, AdvancedSpringJdbcEndpointBuilder {
public SpringJdbcEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new SpringJdbcEndpointBuilderImpl(path);
}
}
| |
// Copyright 2018, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package io.opencensus.integration.jdbc;
import io.opencensus.common.Scope;
import io.opencensus.integration.jdbc.Observability.TrackingOperation;
import java.sql.ResultSet;
import java.sql.SQLException;
/** Wraps and instruments a {@link ResultSet} instance with tracing and metrics using OpenCensus. */
public class OcWrapResultSet implements ResultSet {
private final ResultSet resultSet;
public OcWrapResultSet(ResultSet rs) {
this.resultSet = rs;
}
@Override
public void clearWarnings() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#clearWarnings--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.clearWarnings");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.clearWarnings();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void close() throws SQLException {
// This method goes to the database directly:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#close--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.close");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.close();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void deleteRow() throws SQLException {
// This method goes to the database directly:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#deleteRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.deleteRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.deleteRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public int findColumn(String columnLabel) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#findColumn-java.lang.String-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.findColumn");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.findColumn(columnLabel);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean first() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#first--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.first");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.first();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void insertRow() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#insertRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.insertRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.insertRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateObject-int-java.lang.Object-
this.resultSet.updateObject(columnIndex, x);
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateObject-int-java.lang.Object-int-
this.resultSet.updateObject(columnIndex, x, scaleOrLength);
}
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateObject-java.lang.String-java.lang.Object-
this.resultSet.updateObject(columnLabel, x);
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateObject-java.lang.String-java.lang.Object-int-
this.resultSet.updateObject(columnLabel, x, scaleOrLength);
}
@Override
public void updateTimestamp(int columnIndex, java.sql.Timestamp x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateTimestamp-int-java.sql.Timestamp-
this.resultSet.updateTimestamp(columnIndex, x);
}
@Override
public void updateTimestamp(String columnName, java.sql.Timestamp x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateTimestamp-java.lang.String-java.sql.Timestamp-
this.resultSet.updateTimestamp(columnName, x);
}
@Override
public void updateTime(int columnIndex, java.sql.Time x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateTime-int-java.sql.Time-
this.resultSet.updateTime(columnIndex, x);
}
@Override
public void updateTime(String columnName, java.sql.Time x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateTime-java.lang.String-java.sql.Time-
this.resultSet.updateTime(columnName, x);
}
@Override
public void updateDate(int columnIndex, java.sql.Date x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateDate-int-java.sql.Date-
this.resultSet.updateDate(columnIndex, x);
}
@Override
public void updateDate(String columnName, java.sql.Date x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateDate-java.lang.String-java.sql.Date-
this.resultSet.updateDate(columnName, x);
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBytes-int-byte:A-
this.resultSet.updateBytes(columnIndex, x);
}
@Override
public void updateBytes(String columnName, byte[] x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBytes-java.lang.String-byte:A-
this.resultSet.updateBytes(columnName, x);
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateString-int-java.lang.String-
this.resultSet.updateString(columnIndex, x);
}
@Override
public void updateString(String columnName, String x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateString-java.lang.String-java.lang.String-
this.resultSet.updateString(columnName, x);
}
@Override
public void updateBigDecimal(int columnIndex, java.math.BigDecimal x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBigDecimal-int-java.math.BigDecimal-
this.resultSet.updateBigDecimal(columnIndex, x);
}
@Override
public void updateBigDecimal(String columnName, java.math.BigDecimal x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBigDecimal-java.lang.String-java.math.BigDecimal-
this.resultSet.updateBigDecimal(columnName, x);
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateDouble-int-double-
this.resultSet.updateDouble(columnIndex, x);
}
@Override
public void updateDouble(String columnName, double x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateDouble-java.lang.String-double-
this.resultSet.updateDouble(columnName, x);
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateFloat-int-float-
this.resultSet.updateFloat(columnIndex, x);
}
@Override
public void updateFloat(String columnName, float x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateFloat-java.lang.String-float-
this.resultSet.updateFloat(columnName, x);
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateLong-int-long-
this.resultSet.updateLong(columnIndex, x);
}
@Override
public void updateLong(String columnName, long x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateLong-java.lang.String-long-
this.resultSet.updateLong(columnName, x);
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateInt-int-int-
this.resultSet.updateInt(columnIndex, x);
}
@Override
public void updateInt(String columnName, int x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateInt-java.lang.String-int-
this.resultSet.updateInt(columnName, x);
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateShort-int-short-
this.resultSet.updateShort(columnIndex, x);
}
@Override
public void updateShort(String columnName, short x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateShort-java.lang.String-short-
this.resultSet.updateShort(columnName, x);
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateByte-int-byte-
this.resultSet.updateByte(columnIndex, x);
}
@Override
public void updateByte(String columnName, byte x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateByte-java.lang.String-byte-
this.resultSet.updateByte(columnName, x);
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBoolean-int-boolean-
this.resultSet.updateBoolean(columnIndex, x);
}
@Override
public void updateBoolean(String columnName, boolean x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBoolean-java.lang.String-boolean-
this.resultSet.updateBoolean(columnName, x);
}
@Override
public void updateNull(int columnIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNull-int-
this.resultSet.updateNull(columnIndex);
}
@Override
public void updateNull(String columnName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNull-java.lang.String-
this.resultSet.updateNull(columnName);
}
@Override
public boolean rowDeleted() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#rowDeleted--
return this.resultSet.rowDeleted();
}
@Override
public boolean rowInserted() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#rowInserted--
return this.resultSet.rowInserted();
}
@Override
public boolean rowUpdated() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#rowUpdated--
return this.resultSet.rowUpdated();
}
@Override
public int getConcurrency() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getConcurrency--
return this.resultSet.getConcurrency();
}
@Override
public int getType() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getType--
return this.resultSet.getType();
}
@Override
public int getFetchDirection() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getFetchDirection--
return this.resultSet.getFetchDirection();
}
@Override
public int getFetchSize() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getFetchSize--
return this.resultSet.getFetchSize();
}
@Override
public java.sql.ResultSetMetaData getMetaData() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getMetaData--
return this.resultSet.getMetaData();
}
@Override
public java.sql.SQLWarning getWarnings() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getWarnings--
return this.resultSet.getWarnings();
}
@Override
public java.sql.Array getArray(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getArray-int-
return this.resultSet.getArray(parameterIndex);
}
@Override
public java.sql.Array getArray(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getArray-java.lang.String-
return this.resultSet.getArray(parameterName);
}
@Override
public java.math.BigDecimal getBigDecimal(int columnIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBigDecimal-int-
return this.resultSet.getBigDecimal(columnIndex);
}
@SuppressWarnings("deprecation")
@Override
public java.math.BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBigDecimal-int-int-
return this.resultSet.getBigDecimal(columnIndex, scale);
}
@Override
public java.math.BigDecimal getBigDecimal(String columnLabel) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBigDecimal-java.lang.String-
return this.resultSet.getBigDecimal(columnLabel);
}
@SuppressWarnings("deprecation")
@Override
public java.math.BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBigDecimal-java.lang.String-int-
return this.resultSet.getBigDecimal(columnLabel, scale);
}
@Override
public java.sql.Blob getBlob(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBlob-int-
return this.resultSet.getBlob(parameterIndex);
}
@Override
public java.sql.Blob getBlob(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBlob-java.lang.String-
return this.resultSet.getBlob(parameterName);
}
@Override
public void updateBlob(int parameterIndex, java.sql.Blob x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-int-java.sql.Blob-
this.resultSet.updateBlob(parameterIndex, x);
}
@Override
public void updateBlob(int parameterIndex, java.io.InputStream inputStream) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-int-java.io.InputStream-
this.resultSet.updateBlob(parameterIndex, inputStream);
}
@Override
public void updateBlob(int parameterIndex, java.io.InputStream inputStream, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-int-java.io.InputStream-long-
this.resultSet.updateBlob(parameterIndex, inputStream, length);
}
@Override
public void updateBlob(String parameterName, java.sql.Blob x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-java.lang.String-java.sql.Blob-
this.resultSet.updateBlob(parameterName, x);
}
@Override
public void updateBlob(String parameterName, java.io.InputStream inputStream)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-java.lang.String-java.io.InputStream-
this.resultSet.updateBlob(parameterName, inputStream);
}
@Override
public void updateBlob(String parameterName, java.io.InputStream inputStream, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBlob-java.lang.String-java.io.InputStream-long-
this.resultSet.updateBlob(parameterName, inputStream, length);
}
@Override
public boolean getBoolean(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBoolean-int-
return this.resultSet.getBoolean(parameterIndex);
}
@Override
public boolean getBoolean(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBoolean-java.lang.String-
return this.resultSet.getBoolean(parameterName);
}
@Override
public java.sql.Clob getClob(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getClob-int-
return this.resultSet.getClob(parameterIndex);
}
@Override
public java.sql.Clob getClob(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getClob-java.lang.String-
return this.resultSet.getClob(parameterName);
}
@Override
public void updateClob(int columnIndex, java.sql.Clob clob) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-int-java.sql.Clob-
this.resultSet.updateClob(columnIndex, clob);
}
@Override
public void updateClob(int columnIndex, java.io.Reader reader) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-int-java.io.Reader-
this.resultSet.updateClob(columnIndex, reader);
}
@Override
public void updateClob(int columnIndex, java.io.Reader reader, long length) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-int-java.io.Reader-long-
this.resultSet.updateClob(columnIndex, reader, length);
}
@Override
public void updateClob(String columnLabel, java.sql.Clob clob) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-java.lang.String-java.sql.Clob-
this.resultSet.updateClob(columnLabel, clob);
}
@Override
public void updateClob(String columnLabel, java.io.Reader reader) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-java.lang.String-java.io.Reader-
this.resultSet.updateClob(columnLabel, reader);
}
@Override
public void updateClob(String columnLabel, java.io.Reader reader, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateClob-java.lang.String-java.io.Reader-long-
this.resultSet.updateClob(columnLabel, reader, length);
}
@Override
public java.sql.Date getDate(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDate-int-
return this.resultSet.getDate(parameterIndex);
}
@Override
public java.sql.Date getDate(int parameterIndex, java.util.Calendar cal) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDate-int-java.util.Calendar-
return this.resultSet.getDate(parameterIndex, cal);
}
@Override
public java.sql.Date getDate(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDate-java.lang.String-
return this.resultSet.getDate(parameterName);
}
@Override
public java.sql.Date getDate(String parameterName, java.util.Calendar cal) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDate-java.lang.String-java.util.Calendar-
return this.resultSet.getDate(parameterName, cal);
}
@Override
public double getDouble(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDouble-int-
return this.resultSet.getDouble(parameterIndex);
}
@Override
public double getDouble(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getDouble-java.lang.String-
return this.resultSet.getDouble(parameterName);
}
@Override
public float getFloat(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getFloat-int-
return this.resultSet.getFloat(parameterIndex);
}
@Override
public float getFloat(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getFloat-java.lang.String-
return this.resultSet.getFloat(parameterName);
}
@Override
public int getInt(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getInt-int-
return this.resultSet.getInt(parameterIndex);
}
@Override
public int getInt(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getInt-java.lang.String-
return this.resultSet.getInt(parameterName);
}
@Override
public long getLong(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getLong-java.lang.String-
return this.resultSet.getLong(parameterIndex);
}
@Override
public long getLong(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getLong-java.lang.String-
return this.resultSet.getLong(parameterName);
}
@Override
public java.sql.Ref getRef(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getRef-int-
return this.resultSet.getRef(parameterIndex);
}
@Override
public java.sql.Ref getRef(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getRef-java.lang.String-
return this.resultSet.getRef(parameterName);
}
@Override
public java.sql.RowId getRowId(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getRowId-int-
return this.resultSet.getRowId(parameterIndex);
}
@Override
public java.sql.RowId getRowId(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getRowId-java.lang.String-
return this.resultSet.getRowId(parameterName);
}
@Override
public short getShort(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getShort-int-
return this.resultSet.getShort(parameterIndex);
}
@Override
public short getShort(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getShort-java.lang.String-
return this.resultSet.getShort(parameterName);
}
@Override
public String getString(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getString-int-
return this.resultSet.getString(parameterIndex);
}
@Override
public String getString(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getString-java.lang.String-
return this.resultSet.getString(parameterName);
}
@Override
public java.sql.Time getTime(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTime-int-
return this.resultSet.getTime(parameterIndex);
}
@Override
public java.sql.Time getTime(int parameterIndex, java.util.Calendar cal) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTime-int-java.util.Calendar-
return this.resultSet.getTime(parameterIndex, cal);
}
@Override
public java.sql.Time getTime(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTime-java.lang.String-
return this.resultSet.getTime(parameterName);
}
@Override
public java.sql.Time getTime(String parameterName, java.util.Calendar cal) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTime-java.lang.String-java.util.Calendar-
return this.resultSet.getTime(parameterName, cal);
}
@Override
public java.net.URL getURL(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getURL-int-
return this.resultSet.getURL(parameterIndex);
}
@Override
public java.net.URL getURL(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getURL-java.lang.String-
return this.resultSet.getURL(parameterName);
}
@Override
public boolean isClosed() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#isClosed--
return this.resultSet.isClosed();
}
@Override
public boolean isLast() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#isLast--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.isLast");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.isLast();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean isAfterLast() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#isAfterLast--
return this.resultSet.isAfterLast();
}
@Override
public boolean isFirst() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#isFirst--
return this.resultSet.isFirst();
}
@Override
public String getCursorName() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getCursorName--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getCursorName");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getCursorName();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public byte[] getBytes(int columnIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBytes-int-
return this.resultSet.getBytes(columnIndex);
}
@Override
public byte getByte(int columnIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getByte-int-
return this.resultSet.getByte(columnIndex);
}
@Override
public byte getByte(String columnLabel) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getByte-java.lang.String-
return this.resultSet.getByte(columnLabel);
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBytes-java.lang.String-
return this.resultSet.getBytes(columnLabel);
}
@Override
public java.io.InputStream getBinaryStream(int columnIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBinaryStream-int-
return this.resultSet.getBinaryStream(columnIndex);
}
@Override
public java.io.InputStream getBinaryStream(String columnLabel) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getBinaryStream-java.lang.String-
return this.resultSet.getBinaryStream(columnLabel);
}
@Override
public java.io.InputStream getAsciiStream(int columnIndex) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getAsciiStream-int-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getAsciiStream");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getAsciiStream(columnIndex);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public java.io.InputStream getAsciiStream(String columnLabel) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getAsciiStream-java.lang.String-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getAsciiStream");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getAsciiStream(columnLabel);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@SuppressWarnings("deprecation")
@Override
public java.io.InputStream getUnicodeStream(int columnIndex) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getUnicodeStream-int-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getUnicodeStream");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getUnicodeStream(columnIndex);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@SuppressWarnings("deprecation")
@Override
public java.io.InputStream getUnicodeStream(String columnLabel) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getUnicodeStream-java.lang.String-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getUnicodeStream");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getUnicodeStream(columnLabel);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean isBeforeFirst() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#isBeforeFirst--
return this.resultSet.isBeforeFirst();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#setFetchDirection-int-
this.resultSet.setFetchDirection(direction);
}
@Override
public void setFetchSize(int rows) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#setFetchSize-int-
this.resultSet.setFetchSize(rows);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return this.resultSet.isWrapperFor(iface);
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return this.resultSet.unwrap(iface);
}
@Override
public Object getObject(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-int-
return this.resultSet.getObject(parameterIndex);
}
@Override
public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-int-java.lang.Class-
return this.resultSet.getObject(parameterIndex, type);
}
@Override
public Object getObject(int parameterIndex, java.util.Map<String, Class<?>> map)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-int-java.util.Map-
return this.resultSet.getObject(parameterIndex, map);
}
@Override
public Object getObject(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-java.lang.String-
return this.resultSet.getObject(parameterName);
}
@Override
public <T> T getObject(String parameterName, Class<T> type) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-java.lang.String-java.lang.Class-
return this.resultSet.getObject(parameterName, type);
}
@Override
public Object getObject(String parameterName, java.util.Map<String, Class<?>> map)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getObject-java.lang.String-java.util.Map-
return this.resultSet.getObject(parameterName, map);
}
@Override
public java.sql.SQLXML getSQLXML(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getSQLXML-int-
return this.resultSet.getSQLXML(parameterIndex);
}
@Override
public java.sql.SQLXML getSQLXML(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getSQLXML-java.lang.String-
return this.resultSet.getSQLXML(parameterName);
}
@Override
public String getNString(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNString-int-
return this.resultSet.getNString(parameterIndex);
}
@Override
public String getNString(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNString-java.lang.String-
return this.resultSet.getNString(parameterName);
}
@Override
public java.io.Reader getNCharacterStream(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNCharacterStream-int-
return this.resultSet.getNCharacterStream(parameterIndex);
}
@Override
public java.io.Reader getNCharacterStream(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNCharacterStream-java.lang.String-
return this.resultSet.getNCharacterStream(parameterName);
}
@Override
public java.io.Reader getCharacterStream(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getCharacterStream-int-
return this.resultSet.getCharacterStream(parameterIndex);
}
@Override
public java.io.Reader getCharacterStream(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getCharacterStream-java.lang.String-
return this.resultSet.getCharacterStream(parameterName);
}
@Override
public void updateCharacterStream(int parameterIndex, java.io.Reader x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-int-java.io.Reader-
this.resultSet.updateCharacterStream(parameterIndex, x);
}
@Override
public void updateCharacterStream(int parameterIndex, java.io.Reader x, int length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-int-java.io.Reader-int-
this.resultSet.updateCharacterStream(parameterIndex, x, length);
}
@Override
public void updateCharacterStream(int parameterIndex, java.io.Reader x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-int-java.io.Reader-long-
this.resultSet.updateCharacterStream(parameterIndex, x, length);
}
@Override
public void updateCharacterStream(String parameterName, java.io.Reader x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-java.lang.String-java.io.Reader-
this.resultSet.updateCharacterStream(parameterName, x);
}
@Override
public void updateCharacterStream(String parameterName, java.io.Reader x, int length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-java.lang.String-java.io.Reader-int-
this.resultSet.updateCharacterStream(parameterName, x, length);
}
@Override
public void updateCharacterStream(String parameterName, java.io.Reader x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateCharacterStream-java.lang.String-java.io.Reader-long-
this.resultSet.updateCharacterStream(parameterName, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, java.io.InputStream x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-int-java.io.InputStream-
this.resultSet.updateBinaryStream(columnIndex, x);
}
@Override
public void updateBinaryStream(int columnIndex, java.io.InputStream x, int length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-int-java.io.InputStream-int-
this.resultSet.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(int columnIndex, java.io.InputStream x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-int-java.io.InputStream-long-
this.resultSet.updateBinaryStream(columnIndex, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, java.io.InputStream x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-java.lang.String-java.io.InputStream-
this.resultSet.updateBinaryStream(columnLabel, x);
}
@Override
public void updateBinaryStream(String columnLabel, java.io.InputStream x, int length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-java.lang.String-java.io.InputStream-int-
this.resultSet.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateBinaryStream(String columnLabel, java.io.InputStream x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateBinaryStream-java.lang.String-java.io.InputStream-long-
this.resultSet.updateBinaryStream(columnLabel, x, length);
}
@Override
public void updateAsciiStream(int columnIndex, java.io.InputStream x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-int-java.io.InputStream-
this.resultSet.updateAsciiStream(columnIndex, x);
}
@Override
public void updateAsciiStream(int columnIndex, java.io.InputStream x, int length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-int-java.io.InputStream-int-
this.resultSet.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateAsciiStream(int columnIndex, java.io.InputStream x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-int-java.io.InputStream-long-
this.resultSet.updateAsciiStream(columnIndex, x, length);
}
@Override
public void updateAsciiStream(String columnLabel, java.io.InputStream x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-java.lang.String-java.io.InputStream-
this.resultSet.updateAsciiStream(columnLabel, x);
}
@Override
public void updateAsciiStream(String columnLabel, java.io.InputStream x, int length)
throws SQLException {
// This method doesn't go over the weekend:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-java.lang.String-java.io.InputStream-int-
this.resultSet.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateAsciiStream(String columnLabel, java.io.InputStream x, long length)
throws SQLException {
// This method doesn't go over the weekend:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateAsciiStream-java.lang.String-java.io.InputStream-long-
this.resultSet.updateAsciiStream(columnLabel, x, length);
}
@Override
public void updateNCharacterStream(int columnIndex, java.io.Reader x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNCharacterStream-int-java.io.Reader-
this.resultSet.updateNCharacterStream(columnIndex, x);
}
@Override
public void updateNCharacterStream(int columnIndex, java.io.Reader x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNCharacterStream-int-java.io.Reader-long-
this.resultSet.updateNCharacterStream(columnIndex, x, length);
}
@Override
public void updateNCharacterStream(String columnLabel, java.io.Reader x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNCharacterStream-java.lang.String-java.io.Reader-
this.resultSet.updateNCharacterStream(columnLabel, x);
}
@Override
public void updateNCharacterStream(String columnLabel, java.io.Reader x, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNCharacterStream-java.lang.String-java.io.Reader-long-
this.resultSet.updateNCharacterStream(columnLabel, x, length);
}
@Override
public java.sql.NClob getNClob(int parameterIndex) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNClob-int-
return this.resultSet.getNClob(parameterIndex);
}
@Override
public void updateSQLXML(int columnIndex, java.sql.SQLXML xmlObject) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateSQLXML-int-java.sql.SQLXML-
this.resultSet.updateSQLXML(columnIndex, xmlObject);
}
@Override
public void updateSQLXML(String columnName, java.sql.SQLXML xmlObject) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateSQLXML-java.lang.String-java.sql.SQLXML-
this.resultSet.updateSQLXML(columnName, xmlObject);
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNString-int-java.lang.String-
this.resultSet.updateNString(columnLabel, nString);
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNString-java.lang.String-java.lang.String-
this.resultSet.updateNString(columnIndex, nString);
}
@Override
public int getHoldability() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getHoldability--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getHoldability");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getHoldability();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void updateRow() throws SQLException {
// This method goes to the database directly:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.updateRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.updateRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void updateRowId(int columnIndex, java.sql.RowId x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateRowId-int-java.sql.RowId-
this.resultSet.updateRowId(columnIndex, x);
}
@Override
public void updateRowId(String columnLabel, java.sql.RowId x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateRowId-java.lang.String-java.sql.RowId-
this.resultSet.updateRowId(columnLabel, x);
}
@Override
public void updateArray(int columnIndex, java.sql.Array x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateArray-int-java.sql.Array-
this.resultSet.updateArray(columnIndex, x);
}
@Override
public void updateArray(String columnLabel, java.sql.Array x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateArray-java.lang.String-java.sql.Array-
this.resultSet.updateArray(columnLabel, x);
}
@Override
public void updateRef(int columnIndex, java.sql.Ref x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateRef-int-java.sql.Ref-
this.resultSet.updateRef(columnIndex, x);
}
@Override
public void updateRef(String columnLabel, java.sql.Ref x) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateRef-java.lang.String-java.sql.Ref-
this.resultSet.updateRef(columnLabel, x);
}
@Override
public java.sql.Timestamp getTimestamp(int parameterIndex) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTimestamp-int-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getTimestamp");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getTimestamp(parameterIndex);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public java.sql.Timestamp getTimestamp(int parameterIndex, java.util.Calendar cal)
throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTimestamp-int-java.util.Calendar-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getTimestamp");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getTimestamp(parameterIndex, cal);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public java.sql.Timestamp getTimestamp(String parameterName) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTimestamp-java.lang.String-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getTimestamp");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getTimestamp(parameterName);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public java.sql.Timestamp getTimestamp(String parameterName, java.util.Calendar cal)
throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getTimestamp-java.lang.String-java.util.Calendar-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getTimestamp");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getTimestamp(parameterName, cal);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void moveToCurrentRow() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#moveToCurrentRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.moveToCurrentRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.moveToCurrentRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void moveToInsertRow() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#moveToInsertRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.moveToInsertRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.moveToInsertRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean last() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#last--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.last");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.last();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void afterLast() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#afterLast--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.afterLast");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.afterLast();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void beforeFirst() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#beforeFirst--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.beforeFirst");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.beforeFirst();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean next() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#next--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.next");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.next();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean previous() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#previous--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.previous");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.previous();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean absolute(int rows) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#absolute-int-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.absolute");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.absolute(rows);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public int getRow() throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.getRow");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.getRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public boolean relative(int rows) throws SQLException {
// This method may touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#relative-int-
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.relative");
try (Scope ws = trackingOperation.withSpan()) {
return this.resultSet.relative(rows);
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void cancelRowUpdates() throws SQLException {
// This method goes to the database directly:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#cancelRowUpdates--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.cancelRowUpdates");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.cancelRowUpdates();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public void refreshRow() throws SQLException {
// This method goes to the database directly:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#refreshRow--
TrackingOperation trackingOperation =
Observability.createRoundtripTrackingSpan("java.sql.ResultSet.refreshRow");
try (Scope ws = trackingOperation.withSpan()) {
this.resultSet.refreshRow();
} catch (Exception e) {
trackingOperation.recordException(e);
throw e;
} finally {
trackingOperation.end();
}
}
@Override
public java.sql.Statement getStatement() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getStatement--
return this.resultSet.getStatement();
}
@Override
public java.sql.NClob getNClob(String parameterName) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#getNClob-java.lang.String-
return this.resultSet.getNClob(parameterName);
}
@Override
public void updateNClob(int columnIndex, java.sql.NClob nclob) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-int-java.sql.NClob-
this.resultSet.updateNClob(columnIndex, nclob);
}
@Override
public void updateNClob(int columnIndex, java.io.Reader reader) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-int-java.io.Reader-
this.resultSet.updateNClob(columnIndex, reader);
}
@Override
public void updateNClob(int columnIndex, java.io.Reader reader, long length) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-int-java.io.Reader-long-
this.resultSet.updateNClob(columnIndex, reader, length);
}
@Override
public void updateNClob(String columnLabel, java.sql.NClob nclob) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-java.lang.String-java.sql.NClob-
this.resultSet.updateNClob(columnLabel, nclob);
}
@Override
public void updateNClob(String columnLabel, java.io.Reader reader) throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-java.lang.String-java.io.Reader-
this.resultSet.updateNClob(columnLabel, reader);
}
@Override
public void updateNClob(String columnLabel, java.io.Reader reader, long length)
throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#updateNClob-java.lang.String-java.io.Reader-long-
this.resultSet.updateNClob(columnLabel, reader, length);
}
@Override
public boolean wasNull() throws SQLException {
// This method doesn't touch the database:
// https://docs.oracle.com/javase/8/docs/api/java/sql/ResultSet.html#wasNull--
return this.resultSet.wasNull();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal;
import java.io.File;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger;
import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.AllowSymLinkAliasChecker;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.webapp.WebAppContext;
import org.apache.geode.GemFireConfigException;
import org.apache.geode.internal.admin.SSLConfig;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.security.SecurityService;
/**
* @since GemFire 8.1
*/
@SuppressWarnings("unused")
public class JettyHelper {
private static final Logger logger = LogService.getLogger();
private static final String FILE_PATH_SEPARATOR = System.getProperty("file.separator");
private static final String USER_DIR = System.getProperty("user.dir");
private static final String USER_NAME = System.getProperty("user.name");
private static final String HTTPS = "https";
private static String bindAddress = "0.0.0.0";
private static int port = 0;
public static final String SECURITY_SERVICE_SERVLET_CONTEXT_PARAM =
"org.apache.geode.securityService";
private static final String GEODE_SSLCONFIG_SERVLET_CONTEXT_PARAM = "org.apache.geode.sslConfig";
public static Server initJetty(final String bindAddress, final int port, SSLConfig sslConfig) {
final Server jettyServer = new Server();
// Add a handler collection here, so that each new context adds itself
// to this collection.
jettyServer.setHandler(new HandlerCollection());
ServerConnector connector = null;
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSecureScheme(HTTPS);
httpConfig.setSecurePort(port);
if (sslConfig.isEnabled()) {
SslContextFactory sslContextFactory = new SslContextFactory();
if (StringUtils.isNotBlank(sslConfig.getAlias())) {
sslContextFactory.setCertAlias(sslConfig.getAlias());
}
sslContextFactory.setNeedClientAuth(sslConfig.isRequireAuth());
if (StringUtils.isNotBlank(sslConfig.getCiphers())
&& !"any".equalsIgnoreCase(sslConfig.getCiphers())) {
sslContextFactory.setExcludeCipherSuites();
sslContextFactory.setIncludeCipherSuites(SSLUtil.readArray(sslConfig.getCiphers()));
}
String protocol = SSLUtil.getSSLAlgo(SSLUtil.readArray(sslConfig.getProtocols()));
if (protocol != null) {
sslContextFactory.setProtocol(protocol);
} else {
logger.warn(ManagementStrings.SSL_PROTOCOAL_COULD_NOT_BE_DETERMINED);
}
if (StringUtils.isBlank(sslConfig.getKeystore())) {
throw new GemFireConfigException(
"Key store can't be empty if SSL is enabled for HttpService");
}
sslContextFactory.setKeyStorePath(sslConfig.getKeystore());
if (StringUtils.isNotBlank(sslConfig.getKeystoreType())) {
sslContextFactory.setKeyStoreType(sslConfig.getKeystoreType());
}
if (StringUtils.isNotBlank(sslConfig.getKeystorePassword())) {
sslContextFactory.setKeyStorePassword(sslConfig.getKeystorePassword());
}
if (StringUtils.isNotBlank(sslConfig.getTruststore())) {
sslContextFactory.setTrustStorePath(sslConfig.getTruststore());
}
if (StringUtils.isNotBlank(sslConfig.getTruststorePassword())) {
sslContextFactory.setTrustStorePassword(sslConfig.getTruststorePassword());
}
if (StringUtils.isNotBlank(sslConfig.getTruststoreType())) {
sslContextFactory.setTrustStoreType(sslConfig.getTruststoreType());
}
if (logger.isDebugEnabled()) {
logger.debug(sslContextFactory.dump());
}
httpConfig.addCustomizer(new SecureRequestCustomizer());
// Somehow With HTTP_2.0 Jetty throwing NPE. Need to investigate further whether all GemFire
// web application(Pulse, REST) can do with HTTP_1.1
connector = new ServerConnector(jettyServer,
new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()),
new HttpConnectionFactory(httpConfig));
connector.setPort(port);
} else {
connector = new ServerConnector(jettyServer, new HttpConnectionFactory(httpConfig));
connector.setPort(port);
}
jettyServer.setConnectors(new Connector[] {connector});
if (StringUtils.isNotBlank(bindAddress)) {
connector.setHost(bindAddress);
}
if (bindAddress != null && !bindAddress.isEmpty()) {
JettyHelper.bindAddress = bindAddress;
}
JettyHelper.port = port;
return jettyServer;
}
public static Server startJetty(final Server jetty) throws Exception {
jetty.start();
return jetty;
}
public static Server addWebApplication(final Server jetty, final String webAppContext,
final String warFilePath, SecurityService securityService, Properties sslConfig) {
WebAppContext webapp = new WebAppContext();
webapp.setContextPath(webAppContext);
webapp.setWar(warFilePath);
webapp.setParentLoaderPriority(false);
webapp.setInitParameter("org.eclipse.jetty.servlet.Default.dirAllowed", "false");
webapp.setAttribute(SECURITY_SERVICE_SERVLET_CONTEXT_PARAM, securityService);
webapp.addAliasCheck(new AllowSymLinkAliasChecker());
// This is only required for Pulse because in embedded mode, with SSL enabled, Pulse needs to
// know how to make SSL RMI connections.
webapp.setAttribute(GEODE_SSLCONFIG_SERVLET_CONTEXT_PARAM, sslConfig);
File tmpPath = new File(getWebAppBaseDirectory(webAppContext));
tmpPath.mkdirs();
webapp.setTempDirectory(tmpPath);
((HandlerCollection) jetty.getHandler()).addHandler(webapp);
return jetty;
}
private static String getWebAppBaseDirectory(final String context) {
String underscoredContext = context.replace("/", "_");
String uuid = UUID.randomUUID().toString().substring(0, 8);
final String workingDirectory = USER_DIR.concat(FILE_PATH_SEPARATOR)
.concat("GemFire_" + USER_NAME).concat(FILE_PATH_SEPARATOR).concat("services")
.concat(FILE_PATH_SEPARATOR).concat("http").concat(FILE_PATH_SEPARATOR)
.concat((StringUtils.isBlank(bindAddress)) ? "0.0.0.0" : bindAddress).concat("_")
.concat(String.valueOf(port).concat(underscoredContext)).concat("_").concat(uuid);
return workingDirectory;
}
private static final CountDownLatch latch = new CountDownLatch(1);
private static String normalizeWebAppArchivePath(final String webAppArchivePath) {
return (webAppArchivePath.startsWith(File.separator) ? new File(webAppArchivePath)
: new File(".", webAppArchivePath)).getAbsolutePath();
}
private static String normalizeWebAppContext(final String webAppContext) {
return (webAppContext.startsWith("/") ? webAppContext : "/" + webAppContext);
}
}
| |
package ca.ualberta.cs.cmput301t03app.views;
import java.util.ArrayList;
import ca.ualberta.cs.cmput301t03app.R;
import ca.ualberta.cs.cmput301t03app.adapters.MainListAdapter;
import ca.ualberta.cs.cmput301t03app.controllers.PostController;
import ca.ualberta.cs.cmput301t03app.models.Question;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.AdapterView.OnItemClickListener;
/**
*
* This is the activity for looking user selected set/list of questions as
* selected from the User Home activity. It is started by the User Home activity
* which passes an extra message in the intent that is used to determine
* which local saved list to load. Can show a list of favorite questions,
* cached questions, to be read questions and user posted questions. If a
* selected list from the User Home activity is empty, then the ListView is
* empty in this activity
*
*/
public class UserListsActivity extends Activity
{
private int userListMode;
private TextView user_list_title;
private PostController pc = new PostController(this);
private MainListAdapter mla;
private ArrayList<Question> userQuestionList;
private ListView userListView;
/**
* Aside from the standard onCreate, this method will also get the extra from the intent
* (the extra is called userListMode) and will:<br>
* - Display the corresponding header for the list to be displayed based on the intent extra value.<br>
* - Get the corresponding list from the post controller based on the intent extra value.<br><br>
*
* A new list adaptor (activity_main_question_entity) is created and the ListView is set with this
* new list adaptor.
*
* A new listener for the adaptor is also created.
*/
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_user_lists);
/* Removes the actionbar title text */
getActionBar().setDisplayShowTitleEnabled(false);
Bundle extras = getIntent().getExtras();
userListMode = extras.getInt("userListMode");
user_list_title = (TextView) findViewById(R.id.user_list_title);
userListView = (ListView) findViewById(R.id.user_question_list);
switch (userListMode)
{
case 0:
user_list_title.setText("F A V O R I T E S");
userQuestionList = pc.getFavoriteQuestions();
break;
case 1:
user_list_title.setText("C A C H E D");
userQuestionList = pc.getReadQuestions();
break;
case 2:
user_list_title.setText("T O R E A D");
userQuestionList = pc.getToReadQuestions();
break;
case 3:
user_list_title.setText("M Y Q U E S T I O N S");
userQuestionList = pc.getUserPostedQuestions();
break;
default:
user_list_title.setText("F A V O R I T E S");
userQuestionList = pc.getFavoriteQuestions();
break;
}
mla = new MainListAdapter(this, R.layout.activity_main_question_entity,
userQuestionList);
userListView.setAdapter(mla);
setListeners();
}
/**
* Setting a onItemClickListener for the adaptor which will allow the user to interact
* with the Question object placed inside the adaptor.
*/
public void setListeners()
{
userListView.setOnItemClickListener(new OnItemClickListener()
{
/**
* When the Question object is clicked, the position of the adaptor in
* the list view is determined and passed as an argument to the toQuestionActivity method.
*/
@Override
public void onItemClick(AdapterView<?> parent, View view,
final int position, long id)
{
toQuestionActivity(position);
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu)
{
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.user_lists, menu);
getActionBar().setHomeButtonEnabled(true);
return true;
}
public void onResume() {
super.onResume();
//This makes sure the listview is updated when the user returns
//from viewing a question.
switch (userListMode)
{
case 0:
user_list_title.setText("F A V O R I T E S");
userQuestionList = pc.getFavoriteQuestions();
break;
case 1:
user_list_title.setText("C A C H E D");
userQuestionList = pc.getReadQuestions();
break;
case 2:
user_list_title.setText("T O R E A D");
userQuestionList = pc.getToReadQuestions();
break;
case 3:
user_list_title.setText("M Y Q U E S T I O N S");
userQuestionList = pc.getUserPostedQuestions();
break;
default:
user_list_title.setText("F A V O R I T E S");
userQuestionList = pc.getFavoriteQuestions();
break;
}
mla = new MainListAdapter(this, R.layout.activity_main_question_entity,
userQuestionList);
userListView.setAdapter(mla);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch (item.getItemId()) {
case android.R.id.home:
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
break;
}
return (super.onOptionsItemSelected(item));
}
/**
* This method is called when a Question object is clicked. The Question's position
* in the list is used to determine which Question is selected and to get that Question's
* ID.<br><br>
*
* Checks if the Question is in the PC's (post controller) sublist of questions; if it is not,
* then the Question is added to the sublist. NOTE: This is a quick work-around for a bug, as
* the question needs to exist in the PC's sublist in order to view it. BEING WORKED ON.<br><br>
*
* Starts the ViewQuestion activity with the ID passed as an extra in the intent (the ID is used
* to determine which Question's detail to display).
*
* @param position is the position of the Question in the ListView that was clicked on
*/
public void toQuestionActivity(int position)
{
Intent i = new Intent(this, ViewQuestion.class);
String qId = userQuestionList.get(position).getId();
i.putExtra("question_id", qId);
if (pc.getQuestion(qId) == null) {
pc.getQuestionsInstance().add(userQuestionList.get(position));
}
//pc.addReadQuestion(userQuestionList.get(position));
startActivity(i);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
import org.apache.nifi.bundle.Bundle;
import org.apache.nifi.bundle.BundleCoordinate;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.controller.exception.ControllerServiceInstantiationException;
import org.apache.nifi.controller.exception.ProcessorInstantiationException;
import org.apache.nifi.controller.reporting.ReportingTaskInstantiationException;
import org.apache.nifi.controller.service.ControllerServiceNode;
import org.apache.nifi.engine.FlowEngine;
import org.apache.nifi.expression.ExpressionLanguageCompiler;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.nar.NarCloseable;
import org.apache.nifi.nar.SystemBundle;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.StandardProcessContext;
import org.apache.nifi.processor.StandardProcessorInitializationContext;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.registry.VariableDescriptor;
import org.apache.nifi.registry.VariableRegistry;
import org.apache.nifi.registry.variable.StandardComponentVariableRegistry;
import org.apache.nifi.test.processors.ModifiesClasspathNoAnnotationProcessor;
import org.apache.nifi.test.processors.ModifiesClasspathProcessor;
import org.apache.nifi.util.MockPropertyValue;
import org.apache.nifi.util.MockVariableRegistry;
import org.apache.nifi.util.NiFiProperties;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class TestStandardProcessorNode {
private MockVariableRegistry variableRegistry;
@Before
public void setup() {
variableRegistry = new MockVariableRegistry();
}
@Test(timeout = 10000)
public void testStart() throws InterruptedException {
final ProcessorThatThrowsExceptionOnScheduled processor = new ProcessorThatThrowsExceptionOnScheduled();
final String uuid = UUID.randomUUID().toString();
ProcessorInitializationContext initContext = new StandardProcessorInitializationContext(uuid, null, null, null, null);
processor.initialize(initContext);
final ReloadComponent reloadComponent = Mockito.mock(ReloadComponent.class);
final BundleCoordinate coordinate = Mockito.mock(BundleCoordinate.class);
final LoggableComponent<Processor> loggableComponent = new LoggableComponent<>(processor, coordinate, null);
final StandardProcessorNode procNode = new StandardProcessorNode(loggableComponent, uuid, createValidationContextFactory(), null, null,
NiFiProperties.createBasicNiFiProperties(null, null), new StandardComponentVariableRegistry(VariableRegistry.EMPTY_REGISTRY), reloadComponent);
final ScheduledExecutorService taskScheduler = new FlowEngine(1, "TestClasspathResources", true);
final StandardProcessContext processContext = new StandardProcessContext(procNode, null, null, null, () -> false);
final SchedulingAgentCallback schedulingAgentCallback = new SchedulingAgentCallback() {
@Override
public void onTaskComplete() {
}
@Override
public Future<?> scheduleTask(final Callable<?> task) {
return taskScheduler.submit(task);
}
@Override
public void trigger() {
Assert.fail("Should not have completed");
}
};
procNode.start(taskScheduler, 20000L, processContext, schedulingAgentCallback, true);
Thread.sleep(1000L);
assertEquals(1, processor.onScheduledCount);
assertEquals(1, processor.onUnscheduledCount);
assertEquals(1, processor.onStoppedCount);
}
@Test
public void testDisabledValidationErrors() {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final ModifiesClasspathNoAnnotationProcessor processor = new ModifiesClasspathNoAnnotationProcessor();
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
// Set a property to an invalid value
final Map<String, String> properties = new HashMap<>();
properties.put(ModifiesClasspathNoAnnotationProcessor.CLASSPATH_RESOURCE.getName(), "");
procNode.setProperties(properties);
Assert.assertTrue(procNode.getValidationErrors().size() > 0);
// Disabled processors skip property validation
procNode.disable();
Assert.assertFalse(procNode.getValidationErrors().size() > 0);
}
@Test
public void testSinglePropertyDynamicallyModifiesClasspath() throws MalformedURLException {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final PropertyDescriptor classpathProp = new PropertyDescriptor.Builder().name("Classpath Resources")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final ModifiesClasspathProcessor processor = new ModifiesClasspathProcessor(Arrays.asList(classpathProp));
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(procNode.getProcessor().getClass(), procNode.getIdentifier())){
// Should not have any of the test resources loaded at this point
final URL[] testResources = getTestResources();
for (URL testResource : testResources) {
if (containsResource(reloadComponent.getAdditionalUrls(), testResource)) {
fail("found resource that should not have been loaded");
}
}
// Simulate setting the properties of the processor to point to the test resources directory
final Map<String, String> properties = new HashMap<>();
properties.put(classpathProp.getName(), "src/test/resources/TestClasspathResources");
procNode.setProperties(properties);
// Should have all of the resources loaded into the InstanceClassLoader now
for (URL testResource : testResources) {
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResource));
}
assertEquals(ModifiesClasspathProcessor.class.getCanonicalName(), reloadComponent.getNewType());
// Should pass validation
assertTrue(procNode.isValid());
} finally {
ExtensionManager.removeInstanceClassLoader(procNode.getIdentifier());
}
}
@Test
public void testUpdateOtherPropertyDoesNotImpactClasspath() throws MalformedURLException {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final PropertyDescriptor classpathProp = new PropertyDescriptor.Builder().name("Classpath Resources")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final PropertyDescriptor otherProp = new PropertyDescriptor.Builder().name("My Property")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final ModifiesClasspathProcessor processor = new ModifiesClasspathProcessor(Arrays.asList(classpathProp, otherProp));
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(procNode.getProcessor().getClass(), procNode.getIdentifier())){
// Should not have any of the test resources loaded at this point
final URL[] testResources = getTestResources();
for (URL testResource : testResources) {
if (containsResource(reloadComponent.getAdditionalUrls(), testResource)) {
fail("found resource that should not have been loaded");
}
}
// Simulate setting the properties of the processor to point to the test resources directory
final Map<String, String> properties = new HashMap<>();
properties.put(classpathProp.getName(), "src/test/resources/TestClasspathResources");
procNode.setProperties(properties);
// Should have all of the resources loaded into the InstanceClassLoader now
for (URL testResource : testResources) {
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResource));
}
// Should pass validation
assertTrue(procNode.isValid());
// Simulate setting updating the other property which should not change the classpath
final Map<String, String> otherProperties = new HashMap<>();
otherProperties.put(otherProp.getName(), "foo");
procNode.setProperties(otherProperties);
// Should STILL have all of the resources loaded into the InstanceClassLoader now
for (URL testResource : testResources) {
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResource));
}
// Should STILL pass validation
assertTrue(procNode.isValid());
// Lets update the classpath property and make sure the resources get updated
final Map<String, String> newClasspathProperties = new HashMap<>();
newClasspathProperties.put(classpathProp.getName(), "src/test/resources/TestClasspathResources/resource1.txt");
procNode.setProperties(newClasspathProperties);
// Should only have resource1 loaded now
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResources[0]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[1]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[2]));
assertEquals(ModifiesClasspathProcessor.class.getCanonicalName(), reloadComponent.getNewType());
// Should STILL pass validation
assertTrue(procNode.isValid());
} finally {
ExtensionManager.removeInstanceClassLoader(procNode.getIdentifier());
}
}
@Test
public void testMultiplePropertiesDynamicallyModifyClasspathWithExpressionLanguage() throws MalformedURLException {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final PropertyDescriptor classpathProp1 = new PropertyDescriptor.Builder().name("Classpath Resource 1")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final PropertyDescriptor classpathProp2 = new PropertyDescriptor.Builder().name("Classpath Resource 2")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final ModifiesClasspathProcessor processor = new ModifiesClasspathProcessor(Arrays.asList(classpathProp1, classpathProp2));
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(procNode.getProcessor().getClass(), procNode.getIdentifier())){
// Should not have any of the test resources loaded at this point
final URL[] testResources = getTestResources();
for (URL testResource : testResources) {
if (containsResource(reloadComponent.getAdditionalUrls(), testResource)) {
fail("found resource that should not have been loaded");
}
}
// Simulate setting the properties pointing to two of the resources
final Map<String, String> properties = new HashMap<>();
properties.put(classpathProp1.getName(), "src/test/resources/TestClasspathResources/resource1.txt");
properties.put(classpathProp2.getName(), "src/test/resources/TestClasspathResources/${myResource}");
variableRegistry.setVariable(new VariableDescriptor("myResource"), "resource3.txt");
procNode.setProperties(properties);
// Should have resources 1 and 3 loaded into the InstanceClassLoader now
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResources[0]));
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResources[2]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[1]));
assertEquals(ModifiesClasspathProcessor.class.getCanonicalName(), reloadComponent.getNewType());
// Should pass validation
assertTrue(procNode.isValid());
} finally {
ExtensionManager.removeInstanceClassLoader(procNode.getIdentifier());
}
}
@Test
public void testSomeNonExistentPropertiesDynamicallyModifyClasspath() throws MalformedURLException {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final PropertyDescriptor classpathProp1 = new PropertyDescriptor.Builder().name("Classpath Resource 1")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final PropertyDescriptor classpathProp2 = new PropertyDescriptor.Builder().name("Classpath Resource 2")
.dynamicallyModifiesClasspath(true).addValidator(StandardValidators.NON_EMPTY_VALIDATOR).build();
final ModifiesClasspathProcessor processor = new ModifiesClasspathProcessor(Arrays.asList(classpathProp1, classpathProp2));
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(procNode.getProcessor().getClass(), procNode.getIdentifier())){
// Should not have any of the test resources loaded at this point
final URL[] testResources = getTestResources();
for (URL testResource : testResources) {
if (containsResource(reloadComponent.getAdditionalUrls(), testResource)) {
fail("found resource that should not have been loaded");
}
}
// Simulate setting the properties pointing to two of the resources
final Map<String, String> properties = new HashMap<>();
properties.put(classpathProp1.getName(), "src/test/resources/TestClasspathResources/resource1.txt");
properties.put(classpathProp2.getName(), "src/test/resources/TestClasspathResources/DoesNotExist.txt");
procNode.setProperties(properties);
// Should have resources 1 and 3 loaded into the InstanceClassLoader now
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResources[0]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[1]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[2]));
assertEquals(ModifiesClasspathProcessor.class.getCanonicalName(), reloadComponent.getNewType());
// Should pass validation
assertTrue(procNode.isValid());
} finally {
ExtensionManager.removeInstanceClassLoader(procNode.getIdentifier());
}
}
@Test
public void testPropertyModifiesClasspathWhenProcessorMissingAnnotation() throws MalformedURLException {
final MockReloadComponent reloadComponent = new MockReloadComponent();
final ModifiesClasspathNoAnnotationProcessor processor = new ModifiesClasspathNoAnnotationProcessor();
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(procNode.getProcessor().getClass(), procNode.getIdentifier())){
final Map<String, String> properties = new HashMap<>();
properties.put(ModifiesClasspathNoAnnotationProcessor.CLASSPATH_RESOURCE.getName(),
"src/test/resources/TestClasspathResources/resource1.txt");
procNode.setProperties(properties);
final URL[] testResources = getTestResources();
assertTrue(containsResource(reloadComponent.getAdditionalUrls(), testResources[0]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[1]));
assertFalse(containsResource(reloadComponent.getAdditionalUrls(), testResources[2]));
assertEquals(ModifiesClasspathNoAnnotationProcessor.class.getCanonicalName(), reloadComponent.getNewType());
// Should pass validation
assertTrue(procNode.isValid());
} finally {
ExtensionManager.removeInstanceClassLoader(procNode.getIdentifier());
}
}
@Test
public void testVerifyCanUpdateBundle() {
final ReloadComponent reloadComponent = new MockReloadComponent();
final ModifiesClasspathNoAnnotationProcessor processor = new ModifiesClasspathNoAnnotationProcessor();
final StandardProcessorNode procNode = createProcessorNode(processor, reloadComponent);
final BundleCoordinate existingCoordinate = procNode.getBundleCoordinate();
// should be allowed to update when the bundle is the same
procNode.verifyCanUpdateBundle(existingCoordinate);
// should be allowed to update when the group and id are the same but version is different
final BundleCoordinate diffVersion = new BundleCoordinate(existingCoordinate.getGroup(), existingCoordinate.getId(), "v2");
assertTrue(!existingCoordinate.getVersion().equals(diffVersion.getVersion()));
procNode.verifyCanUpdateBundle(diffVersion);
// should not be allowed to update when the bundle id is different
final BundleCoordinate diffId = new BundleCoordinate(existingCoordinate.getGroup(), "different-id", existingCoordinate.getVersion());
assertTrue(!existingCoordinate.getId().equals(diffId.getId()));
try {
procNode.verifyCanUpdateBundle(diffId);
Assert.fail("Should have thrown exception");
} catch (Exception e) {
}
// should not be allowed to update when the bundle group is different
final BundleCoordinate diffGroup = new BundleCoordinate("different-group", existingCoordinate.getId(), existingCoordinate.getVersion());
assertTrue(!existingCoordinate.getGroup().equals(diffGroup.getGroup()));
try {
procNode.verifyCanUpdateBundle(diffGroup);
Assert.fail("Should have thrown exception");
} catch (Exception e) {
}
}
private StandardProcessorNode createProcessorNode(final Processor processor, final ReloadComponent reloadComponent) {
final String uuid = UUID.randomUUID().toString();
final ValidationContextFactory validationContextFactory = createValidationContextFactory();
final NiFiProperties niFiProperties = NiFiProperties.createBasicNiFiProperties("src/test/resources/conf/nifi.properties", null);
final ProcessScheduler processScheduler = Mockito.mock(ProcessScheduler.class);
final TerminationAwareLogger componentLog = Mockito.mock(TerminationAwareLogger.class);
final Bundle systemBundle = SystemBundle.create(niFiProperties);
ExtensionManager.discoverExtensions(systemBundle, Collections.emptySet());
ExtensionManager.createInstanceClassLoader(processor.getClass().getName(), uuid, systemBundle, null);
ProcessorInitializationContext initContext = new StandardProcessorInitializationContext(uuid, componentLog, null, null, null);
processor.initialize(initContext);
final LoggableComponent<Processor> loggableComponent = new LoggableComponent<>(processor, systemBundle.getBundleDetails().getCoordinate(), componentLog);
return new StandardProcessorNode(loggableComponent, uuid, validationContextFactory, processScheduler,
null, niFiProperties, new StandardComponentVariableRegistry(variableRegistry), reloadComponent);
}
private static class MockReloadComponent implements ReloadComponent {
private String newType;
private final Set<URL> additionalUrls = new LinkedHashSet<>();
public Set<URL> getAdditionalUrls() {
return this.additionalUrls;
}
public String getNewType() {
return newType;
}
@Override
public void reload(ProcessorNode existingNode, String newType, BundleCoordinate bundleCoordinate, Set<URL> additionalUrls) throws ProcessorInstantiationException {
reload(newType, additionalUrls);
}
@Override
public void reload(ControllerServiceNode existingNode, String newType, BundleCoordinate bundleCoordinate, Set<URL> additionalUrls) throws ControllerServiceInstantiationException {
reload(newType, additionalUrls);
}
@Override
public void reload(ReportingTaskNode existingNode, String newType, BundleCoordinate bundleCoordinate, Set<URL> additionalUrls) throws ReportingTaskInstantiationException {
reload(newType, additionalUrls);
}
private void reload(String newType, Set<URL> additionalUrls) {
this.newType = newType;
this.additionalUrls.clear();
if (additionalUrls != null) {
this.additionalUrls.addAll(additionalUrls);
}
}
}
private boolean containsResource(Set<URL> resources, URL resourceToFind) {
for (URL resource : resources) {
if (resourceToFind.getPath().equals(resource.getPath())) {
return true;
}
}
return false;
}
private URL[] getTestResources() throws MalformedURLException {
URL resource1 = new File("src/test/resources/TestClasspathResources/resource1.txt").toURI().toURL();
URL resource2 = new File("src/test/resources/TestClasspathResources/resource2.txt").toURI().toURL();
URL resource3 = new File("src/test/resources/TestClasspathResources/resource3.txt").toURI().toURL();
return new URL[] { resource1, resource2, resource3 };
}
private ValidationContextFactory createValidationContextFactory() {
return new ValidationContextFactory() {
@Override
public ValidationContext newValidationContext(Map<PropertyDescriptor, String> properties, String annotationData, String groupId, String componentId) {
return new ValidationContext() {
@Override
public ControllerServiceLookup getControllerServiceLookup() {
return null;
}
@Override
public ValidationContext getControllerServiceValidationContext(ControllerService controllerService) {
return null;
}
@Override
public ExpressionLanguageCompiler newExpressionLanguageCompiler() {
return null;
}
@Override
public PropertyValue getProperty(PropertyDescriptor property) {
return newPropertyValue(properties.get(property));
}
@Override
public PropertyValue newPropertyValue(String value) {
return new MockPropertyValue(value);
}
@Override
public Map<PropertyDescriptor, String> getProperties() {
return Collections.unmodifiableMap(properties);
}
@Override
public Map<String, String> getAllProperties() {
final Map<String,String> propValueMap = new LinkedHashMap<>();
for (final Map.Entry<PropertyDescriptor, String> entry : getProperties().entrySet()) {
propValueMap.put(entry.getKey().getName(), entry.getValue());
}
return propValueMap;
}
@Override
public String getAnnotationData() {
return null;
}
@Override
public boolean isValidationRequired(ControllerService service) {
return false;
}
@Override
public boolean isExpressionLanguagePresent(String value) {
return false;
}
@Override
public boolean isExpressionLanguageSupported(String propertyName) {
return false;
}
@Override
public String getProcessGroupIdentifier() {
return groupId;
}
};
}
@Override
public ValidationContext newValidationContext(Set<String> serviceIdentifiersToNotValidate, Map<PropertyDescriptor, String> properties, String annotationData, String groupId,
String componentId) {
return newValidationContext(properties, annotationData, groupId, componentId);
}
};
}
public static class ProcessorThatThrowsExceptionOnScheduled extends AbstractProcessor {
private int onScheduledCount = 0;
private int onUnscheduledCount = 0;
private int onStoppedCount = 0;
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
}
@OnScheduled
public void onScheduled() {
onScheduledCount++;
throw new ProcessException("OnScheduled called - Unit Test throws Exception intentionally");
}
@OnUnscheduled
public void onUnscheduled() {
onUnscheduledCount++;
}
@OnStopped
public void onStopped() {
onStoppedCount++;
}
}
}
| |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.rits.cloning.Cloner;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.commands.EntityConfigUpdateCommand;
import com.thoughtworks.go.config.exceptions.*;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.config.remote.ConfigOrigin;
import com.thoughtworks.go.config.update.*;
import com.thoughtworks.go.config.validation.GoConfigValidity;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.config.Admin;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
import com.thoughtworks.go.domain.packagerepository.PackageDefinition;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.i18n.LocalizedMessage;
import com.thoughtworks.go.listener.BaseUrlChangeListener;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.presentation.ConfigForEdit;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.domain.PipelineConfigDependencyGraph;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.domain.user.PipelineSelections;
import com.thoughtworks.go.server.initializers.Initializer;
import com.thoughtworks.go.server.persistence.PipelineRepository;
import com.thoughtworks.go.server.security.GoAcl;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.server.service.result.LocalizedOperationResult;
import com.thoughtworks.go.serverhealth.HealthStateScope;
import com.thoughtworks.go.serverhealth.HealthStateType;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.*;
import org.apache.log4j.Logger;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import org.jdom2.input.JDOMParseException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.StringReader;
import java.util.*;
import static com.thoughtworks.go.config.validation.GoConfigValidity.invalid;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static java.lang.String.format;
@Service
public class GoConfigService implements Initializer, CruiseConfigProvider {
private GoConfigDao goConfigDao;
private PipelineRepository pipelineRepository;
private GoConfigMigration upgrader;
private GoCache goCache;
private ConfigRepository configRepository;
private ConfigCache configCache;
private Cloner cloner = new Cloner();
private Clock clock = new SystemTimeClock();
private static final Logger LOGGER = Logger.getLogger(GoConfigService.class);
public static final String INVALID_CRUISE_CONFIG_XML = "Invalid Configuration";
private final ConfigElementImplementationRegistry registry;
private InstanceFactory instanceFactory;
private final CachedGoPartials cachedGoPartials;
private SystemEnvironment systemEnvironment;
@Autowired
public GoConfigService(GoConfigDao goConfigDao, PipelineRepository pipelineRepository, GoConfigMigration upgrader, GoCache goCache,
ConfigRepository configRepository, ConfigCache configCache, ConfigElementImplementationRegistry registry,
InstanceFactory instanceFactory, CachedGoPartials cachedGoPartials, SystemEnvironment systemEnvironment) {
this.goConfigDao = goConfigDao;
this.pipelineRepository = pipelineRepository;
this.goCache = goCache;
this.configRepository = configRepository;
this.configCache = configCache;
this.registry = registry;
this.upgrader = upgrader;
this.instanceFactory = instanceFactory;
this.cachedGoPartials = cachedGoPartials;
this.systemEnvironment = systemEnvironment;
}
//for testing
public GoConfigService(GoConfigDao goConfigDao, PipelineRepository pipelineRepository, Clock clock, GoConfigMigration upgrader, GoCache goCache,
ConfigRepository configRepository, ConfigElementImplementationRegistry registry,
InstanceFactory instanceFactory, CachedGoPartials cachedGoPartials, SystemEnvironment systemEnvironment) {
this(goConfigDao, pipelineRepository, upgrader, goCache, configRepository, new ConfigCache(), registry, instanceFactory, cachedGoPartials, systemEnvironment);
this.clock = clock;
}
@Override
public void initialize() {
this.goConfigDao.load();
register(new BaseUrlChangeListener(serverConfig().getSiteUrl(), serverConfig().getSecureSiteUrl(), goCache));
File dir = artifactsDir();
if (!dir.exists()) {
boolean success = dir.mkdirs();
if (!success) {
bomb("Unable to create artifacts directory at " + dir.getAbsolutePath());
}
}
if (!dir.canRead()) {
bomb("Cruise does not have read permission on " + dir.getAbsolutePath());
}
if (!dir.canWrite()) {
bomb("Cruise does not have write permission on " + dir.getAbsolutePath());
}
}
public ConfigForEdit<PipelineConfig> loadForEdit(String pipelineName, Username username, HttpLocalizedOperationResult result) {
if (!canEditPipeline(pipelineName, username, result)) {
return null;
}
GoConfigHolder configHolder = getConfigHolder();
configHolder = cloner.deepClone(configHolder);
PipelineConfig config = configHolder.configForEdit.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
return new ConfigForEdit<>(config, configHolder);
}
private boolean canEditPipeline(String pipelineName, Username username, LocalizedOperationResult result) {
return canEditPipeline(pipelineName, username, result, findGroupNameByPipeline(new CaseInsensitiveString(pipelineName)));
}
public boolean canEditPipeline(String pipelineName, Username username, LocalizedOperationResult result, String groupName) {
if (!doesPipelineExist(pipelineName, result)) {
return false;
}
if (!isUserAdminOfGroup(username.getUsername(), groupName)) {
result.unauthorized(LocalizedMessage.string("UNAUTHORIZED_TO_EDIT_PIPELINE", pipelineName), HealthStateType.unauthorisedForPipeline(pipelineName));
return false;
}
return true;
}
private boolean doesPipelineExist(String pipelineName, LocalizedOperationResult result) {
if (!getCurrentConfig().hasPipelineNamed(new CaseInsensitiveString(pipelineName))) {
result.notFound(LocalizedMessage.string("RESOURCE_NOT_FOUND", "pipeline", pipelineName), HealthStateType.general(HealthStateScope.forPipeline(pipelineName)));
return false;
}
return true;
}
public Agents agents() {
return getCurrentConfig().agents();
}
@Deprecated()
public CruiseConfig currentCruiseConfig() {
return getCurrentConfig();
}
public EnvironmentsConfig getEnvironments() {
return cruiseConfig().getEnvironments();
}
@Deprecated()
public CruiseConfig getCurrentConfig() {
return cruiseConfig();
}
@Deprecated()
public CruiseConfig getConfigForEditing() {
return goConfigDao.loadForEditing();
}
@Deprecated()
public CruiseConfig getMergedConfigForEditing() {
return goConfigDao.loadMergedForEditing();
}
CruiseConfig cruiseConfig() {
return goConfigDao.load();
}
public AgentConfig agentByUuid(String uuid) {
return agents().getAgentByUuid(uuid);
}
public StageConfig stageConfigNamed(String pipelineName, String stageName) {
return getCurrentConfig().stageConfigByName(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName));
}
public boolean hasPipelineNamed(final CaseInsensitiveString pipelineName) {
return getCurrentConfig().hasPipelineNamed(pipelineName);
}
public PipelineConfig pipelineConfigNamed(final CaseInsensitiveString name) {
return getCurrentConfig().pipelineConfigByName(name);
}
public boolean stageHasTests(String pipelineName, String stageName) {
return stageConfigNamed(pipelineName, stageName).hasTests();
}
public boolean stageExists(String pipelineName, String stageName) {
try {
stageConfigNamed(pipelineName, stageName);
return true;
} catch (StageNotFoundException e) {
return false;
}
}
public String fileLocation() {
return goConfigDao.fileLocation();
}
public File artifactsDir() {
ServerConfig serverConfig = serverConfig();
String s = serverConfig.artifactsDir();
return new File(s);
}
public boolean hasStageConfigNamed(String pipelineName, String stageName) {
return getCurrentConfig().hasStageConfigNamed(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName), true);
}
public ConfigSaveState updateConfig(UpdateConfigCommand command) {
return goConfigDao.updateConfig(command);
}
public void updateConfig(EntityConfigUpdateCommand command, Username currentUser) {
goConfigDao.updateConfig(command, currentUser);
}
public long getUnresponsiveJobTerminationThreshold(JobIdentifier identifier) {
JobConfig jobConfig = getJob(identifier);
if (jobConfig == null) {
return toMillis(Long.parseLong(serverConfig().getJobTimeout()));
}
String timeout = jobConfig.getTimeout();
return timeout != null ? toMillis(Long.parseLong(timeout)) : toMillis(Long.parseLong(serverConfig().getJobTimeout()));
}
private JobConfig getJob(JobIdentifier identifier) {
JobConfig jobConfig = null;
try {
jobConfig = cruiseConfig().findJob(identifier.getPipelineName(), identifier.getStageName(), identifier.getBuildName());
} catch (Exception ignored) {
}
return jobConfig;
}
private long toMillis(final long minutes) {
return minutes * 60 * 1000;
}
public boolean canCancelJobIfHung(JobIdentifier jobIdentifier) {
JobConfig jobConfig = getJob(jobIdentifier);
if (jobConfig == null) {
return false;
}
String timeout = jobConfig.getTimeout();
if ("0".equals(timeout)) {
return false;
}
if (timeout == null && !"0".equals(serverConfig().getJobTimeout())) {
return true;
}
return timeout != null && !"0".equals(timeout);
}
public ConfigUpdateResponse updateConfigFromUI(final UpdateConfigFromUI command, final String md5, Username username, final LocalizedOperationResult result) {
UiBasedConfigUpdateCommand updateCommand = new UiBasedConfigUpdateCommand(md5, command, result, cachedGoPartials);
UpdatedNodeSubjectResolver updatedConfigResolver = new UpdatedNodeSubjectResolver();
try {
ConfigSaveState configSaveState = updateConfig(updateCommand);
return latestUpdateResponse(command, updateCommand, updatedConfigResolver, clonedConfigForEdit(), configSaveState);
} catch (ConfigFileHasChangedException e) {
CruiseConfig updatedConfig = handleMergeException(md5, updateCommand);
result.conflict(LocalizedMessage.string("SAVE_FAILED_WITH_REASON", e.getMessage()));
return latestUpdateResponse(command, updateCommand, new OldNodeSubjectResolver(), updatedConfig, null);
} catch (ConfigUpdateCheckFailedException e) {
//result is already set
} catch (Exception e) {
ConfigMergeException mergeException = ExceptionUtils.getCause(e, ConfigMergeException.class);
ConfigMergePostValidationException mergePostValidationException = ExceptionUtils.getCause(e, ConfigMergePostValidationException.class);
if (mergeException != null || mergePostValidationException != null) {
CruiseConfig updatedConfig = handleMergeException(md5, updateCommand);
result.conflict(LocalizedMessage.string("SAVE_FAILED_WITH_REASON", e.getMessage()));
return latestUpdateResponse(command, updateCommand, new OldNodeSubjectResolver(), updatedConfig, null);
}
GoConfigInvalidException ex = ExceptionUtils.getCause(e, GoConfigInvalidException.class);
if (ex != null) {
CruiseConfig badConfig = ex.getCruiseConfig();
setMD5(md5, badConfig);
Validatable node = updatedConfigResolver.getNode(command, updateCommand.cruiseConfig());
BasicCruiseConfig.copyErrors(command.updatedNode(badConfig), node);
result.badRequest(LocalizedMessage.string("SAVE_FAILED"));
return new ConfigUpdateResponse(badConfig, node, subjectFromNode(command, updatedConfigResolver, node), updateCommand, null);
} else {
result.badRequest(LocalizedMessage.string("SAVE_FAILED_WITH_REASON", e.getMessage()));
}
}
CruiseConfig newConfigSinceNoOtherConfigExists = clonedConfigForEdit();
setMD5(md5, newConfigSinceNoOtherConfigExists);
return latestUpdateResponse(command, updateCommand, new OldNodeSubjectResolver(), newConfigSinceNoOtherConfigExists, null);
}
private CruiseConfig handleMergeException(String md5, UiBasedConfigUpdateCommand updateCommand) {
CruiseConfig updatedConfig;
try {
updateCommand.update(clonedConfigForEdit());
updatedConfig = updateCommand.cruiseConfig();
} catch (Exception oops) {
//Ignore this. We are trying to retain the user's input. However, if things have changed so massively that we cannot apply this update we cannot do anything.
//But hey, at least we tried...
updatedConfig = clonedConfigForEdit();
}
setMD5(md5, updatedConfig);
return updatedConfig;
}
private void setMD5(String md5, CruiseConfig badConfig) {
try {
MagicalGoConfigXmlLoader.setMd5(badConfig, md5);
} catch (NoSuchFieldException | IllegalAccessException e) {
// Ignore
}
}
private Validatable subjectFromNode(UpdateConfigFromUI command, NodeSubjectResolver updatedConfigResolver, Validatable node) {
return node != null ? updatedConfigResolver.getSubject(command, node) : null;
}
private ConfigUpdateResponse latestUpdateResponse(UpdateConfigFromUI command, UiBasedConfigUpdateCommand updateCommand, final NodeSubjectResolver nodeSubResolver, final CruiseConfig config,
ConfigSaveState configSaveState) {
Validatable node = null;
Validatable subject = null;
try {
node = nodeSubResolver.getNode(command, config);
subject = subjectFromNode(command, nodeSubResolver, node);
} catch (Exception e) {
//ignore, let node be null, will be handled by assert_loaded
}
return new ConfigUpdateResponse(config, node, subject, updateCommand, configSaveState);
}
public ConfigSaveState updateServerConfig(final MailHost mailHost, final LdapConfig ldapConfig, final PasswordFileConfig passwordFileConfig, final boolean shouldAllowAutoLogin,
final String md5, final String artifactsDir, final Double purgeStart, final Double purgeUpto, final String jobTimeout,
final String siteUrl, final String secureSiteUrl, final String taskRepositoryLocation) {
final List<ConfigSaveState> result = new ArrayList<>();
result.add(updateConfig(
new GoConfigDao.NoOverwriteCompositeConfigCommand(md5,
goConfigDao.mailHostUpdater(mailHost),
securityUpdater(ldapConfig, passwordFileConfig, shouldAllowAutoLogin),
serverConfigUpdater(artifactsDir, purgeStart, purgeUpto, jobTimeout, siteUrl, secureSiteUrl, taskRepositoryLocation))));
//should not reach here with empty result
return result.get(0);
}
private UpdateConfigCommand serverConfigUpdater(final String artifactsDir, final Double purgeStart, final Double purgeUpto, final String jobTimeout, final String siteUrl,
final String secureSiteUrl, final String taskRepositoryLocation) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
ServerConfig server = cruiseConfig.server();
server.setArtifactsDir(artifactsDir);
server.setPurgeLimits(purgeStart, purgeUpto);
server.setJobTimeout(jobTimeout);
server.setSiteUrl(siteUrl);
server.setSecureSiteUrl(secureSiteUrl);
server.setCommandRepositoryLocation(taskRepositoryLocation);
return cruiseConfig;
}
};
}
private UpdateConfigCommand securityUpdater(final LdapConfig ldapConfig, final PasswordFileConfig passwordFileConfig, final boolean shouldAllowAutoLogin) {
return new UpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
SecurityConfig securityConfig = cruiseConfig.server().security();
securityConfig.modifyLdap(ldapConfig);
securityConfig.modifyPasswordFile(passwordFileConfig);
securityConfig.modifyAllowOnlyKnownUsers(!shouldAllowAutoLogin);
return cruiseConfig;
}
};
}
public void addEnvironment(EnvironmentConfig environmentConfig) {
goConfigDao.addEnvironment(environmentConfig);
}
public void addPipeline(PipelineConfig pipeline, String groupName) {
goConfigDao.addPipeline(pipeline, groupName);
}
public void register(ConfigChangedListener listener) {
goConfigDao.registerListener(listener);
}
GoAcl readAclBy(String pipelineName, String stageName) {
PipelineConfig pipelineConfig = pipelineConfigNamed(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
AdminsConfig adminsConfig = stageConfig.getApproval().getAuthConfig();
List<CaseInsensitiveString> users = getAuthorizedUsers(adminsConfig);
return new GoAcl(users);
}
private List<CaseInsensitiveString> getAuthorizedUsers(AdminsConfig authorizedAdmins) {
ArrayList<CaseInsensitiveString> users = new ArrayList<>();
for (Admin admin : authorizedAdmins) {
if (admin instanceof AdminRole) {
addRoleUsers(users, admin.getName());
} else {
users.add(admin.getName());
}
}
return users;
}
private void addRoleUsers(List<CaseInsensitiveString> users, final CaseInsensitiveString roleName) {
Role role = security().getRoles().findByName(roleName);
if (role != null) {
for (RoleUser roleUser : role.getUsers()) {
users.add(roleUser.getName());
}
}
}
public GoMailSender getMailSender() {
return GoSmtpMailSender.createSender(serverConfig().mailHost());
}
public List<String> allGroups() {
List<String> allGroup = new ArrayList<>();
getCurrentConfig().groups(allGroup);
return allGroup;
}
public PipelineGroups groups() {
return getCurrentConfig().getGroups();
}
public List<Task> tasksForJob(String pipelineName, String stageName, String jobName) {
return getCurrentConfig().tasksForJob(pipelineName, stageName, jobName);
}
public boolean isSmtpEnabled() {
return currentCruiseConfig().isSmtpEnabled();
}
public void accept(PiplineConfigVisitor visitor) {
getCurrentConfig().accept(visitor);
}
public void accept(PipelineGroupVisitor visitor) {
getCurrentConfig().accept(visitor);
}
public String findGroupNameByPipeline(final CaseInsensitiveString pipelineName) {
return getCurrentConfig().getGroups().findGroupNameByPipeline(pipelineName);
}
public void populateAdminModel(Map<String, String> model) {
model.put("location", fileLocation());
XmlPartialSaver saver = fileSaver(false);
model.put("content", saver.asXml());
model.put("md5", saver.getMd5());
}
public MailHost getMailHost() {
return serverConfig().mailHost();
}
public boolean hasAgent(String uuid) {
return agents().hasAgent(uuid);
}
public JobConfigIdentifier translateToActualCase(JobConfigIdentifier identifier) {
PipelineConfig pipelineConfig = getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString(identifier.getPipelineName()));
String translatedPipelineName = CaseInsensitiveString.str(pipelineConfig.name());
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(identifier.getStageName()));
if (stageConfig == null) {
throw new StageNotFoundException(new CaseInsensitiveString(identifier.getPipelineName()), new CaseInsensitiveString(identifier.getStageName()));
}
String translatedStageName = CaseInsensitiveString.str(stageConfig.name());
JobConfig plan = stageConfig.jobConfigByInstanceName(identifier.getJobName(), true);
if (plan == null) {
throw new JobNotFoundException(identifier.getPipelineName(), identifier.getStageName(), identifier.getJobName());
}
String translatedJobName = plan.translatedName(identifier.getJobName());
return new JobConfigIdentifier(translatedPipelineName, translatedStageName, translatedJobName);
}
public boolean isAdministrator(String username) {
return getCurrentConfig().isAdministrator(username);
}
public CommentRenderer getCommentRendererFor(String pipelineName) {
return pipelineConfigNamed(new CaseInsensitiveString(pipelineName)).getCommentRenderer();
}
public List<PipelineConfig> getAllPipelineConfigs() {
return getCurrentConfig().getAllPipelineConfigs();
}
/* NOTE: this is called from rails environments controller to build a list of pipelines which user can assign in environment.
We don't want user to select or unselect any pipeline which is already selected in a remote configuration repository.
*/
public List<PipelineConfig> getAllLocalPipelineConfigs() {
return getCurrentConfig().getAllLocalPipelineConfigs(true);
}
public List<PipelineConfig> getAllPipelineConfigsForEdit() {
return getConfigForEditing().getAllPipelineConfigs();
}
public String adminEmail() {
return getCurrentConfig().adminEmail();
}
public Set<MaterialConfig> getSchedulableMaterials() {
return getCurrentConfig().getAllUniqueMaterialsBelongingToAutoPipelinesAndConfigRepos();
}
public Set<MaterialConfig> getSchedulableSCMMaterials() {
HashSet<MaterialConfig> scmMaterials = new HashSet<>();
for (MaterialConfig materialConfig : getSchedulableMaterials()) {
if (!(materialConfig instanceof DependencyMaterialConfig)) {
scmMaterials.add(materialConfig);
}
}
return scmMaterials;
}
public Set<DependencyMaterialConfig> getSchedulableDependencyMaterials() {
HashSet<DependencyMaterialConfig> dependencyMaterials = new HashSet<>();
for (MaterialConfig materialConfig : getSchedulableMaterials()) {
if (materialConfig instanceof DependencyMaterialConfig) {
dependencyMaterials.add((DependencyMaterialConfig) materialConfig);
}
}
return dependencyMaterials;
}
public Stage scheduleStage(String pipelineName, String stageName, SchedulingContext context) {
PipelineConfig pipelineConfig = getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString(pipelineName));
return instanceFactory.createStageInstance(pipelineConfig, new CaseInsensitiveString(stageName), context, getCurrentConfig().getMd5(), clock);
}
public MaterialConfig findMaterialWithName(final CaseInsensitiveString pipelineName, final CaseInsensitiveString materialName) {
MaterialConfigs materialConfigs = materialConfigsFor(pipelineName);
for (MaterialConfig materialConfig : materialConfigs) {
if (materialName.equals(materialConfig.getName())) {
return materialConfig;
}
}
LOGGER.error("material [" + materialName + "] not found in pipeline [" + pipelineName + "]");
return null;
}
public MaterialConfig findMaterial(final CaseInsensitiveString pipeline, String pipelineUniqueFingerprint) {
MaterialConfigs materialConfigs = materialConfigsFor(pipeline);
for (MaterialConfig materialConfig : materialConfigs) {
if (pipelineUniqueFingerprint.equals(materialConfig.getPipelineUniqueFingerprint())) {
return materialConfig;
}
}
LOGGER.error("material with fingerprint [" + pipelineUniqueFingerprint + "] not found in pipeline [" + pipeline + "]");
return null;
}
public List<PackageDefinition> getPackages() {
ArrayList<PackageDefinition> packages = new ArrayList<>();
for (PackageRepository repository : this.getCurrentConfig().getPackageRepositories()) {
packages.addAll(repository.getPackages());
}
return packages;
}
public PackageDefinition findPackage(String packageId) {
PackageDefinition packageDefinition = null;
for (PackageRepository repository : this.getCurrentConfig().getPackageRepositories()) {
for (PackageDefinition pkg : repository.getPackages()) {
if (packageId.equals(pkg.getId())) {
packageDefinition = pkg;
break;
}
}
}
return packageDefinition;
}
public MaterialConfigs materialConfigsFor(final CaseInsensitiveString name) {
return pipelineConfigNamed(name).materialConfigs();
}
public MaterialConfig materialForPipelineWithFingerprint(String pipelineName, String fingerprint) {
for (MaterialConfig materialConfig : pipelineConfigNamed(new CaseInsensitiveString(pipelineName)).materialConfigs()) {
if (materialConfig.getFingerprint().equals(fingerprint)) {
return materialConfig;
}
}
throw new RuntimeException(format("Pipeline [%s] does not have a material with fingerprint [%s]", pipelineName, fingerprint));
}
public boolean isLockable(String pipelineName) {
return getCurrentConfig().isPipelineLocked(pipelineName);
}
public GoConfigDao.CompositeConfigCommand modifyRolesCommand(List<String> users, List<TriStateSelection> roleSelections) {
GoConfigDao.CompositeConfigCommand command = new GoConfigDao.CompositeConfigCommand();
for (String user : users) {
for (TriStateSelection roleSelection : roleSelections) {
command.addCommand(new GoConfigDao.ModifyRoleCommand(user, roleSelection));
}
}
return command;
}
public UpdateConfigCommand modifyAdminPrivilegesCommand(List<String> users, TriStateSelection adminPrivilege) {
GoConfigDao.CompositeConfigCommand command = new GoConfigDao.CompositeConfigCommand();
for (String user : users) {
command.addCommand(new GoConfigDao.ModifyAdminPrivilegeCommand(user, adminPrivilege));
}
return command;
}
public void modifyEnvironments(List<AgentInstance> agents, List<TriStateSelection> selections) {
GoConfigDao.CompositeConfigCommand command = new GoConfigDao.CompositeConfigCommand();
for (AgentInstance agentInstance : agents) {
String uuid = agentInstance.getUuid();
if (hasAgent(uuid)) {
for (TriStateSelection selection : selections) {
command.addCommand(new ModifyEnvironmentCommand(uuid, selection.getValue(), selection.getAction()));
}
}
}
updateConfig(command);
}
public Set<Resource> getAllResources() {
return getCurrentConfig().getAllResources();
}
public List<String> getResourceList() {
ArrayList<String> resources = new ArrayList<>();
for (Resource res : getCurrentConfig().getAllResources()) {
resources.add(res.getName());
}
return resources;
}
public List<CaseInsensitiveString> pipelines(String group) {
PipelineConfigs configs = getCurrentConfig().pipelines(group);
List<CaseInsensitiveString> pipelines = new ArrayList<>();
for (PipelineConfig config : configs) {
pipelines.add(config.name());
}
return pipelines;
}
public PipelineConfigs getAllPipelinesInGroup(String group) {
return getCurrentConfig().pipelines(group);
}
public GoConfigValidity checkConfigFileValid() {
return goConfigDao.checkConfigFileValid();
}
public boolean isSecurityEnabled() {
return getCurrentConfig().isSecurityEnabled();
}
public SecurityConfig security() {
return serverConfig().security();
}
public ServerConfig serverConfig() {
return getCurrentConfig().server();
}
public boolean hasNextStage(String pipelineName, String lastStageName) {
return getCurrentConfig().hasNextStage(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(lastStageName));
}
public boolean hasPreviousStage(String pipelineName, String lastStageName) {
return getCurrentConfig().hasPreviousStage(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(lastStageName));
}
public boolean isFirstStage(String pipelineName, String stageName) {
boolean hasPreviousStage = getCurrentConfig().hasPreviousStage(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName));
return !hasPreviousStage;
}
public boolean requiresApproval(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) {
return getCurrentConfig().requiresApproval(pipelineName, stageName);
}
public StageConfig findFirstStageOfPipeline(final CaseInsensitiveString pipelineName) {
return getCurrentConfig().pipelineConfigByName(pipelineName).first();
}
public StageConfig nextStage(String pipelineName, String lastStageName) {
return getCurrentConfig().nextStage(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(lastStageName));
}
public StageConfig previousStage(String pipelineName, String lastStageName) {
return getCurrentConfig().previousStage(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(lastStageName));
}
public Tabs getCustomizedTabs(String pipelineName, String stageName, String buildName) {
try {
JobConfig plan = getCurrentConfig().jobConfigByName(pipelineName, stageName, buildName, false);
return plan.getTabs();
} catch (Exception e) {
return new Tabs();
}
}
public XmlPartialSaver groupSaver(String groupName) {
return new XmlPartialPipelineGroupSaver(groupName, systemEnvironment);
}
public XmlPartialSaver fileSaver(final boolean shouldUpgrade) {
return new XmlPartialFileSaver(shouldUpgrade, registry, systemEnvironment);
}
public String configFileMd5() {
return goConfigDao.md5OfConfigFile();
}
public List<PipelineConfig> downstreamPipelinesOf(String pipelineName) {
List<PipelineConfig> dependencies = new ArrayList<>();
for (PipelineConfig config : getAllPipelineConfigs()) {
if (config.dependsOn(new CaseInsensitiveString(pipelineName))) {
dependencies.add(config);
}
}
return dependencies;
}
public boolean hasVariableInScope(String pipelineName, String variableName) {
return cruiseConfig().hasVariableInScope(pipelineName, variableName);
}
public EnvironmentVariablesConfig variablesFor(String pipelineName) {
return cruiseConfig().variablesFor(pipelineName);
}
public PipelineConfigDependencyGraph upstreamDependencyGraphOf(String pipelineName) {
CruiseConfig currentConfig = getCurrentConfig();
return upstreamDependencyGraphOf(pipelineName, currentConfig);
}
public PipelineConfigDependencyGraph upstreamDependencyGraphOf(String pipelineName, CruiseConfig currentConfig) {
return findUpstream(currentConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName)));
}
private PipelineConfigDependencyGraph findUpstream(PipelineConfig currentPipeline) {
List<PipelineConfigDependencyGraph> graphs = new ArrayList<>();
for (CaseInsensitiveString name : currentPipeline.upstreamPipelines()) {
PipelineConfig pipelineConfig = getCurrentConfig().pipelineConfigByName(name);
graphs.add(findUpstream(pipelineConfig));
}
return new PipelineConfigDependencyGraph(currentPipeline, graphs.toArray(new PipelineConfigDependencyGraph[0]));
}
public PipelineSelections getSelectedPipelines(String id, Long userId) {
PipelineSelections pipelineSelections = getPersistedPipelineSelections(id, userId);
if (pipelineSelections == null) {
pipelineSelections = PipelineSelections.ALL;
}
return pipelineSelections;
}
public long persistSelectedPipelines(String id, Long userId, List<String> selectedPipelines, boolean isBlacklist) {
PipelineSelections pipelineSelections = findOrCreateCurrentPipelineSelectionsFor(id, userId);
if (isBlacklist) {
List<String> unselectedPipelines = invertSelections(selectedPipelines);
pipelineSelections.update(unselectedPipelines, clock.currentTime(), userId, isBlacklist);
} else {
pipelineSelections.update(selectedPipelines, clock.currentTime(), userId, isBlacklist);
}
return pipelineRepository.saveSelectedPipelines(pipelineSelections);
}
private PipelineSelections findOrCreateCurrentPipelineSelectionsFor(String id, Long userId) {
PipelineSelections pipelineSelections = isSecurityEnabled() ? pipelineRepository.findPipelineSelectionsByUserId(userId) : pipelineRepository.findPipelineSelectionsById(id);
if (pipelineSelections == null) {
pipelineSelections = new PipelineSelections(new ArrayList<>(), clock.currentTime(), userId, true);
}
return pipelineSelections;
}
private List<String> invertSelections(List<String> selectedPipelines) {
List<String> unselectedPipelines = new ArrayList<>();
List<PipelineConfig> pipelineConfigList = cruiseConfig().getAllPipelineConfigs();
for (PipelineConfig pipelineConfig : pipelineConfigList) {
String pipelineName = CaseInsensitiveString.str(pipelineConfig.name());
if (!selectedPipelines.contains(pipelineName)) {
unselectedPipelines.add(pipelineName);
}
}
return unselectedPipelines;
}
private PipelineSelections getPersistedPipelineSelections(String id, Long userId) {
PipelineSelections pipelineSelections = null;
if (isSecurityEnabled()) {
pipelineSelections = pipelineRepository.findPipelineSelectionsByUserId(userId);
}
if (pipelineSelections == null) {
pipelineSelections = pipelineRepository.findPipelineSelectionsById(id);
}
return pipelineSelections;
}
public List<Role> rolesForUser(final CaseInsensitiveString user) {
return security().getRoles().memberRoles(new AdminUser(user));
}
public boolean isGroupAdministrator(final CaseInsensitiveString userName) {
return getCurrentConfig().isGroupAdministrator(userName);
}
public boolean isGroupAdministrator(final Username userName) {
return getCurrentConfig().isGroupAdministrator(userName.getUsername());
}
public boolean hasEnvironmentNamed(final CaseInsensitiveString environmentName) {
return getCurrentConfig().getEnvironments().hasEnvironmentNamed(environmentName);
}
public boolean isOnlyKnownUserAllowedToLogin() {
return serverConfig().security().isAllowOnlyKnownUsersToLogin();
}
public boolean isLdapConfigured() {
return ldapConfig().isEnabled();
}
public boolean isPasswordFileConfigured() {
return passwordFileConfig().isEnabled();
}
public boolean shouldFetchMaterials(String pipelineName, String stageName) {
return stageConfigNamed(pipelineName, stageName).isFetchMaterials();
}
public LdapConfig ldapConfig() {
return serverConfig().security().ldapConfig();
}
private PasswordFileConfig passwordFileConfig() {
return serverConfig().security().passwordFileConfig();
}
public boolean isUserAdminOfGroup(final CaseInsensitiveString userName, String groupName) {
PipelineConfigs group = null;
if (groupName != null) {
group = getCurrentConfig().findGroup(groupName);
}
return isUserAdmin(new Username(userName)) || isUserAdminOfGroup(userName, group);
}
public boolean isUserAdminOfGroup(final CaseInsensitiveString userName, PipelineConfigs group) {
return group.isUserAnAdmin(userName, rolesForUser(userName));
}
public boolean isUserAdmin(Username username) {
return isAdministrator(CaseInsensitiveString.str(username.getUsername()));
}
private boolean isUserTemplateAdmin(Username username) {
return getCurrentConfig().getTemplates().canViewAndEditTemplate(username.getUsername(), rolesForUser(username.getUsername()));
}
public GoConfigRevision getConfigAtVersion(String version) {
GoConfigRevision goConfigRevision = null;
try {
goConfigRevision = configRepository.getRevision(version);
} catch (Exception e) {
LOGGER.info("[Go Config Service] Could not fetch cruise config xml at version=" + version, e);
}
return goConfigRevision;
}
public List<PipelineConfig> pipelinesForFetchArtifacts(String pipelineName) {
return currentCruiseConfig().pipelinesForFetchArtifacts(pipelineName);
}
private boolean isValidGroup(String groupName, CruiseConfig cruiseConfig, HttpLocalizedOperationResult result) {
if (!cruiseConfig.hasPipelineGroup(groupName)) {
result.notFound(LocalizedMessage.string("PIPELINE_GROUP_NOT_FOUND", groupName), HealthStateType.general(HealthStateScope.forGroup(groupName)));
return false;
}
return true;
}
private boolean isAdminOfGroup(String toGroupName, Username username, HttpLocalizedOperationResult result) {
if (!isUserAdminOfGroup(username.getUsername(), toGroupName)) {
result.unauthorized(LocalizedMessage.string("UNAUTHORIZED_TO_EDIT_GROUP", toGroupName), HealthStateType.unauthorised());
return false;
}
return true;
}
@Deprecated()
public GoConfigHolder getConfigHolder() {
return goConfigDao.loadConfigHolder();
}
@Deprecated()
public CruiseConfig loadCruiseConfigForEdit(Username username, HttpLocalizedOperationResult result) {
if (!isUserAdmin(username) && !isUserTemplateAdmin(username)) {
result.unauthorized(LocalizedMessage.string("UNAUTHORIZED_TO_ADMINISTER"), HealthStateType.unauthorised());
}
return clonedConfigForEdit();
}
private CruiseConfig clonedConfigForEdit() {
return cloner.deepClone(getConfigForEditing());
}
public ConfigForEdit<PipelineConfigs> loadGroupForEditing(String groupName, Username username, HttpLocalizedOperationResult result) {
GoConfigHolder configForEdit = cloner.deepClone(getConfigHolder());
if (!isValidGroup(groupName, configForEdit.configForEdit, result)) {
return null;
}
if (!isAdminOfGroup(groupName, username, result)) {
return null;
}
PipelineConfigs config = cloner.deepClone(configForEdit.configForEdit.findGroup(groupName));
return new ConfigForEdit<>(config, configForEdit);
}
public boolean doesMd5Match(String md5) {
return configFileMd5().equals(md5);
}
public String getServerId() {
return serverConfig().getServerId();
}
public String configChangesFor(String laterMd5, String earlierMd5, LocalizedOperationResult result) {
try {
return configRepository.configChangesFor(laterMd5, earlierMd5);
} catch (IllegalArgumentException e) {
result.badRequest(LocalizedMessage.string("CONFIG_VERSION_NOT_FOUND"));
} catch (Exception e) {
result.internalServerError(LocalizedMessage.string("COULD_NOT_RETRIEVE_CONFIG_DIFF"));
}
return null;
}
public void updateUserPipelineSelections(String id, Long userId, CaseInsensitiveString pipelineToAdd) {
PipelineSelections currentSelections = findOrCreateCurrentPipelineSelectionsFor(id, userId);
if (!currentSelections.isBlacklist()) {
currentSelections.addPipelineToSelections(pipelineToAdd);
pipelineRepository.saveSelectedPipelines(currentSelections);
}
}
public boolean isPipelineEditableViaUI(String pipelineName) {
PipelineConfig pipelineConfig = this.pipelineConfigNamed(new CaseInsensitiveString(pipelineName));
if (pipelineConfig == null)
return false;
return isOriginLocal(pipelineConfig.getOrigin());
}
private boolean isOriginLocal(ConfigOrigin origin) {
// when null we assume that it comes from file or UI
return origin == null || origin.isLocal();
}
public ArrayList<SCM> getSCMs() {
return cruiseConfig().getSCMs();
}
public boolean isAdministrator(CaseInsensitiveString username) {
return isAdministrator(username.toString());
}
public PackageRepository getPackageRepository(String repoId) {
return cruiseConfig().getPackageRepositories().find(repoId);
}
public PackageRepositories getPackageRepositories() {
return cruiseConfig().getPackageRepositories();
}
public Map<String, List<Pair<PipelineConfig, PipelineConfigs>>> getPackageUsageInPipelines() {
return groups().getPackageUsageInPipelines();
}
public abstract class XmlPartialSaver<T> {
protected final SAXReader reader;
private final ConfigElementImplementationRegistry registry;
private SystemEnvironment systemEnvironment;
protected XmlPartialSaver(ConfigElementImplementationRegistry registry, SystemEnvironment systemEnvironment) {
this.registry = registry;
this.systemEnvironment = systemEnvironment;
reader = new SAXReader();
}
private String md5;
protected ConfigSaveState updatePartial(String xmlPartial, final String md5) throws Exception {
LOGGER.debug("[Config Save] Updating partial");
org.dom4j.Document document = documentRoot();
Element root = document.getRootElement();
Element configElement = ((Element) root.selectSingleNode(getXpath()));
List nodes = configElement.getParent().content();
int index = nodes.indexOf(configElement);
LOGGER.debug("[Config Save] Converting to object");
Element newConfigElement = reader.read(new StringReader(xmlPartial)).getRootElement();
nodes.set(index, newConfigElement);
return saveConfig(document.asXML(), md5);
}
protected ConfigSaveState saveConfig(final String xmlString, final String md5) throws Exception {
LOGGER.debug("[Config Save] Started saving XML");
final MagicalGoConfigXmlLoader configXmlLoader = new MagicalGoConfigXmlLoader(configCache, registry);
LOGGER.debug("[Config Save] Updating config");
final CruiseConfig deserializedConfig = configXmlLoader.deserializeConfig(xmlString);
ConfigSaveState configSaveState = systemEnvironment.optimizeFullConfigSave() ? saveConfigNewFlow(deserializedConfig, md5)
: saveConfigOldFlow(deserializedConfig, md5);
LOGGER.debug("[Config Save] Finished saving XML");
return configSaveState;
}
private ConfigSaveState saveConfigOldFlow(final CruiseConfig deserializedConfig, final String md5) {
LOGGER.debug("[Config Save] Updating config using the old flow");
return goConfigDao.updateConfig(new NoOverwriteUpdateConfigCommand() {
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
deserializedConfig.setPartials(cruiseConfig.getPartials());
return deserializedConfig;
}
public String unmodifiedMd5() {
return md5;
}
});
}
private ConfigSaveState saveConfigNewFlow(CruiseConfig cruiseConfig, String md5) {
LOGGER.debug("[Config Save] Updating config using the new flow");
return goConfigDao.updateFullConfig(new FullConfigUpdateCommand(cruiseConfig, md5));
}
protected org.dom4j.Document documentRoot() throws Exception {
CruiseConfig cruiseConfig = goConfigDao.loadForEditing();
ByteArrayOutputStream out = new ByteArrayOutputStream();
new MagicalGoConfigXmlWriter(configCache, registry).write(cruiseConfig, out, true);
org.dom4j.Document document = reader.read(new StringReader(out.toString()));
Map<String, String> map = new HashMap<>();
map.put("go", MagicalGoConfigXmlWriter.XML_NS);
//TODO: verify this doesn't cache the factory
DocumentFactory factory = DocumentFactory.getInstance();
factory.setXPathNamespaceURIs(map);
return document;
}
protected abstract T valid();
public String asXml() {
return new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(valid());
}
public GoConfigValidity saveXml(String xmlPartial, String expectedMd5) {
GoConfigValidity hasValidRequest = checkValidity();
if (!hasValidRequest.isValid()) {
return hasValidRequest;
}
try {
return GoConfigValidity.valid(updatePartial(xmlPartial, expectedMd5));
} catch (JDOMParseException jsonException) {
return GoConfigValidity.invalid(String.format("%s - %s", INVALID_CRUISE_CONFIG_XML, jsonException.getMessage())).fromConflict();
} catch (ConfigMergePreValidationException e) {
return invalid(e).mergePreValidationError();
} catch (Exception e) {
if (e.getCause() instanceof ConfigMergePostValidationException) {
return GoConfigValidity.invalid(e.getCause().getMessage()).mergePostValidationError();
}
if (e.getCause() instanceof ConfigMergeException) {
return GoConfigValidity.invalid(e.getCause().getMessage()).mergeConflict();
}
return GoConfigValidity.invalid(e).fromConflict();
}
}
private GoConfigValidity checkValidity() {
try {
valid();
return GoConfigValidity.valid();
} catch (Exception e) {
return GoConfigValidity.invalid(e);
}
}
protected final CruiseConfig configForEditing() {
CruiseConfig config = getConfigForEditing();
this.md5 = config.getMd5();
return config;
}
public String getMd5() {
return md5;
}
protected String getXpath() {
throw new RuntimeException("Must provide xpath or override the default updating");
}
}
private class XmlPartialFileSaver extends XmlPartialSaver<CruiseConfig> {
private final boolean shouldUpgrade;
XmlPartialFileSaver(final boolean shouldUpgrade, final ConfigElementImplementationRegistry registry, SystemEnvironment systemEnvironment) {
super(registry, systemEnvironment);
this.shouldUpgrade = shouldUpgrade;
}
protected ConfigSaveState updatePartial(String xmlFile, final String md5) throws Exception {
if (shouldUpgrade) {
xmlFile = upgrader.upgradeIfNecessary(xmlFile);
}
return saveConfig(xmlFile, md5);
}
public String asXml() {
return configAsXml(valid());
}
protected CruiseConfig valid() {
return configForEditing();
}
}
public String xml() {
return configAsXml(getConfigForEditing());
}
private String configAsXml(CruiseConfig cruiseConfig) {
final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
try {
new MagicalGoConfigXmlWriter(configCache, registry).write(cruiseConfig, outStream, true);
return outStream.toString();
} catch (Exception e) {
throw bomb(e);
}
}
private class XmlPartialPipelineGroupSaver extends XmlPartialSaver<Object> {
private final String groupName;
public XmlPartialPipelineGroupSaver(String groupName, SystemEnvironment systemEnvironment) {
super(registry, systemEnvironment);
this.groupName = groupName;
}
protected Object valid() {
CruiseConfig config = configForEditing();
PipelineConfigs group = config.findGroup(groupName);
return group.getCopyForEditing();
}
@Override
protected String getXpath() {
return String.format("//cruise/pipelines[@group='%s']", groupName);
}
}
// for test
public void forceNotifyListeners() throws Exception {
goConfigDao.reloadListeners();
}
public ConfigElementImplementationRegistry getRegistry() {
return registry;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/*
* IndexMaintainceTest.java
*
* Created on February 24, 2005, 5:47 PM
*/
package com.gemstone.gemfire.cache.query.internal.index;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.junit.FixMethodOrder;
import org.junit.runners.MethodSorters;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.query.CacheUtils;
import com.gemstone.gemfire.cache.query.Index;
import com.gemstone.gemfire.cache.query.IndexExistsException;
import com.gemstone.gemfire.cache.query.IndexNameConflictException;
import com.gemstone.gemfire.cache.query.IndexStatistics;
import com.gemstone.gemfire.cache.query.IndexType;
import com.gemstone.gemfire.cache.query.Query;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.RegionNotFoundException;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.Utils;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.query.functional.StructSetOrResultsSet;
import com.gemstone.gemfire.cache.query.internal.ExecutionContext;
import com.gemstone.gemfire.cache.query.internal.QueryObserverAdapter;
import com.gemstone.gemfire.cache.query.internal.QueryObserverHolder;
import com.gemstone.gemfire.cache.query.internal.parse.OQLLexerTokenTypes;
import dunit.DistributedTestCase;
/**
*
* @author vaibhav
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class IndexMaintainceTest extends TestCase {
public IndexMaintainceTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
if (!isInitDone) {
init();
}
System.out.println("Running " + this.getName());
}
protected void tearDown() throws Exception {
}
public static Test suite() {
TestSuite suite = new TestSuite(IndexMaintainceTest.class);
return suite;
}
static QueryService qs;
static boolean isInitDone = false;
static Region region;
static IndexProtocol index;
protected boolean indexUsed = false;
private static void init() {
try {
Cache cache = CacheUtils.getCache();
region = CacheUtils.createRegion("portfolio", Portfolio.class);
region.put("0", new Portfolio(0));
region.put("1", new Portfolio(1));
region.put("2", new Portfolio(2));
region.put("3", new Portfolio(3));
qs = cache.getQueryService();
index = (IndexProtocol) qs.createIndex("statusIndex",
IndexType.FUNCTIONAL, "status", "/portfolio");
assertTrue(index instanceof CompactRangeIndex);
}
catch (Exception e) {
e.printStackTrace();
}
isInitDone = true;
}
public void test000BUG32452() throws IndexNameConflictException,
IndexExistsException, RegionNotFoundException {
Index i1 = qs.createIndex("tIndex", IndexType.FUNCTIONAL, "vals.secId",
"/portfolio pf, pf.positions.values vals");
Index i2 = qs.createIndex("dIndex", IndexType.FUNCTIONAL,
"pf.getCW(pf.ID)", "/portfolio pf");
Index i3 = qs.createIndex("fIndex", IndexType.FUNCTIONAL, "sIter",
"/portfolio pf, pf.collectionHolderMap[(pf.ID).toString()].arr sIter");
Index i4 = qs.createIndex("cIndex", IndexType.FUNCTIONAL,
"pf.collectionHolderMap[(pf.ID).toString()].arr[pf.ID]",
"/portfolio pf");
Index i5 = qs.createIndex("inIndex", IndexType.FUNCTIONAL, "kIter.secId",
"/portfolio['0'].positions.values kIter");
Index i6 = qs.createIndex("sIndex", IndexType.FUNCTIONAL, "pos.secId",
"/portfolio.values val, val.positions.values pos");
Index i7 = qs.createIndex("p1Index", IndexType.PRIMARY_KEY, "pkid",
"/portfolio pf");
Index i8 = qs.createIndex("p2Index", IndexType.PRIMARY_KEY, "pk",
"/portfolio pf");
if (!i1.getCanonicalizedFromClause().equals(
"/portfolio index_iter1, index_iter1.positions.values index_iter2")
|| !i1.getCanonicalizedIndexedExpression().equals("index_iter2.secId")
|| !i1.getFromClause()
.equals("/portfolio pf, pf.positions.values vals")
|| !i1.getIndexedExpression().equals("vals.secId")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i2.getCanonicalizedFromClause().equals("/portfolio index_iter1")
|| !i2.getCanonicalizedIndexedExpression().equals(
"index_iter1.getCW(index_iter1.ID)")
|| !i2.getFromClause().equals("/portfolio pf")
|| !i2.getIndexedExpression().equals("pf.getCW(pf.ID)")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i3
.getCanonicalizedFromClause()
.equals(
"/portfolio index_iter1, index_iter1.collectionHolderMap[index_iter1.ID.toString()].arr index_iter3")
|| !i3.getCanonicalizedIndexedExpression().equals("index_iter3")
|| !i3
.getFromClause()
.equals(
"/portfolio pf, pf.collectionHolderMap[(pf.ID).toString()].arr sIter")
|| !i3.getIndexedExpression().equals("sIter")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i4.getCanonicalizedFromClause().equals("/portfolio index_iter1")
|| !i4.getCanonicalizedIndexedExpression().equals(
"index_iter1.collectionHolderMap[index_iter1.ID.toString()].arr[index_iter1.ID]")
|| !i4.getFromClause().equals("/portfolio pf")
|| !i4.getIndexedExpression().equals(
"pf.collectionHolderMap[(pf.ID).toString()].arr[pf.ID]")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i5.getCanonicalizedFromClause().equals(
"/portfolio['0'].positions.values index_iter4")
|| !i5.getCanonicalizedIndexedExpression().equals("index_iter4.secId")
|| !i5.getFromClause().equals("/portfolio['0'].positions.values kIter")
|| !i5.getIndexedExpression().equals("kIter.secId")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i6.getCanonicalizedFromClause().equals(
"/portfolio.values index_iter5, index_iter5.positions.values index_iter6")
|| !i6.getCanonicalizedIndexedExpression().equals("index_iter6.secId")
|| !i6.getFromClause().equals(
"/portfolio.values val, val.positions.values pos")
|| !i6.getIndexedExpression().equals("pos.secId")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i7.getCanonicalizedFromClause().equals("/portfolio index_iter1")
|| !i7.getCanonicalizedIndexedExpression().equals("index_iter1.pkid")
|| !i7.getFromClause().equals("/portfolio pf")
|| !i7.getIndexedExpression().equals("pkid")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
if (!i8.getCanonicalizedFromClause().equals("/portfolio index_iter1")
|| !i8.getCanonicalizedIndexedExpression().equals("index_iter1.pk")
|| !i8.getFromClause().equals("/portfolio pf")
|| !i8.getIndexedExpression().equals("pk")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
qs.removeIndex(i1);
qs.removeIndex(i2);
qs.removeIndex(i3);
qs.removeIndex(i4);
qs.removeIndex(i5);
qs.removeIndex(i6);
qs.removeIndex(i7);
qs.removeIndex(i8);
Index i9 = qs.createIndex("p3Index", IndexType.PRIMARY_KEY, "getPk",
"/portfolio pf");
if (!i9.getCanonicalizedFromClause().equals("/portfolio index_iter1")
|| !i9.getCanonicalizedIndexedExpression().equals("index_iter1.pk")
|| !i9.getFromClause().equals("/portfolio pf")
|| !i9.getIndexedExpression().equals("getPk")) {
fail("Mismatch found among fromClauses or IndexedExpressions");
}
qs.removeIndex(i9);
}
public void test001AddEntry() throws Exception {
System.out.println(((CompactRangeIndex) index).dump());
IndexStatistics stats = index.getStatistics();
assertEquals(4, stats.getNumberOfValues());
// com.gemstone.gemfire.internal.util.
// DebuggerSupport.waitForJavaDebugger(region.getCache().getLogger());
region.put("4", new Portfolio(4));
System.out.println(((CompactRangeIndex) index).dump());
stats = index.getStatistics();
assertEquals(5, stats.getNumberOfValues());
//Set results = new HashSet();
//index.query("active", OQLLexerTokenTypes.TOK_EQ, results, new ExecutionContext(null, CacheUtils.getCache()));
SelectResults results = region.query("status = 'active'");
System.out.println(Utils.printResult(results));
assertEquals(3, results.size());
}
// !!!:ezoerner:20081030 disabled because modifying an object in place
// and then putting it back into the cache breaks a CompactRangeIndex.
// @todo file a ticket on this issue
public void _test002UpdateEntry() throws Exception {
IndexStatistics stats = index.getStatistics();
System.out.println(((CompactRangeIndex) index).dump());
Portfolio p = (Portfolio) region.get("4");
p.status = "inactive";
region.put("4", p);
assertEquals(5, stats.getNumberOfValues());
//Set results = new HashSet();
//index.query("active", OQLLexerTokenTypes.TOK_EQ, results,new ExecutionContext(null, CacheUtils.getCache()));
SelectResults results = region.query("status = 'active'");
assertEquals(2, results.size());
}
public void test003InvalidateEntry() throws Exception {
IndexStatistics stats = index.getStatistics();
region.invalidate("4");
assertEquals(4, stats.getNumberOfValues());
//Set results = new HashSet();
//index.query("active", OQLLexerTokenTypes.TOK_EQ, results,new ExecutionContext(null, CacheUtils.getCache()));
SelectResults results = region.query("status = 'active'");
assertEquals(2, results.size());
}
public void test004DestroyEntry() throws Exception {
IndexStatistics stats = index.getStatistics();
region.put("4", new Portfolio(4));
region.destroy("4");
assertEquals(4, stats.getNumberOfValues());
//Set results = new HashSet();
//index.query("active", OQLLexerTokenTypes.TOK_EQ, results,new ExecutionContext(null, CacheUtils.getCache()));
SelectResults results = region.query("status = 'active'");
assertEquals(2, results.size());
}
//This test has a meaning only for Trunk code as it checks for Map implementation
//Asif : Tests for Region clear operations on Index in a Local VM
public void test005IndexClearanceOnMapClear() {
try {
CacheUtils.restartCache();
IndexMaintainceTest.isInitDone = false;
init();
Query q = qs
.newQuery("SELECT DISTINCT * FROM /portfolio where status = 'active'");
QueryObserverHolder.setInstance(new QueryObserverAdapter() {
public void afterIndexLookup(Collection coll) {
IndexMaintainceTest.this.indexUsed = true;
}
});
SelectResults set = (SelectResults) q.execute();
if (set.size() == 0 || !this.indexUsed) {
fail("Either Size of the result set is zero or Index is not used ");
}
this.indexUsed = false;
region.clear();
set = (SelectResults) q.execute();
if (set.size() != 0 || !this.indexUsed) {
fail("Either Size of the result set is not zero or Index is not used ");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
finally {
IndexMaintainceTest.isInitDone = false;
CacheUtils.restartCache();
}
}
//Asif : Tests for Region clear operations on Index in a Local VM for cases
// when a clear
//operation & region put operation occur concurrentlty
public void test006ConcurrentMapClearAndRegionPutOperation() {
try {
CacheUtils.restartCache();
IndexMaintainceTest.isInitDone = false;
init();
Query q = qs
.newQuery("SELECT DISTINCT * FROM /portfolio where status = 'active'");
QueryObserverHolder.setInstance(new QueryObserverAdapter() {
public void afterIndexLookup(Collection coll) {
IndexMaintainceTest.this.indexUsed = true;
}
public void beforeRerunningIndexCreationQuery() {
//Spawn a separate thread here which does a put opertion on region
Thread th = new Thread(new Runnable() {
public void run() {
//Assert that the size of region is now 0
assertTrue(IndexMaintainceTest.region.size() == 0);
IndexMaintainceTest.region.put("" + 8, new Portfolio(8));
}
});
th.start();
DistributedTestCase.join(th, 30 * 1000, null);
assertTrue(IndexMaintainceTest.region.size() == 1);
}
});
SelectResults set = (SelectResults) q.execute();
if (set.size() == 0 || !this.indexUsed) {
fail("Either Size of the result set is zero or Index is not used ");
}
this.indexUsed = false;
region.clear();
set = (SelectResults) q.execute();
if (set.size() != 1 || !this.indexUsed) {
fail("Either Size of the result set is not one or Index is not used ");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
finally {
IndexMaintainceTest.isInitDone = false;
CacheUtils.restartCache();
}
}
public void test007IndexUpdate() {
try{
CacheUtils.restartCache();
IndexMaintainceTest.isInitDone = false;
init();
qs.removeIndexes();
index = (IndexProtocol) qs.createIndex("statusIndex",
IndexType.FUNCTIONAL, "pos.secId", "/portfolio p , p.positions.values pos");
String queryStr = "Select distinct pf from /portfolio pf , pf.positions.values ps where ps.secId='SUN'";
Query query = qs.newQuery(queryStr);
SelectResults rs =(SelectResults) query.execute();
int size1 = rs.size();
for(int i=4;i<50;++i) {
region.put(""+i, new Portfolio(i));
}
rs =(SelectResults) query.execute();
int size2 = rs.size();
assertTrue(size2>size1);
}catch(Exception e) {
e.printStackTrace();
fail("Test failed due to exception="+e);
}finally {
IndexMaintainceTest.isInitDone = false;
CacheUtils.restartCache();
}
}
/**
* Test to compare range and compact index.
* They should return the same results.
*/
public void test008RangeAndCompactRangeIndex() {
try{
//CacheUtils.restartCache();
if (!IndexMaintainceTest.isInitDone){
init();
}
qs.removeIndexes();
String[] queryStr = new String[] {
"Select status from /portfolio pf where status='active'",
"Select pf.ID from /portfolio pf where pf.ID > 2 and pf.ID < 100",
"Select * from /portfolio pf where pf.position1.secId > '2'",
};
String[] queryFields = new String[] {
"status",
"ID",
"position1.secId",
};
for (int i=0; i < queryStr.length; i++){
// Clear indexes if any.
qs.removeIndexes();
// initialize region.
region.clear();
for (int k=0; k < 10; k++) {
region.put(""+k, new Portfolio(k));
}
for (int j=0; j < 1; j++) { // With different region size.
// Update Region.
for (int k=0; k < (j * 100); k++) {
region.put(""+k, new Portfolio(k));
}
// Create compact index.
IndexManager.TEST_RANGEINDEX_ONLY = false;
index = (IndexProtocol) qs.createIndex(queryFields[i] + "Index",
IndexType.FUNCTIONAL, queryFields[i], "/portfolio");
// Execute Query.
SelectResults[][] rs = new SelectResults[1][2];
Query query = qs.newQuery(queryStr[i]);
rs[0][0] =(SelectResults) query.execute();
// remove compact index.
qs.removeIndexes();
// Create Range Index.
IndexManager.TEST_RANGEINDEX_ONLY = true;
index = (IndexProtocol) qs.createIndex(queryFields[i] + "rIndex",
IndexType.FUNCTIONAL, queryFields[i], "/portfolio");
query = qs.newQuery(queryStr[i]);
rs[0][1] =(SelectResults) query.execute();
System.out.println("#### rs1 size is : " + (rs[0][0]).size() + " rs2 size is : " + (rs[0][1]).size());
StructSetOrResultsSet ssORrs = new StructSetOrResultsSet();
ssORrs.CompareQueryResultsWithoutAndWithIndexes(rs, 1,queryStr);
}
}
}catch(Exception e) {
e.printStackTrace();
fail("Test failed due to exception="+e);
}finally {
IndexManager.TEST_RANGEINDEX_ONLY = false;
IndexMaintainceTest.isInitDone = false;
CacheUtils.restartCache();
}
}
/**
* Test to compare range and compact index.
* They should return the same results.
*/
public void test009AcquringCompactRangeIndexEarly() {
try{
//CacheUtils.restartCache();
if (!IndexMaintainceTest.isInitDone){
init();
}
qs.removeIndexes();
String[] queryStr = new String[] {
"Select status from /portfolio pf where status='active'",
"Select * from /portfolio pf, pf.positions.values pos where pf.ID > 10 and pf.status='active'",
"Select pf.ID from /portfolio pf where pf.ID > 2 and pf.ID < 100",
"Select * from /portfolio pf where pf.position1.secId > '2'",
"Select * from /portfolio pf, pf.positions.values pos where pos.secId > '2'",
};
// initialize region.
region.clear();
for (int k=0; k < 10; k++) {
region.put(""+k, new Portfolio(k));
}
// Create range and compact-range indexes.
qs.createIndex("id2Index ", IndexType.FUNCTIONAL, "pf.ID", "/portfolio pf");
qs.createIndex("id2PosIndex ", IndexType.FUNCTIONAL, "pf.ID", "/portfolio pf, pf.positions.values");
qs.createIndex("status2PosIndex ", IndexType.FUNCTIONAL, "pos.secId", "/portfolio pf, pf.positions.values pos");
// Set the acquire compact range index flag to true
//IndexManager.TEST_ACQUIRE_COMPACTINDEX_LOCKS_EARLY = true;
// Update Region.
for (int k=0; k < 100; k++) {
region.put(""+k, new Portfolio(k));
}
for (int i=0; i < queryStr.length; i++){
// Execute Query.
SelectResults[][] rs = new SelectResults[1][2];
Query query = qs.newQuery(queryStr[i]);
rs[0][0] =(SelectResults) query.execute();
}
}catch(Exception e) {
e.printStackTrace();
fail("Test failed due to exception="+e);
}finally {
//IndexManager.TEST_ACQUIRE_COMPACTINDEX_LOCKS_EARLY = false;
IndexMaintainceTest.isInitDone = false;
CacheUtils.restartCache();
}
}
////////// main method ///////////
public static void main(java.lang.String[] args) {
junit.textui.TestRunner.run(suite());
}//end of main method
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.runtime.tasks;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.eventtime.TimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkOutput;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.connector.source.Boundedness;
import org.apache.flink.api.connector.source.SourceReader;
import org.apache.flink.api.connector.source.SourceReaderContext;
import org.apache.flink.api.connector.source.mocks.MockSource;
import org.apache.flink.api.connector.source.mocks.MockSourceReader;
import org.apache.flink.api.connector.source.mocks.MockSourceSplit;
import org.apache.flink.api.connector.source.mocks.MockSourceSplitSerializer;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.Metric;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.io.network.api.CancelCheckpointMarker;
import org.apache.flink.runtime.io.network.api.CheckpointBarrier;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.metrics.NoOpMetricRegistry;
import org.apache.flink.runtime.metrics.groups.OperatorMetricGroup;
import org.apache.flink.runtime.metrics.groups.TaskIOMetricGroup;
import org.apache.flink.runtime.metrics.groups.TaskMetricGroup;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.runtime.metrics.util.InterceptingOperatorMetricGroup;
import org.apache.flink.runtime.metrics.util.InterceptingTaskMetricGroup;
import org.apache.flink.runtime.source.event.AddSplitEvent;
import org.apache.flink.runtime.source.event.NoMoreSplitsEvent;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.operators.AbstractInput;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.AbstractStreamOperatorFactory;
import org.apache.flink.streaming.api.operators.AbstractStreamOperatorV2;
import org.apache.flink.streaming.api.operators.BoundedMultiInput;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
import org.apache.flink.streaming.api.operators.Input;
import org.apache.flink.streaming.api.operators.MultipleInputStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.SourceOperatorFactory;
import org.apache.flink.streaming.api.operators.StreamOperator;
import org.apache.flink.streaming.api.operators.StreamOperatorParameters;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.io.StreamMultipleInputProcessor;
import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.streamstatus.StreamStatus;
import org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTest.WatermarkMetricOperator;
import org.apache.flink.streaming.util.TestBoundedMultipleInputOperator;
import org.apache.flink.streaming.util.TestHarnessUtil;
import org.apache.flink.util.SerializedValue;
import org.hamcrest.collection.IsMapContaining;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests for {@link MultipleInputStreamTask}. Theses tests implicitly also test the {@link
* StreamMultipleInputProcessor}.
*/
@SuppressWarnings("serial")
public class MultipleInputStreamTaskTest {
private static final List<String> LIFE_CYCLE_EVENTS = new ArrayList<>();
@Before
public void setUp() {
LIFE_CYCLE_EVENTS.clear();
}
@Test
public void testBasicProcessing() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness = buildTestHarness()) {
long initialTime = 0L;
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
addSourceRecords(testHarness, 1, 42, 43);
expectedOutput.add(new StreamRecord<>("42", TimestampAssigner.NO_TIMESTAMP));
expectedOutput.add(new StreamRecord<>("43", TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new StreamRecord<>("Hello", initialTime + 1), 0);
expectedOutput.add(new StreamRecord<>("Hello", initialTime + 1));
testHarness.processElement(new StreamRecord<>(42.44d, initialTime + 3), 1);
expectedOutput.add(new StreamRecord<>("42.44", initialTime + 3));
testHarness.endInput();
testHarness.waitForTaskCompletion();
assertThat(testHarness.getOutput(), containsInAnyOrder(expectedOutput.toArray()));
}
}
/** This test verifies that checkpoint barriers are correctly forwarded. */
@Test
public void testCheckpointBarriers() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addInput(BasicTypeInfo.INT_TYPE_INFO, 2)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
long initialTime = 0L;
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
0);
// This one should go through
testHarness.processElement(new StreamRecord<>("Ciao-0-0", initialTime), 0, 1);
expectedOutput.add(new StreamRecord<>("Ciao-0-0", initialTime));
// These elements should be forwarded, since we did not yet receive a checkpoint barrier
// on that input, only add to same input, otherwise we would not know the ordering
// of the output since the Task might read the inputs in any order
testHarness.processElement(new StreamRecord<>(11, initialTime), 1, 1);
testHarness.processElement(new StreamRecord<>(1.0d, initialTime), 2, 0);
expectedOutput.add(new StreamRecord<>("11", initialTime));
expectedOutput.add(new StreamRecord<>("1.0", initialTime));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
1);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
0);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
1);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
0);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
1);
// now we should see the barrier
expectedOutput.add(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
}
}
/**
* This test verifies that checkpoint barriers and barrier buffers work correctly with
* concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e.
* some inputs receive barriers from an earlier checkpoint, thereby blocking, then all inputs
* receive barriers from a later checkpoint.
*/
@Test
public void testOvertakingCheckpointBarriers() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addInput(BasicTypeInfo.INT_TYPE_INFO, 2)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
long initialTime = 0L;
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
0);
// These elements should be forwarded, since we did not yet receive a checkpoint barrier
// on that input, only add to same input, otherwise we would not know the ordering
// of the output since the Task might read the inputs in any order
testHarness.processElement(new StreamRecord<>("Witam-0-1", initialTime), 0, 1);
testHarness.processElement(new StreamRecord<>(42, initialTime), 1, 1);
testHarness.processElement(new StreamRecord<>(1.0d, initialTime), 2, 1);
expectedOutput.add(new StreamRecord<>("Witam-0-1", initialTime));
expectedOutput.add(new StreamRecord<>("42", initialTime));
expectedOutput.add(new StreamRecord<>("1.0", initialTime));
// we should not yet see the barrier, only the two elements from non-blocked input
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// Now give a later barrier to all inputs, this should unblock the first channel
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
1);
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
0);
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
1);
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
0);
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
1);
testHarness.processEvent(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
0);
expectedOutput.add(new CancelCheckpointMarker(0));
expectedOutput.add(
new CheckpointBarrier(
1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// Then give the earlier barrier, these should be ignored
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
0,
1);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
0);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
1,
1);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
0);
testHarness.processEvent(
new CheckpointBarrier(
0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()),
2,
1);
testHarness.waitForTaskCompletion();
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
}
}
/**
* With chained sources, task's and main operator's number of input records are two different
* things. The first one should take into account only records comming in from the network,
* ignoring records produced inside the task itself (like via a chained source). Main operator
* should on the other hand report all records from all of the inputs (regardless if it's a
* network or chained input).
*/
@Test
public void testMetrics() throws Exception {
HashMap<String, OperatorMetricGroup> operatorMetrics = new HashMap<>();
TaskMetricGroup taskMetricGroup =
new UnregisteredMetricGroups.UnregisteredTaskMetricGroup() {
@Override
public OperatorMetricGroup getOrAddOperator(
OperatorID operatorID, String name) {
OperatorMetricGroup operatorMetricGroup =
new OperatorMetricGroup(
NoOpMetricRegistry.INSTANCE, this, operatorID, name);
operatorMetrics.put(name, operatorMetricGroup);
return operatorMetricGroup;
}
};
String mainOperatorName = "MainOperator";
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new LifeCycleTrackingMockSource(Boundedness.BOUNDED, 1),
WatermarkStrategy.noWatermarks()))
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.setupOperatorChain(new MapToStringMultipleInputOperatorFactory(3))
.name(mainOperatorName)
.chain(
new OneInputStreamTaskTest.DuplicatingOperator(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig()))
.chain(
new OneInputStreamTaskTest.DuplicatingOperator(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig()))
.chain(
new OneInputStreamTaskTest.DuplicatingOperator(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig()))
.finish()
.setTaskMetricGroup(taskMetricGroup)
.build()) {
assertTrue(operatorMetrics.containsKey(mainOperatorName));
OperatorMetricGroup mainOperatorMetrics = operatorMetrics.get(mainOperatorName);
Counter numRecordsInCounter =
taskMetricGroup.getIOMetricGroup().getNumRecordsInCounter();
Counter numRecordsOutCounter =
taskMetricGroup.getIOMetricGroup().getNumRecordsOutCounter();
int numRecords1 = 5;
int numRecords2 = 3;
int numRecords3 = 2;
// add source splits before processing any elements, so the MockSourceReader does not
// end prematurely
for (int x = 0; x < numRecords2; x++) {
addSourceRecords(testHarness, 1, 42);
}
for (int x = 0; x < numRecords1; x++) {
testHarness.processElement(new StreamRecord<>("hello"), 0, 0);
}
for (int x = 0; x < numRecords3; x++) {
testHarness.processElement(new StreamRecord<>("hello"), 1, 0);
}
int networkRecordsIn = numRecords1 + numRecords3;
int mainOperatorRecordsIn = networkRecordsIn + numRecords2;
int totalRecordsOut =
mainOperatorRecordsIn
* 2
* 2
* 2; // there are three operators duplicating the records
assertEquals(
mainOperatorRecordsIn,
mainOperatorMetrics.getIOMetricGroup().getNumRecordsInCounter().getCount());
assertEquals(networkRecordsIn, numRecordsInCounter.getCount());
assertEquals(totalRecordsOut, numRecordsOutCounter.getCount());
testHarness.waitForTaskCompletion();
}
}
static class DuplicatingOperator extends AbstractStreamOperatorV2<String>
implements MultipleInputStreamOperator<String> {
public DuplicatingOperator(StreamOperatorParameters<String> parameters) {
super(parameters, 3);
}
@Override
public List<Input> getInputs() {
return Arrays.asList(
new DuplicatingInput(this, 1),
new DuplicatingInput(this, 2),
new DuplicatingInput(this, 3));
}
class DuplicatingInput extends AbstractInput<String, String> {
public DuplicatingInput(AbstractStreamOperatorV2<String> owner, int inputId) {
super(owner, inputId);
}
@Override
public void processElement(StreamRecord<String> element) throws Exception {
output.collect(element);
output.collect(element);
}
}
}
@Test
public void testLifeCycleOrder() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new LifeCycleTrackingMockSource(Boundedness.BOUNDED, 1),
WatermarkStrategy.noWatermarks()))
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOperatorChain(
new LifeCycleTrackingMapToStringMultipleInputOperatorFactory())
.chain(
new LifeCycleTrackingMap<>(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig()))
.finish()
.build()) {
testHarness.waitForTaskCompletion();
}
assertThat(
LIFE_CYCLE_EVENTS,
contains(
LifeCycleTrackingMap.OPEN,
LifeCycleTrackingMapToStringMultipleInputOperator.OPEN,
LifeCycleTrackingMockSourceReader.START,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMockSourceReader.CLOSE,
LifeCycleTrackingMapToStringMultipleInputOperator.CLOSE,
LifeCycleTrackingMap.END_INPUT,
LifeCycleTrackingMap.CLOSE));
}
@Test
public void testInputFairness() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
testHarness.setAutoProcess(false);
testHarness.processElement(new StreamRecord<>("0"), 0);
testHarness.processElement(new StreamRecord<>("1"), 0);
testHarness.processElement(new StreamRecord<>("2"), 0);
testHarness.processElement(new StreamRecord<>("3"), 0);
testHarness.processElement(new StreamRecord<>("0"), 2);
testHarness.processElement(new StreamRecord<>("1"), 2);
testHarness.processAll();
// We do not know which of the input will be picked first, but we are expecting them
// to alternate
// NOTE: the behaviour of alternation once per record is not part of any contract.
// Task is just expected to not starve any of the inputs, it just happens to be
// currently implemented in truly "fair" fashion. That means this test might need
// to be adjusted if logic changes.
expectedOutput.add(new StreamRecord<>("0"));
expectedOutput.add(new StreamRecord<>("0"));
expectedOutput.add(new StreamRecord<>("1"));
expectedOutput.add(new StreamRecord<>("1"));
expectedOutput.add(new StreamRecord<>("2"));
expectedOutput.add(new StreamRecord<>("3"));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
}
}
@Test
public void testWatermark() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addSourceInput(
new SourceOperatorFactory<>(
new MockSource(
Boundedness.CONTINUOUS_UNBOUNDED, 2, true, false),
WatermarkStrategy.forGenerator(
ctx -> new RecordToWatermarkGenerator())))
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
int initialTime = 0;
testHarness.processElement(new Watermark(initialTime), 0, 0);
testHarness.processElement(new Watermark(initialTime), 0, 1);
addSourceRecords(testHarness, 1, initialTime);
expectedOutput.add(
new StreamRecord<>("" + (initialTime), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime), 1, 0);
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
testHarness.processElement(new Watermark(initialTime), 1, 1);
// now the watermark should have propagated, Map simply forward Watermarks
expectedOutput.add(new Watermark(initialTime));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// contrary to checkpoint barriers these elements are not blocked by watermarks
testHarness.processElement(new StreamRecord<>("Hello", initialTime), 0, 0);
testHarness.processElement(new StreamRecord<>(42.0, initialTime), 1, 1);
expectedOutput.add(new StreamRecord<>("Hello", initialTime));
expectedOutput.add(new StreamRecord<>("42.0", initialTime));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
testHarness.processElement(new Watermark(initialTime + 4), 0, 0);
testHarness.processElement(new Watermark(initialTime + 3), 0, 1);
addSourceRecords(testHarness, 1, initialTime + 3);
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 3), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime + 3), 1, 0);
testHarness.processElement(new Watermark(initialTime + 2), 1, 1);
// check whether we get the minimum of all the watermarks, this must also only occur in
// the output after the two StreamRecords
expectedOutput.add(new Watermark(initialTime + 2));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// advance watermark from one of the inputs, now we should get a new one since the
// minimum increases
testHarness.processElement(new Watermark(initialTime + 4), 1, 1);
expectedOutput.add(new Watermark(initialTime + 3));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// advance the other inputs, now we should get a new one since the minimum increases
// again
testHarness.processElement(new Watermark(initialTime + 4), 0, 1);
addSourceRecords(testHarness, 1, initialTime + 4);
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 4), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime + 4), 1, 0);
expectedOutput.add(new Watermark(initialTime + 4));
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
List<String> resultElements =
TestHarnessUtil.getRawElementsFromOutput(testHarness.getOutput());
assertEquals(5, resultElements.size());
}
}
/**
* This test verifies that watermarks and stream statuses are correctly forwarded. This also
* checks whether watermarks are forwarded only when we have received watermarks from all
* inputs. The forwarded watermark must be the minimum of the watermarks of all active inputs.
*/
@Test
public void testWatermarkAndStreamStatusForwarding() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addSourceInput(
new SourceOperatorFactory<>(
new MockSource(
Boundedness.CONTINUOUS_UNBOUNDED, 2, true, true),
WatermarkStrategy.forGenerator(
ctx -> new RecordToWatermarkGenerator())))
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
int initialTime = 0;
// test whether idle input channels are acknowledged correctly when forwarding
// watermarks
testHarness.processElement(StreamStatus.IDLE, 0, 1);
testHarness.processElement(new Watermark(initialTime + 6), 0, 0);
testHarness.processElement(
new Watermark(initialTime + 5),
1,
1); // this watermark should be advanced first
testHarness.processElement(StreamStatus.IDLE, 1, 0); // once this is acknowledged,
// We don't expect to see Watermark(6) here because the idle status of one
// input doesn't propagate to the other input. That is, if input 1 is at WM 6 and input
// two was at WM 5 before going to IDLE then the output watermark will not jump to WM 6.
// OPS, there is a known bug: https://issues.apache.org/jira/browse/FLINK-18934
// that prevents this check from succeeding (AbstractStreamOperator and
// AbstractStreamOperatorV2
// are ignoring StreamStatus), so those checks needs to be commented out ...
// expectedOutput.add(new Watermark(initialTime + 5));
// assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// and in as a temporary replacement we need this code block:
{
// we wake up the source and emit watermark
addSourceRecords(testHarness, 1, initialTime + 5);
testHarness.processAll();
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 5), TimestampAssigner.NO_TIMESTAMP));
expectedOutput.add(new Watermark(initialTime + 5));
// the source should go back to being idle immediately, but AbstractStreamOperatorV2
// should have updated it's watermark by then.
}
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// make all input channels idle and check that the operator's idle status is forwarded
testHarness.processElement(StreamStatus.IDLE, 0, 0);
testHarness.processElement(StreamStatus.IDLE, 1, 1);
expectedOutput.add(StreamStatus.IDLE);
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// make source active once again, emit a watermark and go idle again.
addSourceRecords(testHarness, 1, initialTime + 10);
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 10), TimestampAssigner.NO_TIMESTAMP));
expectedOutput.add(StreamStatus.ACTIVE);
expectedOutput.add(StreamStatus.IDLE);
testHarness.processAll();
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
// make some network input channel active again
testHarness.processElement(StreamStatus.ACTIVE, 0, 1);
expectedOutput.add(StreamStatus.ACTIVE);
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
}
}
@Test
@SuppressWarnings("unchecked")
public void testWatermarkMetrics() throws Exception {
OperatorID mainOperatorId = new OperatorID();
OperatorID chainedOperatorId = new OperatorID();
InterceptingOperatorMetricGroup mainOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingOperatorMetricGroup chainedOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingTaskMetricGroup taskMetricGroup =
new InterceptingTaskMetricGroup() {
@Override
public OperatorMetricGroup getOrAddOperator(OperatorID id, String name) {
if (id.equals(mainOperatorId)) {
return mainOperatorMetricGroup;
} else if (id.equals(chainedOperatorId)) {
return chainedOperatorMetricGroup;
} else {
return super.getOrAddOperator(id, name);
}
}
};
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new MockSource(
Boundedness.CONTINUOUS_UNBOUNDED, 2, true, false),
WatermarkStrategy.forGenerator(
ctx -> new RecordToWatermarkGenerator())))
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOperatorChain(
mainOperatorId, new MapToStringMultipleInputOperatorFactory(3))
.chain(
chainedOperatorId,
new WatermarkMetricOperator(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig()))
.finish()
.setTaskMetricGroup(taskMetricGroup)
.build()) {
Gauge<Long> taskInputWatermarkGauge =
(Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> mainInput1WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(1));
Gauge<Long> mainInput2WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(2));
Gauge<Long> mainInput3WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(3));
Gauge<Long> mainInputWatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> mainOutputWatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
Gauge<Long> chainedInputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> chainedOutputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
assertEquals(Long.MIN_VALUE, taskInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput1WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput2WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput3WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainOutputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedOutputWatermarkGauge.getValue().longValue());
testHarness.processElement(new Watermark(1L), 0);
assertEquals(Long.MIN_VALUE, taskInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInputWatermarkGauge.getValue().longValue());
assertEquals(1L, mainInput1WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput2WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput3WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainOutputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedOutputWatermarkGauge.getValue().longValue());
addSourceRecords(testHarness, 1, 2);
testHarness.processAll();
assertEquals(Long.MIN_VALUE, taskInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInputWatermarkGauge.getValue().longValue());
assertEquals(1L, mainInput1WatermarkGauge.getValue().longValue());
assertEquals(2L, mainInput2WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainInput3WatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, mainOutputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedInputWatermarkGauge.getValue().longValue());
assertEquals(Long.MIN_VALUE, chainedOutputWatermarkGauge.getValue().longValue());
testHarness.processElement(new Watermark(2L), 1);
assertEquals(1L, taskInputWatermarkGauge.getValue().longValue());
assertEquals(1L, mainInputWatermarkGauge.getValue().longValue());
assertEquals(1L, mainInput1WatermarkGauge.getValue().longValue());
assertEquals(2L, mainInput2WatermarkGauge.getValue().longValue());
assertEquals(2L, mainInput3WatermarkGauge.getValue().longValue());
assertEquals(1L, mainOutputWatermarkGauge.getValue().longValue());
assertEquals(1L, chainedInputWatermarkGauge.getValue().longValue());
assertEquals(2L, chainedOutputWatermarkGauge.getValue().longValue());
testHarness.processElement(new Watermark(4L), 0);
addSourceRecords(testHarness, 1, 3);
testHarness.processAll();
assertEquals(2L, taskInputWatermarkGauge.getValue().longValue());
assertEquals(2L, mainInputWatermarkGauge.getValue().longValue());
assertEquals(4L, mainInput1WatermarkGauge.getValue().longValue());
assertEquals(3L, mainInput2WatermarkGauge.getValue().longValue());
assertEquals(2L, mainInput3WatermarkGauge.getValue().longValue());
assertEquals(2L, mainOutputWatermarkGauge.getValue().longValue());
assertEquals(2L, chainedInputWatermarkGauge.getValue().longValue());
assertEquals(4L, chainedOutputWatermarkGauge.getValue().longValue());
finishAddingRecords(testHarness, 1);
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
/**
* Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} correctly
* while generating the {@link TwoInputStreamTask}.
*/
@Test
public void testCheckpointBarrierMetrics() throws Exception {
final Map<String, Metric> metrics = new ConcurrentHashMap<>();
final TaskMetricGroup taskMetricGroup =
new StreamTaskTestHarness.TestTaskMetricGroup(metrics);
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addInput(BasicTypeInfo.INT_TYPE_INFO, 2)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.setTaskMetricGroup(taskMetricGroup)
.build()) {
assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME));
assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_START_DELAY_TIME));
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
@Test
public void testLatencyMarker() throws Exception {
final Map<String, Metric> metrics = new ConcurrentHashMap<>();
final TaskMetricGroup taskMetricGroup =
new StreamTaskTestHarness.TestTaskMetricGroup(metrics);
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.setTaskMetricGroup(taskMetricGroup)
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
OperatorID sourceId = new OperatorID();
LatencyMarker latencyMarker = new LatencyMarker(42L, sourceId, 0);
testHarness.processElement(latencyMarker);
expectedOutput.add(latencyMarker);
assertThat(testHarness.getOutput(), contains(expectedOutput.toArray()));
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
/** Test implementation of {@link MultipleInputStreamOperator}. */
protected static class MapToStringMultipleInputOperator extends AbstractStreamOperatorV2<String>
implements MultipleInputStreamOperator<String> {
private static final long serialVersionUID = 1L;
private final int numberOfInputs;
private boolean openCalled;
private boolean closeCalled;
public MapToStringMultipleInputOperator(
StreamOperatorParameters<String> parameters, int numberOfInputs) {
super(parameters, numberOfInputs);
this.numberOfInputs = numberOfInputs;
}
@Override
public void open() throws Exception {
super.open();
if (closeCalled) {
Assert.fail("Close called before open.");
}
openCalled = true;
}
@Override
public void close() throws Exception {
super.close();
if (!openCalled) {
Assert.fail("Open was not called before close.");
}
closeCalled = true;
}
@Override
public List<Input> getInputs() {
checkArgument(numberOfInputs <= 3);
return Arrays.<Input>asList(
new MapToStringInput<String>(this, 1),
new MapToStringInput<Integer>(this, 2),
new MapToStringInput<Double>(this, 3))
.subList(0, numberOfInputs);
}
public boolean wasCloseCalled() {
return closeCalled;
}
/** {@link Input} for {@link MapToStringMultipleInputOperator}. */
public class MapToStringInput<T> extends AbstractInput<T, String> {
public MapToStringInput(AbstractStreamOperatorV2<String> owner, int inputId) {
super(owner, inputId);
}
@Override
public void processElement(StreamRecord<T> element) throws Exception {
if (!openCalled) {
Assert.fail("Open was not called before run.");
}
if (element.hasTimestamp()) {
output.collect(
new StreamRecord<>(
element.getValue().toString(), element.getTimestamp()));
} else {
output.collect(new StreamRecord<>(element.getValue().toString()));
}
}
}
}
private static class TestBoundedMultipleInputOperatorFactory
extends AbstractStreamOperatorFactory<String> {
@Override
public <T extends StreamOperator<String>> T createStreamOperator(
StreamOperatorParameters<String> parameters) {
return (T) new TestBoundedMultipleInputOperator("Operator0", parameters);
}
@Override
public Class<? extends StreamOperator<String>> getStreamOperatorClass(
ClassLoader classLoader) {
return TestBoundedMultipleInputOperator.class;
}
}
private static class DuplicatingOperatorFactory extends AbstractStreamOperatorFactory<String> {
@Override
public <T extends StreamOperator<String>> T createStreamOperator(
StreamOperatorParameters<String> parameters) {
return (T) new DuplicatingOperator(parameters);
}
@Override
public Class<? extends StreamOperator<String>> getStreamOperatorClass(
ClassLoader classLoader) {
return DuplicatingOperator.class;
}
}
/** Factory for {@link MapToStringMultipleInputOperator}. */
protected static class MapToStringMultipleInputOperatorFactory
extends AbstractStreamOperatorFactory<String> {
private final int numberOfInputs;
public MapToStringMultipleInputOperatorFactory(int numberOfInputs) {
this.numberOfInputs = numberOfInputs;
}
@Override
public <T extends StreamOperator<String>> T createStreamOperator(
StreamOperatorParameters<String> parameters) {
return (T) new MapToStringMultipleInputOperator(parameters, numberOfInputs);
}
@Override
public Class<? extends StreamOperator<String>> getStreamOperatorClass(
ClassLoader classLoader) {
return MapToStringMultipleInputOperator.class;
}
}
static StreamTaskMailboxTestHarness<String> buildTestHarness() throws Exception {
return buildTestHarness(false);
}
static StreamTaskMailboxTestHarness<String> buildTestHarness(boolean unaligned)
throws Exception {
return new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(config -> config.enableObjectReuse())
.modifyStreamConfig(config -> config.setUnalignedCheckpointsEnabled(unaligned))
.modifyStreamConfig(config -> config.setAlignmentTimeout(0))
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new MockSource(Boundedness.BOUNDED, 1),
WatermarkStrategy.noWatermarks()))
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build();
}
static void addSourceRecords(
StreamTaskMailboxTestHarness<String> testHarness, int sourceId, int... records)
throws Exception {
OperatorID sourceOperatorID = getSourceOperatorID(testHarness, sourceId);
// Prepare the source split and assign it to the source reader.
MockSourceSplit split = new MockSourceSplit(0, 0, records.length);
for (int record : records) {
split.addRecord(record);
}
// Assign the split to the source reader.
AddSplitEvent<MockSourceSplit> addSplitEvent =
new AddSplitEvent<>(
Collections.singletonList(split), new MockSourceSplitSerializer());
testHarness
.getStreamTask()
.dispatchOperatorEvent(sourceOperatorID, new SerializedValue<>(addSplitEvent));
}
private static OperatorID getSourceOperatorID(
StreamTaskMailboxTestHarness<String> testHarness, int sourceId) {
StreamConfig.InputConfig[] inputs =
testHarness
.getStreamTask()
.getConfiguration()
.getInputs(testHarness.getClass().getClassLoader());
StreamConfig.SourceInputConfig input = (StreamConfig.SourceInputConfig) inputs[sourceId];
return testHarness.getStreamTask().operatorChain.getSourceTaskInput(input).getOperatorID();
}
private void finishAddingRecords(StreamTaskMailboxTestHarness<String> testHarness, int sourceId)
throws Exception {
testHarness
.getStreamTask()
.dispatchOperatorEvent(
getSourceOperatorID(testHarness, sourceId),
new SerializedValue<>(new NoMoreSplitsEvent()));
}
static class LifeCycleTrackingMapToStringMultipleInputOperator
extends MapToStringMultipleInputOperator implements BoundedMultiInput {
public static final String OPEN = "MultipleInputOperator#open";
public static final String CLOSE = "MultipleInputOperator#close";
public static final String END_INPUT = "MultipleInputOperator#endInput";
private static final long serialVersionUID = 1L;
public LifeCycleTrackingMapToStringMultipleInputOperator(
StreamOperatorParameters<String> parameters) {
super(parameters, 3);
}
@Override
public void open() throws Exception {
LIFE_CYCLE_EVENTS.add(OPEN);
super.open();
}
@Override
public void close() throws Exception {
LIFE_CYCLE_EVENTS.add(CLOSE);
super.close();
}
@Override
public void endInput(int inputId) {
LIFE_CYCLE_EVENTS.add(END_INPUT);
}
}
static class LifeCycleTrackingMapToStringMultipleInputOperatorFactory
extends AbstractStreamOperatorFactory<String> {
@Override
public <T extends StreamOperator<String>> T createStreamOperator(
StreamOperatorParameters<String> parameters) {
return (T) new LifeCycleTrackingMapToStringMultipleInputOperator(parameters);
}
@Override
public Class<? extends StreamOperator<String>> getStreamOperatorClass(
ClassLoader classLoader) {
return LifeCycleTrackingMapToStringMultipleInputOperator.class;
}
}
static class LifeCycleTrackingMockSource extends MockSource {
public LifeCycleTrackingMockSource(Boundedness boundedness, int numSplits) {
super(boundedness, numSplits);
}
@Override
public SourceReader<Integer, MockSourceSplit> createReader(
SourceReaderContext readerContext) {
LifeCycleTrackingMockSourceReader sourceReader =
new LifeCycleTrackingMockSourceReader();
createdReaders.add(sourceReader);
return sourceReader;
}
}
static class LifeCycleTrackingMockSourceReader extends MockSourceReader {
public static final String START = "SourceReader#start";
public static final String CLOSE = "SourceReader#close";
@Override
public void start() {
LIFE_CYCLE_EVENTS.add(START);
super.start();
}
@Override
public void close() throws Exception {
LIFE_CYCLE_EVENTS.add(CLOSE);
super.close();
}
}
static class LifeCycleTrackingMap<T> extends AbstractStreamOperator<T>
implements OneInputStreamOperator<T, T>, BoundedOneInput {
public static final String OPEN = "LifeCycleTrackingMap#open";
public static final String CLOSE = "LifeCycleTrackingMap#close";
public static final String END_INPUT = "LifeCycleTrackingMap#endInput";
@Override
public void processElement(StreamRecord<T> element) throws Exception {
output.collect(element);
}
@Override
public void open() throws Exception {
LIFE_CYCLE_EVENTS.add(OPEN);
super.open();
}
@Override
public void close() throws Exception {
LIFE_CYCLE_EVENTS.add(CLOSE);
super.close();
}
@Override
public void endInput() throws Exception {
LIFE_CYCLE_EVENTS.add(END_INPUT);
}
}
private static class RecordToWatermarkGenerator
implements WatermarkGenerator<Integer>, Serializable {
@Override
public void onEvent(Integer event, long eventTimestamp, WatermarkOutput output) {
output.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(event));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
}
}
| |
package org.nem.ncc.controller;
import net.minidev.json.JSONObject;
import org.hamcrest.core.*;
import org.junit.*;
import org.mockito.*;
import org.nem.core.crypto.*;
import org.nem.core.model.*;
import org.nem.core.serialization.MissingRequiredPropertyException;
import org.nem.ncc.addressbook.*;
import org.nem.ncc.controller.requests.WalletNamePasswordBag;
import org.nem.ncc.controller.viewmodels.*;
import org.nem.ncc.services.*;
import org.nem.ncc.test.*;
import org.nem.ncc.wallet.*;
import java.util.function.*;
public class WalletAccountControllerTest {
//region addNewAccount
@Test
public void canAddNewAccountWithoutLabel() {
this.assertNewAccountCanBeAdded(null);
}
@Test
public void canAddNewAccountWithLabel() {
this.assertNewAccountCanBeAdded("l");
}
private void assertNewAccountCanBeAdded(final String label) {
// Arrange:
final JSONObject jsonObject = new JSONObject();
jsonObject.put("wallet", "n");
jsonObject.put("password", "p");
if (null != label) {
jsonObject.put("label", label);
}
final TestContext context = new TestContext(jsonObject);
final AccountViewModel accountViewModel = Mockito.mock(AccountViewModel.class);
final ArgumentCaptor<WalletAccount> walletAccountCaptor = ArgumentCaptor.forClass(WalletAccount.class);
Mockito.when(context.accountMapper.toViewModel(walletAccountCaptor.capture())).thenReturn(accountViewModel);
// Act:
final AccountViewModel result = context.controller.addNewAccount(context.bag);
// Assert:
final WalletAccount walletAccount = walletAccountCaptor.getValue();
Assert.assertThat(result, IsEqual.equalTo(accountViewModel));
Mockito.verify(context.wallet, Mockito.times(1)).addOtherAccount(Mockito.any());
Mockito.verify(context.walletServices, Mockito.times(1)).open(context.bag);
Mockito.verify(context.addressBookServices, Mockito.times(1)).open(Mockito.any());
Mockito.verify(context.addressBook, Mockito.times(1)).contains(Mockito.any());
Mockito.verify(context.accountMapper, Mockito.times(1)).toViewModel(Mockito.any(WalletAccount.class));
Mockito.verify(context.addressBook, Mockito.times(1))
.addLabel(Mockito.eq(new AccountLabel(walletAccount.getAddress(), "", null == label ? "" : label)));
}
//endregion
//region addExistingAccount
@Test
public void canAddExistingAccountWithKey() {
this.assertExistingAccountWithKeyCanBeAdded(null);
}
@Test
public void canAddExistingAccountWithKeyAndLabel() {
this.assertExistingAccountWithKeyCanBeAdded("l");
}
public void assertExistingAccountWithKeyCanBeAdded(final String label) {
// Arrange:
final JSONObject jsonObject = new JSONObject();
jsonObject.put("wallet", "n");
jsonObject.put("password", "p");
jsonObject.put("accountKey", "0011223344");
if (null != label) {
jsonObject.put("label", label);
}
final TestContext context = new TestContext(jsonObject);
final WalletAccount walletAccount = new WalletAccount(PrivateKey.fromHexString("0011223344"));
final AccountViewModel accountViewModel = Mockito.mock(AccountViewModel.class);
Mockito.when(context.accountMapper.toViewModel(walletAccount)).thenReturn(accountViewModel);
// Act:
final AccountViewModel result = context.controller.addExistingAccount(context.bag);
// Assert:
Assert.assertThat(result, IsEqual.equalTo(accountViewModel));
Mockito.verify(context.wallet, Mockito.times(1)).addOtherAccount(walletAccount);
Mockito.verify(context.walletServices, Mockito.times(1)).open(context.bag);
Mockito.verify(context.addressBookServices, Mockito.times(1)).open(Mockito.any());
Mockito.verify(context.addressBook, Mockito.times(1)).contains(Mockito.eq(walletAccount.getAddress()));
Mockito.verify(context.accountMapper, Mockito.times(1)).toViewModel(walletAccount);
Mockito.verify(context.addressBook, Mockito.times(1))
.addLabel(Mockito.eq(new AccountLabel(walletAccount.getAddress(), "", null == label ? "" : label)));
}
@Test(expected = MissingRequiredPropertyException.class)
public void cannotAddExistingAccountWithoutKey() {
// Arrange:
final JSONObject jsonObject = new JSONObject();
jsonObject.put("wallet", "n");
jsonObject.put("password", "p");
jsonObject.put("label", "l");
final TestContext context = new TestContext(jsonObject);
final WalletAccount walletAccount = new WalletAccount(PrivateKey.fromHexString("0011223344"));
final AccountViewModel accountViewModel = Mockito.mock(AccountViewModel.class);
Mockito.when(context.accountMapper.toViewModel(walletAccount)).thenReturn(accountViewModel);
// Act:
context.controller.addExistingAccount(context.bag);
}
//endregion
//region setPrimaryAccount / removeAccount
@Test
public void setPrimaryAccountDelegatesToServices() {
// Arrange:
final Address address = Utils.generateRandomAddress();
final JSONObject jsonObject = createJsonObjectWithAddress(address);
final TestContext context = new TestContext(jsonObject);
final WalletViewModel walletViewModel = Mockito.mock(WalletViewModel.class);
Mockito.when(context.walletMapper.toViewModel(context.wallet)).thenReturn(walletViewModel);
// Act:
final WalletViewModel result = context.controller.setPrimaryAccount(context.bag);
// Assert:
Assert.assertThat(result, IsEqual.equalTo(walletViewModel));
Mockito.verify(context.walletServices, Mockito.times(1)).open(context.bag);
Mockito.verify(context.wallet, Mockito.times(1)).setPrimaryAccount(address);
Mockito.verify(context.walletMapper, Mockito.times(1)).toViewModel(context.wallet);
}
@Test
public void removeAccountDelegatesToServices() {
// Arrange:
final Address address = Utils.generateRandomAddress();
final JSONObject jsonObject = createJsonObjectWithAddress(address);
final TestContext context = new TestContext(jsonObject);
final WalletViewModel walletViewModel = Mockito.mock(WalletViewModel.class);
Mockito.when(context.walletMapper.toViewModel(context.wallet)).thenReturn(walletViewModel);
// Act:
final WalletViewModel result = context.controller.removeAccount(context.bag);
// Assert:
Assert.assertThat(result, IsEqual.equalTo(walletViewModel));
Mockito.verify(context.walletServices, Mockito.times(1)).open(context.bag);
Mockito.verify(context.wallet, Mockito.times(1)).removeAccount(address);
Mockito.verify(context.walletMapper, Mockito.times(1)).toViewModel(context.wallet);
}
@Test
public void removeAccountRemovesLabelFromAddressBook() {
// Arrange:
final Address address = Utils.generateRandomAddress();
final JSONObject jsonObject = createJsonObjectWithAddress(address);
final TestContext context = new TestContext(jsonObject);
final WalletViewModel walletViewModel = Mockito.mock(WalletViewModel.class);
Mockito.when(context.walletMapper.toViewModel(context.wallet)).thenReturn(walletViewModel);
Mockito.when(context.addressBook.contains(address)).thenReturn(true);
// Act:
final WalletViewModel result = context.controller.removeAccount(context.bag);
// Assert:
Assert.assertThat(result, IsEqual.equalTo(walletViewModel));
Mockito.verify(context.addressBookServices, Mockito.times(1)).open(Mockito.any());
Mockito.verify(context.addressBook, Mockito.times(1)).contains(address);
Mockito.verify(context.addressBook, Mockito.times(1)).removeLabel(address);
}
//endregion
//region reveal (remote) private key
@Test
public void revealAccountReturnsViewModelOfPrivateKeyPair() {
// Assert:
assertCanRevealKnownAccount(WalletAccountController::revealAccount, WalletAccount::getPrivateKey);
}
@Test
public void revealAccountFailsIfAddressIsUnknown() {
// Assert:
assertCannotRevealUnknownAccount(WalletAccountController::revealAccount);
}
@Test
public void revealRemoteAccountReturnsViewModelOfPrivateKeyPair() {
// Assert:
assertCanRevealKnownAccount(WalletAccountController::revealRemoteAccount, WalletAccount::getRemoteHarvestingPrivateKey);
}
@Test
public void revealRemoteAccountFailsIfAddressIsUnknown() {
// Assert:
assertCannotRevealUnknownAccount(WalletAccountController::revealRemoteAccount);
}
private static void assertCannotRevealUnknownAccount(
final BiFunction<WalletAccountController, WalletNamePasswordBag, KeyPairViewModel> revealAccount) {
// Arrange:
final TestContext context = new TestContext();
final WalletNamePasswordBag bag = new WalletNamePasswordBag(Utils.createDeserializer(createJsonObjectWithAddress(Utils.generateRandomAddress())));
// Assert:
ExceptionAssert.assertThrows(v -> revealAccount.apply(context.controller, bag), WalletException.class);
}
private static void assertCanRevealKnownAccount(
final BiFunction<WalletAccountController, WalletNamePasswordBag, KeyPairViewModel> revealAccount,
final Function<WalletAccount, PrivateKey> getPrivateKey) {
// Arrange:
final TestContext context = new TestContext();
final KeyPair keyPair = new KeyPair(getPrivateKey.apply(context.walletAccount));
// Act:
final KeyPairViewModel viewModel = revealAccount.apply(context.controller, context.bag);
// Assert:
Assert.assertThat(viewModel.getNetworkVersion(), IsEqual.equalTo(NetworkInfos.getDefault().getVersion()));
Assert.assertThat(viewModel.getKeyPair().getPrivateKey(), IsNull.notNullValue());
Assert.assertThat(viewModel.getKeyPair().getPrivateKey(), IsEqual.equalTo(keyPair.getPrivateKey()));
Assert.assertThat(viewModel.getKeyPair().getPublicKey(), IsNull.notNullValue());
Assert.assertThat(viewModel.getKeyPair().getPublicKey(), IsEqual.equalTo(keyPair.getPublicKey()));
}
//endregion
private static JSONObject createJsonObjectWithAddress(final Address address) {
final JSONObject jsonObject = createJsonObject();
jsonObject.put("account", address.getEncoded());
return jsonObject;
}
private static JSONObject createJsonObject() {
final JSONObject jsonObject = new JSONObject();
jsonObject.put("wallet", "n");
jsonObject.put("password", "p");
jsonObject.put("label", "l");
return jsonObject;
}
private static class TestContext {
private final WalletServices walletServices = Mockito.mock(WalletServices.class);
private final WalletAccount walletAccount = new WalletAccount(new KeyPair().getPrivateKey(), new KeyPair().getPrivateKey());
private final WalletMapper walletMapper = Mockito.mock(WalletMapper.class);
private final AccountMapper accountMapper = Mockito.mock(AccountMapper.class);
private final AddressBookServices addressBookServices = Mockito.mock(AddressBookServices.class);
private final AddressBook addressBook = Mockito.mock(AddressBook.class);
private final WalletAccountController controller = new WalletAccountController(
this.walletServices,
this.walletMapper,
this.accountMapper,
this.addressBookServices);
private final Wallet wallet = Mockito.mock(Wallet.class);
private final WalletNamePasswordBag bag;
private TestContext() {
final JSONObject jsonObject = createJsonObject();
jsonObject.put("account", this.walletAccount.getAddress().getEncoded());
this.bag = new WalletNamePasswordBag(Utils.createDeserializer(jsonObject));
this.setupMocks();
}
private TestContext(final JSONObject jsonObject) {
this.bag = new WalletNamePasswordBag(Utils.createDeserializer(jsonObject));
this.setupMocks();
}
private void setupMocks() {
Mockito.when(this.walletServices.open(this.bag)).thenReturn(this.wallet);
Mockito.when(this.addressBookServices.open(Mockito.any())).thenReturn(this.addressBook);
Mockito.when(this.wallet.tryGetWalletAccount(this.walletAccount.getAddress())).thenReturn(this.walletAccount);
}
}
}
| |
package net.i2p.crypto;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.GeneralSecurityException;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SignatureException;
import java.security.interfaces.DSAPrivateKey;
import java.security.interfaces.DSAPublicKey;
import java.security.interfaces.ECPrivateKey;
import java.security.interfaces.ECPublicKey;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.AlgorithmParameterSpec;
import java.security.spec.DSAPrivateKeySpec;
import java.security.spec.DSAPublicKeySpec;
import java.security.spec.ECParameterSpec;
import java.security.spec.ECPrivateKeySpec;
import java.security.spec.ECPublicKeySpec;
import java.security.spec.ECPoint;
import java.security.spec.KeySpec;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.RSAKeyGenParameterSpec;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Map;
import net.i2p.crypto.eddsa.EdDSAPrivateKey;
import net.i2p.crypto.eddsa.EdDSAPublicKey;
import net.i2p.crypto.eddsa.spec.EdDSAParameterSpec;
import net.i2p.crypto.eddsa.spec.EdDSAPrivateKeySpec;
import net.i2p.crypto.eddsa.spec.EdDSAPublicKeySpec;
import net.i2p.data.Signature;
import net.i2p.data.SigningPrivateKey;
import net.i2p.data.SigningPublicKey;
import net.i2p.util.LHMCache;
import net.i2p.util.NativeBigInteger;
/**
* Utilities for Signing keys and Signatures
*
* @since 0.9.9, public since 0.9.12
*/
public class SigUtil {
private static final Map<SigningPublicKey, ECPublicKey> _ECPubkeyCache = new LHMCache<SigningPublicKey, ECPublicKey>(64);
private static final Map<SigningPrivateKey, ECPrivateKey> _ECPrivkeyCache = new LHMCache<SigningPrivateKey, ECPrivateKey>(16);
private static final Map<SigningPublicKey, EdDSAPublicKey> _EdPubkeyCache = new LHMCache<SigningPublicKey, EdDSAPublicKey>(64);
private static final Map<SigningPrivateKey, EdDSAPrivateKey> _EdPrivkeyCache = new LHMCache<SigningPrivateKey, EdDSAPrivateKey>(16);
private SigUtil() {}
/**
* @return JAVA key!
*/
public static PublicKey toJavaKey(SigningPublicKey pk)
throws GeneralSecurityException {
switch (pk.getType().getBaseAlgorithm()) {
case DSA:
return toJavaDSAKey(pk);
case EC:
return toJavaECKey(pk);
case EdDSA:
return toJavaEdDSAKey(pk);
case RSA:
return toJavaRSAKey(pk);
default:
throw new IllegalArgumentException();
}
}
/**
* @return JAVA key!
*/
public static PrivateKey toJavaKey(SigningPrivateKey pk)
throws GeneralSecurityException {
switch (pk.getType().getBaseAlgorithm()) {
case DSA:
return toJavaDSAKey(pk);
case EC:
return toJavaECKey(pk);
case EdDSA:
return toJavaEdDSAKey(pk);
case RSA:
return toJavaRSAKey(pk);
default:
throw new IllegalArgumentException();
}
}
/**
* Use if SigType is unknown.
* For efficiency, use fromJavakey(pk, type) if type is known.
*
* @param pk JAVA key!
* @throws IllegalArgumentException on unknown type
* @since 0.9.18
*/
public static SigningPublicKey fromJavaKey(PublicKey pk)
throws GeneralSecurityException {
if (pk instanceof DSAPublicKey) {
return fromJavaKey((DSAPublicKey) pk);
}
if (pk instanceof ECPublicKey) {
ECPublicKey k = (ECPublicKey) pk;
AlgorithmParameterSpec spec = k.getParams();
SigType type;
if (spec.equals(SigType.ECDSA_SHA256_P256.getParams()))
type = SigType.ECDSA_SHA256_P256;
else if (spec.equals(SigType.ECDSA_SHA384_P384.getParams()))
type = SigType.ECDSA_SHA384_P384;
else if (spec.equals(SigType.ECDSA_SHA512_P521.getParams()))
type = SigType.ECDSA_SHA512_P521;
else
throw new IllegalArgumentException("Unknown EC type");
return fromJavaKey(k, type);
}
if (pk instanceof EdDSAPublicKey) {
return fromJavaKey((EdDSAPublicKey) pk, SigType.EdDSA_SHA512_Ed25519);
}
if (pk instanceof RSAPublicKey) {
RSAPublicKey k = (RSAPublicKey) pk;
int sz = k.getModulus().bitLength();
SigType type;
if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA256_2048.getParams()).getKeysize())
type = SigType.RSA_SHA256_2048;
else if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA384_3072.getParams()).getKeysize())
type = SigType.RSA_SHA384_3072;
else if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA512_4096.getParams()).getKeysize())
type = SigType.RSA_SHA512_4096;
else
throw new IllegalArgumentException("Unknown RSA type");
return fromJavaKey(k, type);
}
throw new IllegalArgumentException("Unknown type");
}
/**
* Use if SigType is known.
*
* @param pk JAVA key!
*/
public static SigningPublicKey fromJavaKey(PublicKey pk, SigType type)
throws GeneralSecurityException {
switch (type.getBaseAlgorithm()) {
case DSA:
return fromJavaKey((DSAPublicKey) pk);
case EC:
return fromJavaKey((ECPublicKey) pk, type);
case EdDSA:
return fromJavaKey((EdDSAPublicKey) pk, type);
case RSA:
return fromJavaKey((RSAPublicKey) pk, type);
default:
throw new IllegalArgumentException();
}
}
/**
* Use if SigType is unknown.
* For efficiency, use fromJavakey(pk, type) if type is known.
*
* @param pk JAVA key!
* @throws IllegalArgumentException on unknown type
* @since 0.9.18
*/
public static SigningPrivateKey fromJavaKey(PrivateKey pk)
throws GeneralSecurityException {
if (pk instanceof DSAPrivateKey) {
return fromJavaKey((DSAPrivateKey) pk);
}
if (pk instanceof ECPrivateKey) {
ECPrivateKey k = (ECPrivateKey) pk;
AlgorithmParameterSpec spec = k.getParams();
SigType type;
if (spec.equals(SigType.ECDSA_SHA256_P256.getParams()))
type = SigType.ECDSA_SHA256_P256;
else if (spec.equals(SigType.ECDSA_SHA384_P384.getParams()))
type = SigType.ECDSA_SHA384_P384;
else if (spec.equals(SigType.ECDSA_SHA512_P521.getParams()))
type = SigType.ECDSA_SHA512_P521;
else
throw new IllegalArgumentException("Unknown EC type");
return fromJavaKey(k, type);
}
if (pk instanceof EdDSAPrivateKey) {
return fromJavaKey((EdDSAPrivateKey) pk, SigType.EdDSA_SHA512_Ed25519);
}
if (pk instanceof RSAPrivateKey) {
RSAPrivateKey k = (RSAPrivateKey) pk;
int sz = k.getModulus().bitLength();
SigType type;
if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA256_2048.getParams()).getKeysize())
type = SigType.RSA_SHA256_2048;
else if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA384_3072.getParams()).getKeysize())
type = SigType.RSA_SHA384_3072;
else if (sz <= ((RSAKeyGenParameterSpec) SigType.RSA_SHA512_4096.getParams()).getKeysize())
type = SigType.RSA_SHA512_4096;
else
throw new IllegalArgumentException("Unknown RSA type");
return fromJavaKey(k, type);
}
throw new IllegalArgumentException("Unknown type");
}
/**
* Use if SigType is known.
*
* @param pk JAVA key!
*/
public static SigningPrivateKey fromJavaKey(PrivateKey pk, SigType type)
throws GeneralSecurityException {
switch (type.getBaseAlgorithm()) {
case DSA:
return fromJavaKey((DSAPrivateKey) pk);
case EC:
return fromJavaKey((ECPrivateKey) pk, type);
case EdDSA:
return fromJavaKey((EdDSAPrivateKey) pk, type);
case RSA:
return fromJavaKey((RSAPrivateKey) pk, type);
default:
throw new IllegalArgumentException();
}
}
/**
* @return JAVA key!
*/
public static ECPublicKey toJavaECKey(SigningPublicKey pk)
throws GeneralSecurityException {
ECPublicKey rv;
synchronized (_ECPubkeyCache) {
rv = _ECPubkeyCache.get(pk);
}
if (rv != null)
return rv;
rv = cvtToJavaECKey(pk);
synchronized (_ECPubkeyCache) {
_ECPubkeyCache.put(pk, rv);
}
return rv;
}
/**
* @return JAVA key!
*/
public static ECPrivateKey toJavaECKey(SigningPrivateKey pk)
throws GeneralSecurityException {
ECPrivateKey rv;
synchronized (_ECPrivkeyCache) {
rv = _ECPrivkeyCache.get(pk);
}
if (rv != null)
return rv;
rv = cvtToJavaECKey(pk);
synchronized (_ECPrivkeyCache) {
_ECPrivkeyCache.put(pk, rv);
}
return rv;
}
private static ECPublicKey cvtToJavaECKey(SigningPublicKey pk)
throws GeneralSecurityException {
SigType type = pk.getType();
BigInteger[] xy = split(pk.getData());
ECPoint w = new ECPoint(xy[0], xy[1]);
// see ECConstants re: casting
ECPublicKeySpec ks = new ECPublicKeySpec(w, (ECParameterSpec) type.getParams());
KeyFactory kf = KeyFactory.getInstance("EC");
return (ECPublicKey) kf.generatePublic(ks);
}
private static ECPrivateKey cvtToJavaECKey(SigningPrivateKey pk)
throws GeneralSecurityException {
SigType type = pk.getType();
byte[] b = pk.getData();
BigInteger s = new NativeBigInteger(1, b);
// see ECConstants re: casting
ECPrivateKeySpec ks = new ECPrivateKeySpec(s, (ECParameterSpec) type.getParams());
KeyFactory kf = KeyFactory.getInstance("EC");
return (ECPrivateKey) kf.generatePrivate(ks);
}
public static SigningPublicKey fromJavaKey(ECPublicKey pk, SigType type)
throws GeneralSecurityException {
ECPoint w = pk.getW();
BigInteger x = w.getAffineX();
BigInteger y = w.getAffineY();
int len = type.getPubkeyLen();
byte[] b = combine(x, y, len);
return new SigningPublicKey(type, b);
}
public static SigningPrivateKey fromJavaKey(ECPrivateKey pk, SigType type)
throws GeneralSecurityException {
BigInteger s = pk.getS();
int len = type.getPrivkeyLen();
byte[] bs = rectify(s, len);
return new SigningPrivateKey(type, bs);
}
/**
* @return JAVA EdDSA public key!
* @since 0.9.15
*/
public static EdDSAPublicKey toJavaEdDSAKey(SigningPublicKey pk)
throws GeneralSecurityException {
EdDSAPublicKey rv;
synchronized (_EdPubkeyCache) {
rv = _EdPubkeyCache.get(pk);
}
if (rv != null)
return rv;
rv = cvtToJavaEdDSAKey(pk);
synchronized (_EdPubkeyCache) {
_EdPubkeyCache.put(pk, rv);
}
return rv;
}
/**
* @return JAVA EdDSA private key!
* @since 0.9.15
*/
public static EdDSAPrivateKey toJavaEdDSAKey(SigningPrivateKey pk)
throws GeneralSecurityException {
EdDSAPrivateKey rv;
synchronized (_EdPrivkeyCache) {
rv = _EdPrivkeyCache.get(pk);
}
if (rv != null)
return rv;
rv = cvtToJavaEdDSAKey(pk);
synchronized (_EdPrivkeyCache) {
_EdPrivkeyCache.put(pk, rv);
}
return rv;
}
/**
* @since 0.9.15
*/
private static EdDSAPublicKey cvtToJavaEdDSAKey(SigningPublicKey pk)
throws GeneralSecurityException {
try {
return new EdDSAPublicKey(new EdDSAPublicKeySpec(
pk.getData(), (EdDSAParameterSpec) pk.getType().getParams()));
} catch (IllegalArgumentException iae) {
throw new InvalidKeyException(iae);
}
}
/**
* @since 0.9.15
*/
private static EdDSAPrivateKey cvtToJavaEdDSAKey(SigningPrivateKey pk)
throws GeneralSecurityException {
try {
return new EdDSAPrivateKey(new EdDSAPrivateKeySpec(
pk.getData(), (EdDSAParameterSpec) pk.getType().getParams()));
} catch (IllegalArgumentException iae) {
throw new InvalidKeyException(iae);
}
}
/**
* @since 0.9.15
*/
public static SigningPublicKey fromJavaKey(EdDSAPublicKey pk, SigType type)
throws GeneralSecurityException {
return new SigningPublicKey(type, pk.getAbyte());
}
/**
* @since 0.9.15
*/
public static SigningPrivateKey fromJavaKey(EdDSAPrivateKey pk, SigType type)
throws GeneralSecurityException {
return new SigningPrivateKey(type, pk.getSeed());
}
public static DSAPublicKey toJavaDSAKey(SigningPublicKey pk)
throws GeneralSecurityException {
KeyFactory kf = KeyFactory.getInstance("DSA");
// y p q g
KeySpec ks = new DSAPublicKeySpec(new NativeBigInteger(1, pk.getData()),
CryptoConstants.dsap,
CryptoConstants.dsaq,
CryptoConstants.dsag);
return (DSAPublicKey) kf.generatePublic(ks);
}
public static DSAPrivateKey toJavaDSAKey(SigningPrivateKey pk)
throws GeneralSecurityException {
KeyFactory kf = KeyFactory.getInstance("DSA");
// x p q g
KeySpec ks = new DSAPrivateKeySpec(new NativeBigInteger(1, pk.getData()),
CryptoConstants.dsap,
CryptoConstants.dsaq,
CryptoConstants.dsag);
return (DSAPrivateKey) kf.generatePrivate(ks);
}
public static SigningPublicKey fromJavaKey(DSAPublicKey pk)
throws GeneralSecurityException {
BigInteger y = pk.getY();
SigType type = SigType.DSA_SHA1;
int len = type.getPubkeyLen();
byte[] by = rectify(y, len);
return new SigningPublicKey(type, by);
}
public static SigningPrivateKey fromJavaKey(DSAPrivateKey pk)
throws GeneralSecurityException {
BigInteger x = pk.getX();
SigType type = SigType.DSA_SHA1;
int len = type.getPrivkeyLen();
byte[] bx = rectify(x, len);
return new SigningPrivateKey(type, bx);
}
/**
* @deprecated unused
*/
public static RSAPublicKey toJavaRSAKey(SigningPublicKey pk)
throws GeneralSecurityException {
SigType type = pk.getType();
KeyFactory kf = KeyFactory.getInstance("RSA");
BigInteger n = new NativeBigInteger(1, pk.getData());
BigInteger e = ((RSAKeyGenParameterSpec)type.getParams()).getPublicExponent();
// modulus exponent
KeySpec ks = new RSAPublicKeySpec(n, e);
return (RSAPublicKey) kf.generatePublic(ks);
}
/**
*
*/
public static RSAPrivateKey toJavaRSAKey(SigningPrivateKey pk)
throws GeneralSecurityException {
KeyFactory kf = KeyFactory.getInstance("RSA");
// private key is modulus (pubkey) + exponent
BigInteger[] nd = split(pk.getData());
// modulus exponent
KeySpec ks = new RSAPrivateKeySpec(nd[0], nd[1]);
return (RSAPrivateKey) kf.generatePrivate(ks);
}
/**
*
*/
public static SigningPublicKey fromJavaKey(RSAPublicKey pk, SigType type)
throws GeneralSecurityException {
BigInteger n = pk.getModulus();
int len = type.getPubkeyLen();
byte[] bn = rectify(n, len);
return new SigningPublicKey(type, bn);
}
/**
* @deprecated unused
*/
public static SigningPrivateKey fromJavaKey(RSAPrivateKey pk, SigType type)
throws GeneralSecurityException {
// private key is modulus (pubkey) + exponent
BigInteger n = pk.getModulus();
BigInteger d = pk.getPrivateExponent();
byte[] b = combine(n, d, type.getPrivkeyLen());
return new SigningPrivateKey(type, b);
}
/**
* @return ASN.1 representation
*/
public static byte[] toJavaSig(Signature sig) {
// RSA and EdDSA sigs are not ASN encoded
if (sig.getType().getBaseAlgorithm() == SigAlgo.RSA || sig.getType().getBaseAlgorithm() == SigAlgo.EdDSA)
return sig.getData();
return sigBytesToASN1(sig.getData());
}
/**
* @param asn ASN.1 representation
* @return a Signature with SigType type
*/
public static Signature fromJavaSig(byte[] asn, SigType type)
throws SignatureException {
// RSA and EdDSA sigs are not ASN encoded
if (type.getBaseAlgorithm() == SigAlgo.RSA || type.getBaseAlgorithm() == SigAlgo.EdDSA)
return new Signature(type, asn);
return new Signature(type, aSN1ToSigBytes(asn, type.getSigLen()));
}
/**
* @return JAVA key!
*/
public static PublicKey importJavaPublicKey(File file, SigType type)
throws GeneralSecurityException, IOException {
byte[] data = getData(file);
KeySpec ks = new X509EncodedKeySpec(data);
String algo = type.getBaseAlgorithm().getName();
KeyFactory kf = KeyFactory.getInstance(algo);
return kf.generatePublic(ks);
}
/**
* @return JAVA key!
*/
public static PrivateKey importJavaPrivateKey(File file, SigType type)
throws GeneralSecurityException, IOException {
byte[] data = getData(file);
KeySpec ks = new PKCS8EncodedKeySpec(data);
String algo = type.getBaseAlgorithm().getName();
KeyFactory kf = KeyFactory.getInstance(algo);
return kf.generatePrivate(ks);
}
/** 16 KB max */
private static byte[] getData(File file) throws IOException {
byte buf[] = new byte[1024];
InputStream in = null;
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
try {
in = new FileInputStream(file);
int read = 0;
int tot = 0;
while ( (read = in.read(buf)) != -1) {
out.write(buf, 0, read);
tot += read;
if (tot > 16*1024)
throw new IOException("too big");
}
return out.toByteArray();
} finally {
if (in != null)
try { in.close(); } catch (IOException ioe) {}
}
}
/**
* Split a byte array into two BigIntegers
* @return array of two BigIntegers
*/
private static BigInteger[] split(byte[] b) {
int sublen = b.length / 2;
byte[] bx = new byte[sublen];
byte[] by = new byte[sublen];
System.arraycopy(b, 0, bx, 0, sublen);
System.arraycopy(b, sublen, by, 0, sublen);
NativeBigInteger x = new NativeBigInteger(1, bx);
NativeBigInteger y = new NativeBigInteger(1, by);
return new NativeBigInteger[] {x, y};
}
/**
* Combine two BigIntegers of nominal length = len / 2
* @return array of exactly len bytes
*/
private static byte[] combine(BigInteger x, BigInteger y, int len)
throws InvalidKeyException {
int sublen = len / 2;
byte[] b = new byte[len];
byte[] bx = rectify(x, sublen);
byte[] by = rectify(y, sublen);
System.arraycopy(bx, 0, b, 0, sublen);
System.arraycopy(by, 0, b, sublen, sublen);
return b;
}
/**
* @param bi non-negative
* @return array of exactly len bytes
*/
public static byte[] rectify(BigInteger bi, int len)
throws InvalidKeyException {
byte[] b = bi.toByteArray();
if (b.length == len) {
// just right
return b;
}
if (b.length > len + 1)
throw new InvalidKeyException("key too big (" + b.length + ") max is " + (len + 1));
byte[] rv = new byte[len];
if (b.length == 0)
return rv;
if ((b[0] & 0x80) != 0)
throw new InvalidKeyException("negative");
if (b.length > len) {
// leading 0 byte
if (b[0] != 0)
throw new InvalidKeyException("key too big (" + b.length + ") max is " + len);
System.arraycopy(b, 1, rv, 0, len);
} else {
// smaller
System.arraycopy(b, 0, rv, len - b.length, b.length);
}
return rv;
}
/**
* http://download.oracle.com/javase/1.5.0/docs/guide/security/CryptoSpec.html
* Signature Format ASN.1 sequence of two INTEGER values: r and s, in that order:
* SEQUENCE ::= { r INTEGER, s INTEGER }
*
* http://en.wikipedia.org/wiki/Abstract_Syntax_Notation_One
* 30 -- tag indicating SEQUENCE
* xx - length in octets
*
* 02 -- tag indicating INTEGER
* xx - length in octets
* xxxxxx - value
*
* Convert to BigInteger and back so we have the minimum length representation, as required.
* r and s are always non-negative.
*
* Only supports sigs up to about 252 bytes. See code to fix BER encoding for this before you
* add a SigType with bigger signatures.
*
* @throws IllegalArgumentException if too big
* @since 0.8.7, moved to SigUtil in 0.9.9
*/
private static byte[] sigBytesToASN1(byte[] sig) {
//System.out.println("pre TO asn1\n" + net.i2p.util.HexDump.dump(sig));
int len = sig.length;
int sublen = len / 2;
byte[] tmp = new byte[sublen];
System.arraycopy(sig, 0, tmp, 0, sublen);
BigInteger r = new BigInteger(1, tmp);
byte[] rb = r.toByteArray();
if (rb.length > 127)
throw new IllegalArgumentException("FIXME R length > 127");
System.arraycopy(sig, sublen, tmp, 0, sublen);
BigInteger s = new BigInteger(1, tmp);
byte[] sb = s.toByteArray();
if (sb.length > 127)
throw new IllegalArgumentException("FIXME S length > 127");
int seqlen = rb.length + sb.length + 4;
if (seqlen > 255)
throw new IllegalArgumentException("FIXME seq length > 255");
int totlen = seqlen + 2;
if (seqlen > 127)
totlen++;
byte[] rv = new byte[totlen];
int idx = 0;
rv[idx++] = 0x30;
if (seqlen > 127)
rv[idx++] =(byte) 0x81;
rv[idx++] = (byte) seqlen;
rv[idx++] = 0x02;
rv[idx++] = (byte) rb.length;
System.arraycopy(rb, 0, rv, idx, rb.length);
idx += rb.length;
rv[idx++] = 0x02;
rv[idx++] = (byte) sb.length;
System.arraycopy(sb, 0, rv, idx, sb.length);
//System.out.println("post TO asn1\n" + net.i2p.util.HexDump.dump(rv));
return rv;
}
/**
* See above.
* Only supports sigs up to about 252 bytes. See code to fix BER encoding for bigger than that.
*
* @return len bytes
* @since 0.8.7, moved to SigUtil in 0.9.9
*/
private static byte[] aSN1ToSigBytes(byte[] asn, int len)
throws SignatureException {
//System.out.println("pre from asn1 len=" + len + "\n" + net.i2p.util.HexDump.dump(asn));
if (asn[0] != 0x30)
throw new SignatureException("asn[0] = " + (asn[0] & 0xff));
// handles total len > 127
int idx = 2;
if ((asn[1] & 0x80) != 0)
idx += asn[1] & 0x7f;
if (asn[idx] != 0x02)
throw new SignatureException("asn[2] = " + (asn[idx] & 0xff));
byte[] rv = new byte[len];
int sublen = len / 2;
int rlen = asn[++idx];
if ((rlen & 0x80) != 0)
throw new SignatureException("FIXME R length > 127");
if ((asn[++idx] & 0x80) != 0)
throw new SignatureException("R is negative");
if (rlen > sublen + 1)
throw new SignatureException("R too big " + rlen);
if (rlen == sublen + 1)
System.arraycopy(asn, idx + 1, rv, 0, sublen);
else
System.arraycopy(asn, idx, rv, sublen - rlen, rlen);
idx += rlen;
int slenloc = idx + 1;
if (asn[idx] != 0x02)
throw new SignatureException("asn[s] = " + (asn[idx] & 0xff));
int slen = asn[slenloc];
if ((slen & 0x80) != 0)
throw new SignatureException("FIXME S length > 127");
if ((asn[slenloc + 1] & 0x80) != 0)
throw new SignatureException("S is negative");
if (slen > sublen + 1)
throw new SignatureException("S too big " + slen);
if (slen == sublen + 1)
System.arraycopy(asn, slenloc + 2, rv, sublen, sublen);
else
System.arraycopy(asn, slenloc + 1, rv, len - slen, slen);
//System.out.println("post from asn1\n" + net.i2p.util.HexDump.dump(rv));
return rv;
}
public static void clearCaches() {
synchronized(_ECPubkeyCache) {
_ECPubkeyCache.clear();
}
synchronized(_ECPrivkeyCache) {
_ECPrivkeyCache.clear();
}
synchronized(_EdPubkeyCache) {
_EdPubkeyCache.clear();
}
synchronized(_EdPrivkeyCache) {
_EdPrivkeyCache.clear();
}
}
}
| |
package org.hisp.dhis.node.serializers;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.hisp.dhis.node.AbstractNodeSerializer;
import org.hisp.dhis.node.Node;
import org.hisp.dhis.node.types.CollectionNode;
import org.hisp.dhis.node.types.ComplexNode;
import org.hisp.dhis.node.types.RootNode;
import org.hisp.dhis.node.types.SimpleNode;
import org.hisp.dhis.system.util.DateUtils;
import org.springframework.context.annotation.Scope;
import org.springframework.context.annotation.ScopedProxyMode;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.OutputStream;
import java.util.Date;
import java.util.List;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Component
@Scope( value = "prototype", proxyMode = ScopedProxyMode.INTERFACES )
public class StAXNodeSerializer extends AbstractNodeSerializer
{
public static final String CONTENT_TYPE = "application/xml";
private static final XMLOutputFactory xmlFactory = XMLOutputFactory.newInstance();
static
{
xmlFactory.setProperty( "javax.xml.stream.isRepairingNamespaces", true );
}
private XMLStreamWriter writer;
@Override
public List<String> contentTypes()
{
return Lists.newArrayList( CONTENT_TYPE );
}
@Override
protected void startSerialize( RootNode rootNode, OutputStream outputStream ) throws Exception
{
writer = xmlFactory.createXMLStreamWriter( outputStream );
writer.setDefaultNamespace( rootNode.getDefaultNamespace() );
}
@Override
protected void flushStream() throws Exception
{
writer.flush();
}
@Override
protected void startWriteRootNode( RootNode rootNode ) throws Exception
{
writer.writeStartDocument( "UTF-8", "1.0" );
if ( !StringUtils.isEmpty( rootNode.getComment() ) )
{
writer.writeComment( rootNode.getComment() );
}
writeStartElement( rootNode );
}
@Override
protected void endWriteRootNode( RootNode rootNode ) throws Exception
{
writer.writeEndElement();
writer.writeEndDocument();
}
@Override
protected void startWriteSimpleNode( SimpleNode simpleNode ) throws Exception
{
String value = null;
if ( simpleNode.getValue() != null && Date.class.isAssignableFrom( simpleNode.getValue().getClass() ) )
{
value = DateUtils.getIso8601NoTz( (Date) simpleNode.getValue() );
}
else
{
value = String.valueOf( simpleNode.getValue() );
}
if ( simpleNode.isAttribute() )
{
if ( value == null )
{
return;
}
if ( !StringUtils.isEmpty( simpleNode.getNamespace() ) )
{
writer.writeAttribute( simpleNode.getNamespace(), simpleNode.getName(), value );
}
else
{
writer.writeAttribute( simpleNode.getName(), value );
}
}
else
{
writeStartElement( simpleNode );
if ( value != null )
{
writer.writeCharacters( value );
}
}
}
@Override
protected void endWriteSimpleNode( SimpleNode simpleNode ) throws Exception
{
if ( !simpleNode.isAttribute() )
{
writer.writeEndElement();
}
}
@Override
protected void startWriteComplexNode( ComplexNode complexNode ) throws Exception
{
writeStartElement( complexNode );
}
@Override
protected void endWriteComplexNode( ComplexNode complexNode ) throws Exception
{
writer.writeEndElement();
}
@Override
protected void startWriteCollectionNode( CollectionNode collectionNode ) throws Exception
{
if ( collectionNode.isWrapping() && !collectionNode.getChildren().isEmpty() )
{
writeStartElement( collectionNode );
}
}
@Override
protected void endWriteCollectionNode( CollectionNode collectionNode ) throws Exception
{
if ( collectionNode.isWrapping() && !collectionNode.getChildren().isEmpty() )
{
writer.writeEndElement();
}
}
private void writeStartElement( Node node ) throws XMLStreamException
{
if ( !StringUtils.isEmpty( node.getComment() ) )
{
writer.writeComment( node.getComment() );
}
if ( !StringUtils.isEmpty( node.getNamespace() ) )
{
writer.writeStartElement( node.getNamespace(), node.getName() );
}
else
{
writer.writeStartElement( node.getName() );
}
}
}
| |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.bbg.component;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import net.sf.ehcache.CacheManager;
import org.fudgemsg.FudgeMsg;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.bbg.BloombergConnector;
import com.opengamma.bbg.config.BloombergFieldOverride;
import com.opengamma.bbg.referencedata.ReferenceData;
import com.opengamma.bbg.referencedata.ReferenceDataProvider;
import com.opengamma.bbg.referencedata.ReferenceDataProviderGetRequest;
import com.opengamma.bbg.referencedata.ReferenceDataProviderGetResult;
import com.opengamma.bbg.referencedata.cache.EHValueCachingReferenceDataProvider;
import com.opengamma.bbg.referencedata.cache.InMemoryInvalidFieldCachingReferenceDataProvider;
import com.opengamma.bbg.referencedata.cache.MongoDBInvalidFieldCachingReferenceDataProvider;
import com.opengamma.bbg.referencedata.cache.MongoDBValueCachingReferenceDataProvider;
import com.opengamma.bbg.referencedata.impl.DataReferenceDataProviderResource;
import com.opengamma.bbg.referencedata.impl.PatchableReferenceDataProvider;
import com.opengamma.bbg.referencedata.impl.RemoteReferenceDataProvider;
import com.opengamma.component.ComponentInfo;
import com.opengamma.component.ComponentRepository;
import com.opengamma.component.factory.AbstractComponentFactory;
import com.opengamma.component.factory.ComponentInfoAttributes;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.config.impl.ConfigItem;
import com.opengamma.id.VersionCorrection;
import com.opengamma.util.mongo.MongoConnector;
/**
* Component factory for the reference data provider backed by a pre-populated MongoDB rather than BBG.
*/
@BeanDefinition
public class MongoFakeBloombergReferenceDataProviderComponentFactory extends AbstractComponentFactory {
/**
* The classifier that the factory should publish under.
*/
@PropertyDefinition(validate = "notNull")
private String _classifier;
/**
* The flag determining whether the component should be published by REST (default true).
*/
@PropertyDefinition
private boolean _publishRest = true;
/**
* The Mongo connector.
* If a Mongo connector is specified, then it is used for caching.
*/
@PropertyDefinition
private MongoConnector _mongoConnector;
/**
* The cache manager.
* If a Mongo connector is specified, then this is not used.
* If a Mongo connector is not specified and this is, then EH cache is used.
*/
@PropertyDefinition
private CacheManager _cacheManager;
/**
* A config source. If specified, overrides will be pulled from
* here.
*/
@PropertyDefinition
private ConfigSource _configSource;
//-------------------------------------------------------------------------
@Override
public void init(ComponentRepository repo, LinkedHashMap<String, String> configuration) throws Exception {
final ReferenceDataProvider provider = initReferenceDataProvider(repo);
final ComponentInfo info = new ComponentInfo(ReferenceDataProvider.class, getClassifier());
info.addAttribute(ComponentInfoAttributes.LEVEL, 1);
info.addAttribute(ComponentInfoAttributes.REMOTE_CLIENT_JAVA, RemoteReferenceDataProvider.class);
repo.registerComponent(info, provider);
if (isPublishRest()) {
repo.getRestComponents().publish(info, new DataReferenceDataProviderResource(provider));
}
}
/**
* Creates the provider.
*
* @param repo the repository, not null
* @return the provider, not null
*/
@SuppressWarnings("unchecked")
protected ReferenceDataProvider initReferenceDataProvider(ComponentRepository repo) {
ReferenceDataProvider underlying = mock(ReferenceDataProvider.class);
when(underlying.getReferenceData(any(ReferenceDataProviderGetRequest.class))).thenReturn(new ReferenceDataProviderGetResult(Collections.<ReferenceData>emptyList()));
when(underlying.getReferenceData(any(Iterable.class), any(Iterable.class))).thenReturn(new HashMap<String, FudgeMsg>());
when(underlying.getReferenceDataIgnoreCache(any(Iterable.class), any(Iterable.class))).thenReturn(new HashMap<String, FudgeMsg>());
when(underlying.getReferenceDataValue(anyString(), anyString())).thenReturn(null);
when(underlying.getReferenceDataValues(any(Iterable.class), anyString())).thenReturn(new HashMap<String, String>());
when(underlying.getReferenceDataValues(anyString(), any(Iterable.class))).thenReturn(new HashMap<String, String>());
ReferenceDataProvider effectiveProvider = underlying;
if (getConfigSource() != null) {
effectiveProvider = applyFieldOverrides(effectiveProvider);
}
MongoConnector mongoConnector = getMongoConnector();
CacheManager cacheManager = getCacheManager();
if (mongoConnector != null) {
MongoDBInvalidFieldCachingReferenceDataProvider fieldCached = new MongoDBInvalidFieldCachingReferenceDataProvider(effectiveProvider, mongoConnector);
return new MongoDBValueCachingReferenceDataProvider(fieldCached, mongoConnector);
} else if (cacheManager != null) {
ReferenceDataProvider fieldCached = new InMemoryInvalidFieldCachingReferenceDataProvider(effectiveProvider); // TODO: EHcached version
return new EHValueCachingReferenceDataProvider(fieldCached, cacheManager);
} else {
return new InMemoryInvalidFieldCachingReferenceDataProvider(effectiveProvider);
}
}
/**
* Loads overrides from the config source and applies them to the passed
* reference data provider via a wrapper (a {@link PatchableReferenceDataProvider}).
* @param underlying the provider to patch
* @return a patched provider
*/
private PatchableReferenceDataProvider applyFieldOverrides(ReferenceDataProvider underlying) {
Collection<ConfigItem<BloombergFieldOverride>> overrideItems = getConfigSource().getAll(BloombergFieldOverride.class, VersionCorrection.LATEST);
PatchableReferenceDataProvider patchableReferenceDataProvider = new PatchableReferenceDataProvider(underlying);
for (ConfigItem<BloombergFieldOverride> configItem : overrideItems) {
BloombergFieldOverride fieldOverride = configItem.getValue();
patchableReferenceDataProvider.setPatch(fieldOverride.getBloombergId(), fieldOverride.getFieldName(), fieldOverride.getOverrideValue());
}
return patchableReferenceDataProvider;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code MongoFakeBloombergReferenceDataProviderComponentFactory}.
* @return the meta-bean, not null
*/
public static MongoFakeBloombergReferenceDataProviderComponentFactory.Meta meta() {
return MongoFakeBloombergReferenceDataProviderComponentFactory.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(MongoFakeBloombergReferenceDataProviderComponentFactory.Meta.INSTANCE);
}
@Override
public MongoFakeBloombergReferenceDataProviderComponentFactory.Meta metaBean() {
return MongoFakeBloombergReferenceDataProviderComponentFactory.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the classifier that the factory should publish under.
* @return the value of the property, not null
*/
public String getClassifier() {
return _classifier;
}
/**
* Sets the classifier that the factory should publish under.
* @param classifier the new value of the property, not null
*/
public void setClassifier(String classifier) {
JodaBeanUtils.notNull(classifier, "classifier");
this._classifier = classifier;
}
/**
* Gets the the {@code classifier} property.
* @return the property, not null
*/
public final Property<String> classifier() {
return metaBean().classifier().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the flag determining whether the component should be published by REST (default true).
* @return the value of the property
*/
public boolean isPublishRest() {
return _publishRest;
}
/**
* Sets the flag determining whether the component should be published by REST (default true).
* @param publishRest the new value of the property
*/
public void setPublishRest(boolean publishRest) {
this._publishRest = publishRest;
}
/**
* Gets the the {@code publishRest} property.
* @return the property, not null
*/
public final Property<Boolean> publishRest() {
return metaBean().publishRest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the Mongo connector.
* If a Mongo connector is specified, then it is used for caching.
* @return the value of the property
*/
public MongoConnector getMongoConnector() {
return _mongoConnector;
}
/**
* Sets the Mongo connector.
* If a Mongo connector is specified, then it is used for caching.
* @param mongoConnector the new value of the property
*/
public void setMongoConnector(MongoConnector mongoConnector) {
this._mongoConnector = mongoConnector;
}
/**
* Gets the the {@code mongoConnector} property.
* If a Mongo connector is specified, then it is used for caching.
* @return the property, not null
*/
public final Property<MongoConnector> mongoConnector() {
return metaBean().mongoConnector().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the cache manager.
* If a Mongo connector is specified, then this is not used.
* If a Mongo connector is not specified and this is, then EH cache is used.
* @return the value of the property
*/
public CacheManager getCacheManager() {
return _cacheManager;
}
/**
* Sets the cache manager.
* If a Mongo connector is specified, then this is not used.
* If a Mongo connector is not specified and this is, then EH cache is used.
* @param cacheManager the new value of the property
*/
public void setCacheManager(CacheManager cacheManager) {
this._cacheManager = cacheManager;
}
/**
* Gets the the {@code cacheManager} property.
* If a Mongo connector is specified, then this is not used.
* If a Mongo connector is not specified and this is, then EH cache is used.
* @return the property, not null
*/
public final Property<CacheManager> cacheManager() {
return metaBean().cacheManager().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets a config source. If specified, overrides will be pulled from
* here.
* @return the value of the property
*/
public ConfigSource getConfigSource() {
return _configSource;
}
/**
* Sets a config source. If specified, overrides will be pulled from
* here.
* @param configSource the new value of the property
*/
public void setConfigSource(ConfigSource configSource) {
this._configSource = configSource;
}
/**
* Gets the the {@code configSource} property.
* here.
* @return the property, not null
*/
public final Property<ConfigSource> configSource() {
return metaBean().configSource().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public MongoFakeBloombergReferenceDataProviderComponentFactory clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
MongoFakeBloombergReferenceDataProviderComponentFactory other = (MongoFakeBloombergReferenceDataProviderComponentFactory) obj;
return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) &&
(isPublishRest() == other.isPublishRest()) &&
JodaBeanUtils.equal(getMongoConnector(), other.getMongoConnector()) &&
JodaBeanUtils.equal(getCacheManager(), other.getCacheManager()) &&
JodaBeanUtils.equal(getConfigSource(), other.getConfigSource()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = hash * 31 + JodaBeanUtils.hashCode(getClassifier());
hash = hash * 31 + JodaBeanUtils.hashCode(isPublishRest());
hash = hash * 31 + JodaBeanUtils.hashCode(getMongoConnector());
hash = hash * 31 + JodaBeanUtils.hashCode(getCacheManager());
hash = hash * 31 + JodaBeanUtils.hashCode(getConfigSource());
return hash ^ super.hashCode();
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(192);
buf.append("MongoFakeBloombergReferenceDataProviderComponentFactory{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("classifier").append('=').append(JodaBeanUtils.toString(getClassifier())).append(',').append(' ');
buf.append("publishRest").append('=').append(JodaBeanUtils.toString(isPublishRest())).append(',').append(' ');
buf.append("mongoConnector").append('=').append(JodaBeanUtils.toString(getMongoConnector())).append(',').append(' ');
buf.append("cacheManager").append('=').append(JodaBeanUtils.toString(getCacheManager())).append(',').append(' ');
buf.append("configSource").append('=').append(JodaBeanUtils.toString(getConfigSource())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code MongoFakeBloombergReferenceDataProviderComponentFactory}.
*/
public static class Meta extends AbstractComponentFactory.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code classifier} property.
*/
private final MetaProperty<String> _classifier = DirectMetaProperty.ofReadWrite(
this, "classifier", MongoFakeBloombergReferenceDataProviderComponentFactory.class, String.class);
/**
* The meta-property for the {@code publishRest} property.
*/
private final MetaProperty<Boolean> _publishRest = DirectMetaProperty.ofReadWrite(
this, "publishRest", MongoFakeBloombergReferenceDataProviderComponentFactory.class, Boolean.TYPE);
/**
* The meta-property for the {@code mongoConnector} property.
*/
private final MetaProperty<MongoConnector> _mongoConnector = DirectMetaProperty.ofReadWrite(
this, "mongoConnector", MongoFakeBloombergReferenceDataProviderComponentFactory.class, MongoConnector.class);
/**
* The meta-property for the {@code cacheManager} property.
*/
private final MetaProperty<CacheManager> _cacheManager = DirectMetaProperty.ofReadWrite(
this, "cacheManager", MongoFakeBloombergReferenceDataProviderComponentFactory.class, CacheManager.class);
/**
* The meta-property for the {@code configSource} property.
*/
private final MetaProperty<ConfigSource> _configSource = DirectMetaProperty.ofReadWrite(
this, "configSource", MongoFakeBloombergReferenceDataProviderComponentFactory.class, ConfigSource.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"classifier",
"publishRest",
"mongoConnector",
"cacheManager",
"configSource");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return _classifier;
case -614707837: // publishRest
return _publishRest;
case 224118201: // mongoConnector
return _mongoConnector;
case -1452875317: // cacheManager
return _cacheManager;
case 195157501: // configSource
return _configSource;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends MongoFakeBloombergReferenceDataProviderComponentFactory> builder() {
return new DirectBeanBuilder<MongoFakeBloombergReferenceDataProviderComponentFactory>(new MongoFakeBloombergReferenceDataProviderComponentFactory());
}
@Override
public Class<? extends MongoFakeBloombergReferenceDataProviderComponentFactory> beanType() {
return MongoFakeBloombergReferenceDataProviderComponentFactory.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code classifier} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> classifier() {
return _classifier;
}
/**
* The meta-property for the {@code publishRest} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> publishRest() {
return _publishRest;
}
/**
* The meta-property for the {@code mongoConnector} property.
* @return the meta-property, not null
*/
public final MetaProperty<MongoConnector> mongoConnector() {
return _mongoConnector;
}
/**
* The meta-property for the {@code cacheManager} property.
* @return the meta-property, not null
*/
public final MetaProperty<CacheManager> cacheManager() {
return _cacheManager;
}
/**
* The meta-property for the {@code configSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<ConfigSource> configSource() {
return _configSource;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return ((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).getClassifier();
case -614707837: // publishRest
return ((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).isPublishRest();
case 224118201: // mongoConnector
return ((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).getMongoConnector();
case -1452875317: // cacheManager
return ((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).getCacheManager();
case 195157501: // configSource
return ((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).getConfigSource();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).setClassifier((String) newValue);
return;
case -614707837: // publishRest
((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).setPublishRest((Boolean) newValue);
return;
case 224118201: // mongoConnector
((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).setMongoConnector((MongoConnector) newValue);
return;
case -1452875317: // cacheManager
((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).setCacheManager((CacheManager) newValue);
return;
case 195157501: // configSource
((MongoFakeBloombergReferenceDataProviderComponentFactory) bean).setConfigSource((ConfigSource) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((MongoFakeBloombergReferenceDataProviderComponentFactory) bean)._classifier, "classifier");
super.validate(bean);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package io.silverspoon.bulldog.core.gpio.base;
import java.util.List;
import io.silverspoon.bulldog.core.Edge;
import io.silverspoon.bulldog.core.Signal;
import io.silverspoon.bulldog.core.gpio.DigitalIO;
import io.silverspoon.bulldog.core.gpio.DigitalInput;
import io.silverspoon.bulldog.core.gpio.DigitalOutput;
import io.silverspoon.bulldog.core.gpio.Pin;
import io.silverspoon.bulldog.core.gpio.event.InterruptEventArgs;
import io.silverspoon.bulldog.core.gpio.event.InterruptListener;
public class DigitalIOFeature extends AbstractPinFeature implements DigitalIO {
private static final String NAME_FORMAT = "Digital IO on Pin %s";
private DigitalOutput output;
private DigitalInput input;
public DigitalIOFeature(Pin pin, DigitalInput input, DigitalOutput output) {
super(pin);
this.output = output;
this.input = input;
}
@Override
public Signal read() {
setupInputIfNecessary();
return input.read();
}
private void setupInputIfNecessary() {
if (!input.isSetup()) {
if (output.isSetup()) {
output.teardown();
}
input.setup();
}
}
private void setupOutputIfNecessary() {
if (!output.isSetup()) {
if (input.isSetup()) {
input.teardown();
}
;
output.setup();
}
}
@Override
public Signal readDebounced(int debounceMilliseconds) {
setupInputIfNecessary();
return input.readDebounced(debounceMilliseconds);
}
@Override
public void disableInterrupts() {
setupInputIfNecessary();
input.disableInterrupts();
}
@Override
public void enableInterrupts() {
setupInputIfNecessary();
input.enableInterrupts();
}
@Override
public boolean areInterruptsEnabled() {
setupInputIfNecessary();
return input.areInterruptsEnabled();
}
@Override
public void setInterruptDebounceMs(int milliSeconds) {
setupInputIfNecessary();
input.setInterruptDebounceMs(milliSeconds);
}
@Override
public int getInterruptDebounceMs() {
setupInputIfNecessary();
return input.getInterruptDebounceMs();
}
@Override
public void setInterruptTrigger(Edge edge) {
setupInputIfNecessary();
input.setInterruptTrigger(edge);
}
@Override
public Edge getInterruptTrigger() {
setupInputIfNecessary();
return input.getInterruptTrigger();
}
@Override
public void fireInterruptEvent(InterruptEventArgs args) {
setupInputIfNecessary();
input.fireInterruptEvent(args);
}
@Override
public void addInterruptListener(InterruptListener listener) {
setupInputIfNecessary();
input.addInterruptListener(listener);
}
@Override
public void removeInterruptListener(InterruptListener listener) {
setupInputIfNecessary();
input.removeInterruptListener(listener);
}
@Override
public void clearInterruptListeners() {
setupInputIfNecessary();
input.clearInterruptListeners();
}
@Override
public List<InterruptListener> getInterruptListeners() {
setupInputIfNecessary();
return input.getInterruptListeners();
}
@Override
public String getName() {
return String.format(NAME_FORMAT, getPin().getName());
}
@Override
public void write(Signal signal) {
setupOutputIfNecessary();
output.write(signal);
}
@Override
public void applySignal(Signal signal) {
setupOutputIfNecessary();
output.applySignal(signal);
}
@Override
public Signal getAppliedSignal() {
setupOutputIfNecessary();
return output.getAppliedSignal();
}
@Override
public void high() {
setupOutputIfNecessary();
output.high();
}
@Override
public void low() {
setupOutputIfNecessary();
output.low();
}
@Override
public void toggle() {
setupOutputIfNecessary();
output.toggle();
}
@Override
public boolean isHigh() {
setupOutputIfNecessary();
return output.isHigh();
}
@Override
public boolean isLow() {
setupOutputIfNecessary();
return output.isLow();
}
@Override
public void startBlinking(int periodLengthMilliseconds) {
setupOutputIfNecessary();
output.startBlinking(periodLengthMilliseconds);
}
@Override
public void startBlinking(int periodLengthMilliseconds,
int durationMilliseconds) {
setupOutputIfNecessary();
output.startBlinking(periodLengthMilliseconds, durationMilliseconds);
}
@Override
public void blinkTimes(int periodLengthMilliseconds, int times) {
setupOutputIfNecessary();
output.blinkTimes(periodLengthMilliseconds, times);
}
@Override
public void stopBlinking() {
setupOutputIfNecessary();
output.stopBlinking();
}
@Override
public boolean isBlinking() {
setupOutputIfNecessary();
return output.isBlinking();
}
@Override
public void awaitBlinkingStopped() {
setupOutputIfNecessary();
output.awaitBlinkingStopped();
}
@Override
protected void setupImpl() {
}
@Override
protected void teardownImpl() {
if (isInputActive()) {
input.teardown();
} else if (isOutputActive()) {
output.teardown();
}
}
public boolean isBlocking() {
return getPin().getBlocker() == this || getPin().getBlocker() == output || getPin().getBlocker() == input;
}
@Override
public boolean isInputActive() {
return input.isSetup();
}
@Override
public boolean isOutputActive() {
return output.isSetup();
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.nearcache.impl.store;
import com.hazelcast.config.EvictionConfig;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.core.IFunction;
import com.hazelcast.internal.eviction.EvictionChecker;
import com.hazelcast.internal.eviction.EvictionListener;
import com.hazelcast.internal.eviction.EvictionPolicyType;
import com.hazelcast.internal.eviction.impl.evaluator.EvictionPolicyEvaluator;
import com.hazelcast.internal.eviction.impl.strategy.sampling.SamplingEvictionStrategy;
import com.hazelcast.internal.nearcache.NearCacheRecord;
import com.hazelcast.internal.nearcache.NearCacheRecordStore;
import com.hazelcast.internal.nearcache.impl.SampleableNearCacheRecordMap;
import com.hazelcast.internal.nearcache.impl.invalidation.MetaDataContainer;
import com.hazelcast.internal.nearcache.impl.invalidation.StaleReadDetector;
import com.hazelcast.monitor.NearCacheStats;
import com.hazelcast.monitor.impl.NearCacheStatsImpl;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.util.Clock;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import static com.hazelcast.internal.eviction.EvictionPolicyEvaluatorProvider.getEvictionPolicyEvaluator;
import static com.hazelcast.internal.memory.GlobalMemoryAccessorRegistry.MEM;
import static com.hazelcast.internal.memory.GlobalMemoryAccessorRegistry.MEM_AVAILABLE;
import static com.hazelcast.internal.nearcache.NearCacheRecord.NOT_RESERVED;
import static com.hazelcast.internal.nearcache.NearCacheRecord.READ_PERMITTED;
import static com.hazelcast.internal.nearcache.NearCacheRecord.RESERVED;
import static com.hazelcast.internal.nearcache.NearCacheRecord.UPDATE_STARTED;
import static com.hazelcast.internal.nearcache.impl.invalidation.StaleReadDetector.ALWAYS_FRESH;
import static com.hazelcast.util.ExceptionUtil.rethrow;
import static java.util.concurrent.atomic.AtomicLongFieldUpdater.newUpdater;
/**
* Abstract implementation of {@link NearCacheRecordStore} and {@link EvictionListener}.
*
* @param <K> the type of the key stored in Near Cache
* @param <V> the type of the value stored in Near Cache
* @param <KS> the type of the key of the underlying {@link com.hazelcast.internal.nearcache.impl.NearCacheRecordMap}
* @param <R> the type of the value of the underlying {@link com.hazelcast.internal.nearcache.impl.NearCacheRecordMap}
* @param <NCRM> the type of the underlying {@link com.hazelcast.internal.nearcache.impl.NearCacheRecordMap}
*/
@SuppressWarnings("checkstyle:methodcount")
public abstract class AbstractNearCacheRecordStore<K, V, KS, R extends NearCacheRecord,
NCRM extends SampleableNearCacheRecordMap<KS, R>>
implements NearCacheRecordStore<K, V>, EvictionListener<KS, R> {
protected static final AtomicLongFieldUpdater<AbstractNearCacheRecordStore> RESERVATION_ID
= newUpdater(AbstractNearCacheRecordStore.class, "reservationId");
/**
* If Unsafe is available, Object array index scale (every index represents a reference)
* can be assumed as reference size.
* <p>
* Otherwise, we assume reference size as integer size that means
* we assume 32 bit JVM or compressed-references enabled 64 bit JVM
* by ignoring compressed-references disable mode on 64 bit JVM.
*/
protected static final int REFERENCE_SIZE = MEM_AVAILABLE ? MEM.arrayIndexScale(Object[].class) : (Integer.SIZE / Byte.SIZE);
protected static final int MILLI_SECONDS_IN_A_SECOND = 1000;
protected final long timeToLiveMillis;
protected final long maxIdleMillis;
protected final NearCacheConfig nearCacheConfig;
protected final SerializationService serializationService;
protected final ClassLoader classLoader;
protected final NearCacheStatsImpl nearCacheStats;
protected final IFunction<K, R> reserveForUpdate = new IFunction<K, R>() {
@Override
public R apply(K key) {
R record = null;
try {
record = valueToRecord(null);
onRecordCreate(key, record);
record.casRecordState(READ_PERMITTED, RESERVED);
} catch (Throwable throwable) {
onPutError(key, null, record, null, throwable);
throw rethrow(throwable);
}
return record;
}
};
protected EvictionChecker evictionChecker;
protected EvictionPolicyEvaluator<KS, R> evictionPolicyEvaluator;
protected SamplingEvictionStrategy<KS, R, NCRM> evictionStrategy;
protected EvictionPolicyType evictionPolicyType;
protected NCRM records;
protected volatile StaleReadDetector staleReadDetector = ALWAYS_FRESH;
protected volatile long reservationId;
public AbstractNearCacheRecordStore(NearCacheConfig nearCacheConfig, SerializationService serializationService,
ClassLoader classLoader) {
this(nearCacheConfig, new NearCacheStatsImpl(), serializationService, classLoader);
}
// extended in EE
protected AbstractNearCacheRecordStore(NearCacheConfig nearCacheConfig, NearCacheStatsImpl nearCacheStats,
SerializationService serializationService, ClassLoader classLoader) {
this.nearCacheConfig = nearCacheConfig;
this.timeToLiveMillis = nearCacheConfig.getTimeToLiveSeconds() * MILLI_SECONDS_IN_A_SECOND;
this.maxIdleMillis = nearCacheConfig.getMaxIdleSeconds() * MILLI_SECONDS_IN_A_SECOND;
this.serializationService = serializationService;
this.classLoader = classLoader;
this.nearCacheStats = nearCacheStats;
this.evictionPolicyType = nearCacheConfig.getEvictionConfig().getEvictionPolicyType();
}
@Override
public void initialize() {
EvictionConfig evictionConfig = nearCacheConfig.getEvictionConfig();
this.records = createNearCacheRecordMap(nearCacheConfig);
this.evictionChecker = createNearCacheEvictionChecker(evictionConfig, nearCacheConfig);
this.evictionPolicyEvaluator = createEvictionPolicyEvaluator(evictionConfig);
this.evictionStrategy = createEvictionStrategy(evictionConfig);
this.evictionPolicyType = evictionConfig.getEvictionPolicyType();
}
@Override
public void setStaleReadDetector(StaleReadDetector staleReadDetector) {
this.staleReadDetector = staleReadDetector;
}
@Override
public StaleReadDetector getStaleReadDetector() {
return staleReadDetector;
}
protected abstract EvictionChecker createNearCacheEvictionChecker(EvictionConfig evictionConfig,
NearCacheConfig nearCacheConfig);
protected abstract NCRM createNearCacheRecordMap(NearCacheConfig nearCacheConfig);
protected abstract long getKeyStorageMemoryCost(K key);
protected abstract long getRecordStorageMemoryCost(R record);
protected abstract R valueToRecord(V value);
protected abstract void updateRecordValue(R record, V value);
protected abstract V recordToValue(R record);
// public for tests
public abstract R getRecord(K key);
protected abstract R getOrCreateToReserve(K key);
protected abstract V updateAndGetReserved(K key, final V value, final long reservationId, boolean deserialize);
protected abstract R putRecord(K key, R record);
protected abstract R removeRecord(K key);
protected abstract boolean containsRecordKey(K key);
protected void checkAvailable() {
if (!isAvailable()) {
throw new IllegalStateException(nearCacheConfig.getName() + " named Near Cache record store is not available");
}
}
protected EvictionPolicyEvaluator<KS, R> createEvictionPolicyEvaluator(EvictionConfig evictionConfig) {
final EvictionPolicyType evictionPolicyType = evictionConfig.getEvictionPolicyType();
if (evictionPolicyType == null) {
throw new IllegalArgumentException("Eviction policy cannot be null");
}
return getEvictionPolicyEvaluator(evictionConfig, classLoader);
}
protected SamplingEvictionStrategy<KS, R, NCRM> createEvictionStrategy(EvictionConfig evictionConfig) {
return SamplingEvictionStrategy.INSTANCE;
}
protected boolean isAvailable() {
return records != null;
}
protected Data valueToData(V value) {
if (value instanceof Data) {
return (Data) value;
} else if (value != null) {
return serializationService.toData(value);
} else {
return null;
}
}
protected V dataToValue(Data data) {
if (data != null) {
return serializationService.toObject(data);
} else {
return null;
}
}
protected Data toData(Object obj) {
if (obj == null) {
return null;
} else if (obj instanceof Data) {
return (Data) obj;
} else {
return valueToData((V) obj);
}
}
protected V toValue(Object obj) {
if (obj == null) {
return null;
} else if (obj instanceof Data) {
return dataToValue((Data) obj);
} else {
return (V) obj;
}
}
protected long getTotalStorageMemoryCost(K key, R record) {
return getKeyStorageMemoryCost(key) + getRecordStorageMemoryCost(record);
}
protected boolean isRecordExpired(R record) {
long now = Clock.currentTimeMillis();
if (record.isExpiredAt(now)) {
return true;
} else {
return record.isIdleAt(maxIdleMillis, now);
}
}
protected void onRecordCreate(K key, R record) {
record.setCreationTime(Clock.currentTimeMillis());
MetaDataContainer metaDataContainer = staleReadDetector.getMetaDataContainer(key);
if (metaDataContainer != null) {
record.setUuid(metaDataContainer.getUuid());
record.setInvalidationSequence(metaDataContainer.getSequence());
}
}
protected void onRecordAccess(R record) {
record.setAccessTime(Clock.currentTimeMillis());
record.incrementAccessHit();
}
@SuppressWarnings("unused")
protected void onGet(K key, V value, R record) {
}
@SuppressWarnings("unused")
protected void onGetError(K key, V value, R record, Throwable error) {
}
@SuppressWarnings("unused")
protected void onPut(K key, V value, R record, R oldRecord) {
}
@SuppressWarnings("unused")
protected void onPutError(K key, V value, R record, R oldRecord, Throwable error) {
}
@SuppressWarnings("unused")
protected void onRemove(K key, R record, boolean removed) {
}
@SuppressWarnings("unused")
protected void onRemoveError(K key, R record, boolean removed, Throwable error) {
}
@SuppressWarnings("unused")
protected void onExpire(K key, R record) {
nearCacheStats.incrementExpirations();
}
protected boolean isEvictionEnabled() {
return evictionStrategy != null
&& evictionPolicyEvaluator != null
&& evictionPolicyType != null
&& !evictionPolicyType.equals(EvictionPolicyType.NONE);
}
@Override
public void onEvict(KS key, R record, boolean wasExpired) {
if (wasExpired) {
nearCacheStats.incrementExpirations();
} else {
nearCacheStats.incrementEvictions();
}
nearCacheStats.decrementOwnedEntryCount();
}
@Override
public V get(K key) {
checkAvailable();
R record = null;
V value = null;
try {
record = getRecord(key);
if (record != null) {
if (record.getRecordState() != READ_PERMITTED) {
return null;
}
if (staleReadDetector.isStaleRead(key, record)) {
remove(key);
return null;
}
if (isRecordExpired(record)) {
remove(key);
onExpire(key, record);
return null;
}
onRecordAccess(record);
nearCacheStats.incrementHits();
value = recordToValue(record);
onGet(key, value, record);
return value;
} else {
nearCacheStats.incrementMisses();
return null;
}
} catch (Throwable error) {
onGetError(key, value, record, error);
throw rethrow(error);
}
}
@Override
public void put(K key, V value) {
checkAvailable();
// if there is no eviction configured we return if the Near Cache is full and it's a new key
// (we have to check the key, otherwise we might lose updates on existing keys)
if (!isEvictionEnabled() && evictionChecker.isEvictionRequired() && !containsRecordKey(key)) {
return;
}
R record = null;
R oldRecord = null;
try {
record = valueToRecord(value);
onRecordCreate(key, record);
oldRecord = putRecord(key, record);
if (oldRecord == null) {
nearCacheStats.incrementOwnedEntryCount();
}
onPut(key, value, record, oldRecord);
} catch (Throwable error) {
onPutError(key, value, record, oldRecord, error);
throw rethrow(error);
}
}
@Override
public boolean remove(K key) {
checkAvailable();
R record = null;
boolean removed = false;
try {
record = removeRecord(key);
if (record != null && record.getRecordState() == READ_PERMITTED) {
removed = true;
nearCacheStats.decrementOwnedEntryCount();
}
onRemove(key, record, removed);
return record != null;
} catch (Throwable error) {
onRemoveError(key, record, removed, error);
throw rethrow(error);
}
}
@Override
public void clear() {
checkAvailable();
clearRecords();
nearCacheStats.setOwnedEntryCount(0);
nearCacheStats.setOwnedEntryMemoryCost(0L);
}
protected void clearRecords() {
records.clear();
}
@Override
public void destroy() {
checkAvailable();
destroyStore();
nearCacheStats.setOwnedEntryCount(0);
nearCacheStats.setOwnedEntryMemoryCost(0L);
}
protected void destroyStore() {
clearRecords();
}
@Override
public NearCacheStats getNearCacheStats() {
checkAvailable();
return nearCacheStats;
}
@Override
public int size() {
checkAvailable();
return records.size();
}
@Override
public void doEvictionIfRequired() {
checkAvailable();
if (isEvictionEnabled()) {
evictionStrategy.evict(records, evictionPolicyEvaluator, evictionChecker, this);
}
}
@Override
public void doEviction() {
checkAvailable();
if (isEvictionEnabled()) {
evictionStrategy.evict(records, evictionPolicyEvaluator, null, this);
}
}
@Override
public long tryReserveForUpdate(K key) {
checkAvailable();
// if there is no eviction configured we return if the Near Cache is full and it's a new key
// (we have to check the key, otherwise we might lose updates on existing keys)
if (!isEvictionEnabled() && evictionChecker.isEvictionRequired() && !containsRecordKey(key)) {
return NOT_RESERVED;
}
R reservedRecord = getOrCreateToReserve(key);
long reservationId = nextReservationId();
if (reservedRecord.casRecordState(RESERVED, reservationId)) {
return reservationId;
} else {
return NOT_RESERVED;
}
}
@Override
public V tryPublishReserved(K key, final V value, final long reservationId, boolean deserialize) {
return updateAndGetReserved(key, value, reservationId, deserialize);
}
protected R updateReservedRecordInternal(K key, V value, R reservedRecord, long reservationId) {
if (!reservedRecord.casRecordState(reservationId, UPDATE_STARTED)) {
return reservedRecord;
}
updateRecordValue(reservedRecord, value);
reservedRecord.casRecordState(UPDATE_STARTED, READ_PERMITTED);
nearCacheStats.incrementOwnedEntryMemoryCost(getTotalStorageMemoryCost(key, reservedRecord));
nearCacheStats.incrementOwnedEntryCount();
return reservedRecord;
}
protected long nextReservationId() {
return RESERVATION_ID.incrementAndGet(this);
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.metadata.settings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import io.crate.analyze.SettingsApplier;
import io.crate.analyze.SettingsAppliers;
import io.crate.breaker.CrateCircuitBreakerService;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
public class CrateSettings {
public static final NestedSetting STATS = new NestedSetting() {
@Override
public String name() {
return "stats";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(STATS_ENABLED, STATS_JOBS_LOG_SIZE, STATS_OPERATIONS_LOG_SIZE);
}
};
public static final BoolSetting STATS_ENABLED = new BoolSetting() {
@Override
public String name() {
return "enabled";
}
@Override
public Boolean defaultValue() {
return false;
}
@Override
public Setting parent() {
return STATS;
}
};
public static final IntSetting STATS_JOBS_LOG_SIZE = new IntSetting() {
@Override
public String name() {
return "jobs_log_size";
}
@Override
public Integer defaultValue() {
return 10_000;
}
@Override
public Integer minValue() {
return 0;
}
@Override
public Setting parent() {
return STATS;
}
};
public static final IntSetting STATS_OPERATIONS_LOG_SIZE = new IntSetting() {
@Override
public String name() {
return "operations_log_size";
}
@Override
public Integer defaultValue() {
return 10_000;
}
@Override
public Integer minValue() {
return 0;
}
@Override
public Setting parent() {
return STATS;
}
};
public static final NestedSetting CLUSTER = new NestedSetting() {
@Override
public String name() {
return "cluster";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(GRACEFUL_STOP, ROUTING, CLUSTER_INFO);
}
};
public static final NestedSetting GRACEFUL_STOP = new NestedSetting() {
@Override
public String name() { return "graceful_stop"; }
@Override
public Setting parent() {
return CLUSTER;
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
GRACEFUL_STOP_MIN_AVAILABILITY,
GRACEFUL_STOP_REALLOCATE,
GRACEFUL_STOP_TIMEOUT,
GRACEFUL_STOP_FORCE);
}
};
public static final StringSetting GRACEFUL_STOP_MIN_AVAILABILITY = new StringSetting(
Sets.newHashSet("full", "primaries", "none")
) {
@Override
public String name() { return "min_availability"; }
@Override
public String defaultValue() { return "primaries"; }
@Override
public Setting parent() {
return GRACEFUL_STOP;
}
};
public static final BoolSetting GRACEFUL_STOP_REALLOCATE = new BoolSetting() {
@Override
public String name() { return "reallocate"; }
@Override
public Boolean defaultValue() {
return true;
}
@Override
public Setting parent() {
return GRACEFUL_STOP;
}
};
public static final TimeSetting GRACEFUL_STOP_TIMEOUT = new TimeSetting() {
@Override
public String name() {
return "timeout";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(7_200_000);
}
@Override
public Setting parent() {
return GRACEFUL_STOP;
}
};
public static final BoolSetting GRACEFUL_STOP_FORCE = new BoolSetting() {
@Override
public String name() {
return "force";
}
@Override
public Boolean defaultValue() {
return false;
}
@Override
public Setting parent() {
return GRACEFUL_STOP;
}
};
public static final NestedSetting DISCOVERY = new NestedSetting() {
@Override
public String name() {
return "discovery";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(DISCOVERY_ZEN);
}
};
public static final NestedSetting DISCOVERY_ZEN = new NestedSetting() {
@Override
public String name() { return "zen"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
DISCOVERY_ZEN_MIN_MASTER_NODES,
DISCOVERY_ZEN_PING_TIMEOUT,
DISCOVERY_ZEN_PUBLISH_TIMEOUT
);
}
@Override
public Setting parent() {
return DISCOVERY;
}
};
public static final IntSetting DISCOVERY_ZEN_MIN_MASTER_NODES = new IntSetting() {
@Override
public String name() {
return "minimum_master_nodes";
}
@Override
public Integer defaultValue() {
return 1;
}
@Override
public Setting parent() {
return DISCOVERY_ZEN;
}
};
public static final TimeSetting DISCOVERY_ZEN_PING_TIMEOUT = new TimeSetting() {
@Override
public String name() {
return "ping_timeout";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(3, TimeUnit.SECONDS);
}
@Override
public Setting parent() {
return DISCOVERY_ZEN;
}
};
public static final TimeSetting DISCOVERY_ZEN_PUBLISH_TIMEOUT = new TimeSetting() {
@Override
public String name() {
return "publish_timeout";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(30, TimeUnit.SECONDS);
}
@Override
public Setting parent() {
return DISCOVERY_ZEN;
}
};
public static final NestedSetting ROUTING = new NestedSetting() {
@Override
public String name() { return "routing"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(ROUTING_ALLOCATION);
}
@Override
public Setting parent() {
return CLUSTER;
}
};
public static final NestedSetting ROUTING_ALLOCATION = new NestedSetting() {
@Override
public String name() { return "allocation"; }
@Override
public Setting parent() {
return ROUTING;
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_ENABLE,
ROUTING_ALLOCATION_ALLOW_REBALANCE,
ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE,
ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES,
ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES,
ROUTING_ALLOCATION_INCLUDE,
ROUTING_ALLOCATION_EXCLUDE,
ROUTING_ALLOCATION_REQUIRE,
ROUTING_ALLOCATION_BALANCE,
ROUTING_ALLOCATION_DISK
);
}
};
public static final StringSetting ROUTING_ALLOCATION_ENABLE = new StringSetting(
Sets.newHashSet("none", "primaries", "all", "new_primaries")
) {
@Override
public String name() { return "enable"; }
@Override
public String defaultValue() { return "all"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final StringSetting ROUTING_ALLOCATION_ALLOW_REBALANCE = new StringSetting(
Sets.newHashSet("always", "indices_primary_active", "indices_all_active")
) {
@Override
public String name() { return "allow_rebalance"; }
@Override
public String defaultValue() { return "indices_all_active"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final IntSetting ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE = new IntSetting() {
@Override
public String name() { return "cluster_concurrent_rebalance"; }
@Override
public Integer defaultValue() { return 2; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final IntSetting ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES = new IntSetting() {
@Override
public String name() { return "node_initial_primaries_recoveries"; }
@Override
public Integer defaultValue() { return 4; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final IntSetting ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES = new IntSetting() {
@Override
public String name() { return "node_concurrent_recoveries"; }
@Override
public Integer defaultValue() { return 2; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final NestedSetting ROUTING_ALLOCATION_INCLUDE = new NestedSetting() {
@Override
public String name() { return "include"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_INCLUDE_IP,
ROUTING_ALLOCATION_INCLUDE_HOST,
ROUTING_ALLOCATION_INCLUDE_ID,
ROUTING_ALLOCATION_INCLUDE_NAME
);
}
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final StringSetting ROUTING_ALLOCATION_INCLUDE_IP = new StringSetting() {
@Override
public String name() { return "_ip"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_INCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_INCLUDE_ID = new StringSetting() {
@Override
public String name() { return "_id"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_INCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_INCLUDE_HOST = new StringSetting() {
@Override
public String name() { return "_host"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_INCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_INCLUDE_NAME = new StringSetting() {
@Override
public String name() { return "_name"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_INCLUDE;
}
};
public static final NestedSetting ROUTING_ALLOCATION_EXCLUDE = new NestedSetting() {
@Override
public String name() { return "exclude"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_EXCLUDE_IP,
ROUTING_ALLOCATION_EXCLUDE_HOST,
ROUTING_ALLOCATION_EXCLUDE_ID,
ROUTING_ALLOCATION_EXCLUDE_NAME
);
}
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final StringSetting ROUTING_ALLOCATION_EXCLUDE_IP = new StringSetting() {
@Override
public String name() { return "_ip"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_EXCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_EXCLUDE_ID = new StringSetting() {
@Override
public String name() { return "_id"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_EXCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_EXCLUDE_HOST = new StringSetting() {
@Override
public String name() { return "_host"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_EXCLUDE;
}
};
public static final StringSetting ROUTING_ALLOCATION_EXCLUDE_NAME = new StringSetting() {
@Override
public String name() { return "_name"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_EXCLUDE;
}
};
public static final NestedSetting ROUTING_ALLOCATION_REQUIRE = new NestedSetting() {
@Override
public String name() { return "require"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_REQUIRE_IP,
ROUTING_ALLOCATION_REQUIRE_HOST,
ROUTING_ALLOCATION_REQUIRE_ID,
ROUTING_ALLOCATION_REQUIRE_NAME
);
}
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
};
public static final StringSetting ROUTING_ALLOCATION_REQUIRE_IP = new StringSetting() {
@Override
public String name() { return "_ip"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_REQUIRE;
}
};
public static final StringSetting ROUTING_ALLOCATION_REQUIRE_ID = new StringSetting() {
@Override
public String name() { return "_id"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_REQUIRE;
}
};
public static final StringSetting ROUTING_ALLOCATION_REQUIRE_HOST = new StringSetting() {
@Override
public String name() { return "_host"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_REQUIRE;
}
};
public static final StringSetting ROUTING_ALLOCATION_REQUIRE_NAME = new StringSetting() {
@Override
public String name() { return "_name"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_REQUIRE;
}
};
public static final NestedSetting ROUTING_ALLOCATION_BALANCE = new NestedSetting() {
@Override
public String name() { return "balance"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_BALANCE_SHARD,
ROUTING_ALLOCATION_BALANCE_INDEX,
ROUTING_ALLOCATION_BALANCE_PRIMARY,
ROUTING_ALLOCATION_BALANCE_THRESHOLD
);
}
};
public static final FloatSetting ROUTING_ALLOCATION_BALANCE_SHARD = new FloatSetting() {
@Override
public String name() { return "shard"; }
@Override
public Float defaultValue() { return 0.45f; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_BALANCE;
}
};
public static final FloatSetting ROUTING_ALLOCATION_BALANCE_INDEX = new FloatSetting() {
@Override
public String name() { return "index"; }
@Override
public Float defaultValue() { return 0.5f; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_BALANCE;
}
};
public static final FloatSetting ROUTING_ALLOCATION_BALANCE_PRIMARY = new FloatSetting() {
@Override
public String name() { return "primary"; }
@Override
public Float defaultValue() { return 0.05f; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_BALANCE;
}
};
public static final FloatSetting ROUTING_ALLOCATION_BALANCE_THRESHOLD = new FloatSetting() {
@Override
public String name() { return "threshold"; }
@Override
public Float defaultValue() { return 1.0f; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_BALANCE;
}
};
public static final NestedSetting ROUTING_ALLOCATION_DISK = new NestedSetting() {
@Override
public String name() { return "disk"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION;
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED,
ROUTING_ALLOCATION_DISK_WATERMARK
);
}
};
public static final BoolSetting ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED = new BoolSetting() {
@Override
public String name() { return "threshold_enabled"; }
@Override
public Boolean defaultValue() { return true; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_DISK;
}
};
public static final NestedSetting ROUTING_ALLOCATION_DISK_WATERMARK = new NestedSetting() {
@Override
public String name() { return "watermark"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_DISK;
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
ROUTING_ALLOCATION_DISK_WATERMARK_LOW,
ROUTING_ALLOCATION_DISK_WATERMARK_HIGH
);
}
};
public static final StringSetting ROUTING_ALLOCATION_DISK_WATERMARK_LOW = new StringSetting() {
@Override
public String name() { return "low"; }
@Override
public String defaultValue() { return "85%"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_DISK_WATERMARK;
}
};
public static final StringSetting ROUTING_ALLOCATION_DISK_WATERMARK_HIGH = new StringSetting() {
@Override
public String name() { return "high"; }
@Override
public String defaultValue() { return "90%"; }
@Override
public Setting parent() {
return ROUTING_ALLOCATION_DISK_WATERMARK;
}
};
public static final NestedSetting INDICES = new NestedSetting() {
@Override
public String name() {
return "indices";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(INDICES_RECOVERY, INDICES_STORE, INDICES_FIELDDATA, INDICES_BREAKER);
}
};
public static final NestedSetting INDICES_RECOVERY = new NestedSetting() {
@Override
public String name() { return "recovery"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_RECOVERY_CONCURRENT_STREAMS,
INDICES_RECOVERY_FILE_CHUNK_SIZE,
INDICES_RECOVERY_TRANSLOG_OPS,
INDICES_RECOVERY_TRANSLOG_SIZE,
INDICES_RECOVERY_COMPRESS,
INDICES_RECOVERY_MAX_BYTES_PER_SEC,
INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC,
INDICES_RECOVERY_RETRY_DELAY_NETWORK,
INDICES_RECOVERY_ACTIVITY_TIMEOUT,
INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT,
INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT
);
}
@Override
public Setting parent() {
return INDICES;
}
};
public static final IntSetting INDICES_RECOVERY_CONCURRENT_STREAMS = new IntSetting() {
@Override
public String name() { return "concurrent_streams"; }
@Override
public Integer defaultValue() { return 3; }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final ByteSizeSetting INDICES_RECOVERY_FILE_CHUNK_SIZE = new ByteSizeSetting() {
@Override
public String name() { return "file_chunk_size"; }
@Override
public ByteSizeValue defaultValue() { return new ByteSizeValue(512, ByteSizeUnit.KB); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final IntSetting INDICES_RECOVERY_TRANSLOG_OPS = new IntSetting() {
@Override
public String name() { return "translog_ops"; }
@Override
public Integer defaultValue() { return 1000; }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final ByteSizeSetting INDICES_RECOVERY_TRANSLOG_SIZE = new ByteSizeSetting() {
@Override
public String name() { return "translog_size"; }
@Override
public ByteSizeValue defaultValue() { return new ByteSizeValue(512, ByteSizeUnit.KB); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final BoolSetting INDICES_RECOVERY_COMPRESS = new BoolSetting() {
@Override
public String name() { return "compress"; }
@Override
public Boolean defaultValue() { return true; }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final ByteSizeSetting INDICES_RECOVERY_MAX_BYTES_PER_SEC = new ByteSizeSetting() {
@Override
public String name() { return "max_bytes_per_sec"; }
@Override
public ByteSizeValue defaultValue() { return new ByteSizeValue(40, ByteSizeUnit.MB); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final TimeSetting INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC = new TimeSetting() {
@Override
public String name() { return "retry_delay_state_sync"; }
@Override
public TimeValue defaultValue() { return TimeValue.timeValueMillis(500); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final TimeSetting INDICES_RECOVERY_RETRY_DELAY_NETWORK = new TimeSetting() {
@Override
public String name() { return "retry_delay_network"; }
@Override
public TimeValue defaultValue() { return TimeValue.timeValueSeconds(5); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final TimeSetting INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT = new TimeSetting() {
@Override
public String name() { return "internal_action_timeout"; }
@Override
public TimeValue defaultValue() { return TimeValue.timeValueMinutes(15); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final TimeSetting INDICES_RECOVERY_ACTIVITY_TIMEOUT = new TimeSetting() {
@Override
public String name() { return "activity_timeout"; }
@Override
public TimeValue defaultValue() { return INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT.defaultValue(); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final TimeSetting INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT = new TimeSetting() {
@Override
public String name() { return "internal_action_long_timeout"; }
@Override
public TimeValue defaultValue() { return new TimeValue(INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT.defaultValue().millis() * 2); }
@Override
public Setting parent() {
return INDICES_RECOVERY;
}
};
public static final NestedSetting INDICES_STORE = new NestedSetting() {
@Override
public String name() { return "store"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(INDICES_STORE_THROTTLE);
}
@Override
public Setting parent() {
return INDICES;
}
};
public static final NestedSetting INDICES_STORE_THROTTLE = new NestedSetting() {
@Override
public String name() { return "throttle"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_STORE_THROTTLE_TYPE,
INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC
);
}
@Override
public Setting parent() {
return INDICES_STORE;
}
};
public static final StringSetting INDICES_STORE_THROTTLE_TYPE = new StringSetting(
Sets.newHashSet("all", "merge", "none")
) {
@Override
public String name() { return "type"; }
@Override
public String defaultValue() { return "merge"; }
@Override
public Setting parent() {
return INDICES_STORE_THROTTLE;
}
};
public static final ByteSizeSetting INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC = new ByteSizeSetting() {
@Override
public String name() { return "max_bytes_per_sec"; }
@Override
public ByteSizeValue defaultValue() { return new ByteSizeValue(20, ByteSizeUnit.MB); }
@Override
public Setting parent() {
return INDICES_STORE_THROTTLE;
}
};
public static final NestedSetting INDICES_FIELDDATA = new NestedSetting() {
@Override
public String name() { return "fielddata"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(INDICES_FIELDDATA_BREAKER);
}
@Override
public Setting parent() {
return INDICES;
}
};
public static final NestedSetting INDICES_FIELDDATA_BREAKER = new NestedSetting() {
@Override
public String name() { return "breaker"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_FIELDDATA_BREAKER_LIMIT,
INDICES_FIELDDATA_BREAKER_OVERHEAD
);
}
@Override
public Setting parent() {
return INDICES_FIELDDATA;
}
};
public static final StringSetting INDICES_FIELDDATA_BREAKER_LIMIT = new StringSetting() {
@Override
public String name() { return "limit"; }
@Override
public String defaultValue() { return "60%"; }
@Override
public Setting parent() {
return INDICES_FIELDDATA_BREAKER;
}
};
public static final DoubleSetting INDICES_FIELDDATA_BREAKER_OVERHEAD = new DoubleSetting() {
@Override
public String name() { return "overhead"; }
@Override
public Double defaultValue() { return 1.03; }
@Override
public Setting parent() {
return INDICES_FIELDDATA_BREAKER;
}
};
public static final NestedSetting INDICES_BREAKER = new NestedSetting() {
@Override
public String name() { return "breaker"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_BREAKER_QUERY,
INDICES_BREAKER_REQUEST
);
}
@Override
public Setting parent() {
return INDICES;
}
};
public static final NestedSetting INDICES_BREAKER_QUERY = new NestedSetting() {
@Override
public String name() { return "query"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_BREAKER_QUERY_LIMIT,
INDICES_BREAKER_QUERY_OVERHEAD
);
}
@Override
public Setting parent() {
return INDICES_BREAKER;
}
};
public static final StringSetting INDICES_BREAKER_QUERY_LIMIT = new StringSetting() {
@Override
public String name() { return "limit"; }
@Override
public String defaultValue() { return CrateCircuitBreakerService.DEFAULT_QUERY_CIRCUIT_BREAKER_LIMIT; }
@Override
public Setting parent() {
return INDICES_BREAKER_QUERY;
}
};
public static final DoubleSetting INDICES_BREAKER_QUERY_OVERHEAD = new DoubleSetting() {
@Override
public String name() { return "overhead"; }
@Override
public Double defaultValue() { return CrateCircuitBreakerService.DEFAULT_QUERY_CIRCUIT_BREAKER_OVERHEAD_CONSTANT; }
@Override
public Setting parent() {
return INDICES_BREAKER_QUERY;
}
};
public static final NestedSetting INDICES_BREAKER_REQUEST = new NestedSetting() {
@Override
public String name() { return "request"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
INDICES_BREAKER_REQUEST_LIMIT,
INDICES_BREAKER_REQUEST_OVERHEAD
);
}
@Override
public Setting parent() {
return INDICES_BREAKER;
}
};
public static final StringSetting INDICES_BREAKER_REQUEST_LIMIT = new StringSetting() {
@Override
public String name() { return "limit"; }
@Override
public String defaultValue() { return "40%"; }
@Override
public Setting parent() {
return INDICES_BREAKER_REQUEST;
}
};
public static final DoubleSetting INDICES_BREAKER_REQUEST_OVERHEAD = new DoubleSetting() {
@Override
public String name() { return "overhead"; }
@Override
public Double defaultValue() { return 1.0; }
@Override
public Setting parent() {
return INDICES_BREAKER_REQUEST;
}
};
public static final NestedSetting CLUSTER_INFO = new NestedSetting() {
@Override
public String name() { return "info"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(CLUSTER_INFO_UPDATE);
}
@Override
public Setting parent() {
return CLUSTER;
}
};
public static final NestedSetting CLUSTER_INFO_UPDATE = new NestedSetting() {
@Override
public String name() { return "update"; }
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(
CLUSTER_INFO_UPDATE_INTERVAL
);
}
@Override
public Setting parent() {
return CLUSTER_INFO;
}
};
public static final TimeSetting CLUSTER_INFO_UPDATE_INTERVAL = new TimeSetting() {
@Override
public String name() {
return "interval";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(30, TimeUnit.SECONDS);
}
@Override
public Setting parent() {
return CLUSTER_INFO_UPDATE;
}
};
public static final NestedSetting BULK = new NestedSetting() {
@Override
public String name() {
return "bulk";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(BULK_PARTITION_CREATION_TIMEOUT, BULK_REQUEST_TIMEOUT);
}
};
public static final TimeSetting BULK_REQUEST_TIMEOUT = new TimeSetting() {
@Override
public String name() {
return "request_timeout";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(1, TimeUnit.MINUTES);
}
@Override
public Setting parent() {
return BULK;
}
};
public static final TimeSetting BULK_PARTITION_CREATION_TIMEOUT = new TimeSetting() {
@Override
public String name() {
return "partition_creation_timeout";
}
@Override
public TimeValue defaultValue() {
return new TimeValue(10, TimeUnit.SECONDS);
}
@Override
public Setting parent() {
return BULK;
}
};
public static final NestedSetting GATEWAY = new NestedSetting() {
@Override
public String name() {
return "gateway";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(GATEWAY_RECOVERY_AFTER_NODES, GATEWAY_EXPECTED_NODES, GATEWAY_RECOVER_AFTER_TIME);
}
};
public static final IntSetting GATEWAY_RECOVERY_AFTER_NODES = new IntSetting() {
@Override
public String name() {
return "recover_after_nodes";
}
@Override
public Integer defaultValue() {
return -1;
}
@Override
public Setting parent() {
return GATEWAY;
}
};
public static final IntSetting GATEWAY_EXPECTED_NODES = new IntSetting() {
@Override
public String name() {
return "expected_nodes";
}
@Override
public Integer defaultValue() {
return -1;
}
@Override
public Setting parent() {
return GATEWAY;
}
};
public static final TimeSetting GATEWAY_RECOVER_AFTER_TIME = new TimeSetting() {
@Override
public String name() {
return "recover_after_time";
}
@Override
public TimeValue defaultValue() {
return TimeValue.timeValueMinutes(5);
}
@Override
public Setting parent() {
return GATEWAY;
}
};
public static final NestedSetting UDC = new NestedSetting() {
@Override
public String name() {
return "udc";
}
@Override
public List<Setting> children() {
return ImmutableList.<Setting>of(UDC_ENABLED, UDC_INITIAL_DELAY, UDC_INTERVAL, UDC_URL);
}
};
public static final BoolSetting UDC_ENABLED = new BoolSetting() {
@Override
public String name() {
return "enabled";
}
@Override
public Boolean defaultValue() {
return true;
}
@Override
public Setting parent() {
return UDC;
}
};
public static final TimeSetting UDC_INITIAL_DELAY = new TimeSetting() {
@Override
public String name() {
return "initial_delay";
}
@Override
public TimeValue defaultValue() {
return TimeValue.timeValueMinutes(10);
}
@Override
public Setting parent() {
return UDC;
}
};
public static final TimeSetting UDC_INTERVAL = new TimeSetting() {
@Override
public String name() {
return "interval";
}
@Override
public TimeValue defaultValue() {
return TimeValue.timeValueHours(24);
}
@Override
public Setting parent() {
return UDC;
}
};
public static final StringSetting UDC_URL = new StringSetting() {
@Override
public String name() {
return "url";
}
@Override
public String defaultValue() {
return "https://udc.crate.io";
}
@Override
public Setting parent() {
return UDC;
}
};
public static final ImmutableList<Setting> CRATE_SETTINGS = ImmutableList.<Setting>of(STATS, CLUSTER, DISCOVERY, INDICES, BULK, GATEWAY, UDC);
public static final Map<String, SettingsApplier> SUPPORTED_SETTINGS = ImmutableMap.<String, SettingsApplier>builder()
.put(CrateSettings.STATS.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.STATS))
.put(CrateSettings.STATS_JOBS_LOG_SIZE.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.STATS_JOBS_LOG_SIZE))
.put(CrateSettings.STATS_OPERATIONS_LOG_SIZE.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.STATS_OPERATIONS_LOG_SIZE))
.put(CrateSettings.STATS_ENABLED.settingName(),
new SettingsAppliers.BooleanSettingsApplier(CrateSettings.STATS_ENABLED))
.put(CrateSettings.CLUSTER.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.CLUSTER))
.put(CrateSettings.GRACEFUL_STOP.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.GRACEFUL_STOP))
.put(CrateSettings.GRACEFUL_STOP_MIN_AVAILABILITY.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.GRACEFUL_STOP_MIN_AVAILABILITY))
.put(CrateSettings.GRACEFUL_STOP_REALLOCATE.settingName(),
new SettingsAppliers.BooleanSettingsApplier(CrateSettings.GRACEFUL_STOP_REALLOCATE))
.put(CrateSettings.GRACEFUL_STOP_FORCE.settingName(),
new SettingsAppliers.BooleanSettingsApplier(CrateSettings.GRACEFUL_STOP_FORCE))
.put(CrateSettings.GRACEFUL_STOP_TIMEOUT.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.GRACEFUL_STOP_TIMEOUT))
.put(CrateSettings.DISCOVERY.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.DISCOVERY))
.put(CrateSettings.DISCOVERY_ZEN.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.DISCOVERY_ZEN))
.put(CrateSettings.DISCOVERY_ZEN_MIN_MASTER_NODES.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.DISCOVERY_ZEN_MIN_MASTER_NODES))
.put(CrateSettings.DISCOVERY_ZEN_PING_TIMEOUT.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.DISCOVERY_ZEN_PING_TIMEOUT))
.put(CrateSettings.DISCOVERY_ZEN_PUBLISH_TIMEOUT.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.DISCOVERY_ZEN_PUBLISH_TIMEOUT))
.put(CrateSettings.ROUTING.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING))
.put(CrateSettings.ROUTING_ALLOCATION.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION))
.put(CrateSettings.ROUTING_ALLOCATION_ENABLE.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_ENABLE))
.put(CrateSettings.ROUTING_ALLOCATION_ALLOW_REBALANCE.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_ALLOW_REBALANCE))
.put(CrateSettings.ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE))
.put(CrateSettings.ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES))
.put(CrateSettings.ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES))
.put(CrateSettings.ROUTING_ALLOCATION_INCLUDE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_INCLUDE))
.put(CrateSettings.ROUTING_ALLOCATION_INCLUDE_IP.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_INCLUDE_IP))
.put(CrateSettings.ROUTING_ALLOCATION_INCLUDE_ID.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_INCLUDE_ID))
.put(CrateSettings.ROUTING_ALLOCATION_INCLUDE_HOST.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_INCLUDE_HOST))
.put(CrateSettings.ROUTING_ALLOCATION_INCLUDE_NAME.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_INCLUDE_NAME))
.put(CrateSettings.ROUTING_ALLOCATION_EXCLUDE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_EXCLUDE))
.put(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_IP.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_IP))
.put(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_ID.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_ID))
.put(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_HOST.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_HOST))
.put(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_NAME.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_EXCLUDE_NAME))
.put(CrateSettings.ROUTING_ALLOCATION_REQUIRE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_REQUIRE))
.put(CrateSettings.ROUTING_ALLOCATION_REQUIRE_IP.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_REQUIRE_IP))
.put(CrateSettings.ROUTING_ALLOCATION_REQUIRE_ID.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_REQUIRE_ID))
.put(CrateSettings.ROUTING_ALLOCATION_REQUIRE_HOST.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_REQUIRE_HOST))
.put(CrateSettings.ROUTING_ALLOCATION_REQUIRE_NAME.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.ROUTING_ALLOCATION_REQUIRE_NAME))
.put(CrateSettings.ROUTING_ALLOCATION_BALANCE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_BALANCE))
.put(CrateSettings.ROUTING_ALLOCATION_BALANCE_SHARD.settingName(),
new SettingsAppliers.FloatSettingsApplier(CrateSettings.ROUTING_ALLOCATION_BALANCE_SHARD))
.put(CrateSettings.ROUTING_ALLOCATION_BALANCE_INDEX.settingName(),
new SettingsAppliers.FloatSettingsApplier(CrateSettings.ROUTING_ALLOCATION_BALANCE_INDEX))
.put(CrateSettings.ROUTING_ALLOCATION_BALANCE_PRIMARY.settingName(),
new SettingsAppliers.FloatSettingsApplier(CrateSettings.ROUTING_ALLOCATION_BALANCE_PRIMARY))
.put(CrateSettings.ROUTING_ALLOCATION_BALANCE_THRESHOLD.settingName(),
new SettingsAppliers.FloatSettingsApplier(CrateSettings.ROUTING_ALLOCATION_BALANCE_THRESHOLD))
.put(CrateSettings.ROUTING_ALLOCATION_DISK.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_DISK))
.put(CrateSettings.ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED.settingName(),
new SettingsAppliers.BooleanSettingsApplier(CrateSettings.ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED))
.put(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK))
.put(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK_LOW.settingName(),
new SettingsAppliers.MemoryValueSettingsApplier(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK_LOW))
.put(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK_HIGH.settingName(),
new SettingsAppliers.MemoryValueSettingsApplier(CrateSettings.ROUTING_ALLOCATION_DISK_WATERMARK_HIGH))
.put(CrateSettings.INDICES.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES))
.put(CrateSettings.INDICES_RECOVERY.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_RECOVERY))
.put(CrateSettings.INDICES_RECOVERY_CONCURRENT_STREAMS.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.INDICES_RECOVERY_CONCURRENT_STREAMS))
.put(CrateSettings.INDICES_RECOVERY_FILE_CHUNK_SIZE.settingName(),
new SettingsAppliers.ByteSizeSettingsApplier(CrateSettings.INDICES_RECOVERY_FILE_CHUNK_SIZE))
.put(CrateSettings.INDICES_RECOVERY_TRANSLOG_OPS.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.INDICES_RECOVERY_TRANSLOG_OPS))
.put(CrateSettings.INDICES_RECOVERY_TRANSLOG_SIZE.settingName(),
new SettingsAppliers.ByteSizeSettingsApplier(CrateSettings.INDICES_RECOVERY_TRANSLOG_SIZE))
.put(CrateSettings.INDICES_RECOVERY_COMPRESS.settingName(),
new SettingsAppliers.BooleanSettingsApplier(CrateSettings.INDICES_RECOVERY_COMPRESS))
.put(CrateSettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC.settingName(),
new SettingsAppliers.ByteSizeSettingsApplier(CrateSettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC))
.put(CrateSettings.INDICES_STORE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_STORE))
.put(CrateSettings.INDICES_STORE_THROTTLE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_STORE_THROTTLE))
.put(CrateSettings.INDICES_STORE_THROTTLE_TYPE.settingName(),
new SettingsAppliers.StringSettingsApplier(CrateSettings.INDICES_STORE_THROTTLE_TYPE))
.put(CrateSettings.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC.settingName(),
new SettingsAppliers.ByteSizeSettingsApplier(CrateSettings.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC))
.put(CrateSettings.INDICES_FIELDDATA.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_FIELDDATA))
.put(CrateSettings.INDICES_FIELDDATA_BREAKER.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_FIELDDATA_BREAKER))
.put(CrateSettings.INDICES_FIELDDATA_BREAKER_LIMIT.settingName(),
new SettingsAppliers.MemoryValueSettingsApplier(CrateSettings.INDICES_FIELDDATA_BREAKER_LIMIT))
.put(CrateSettings.INDICES_FIELDDATA_BREAKER_OVERHEAD.settingName(),
new SettingsAppliers.DoubleSettingsApplier(CrateSettings.INDICES_FIELDDATA_BREAKER_OVERHEAD))
.put(CrateSettings.INDICES_BREAKER.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_BREAKER))
.put(CrateSettings.INDICES_BREAKER_REQUEST.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_BREAKER_REQUEST))
.put(CrateSettings.INDICES_BREAKER_REQUEST_LIMIT.settingName(),
new SettingsAppliers.MemoryValueSettingsApplier(CrateSettings.INDICES_BREAKER_REQUEST_LIMIT))
.put(CrateSettings.INDICES_BREAKER_REQUEST_OVERHEAD.settingName(),
new SettingsAppliers.DoubleSettingsApplier(CrateSettings.INDICES_BREAKER_REQUEST_OVERHEAD))
.put(CrateSettings.INDICES_BREAKER_QUERY.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.INDICES_BREAKER_QUERY))
.put(CrateSettings.INDICES_BREAKER_QUERY_LIMIT.settingName(),
new SettingsAppliers.MemoryValueSettingsApplier(CrateSettings.INDICES_BREAKER_QUERY_LIMIT))
.put(CrateSettings.INDICES_BREAKER_QUERY_OVERHEAD.settingName(),
new SettingsAppliers.DoubleSettingsApplier(CrateSettings.INDICES_BREAKER_QUERY_OVERHEAD))
.put(CrateSettings.CLUSTER_INFO.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.CLUSTER_INFO))
.put(CrateSettings.CLUSTER_INFO_UPDATE.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.CLUSTER_INFO_UPDATE))
.put(CrateSettings.CLUSTER_INFO_UPDATE_INTERVAL.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.CLUSTER_INFO_UPDATE_INTERVAL))
.put(CrateSettings.BULK.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.BULK))
.put(CrateSettings.BULK_REQUEST_TIMEOUT.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.BULK_REQUEST_TIMEOUT))
.put(CrateSettings.BULK_PARTITION_CREATION_TIMEOUT.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.BULK_PARTITION_CREATION_TIMEOUT))
.put(CrateSettings.GATEWAY.settingName(),
new SettingsAppliers.ObjectSettingsApplier(CrateSettings.GATEWAY))
.put(CrateSettings.GATEWAY_EXPECTED_NODES.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.GATEWAY_EXPECTED_NODES))
.put(CrateSettings.GATEWAY_RECOVER_AFTER_TIME.settingName(),
new SettingsAppliers.TimeSettingsApplier(CrateSettings.GATEWAY_RECOVER_AFTER_TIME))
.put(CrateSettings.GATEWAY_RECOVERY_AFTER_NODES.settingName(),
new SettingsAppliers.IntSettingsApplier(CrateSettings.GATEWAY_RECOVERY_AFTER_NODES))
.build();
@Nullable
public static SettingsApplier getSetting(String name) {
return SUPPORTED_SETTINGS.get(name);
}
public static Set<String> settingNamesByPrefix(String prefix) {
Set<String> settingNames = Sets.newHashSet();
SettingsApplier settingsApplier = SUPPORTED_SETTINGS.get(prefix);
if (settingsApplier != null
&& !(settingsApplier instanceof SettingsAppliers.ObjectSettingsApplier)) {
settingNames.add(prefix);
} else {
prefix += ".";
for (String name : SUPPORTED_SETTINGS.keySet()) {
if (name.startsWith(prefix)) {
settingNames.add(name);
}
}
}
return settingNames;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.