gt stringclasses 1 value | context stringlengths 2.05k 161k |
|---|---|
/*
* Created on Nov 4, 2004
*
* This file is part of susimail project, see http://susi.i2p/
*
* Copyright (C) 2004-2005 <susi23@mail.i2p>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* $Revision: 1.1 $
*/
package i2p.susi.webmail.pop3;
import i2p.susi.debug.Debug;
import i2p.susi.webmail.Messages;
import i2p.susi.webmail.NewMailListener;
import i2p.susi.webmail.WebMail;
import i2p.susi.util.Config;
import i2p.susi.util.ReadBuffer;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import net.i2p.data.DataHelper;
/**
* @author susi23
*/
public class POP3MailBox implements NewMailListener {
private final String host, user, pass;
private String lastLine, lastError;
private final int port;
private int mails;
private boolean connected;
private boolean gotCAPA;
private boolean supportsPipelining;
private boolean supportsTOP;
private boolean supportsUIDL;
/** ID to size */
private final HashMap<Integer, Integer> sizes;
/** UIDL to ID */
private final HashMap<String, Integer> uidlToID;
private Socket socket;
private final AtomicLong lastActive;
private final AtomicLong lastChecked;
private final Object synchronizer;
private final DelayedDeleter delayedDeleter;
// instantiated after first successful connection
private BackgroundChecker backgroundChecker;
// instantiated after every successful connection
private IdleCloser idleCloser;
private volatile NewMailListener newMailListener;
/**
* Does not connect. Caller must call connectToServer() if desired.
*
* @param host
* @param port
* @param user
* @param pass
*/
public POP3MailBox(String host, int port, String user, String pass) {
Debug.debug(
Debug.DEBUG,
"Mailbox(" + host + "," + port + "," + user + ",password)");
this.host = host;
this.port = port;
this.user = user;
this.pass = pass;
uidlToID = new HashMap<String, Integer>();
sizes = new HashMap<Integer, Integer>();
synchronizer = new Object();
// this appears in the UI so translate
lastLine = _("No response from server");
lastActive = new AtomicLong(System.currentTimeMillis());
lastChecked = new AtomicLong();
delayedDeleter = new DelayedDeleter(this);
}
/**
* Fetch the header. Does not cache.
*
* @param uidl
* @return Byte buffer containing header data or null
*/
public ReadBuffer getHeader( String uidl ) {
synchronized( synchronizer ) {
try {
// we must be connected to know the UIDL to ID mapping
checkConnection();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error fetching header: " + ioe);
return null;
}
int id = getIDfromUIDL(uidl);
if (id < 0)
return null;
return getHeader(id);
}
}
/**
* retrieves header from pop3 server (with TOP command and RETR as fallback)
* Caller must sync.
*
* @param id message id
* @return Byte buffer containing header data or null
*/
private ReadBuffer getHeader( int id ) {
Debug.debug(Debug.DEBUG, "getHeader(" + id + ")");
ReadBuffer header = null;
if (id >= 1 && id <= mails) {
/*
* try 'TOP n 0' command
*/
header = sendCmdN("TOP " + id + " 0" );
if( header == null) {
/*
* try 'RETR n' command
*/
header = sendCmdN("RETR " + id );
if (header == null)
Debug.debug( Debug.DEBUG, "RETR returned null" );
}
} else {
lastError = "Message id out of range.";
}
return header;
}
/**
* Fetch the body. Does not cache.
*
* @param uidl
* @return Byte buffer containing body data or null
*/
public ReadBuffer getBody( String uidl ) {
synchronized( synchronizer ) {
try {
// we must be connected to know the UIDL to ID mapping
checkConnection();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error fetching body: " + ioe);
return null;
}
int id = getIDfromUIDL(uidl);
if (id < 0)
return null;
return getBody(id);
}
}
/**
* Fetch headers and/or bodies. Does not cache.
* ReadBuffer objects are inserted into the requests.
* No total time limit.
*
* @since 0.9.13
*/
public void getBodies(Collection<FetchRequest> requests) {
List<SendRecv> srs = new ArrayList<SendRecv>(requests.size());
synchronized( synchronizer ) {
try {
// we must be connected to know the UIDL to ID mapping
checkConnection();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error fetching: " + ioe);
return;
}
for (FetchRequest fr : requests) {
int id = getIDfromUIDL(fr.getUIDL());
if (id < 0)
continue;
SendRecv sr;
if (fr.getHeaderOnly() && supportsTOP)
sr = new SendRecv("TOP " + id + " 0", Mode.RB);
else
sr = new SendRecv("RETR " + id, Mode.RB);
sr.savedObject = fr;
srs.add(sr);
}
if (srs.isEmpty())
return;
try {
sendCmds(srs);
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error fetching bodies: " + ioe);
// todo maybe
}
}
for (SendRecv sr : srs) {
if (sr.result) {
FetchRequest fr = (FetchRequest) sr.savedObject;
fr.setBuffer(sr.rb);
}
}
}
/**
* retrieve message body from pop3 server (via RETR command)
* Caller must sync.
*
* @param id message id
* @return Byte buffer containing body data or null
*/
private ReadBuffer getBody(int id) {
Debug.debug(Debug.DEBUG, "getBody(" + id + ")");
ReadBuffer body = null;
if (id >= 1 && id <= mails) {
try {
body = sendCmdN( "RETR " + id );
if (body == null)
Debug.debug( Debug.DEBUG, "RETR returned null" );
} catch (OutOfMemoryError oom) {
Debug.debug( Debug.ERROR, "OOM fetching mail" );
lastError = oom.toString();
close();
}
}
else {
lastError = "Message id out of range.";
}
return body;
}
/**
* Call performDelete() after this or they will come back
* UNUSED
*
* @param uidl
* @return Success of delete operation: true if successful.
*/
/****
public boolean delete( String uidl )
{
Debug.debug(Debug.DEBUG, "delete(" + uidl + ")");
synchronized( synchronizer ) {
try {
// we must be connected to know the UIDL to ID mapping
checkConnection();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error deleting: " + ioe);
return false;
}
int id = getIDfromUIDL(uidl);
if (id < 0)
return false;
return delete(id);
}
}
****/
/**
* Queue for later deletion. Non-blocking.
*
* @since 0.9.13
*/
public void queueForDeletion(Collection<String> uidls) {
for (String uidl : uidls) {
queueForDeletion(uidl);
}
}
/**
* Queue for later deletion. Non-blocking.
*
* @since 0.9.13
*/
public void queueForDeletion(String uidl) {
Debug.debug(Debug.DEBUG, "Queueing for deletion: " + uidl);
delayedDeleter.queueDelete(uidl);
}
/**
* Delete all at once and close. Does not reconnect.
* Do NOT call performDelete() after this.
* Returns all UIDLs successfully deleted OR were not known by the server.
*
* @since 0.9.13
*/
Collection<String> delete(Collection<String> uidls) {
List<String> rv = new ArrayList<String>(uidls.size());
List<SendRecv> srs = new ArrayList<SendRecv>(uidls.size() + 1);
synchronized( synchronizer ) {
try {
// we must be connected to know the UIDL to ID mapping
checkConnection();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error deleting: " + ioe);
return rv;
}
for (String uidl : uidls) {
int id = getIDfromUIDL(uidl);
if (id < 0) {
// presumed already deleted
rv.add(uidl);
continue;
}
SendRecv sr = new SendRecv("DELE " + id, Mode.A1);
sr.savedObject = uidl;
srs.add(sr);
}
if (srs.isEmpty())
return rv;
// TODO don't quit now, just set timer to quit later
SendRecv quit = new SendRecv("QUIT", Mode.A1);
srs.add(quit);
try {
sendCmds(srs);
// do NOT call close() here, we included QUIT above
try {
socket.close();
} catch (IOException e) {}
clear();
// result of QUIT
boolean success = srs.get(srs.size() - 1).result;
if (success) {
for (int i = 0; i < srs.size() - 1; i++) {
SendRecv sr = srs.get(i);
// ignore sr.result, if it failed it's because
// it's already deleted
rv.add((String) sr.savedObject);
}
}
// why reconnect?
//connect();
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error deleting: " + ioe);
// todo maybe
}
}
return rv;
}
/**
* delete message on pop3 server
* UNUSED
*
* @param id message id
* @return Success of delete operation: true if successful.
*/
/****
private boolean delete(int id)
{
Debug.debug(Debug.DEBUG, "delete(" + id + ")");
boolean result = false;
synchronized( synchronizer ) {
try {
result = sendCmd1a( "DELE " + id );
}
catch (IOException e) {
}
}
return result;
}
****/
/**
* Get cached size of a message (via previous LIST command).
*
* @param uidl
* @return Message size in bytes or 0 if not found
*/
public int getSize( String uidl ) {
synchronized( synchronizer ) {
int id = getIDfromUIDL(uidl);
if (id < 0)
return 0;
return getSize(id);
}
}
/**
* Get cached size of a message (via previous LIST command).
* Caller must sync.
*
* @param id message id
* @return Message size in bytes or 0 if not found
*/
private int getSize(int id) {
int result = 0;
/*
* find value in hashtable
*/
Integer resultObj = sizes.get(Integer.valueOf(id));
if (resultObj != null)
result = resultObj.intValue();
Debug.debug(Debug.DEBUG, "getSize(" + id + ") = " + result);
return result;
}
/**
* Is the connection is still alive
*
* @return true or false
*/
public boolean isConnected() {
if (socket == null
|| !socket.isConnected()
|| socket.isInputShutdown()
|| socket.isOutputShutdown()
|| socket.isClosed()) {
connected = false;
}
return connected;
}
/**
* If not connected, connect now.
* Should be called from all public methods before sending a command.
* Caller must sync.
*
* @return true or false
*/
private void checkConnection() throws IOException {
Debug.debug(Debug.DEBUG, "checkConnection()");
if (!isConnected()) {
connect();
if (!isConnected())
throw new IOException("Cannot connect");
}
}
/**
* Timestamp.
*
* @since 0.9.13
*/
private void updateActivity() {
lastActive.set(System.currentTimeMillis());
}
/**
* Timestamp.
*
* @since 0.9.13
*/
long getLastActivity() {
return lastActive.get();
}
/**
* Timestamp. When we last successfully got the UIDL list.
*
* @since 0.9.13
*/
long getLastChecked() {
return lastChecked.get();
}
/**
*
* @param response line starting with +OK
*/
private void updateMailCount(String response) {
if (response == null || response.length() < 4) {
mails = 0;
return;
}
response = response.trim();
try {
int i = response.indexOf(" ", 5);
mails =
Integer.parseInt(
i != -1
? response.substring(4, i)
: response.substring(4));
} catch (NumberFormatException nfe) {
mails = 0;
}
}
/**
* Caller must sync.
*
* @throws IOException
*/
private void updateUIDLs(List<String> lines) {
uidlToID.clear();
if (lines != null) {
for (String line : lines) {
int j = line.indexOf( " " );
if( j != -1 ) {
try {
int n = Integer.parseInt( line.substring( 0, j ) );
String uidl = line.substring(j + 1).trim();
uidlToID.put( uidl, Integer.valueOf( n ) );
} catch (NumberFormatException nfe) {
Debug.debug(Debug.DEBUG, "UIDL error " + nfe);
} catch (IndexOutOfBoundsException ioobe) {
Debug.debug(Debug.DEBUG, "UIDL error " + ioobe);
}
}
}
lastChecked.set(System.currentTimeMillis());
} else {
Debug.debug(Debug.DEBUG, "Error getting UIDL list from server.");
}
}
/**
* Caller must sync.
*
* @throws IOException
*/
private void updateSizes(List<String> lines) {
/*
* try LIST
*/
sizes.clear();
if (lines != null) {
for (String line : lines) {
int j = line.indexOf(" ");
if (j != -1) {
try {
int key = Integer.parseInt(line.substring(0, j));
int value = Integer.parseInt(line.substring(j + 1).trim());
sizes.put(Integer.valueOf(key), Integer.valueOf(value));
} catch (NumberFormatException nfe) {
Debug.debug(Debug.DEBUG, "LIST error " + nfe);
}
}
}
} else {
Debug.debug(Debug.DEBUG, "Error getting LIST from server.");
}
}
/**
*
*
*/
public void refresh() {
synchronized( synchronizer ) {
close(true);
connect();
}
}
/**
* Caller must sync.
*/
private void clear()
{
uidlToID.clear();
sizes.clear();
mails = 0;
}
/**
* Connect to pop3 server if not connected.
* Does nothing if already connected.
*
* @return true if connected
* @since 0.9.13
*/
public boolean connectToServer() {
synchronized( synchronizer ) {
if (isConnected())
return true;
connect();
return isConnected();
}
}
/**
* connect to pop3 server, login with USER and PASS and try STAT then
*
* Caller must sync.
*/
private void connect() {
Debug.debug(Debug.DEBUG, "connect()");
if (Debug.getLevel() == Debug.DEBUG)
(new Exception("I did it")).printStackTrace();
clear();
if (socket != null && socket.isConnected())
close();
try {
socket = new Socket(host, port);
} catch (UnknownHostException e) {
lastError = e.toString();
return;
} catch (IOException e) {
Debug.debug( Debug.DEBUG, "Error connecting: " + e);
lastError = e.toString();
return;
}
if (socket != null) {
try {
// pipeline 2 commands
lastError = "";
socket.setSoTimeout(120*1000);
boolean ok = doHandshake();
if (ok) {
// TODO APOP (unsupported by postman)
List<SendRecv> cmds = new ArrayList<SendRecv>(4);
cmds.add(new SendRecv("USER " + user, Mode.A1));
cmds.add(new SendRecv("PASS " + pass, Mode.A1));
socket.setSoTimeout(60*1000);
ok = sendCmds(cmds);
}
if (ok) {
connected = true;
List<SendRecv> cmds = new ArrayList<SendRecv>(4);
SendRecv stat = new SendRecv("STAT", Mode.A1);
cmds.add(stat);
SendRecv uidl = new SendRecv("UIDL", Mode.LS);
cmds.add(uidl);
SendRecv list = new SendRecv("LIST", Mode.LS);
cmds.add(list);
// check individual responses
socket.setSoTimeout(120*1000);
ok = sendCmds(cmds);
if (stat.result)
updateMailCount(stat.response);
else
Debug.debug(Debug.DEBUG, "STAT failed");
if (uidl.result)
updateUIDLs(uidl.ls);
else
Debug.debug(Debug.DEBUG, "UIDL failed");
if (list.result)
updateSizes(list.ls);
else
Debug.debug(Debug.DEBUG, "LIST failed");
socket.setSoTimeout(300*1000);
if (ok && backgroundChecker == null &&
Boolean.parseBoolean(Config.getProperty(WebMail.CONFIG_BACKGROUND_CHECK)))
backgroundChecker = new BackgroundChecker(this);
if (ok && idleCloser == null)
idleCloser = new IdleCloser(this);
} else {
if (lastError.equals(""))
lastError = _("Error connecting to server");
close();
}
}
catch (NumberFormatException e1) {
lastError = _("Error opening mailbox") + ": " + e1;
}
catch (IOException e1) {
lastError = _("Error opening mailbox") + ": " + e1.getLocalizedMessage();
}
}
}
/**
* Check the initial response, send CAPA, check the CAPA result
* Caller must sync.
*
* @return true if successful
* @throws IOException
* @since 0.9.13
*/
private boolean doHandshake() throws IOException {
List<SendRecv> cmds = new ArrayList<SendRecv>(2);
cmds.add(new SendRecv(null, Mode.A1));
SendRecv capa = null;
if (gotCAPA) {
Debug.debug(Debug.DEBUG, "Skipping CAPA");
} else {
capa = new SendRecv("CAPA", Mode.LS);
cmds.add(capa);
}
boolean rv = sendCmds(cmds);
if (rv && capa != null) {
if (capa.ls != null) {
for (String cap : capa.ls) {
String t = cap.trim();
if (t.equals("PIPELINING"))
supportsPipelining = true;
else if (t.equals("UIDL"))
supportsUIDL = true;
else if (t.equals("TOP"))
supportsTOP = true;
}
}
gotCAPA = true;
Debug.debug(Debug.DEBUG, "POP3 server caps: pipelining? " + supportsPipelining +
" UIDL? " + supportsUIDL +
" TOP? " + supportsTOP);
}
return rv;
}
/**
* send command to pop3 server (and expect single line answer)
* Response will be in lastLine. Does not read past the first line of the response.
* Caller must sync.
*
* @param cmd command to send
* @return true if command was successful (+OK)
* @throws IOException
*/
private boolean sendCmd1a(String cmd) throws IOException {
boolean result = false;
sendCmd1aNoWait(cmd);
socket.getOutputStream().flush();
String foo = DataHelper.readLine(socket.getInputStream());
updateActivity();
// Debug.debug(Debug.DEBUG, "sendCmd1a: read " + read + " bytes");
if (foo != null) {
lastLine = foo;
if (lastLine.startsWith("+OK")) {
if (cmd.startsWith("PASS"))
cmd = "PASS provided";
Debug.debug(Debug.DEBUG, "sendCmd1a: (" + cmd + ") success: \"" + lastLine.trim() + '"');
result = true;
} else {
if (cmd.startsWith("PASS"))
cmd = "PASS provided";
Debug.debug(Debug.DEBUG, "sendCmd1a: (" + cmd + ") FAIL: \"" + lastLine.trim() + '"');
lastError = lastLine;
}
} else {
Debug.debug(Debug.DEBUG, "sendCmd1a: (" + cmd + ") NO RESPONSE");
lastError = _("No response from server");
throw new IOException(lastError);
}
return result;
}
/**
* Send commands to pop3 server all at once (and expect answers).
* Sets lastError to the FIRST error.
* Caller must sync.
*
* @param cmd command to send
* @param rcvLines lines to receive
* @return true if ALL received lines were successful (+OK)
* @throws IOException
* @since 0.9.13
*/
private boolean sendCmds(List<SendRecv> cmds) throws IOException {
boolean result = true;
boolean pipe = supportsPipelining;
if (pipe) {
Debug.debug(Debug.DEBUG, "POP3 pipelining " + cmds.size() + " commands");
for (SendRecv sr : cmds) {
String cmd = sr.send;
if (cmd != null)
sendCmd1aNoWait(cmd);
}
} // else we will do it below
socket.getOutputStream().flush();
InputStream in = socket.getInputStream();
int i = 0;
for (SendRecv sr : cmds) {
if (!pipe) {
String cmd = sr.send;
if (cmd != null) {
sendCmd1aNoWait(cmd);
socket.getOutputStream().flush();
}
}
String foo = DataHelper.readLine(in);
updateActivity();
if (foo == null) {
lastError = _("No response from server");
throw new IOException(lastError);
}
sr.response = foo.trim();
i++;
if (!foo.startsWith("+OK")) {
Debug.debug(Debug.DEBUG, "Fail after " + i + " of " + cmds.size() + " responses: \"" + foo.trim() + '"');
if (result)
lastError = foo; // actually the first error, for better info to the user
result = false;
sr.result = false;
} else {
Debug.debug(Debug.DEBUG, "OK after " + i + " of " + cmds.size() + " responses: \"" + foo.trim() + '"');
switch (sr.mode) {
case A1:
sr.result = true;
break;
case RB:
try {
sr.rb = getResultNa();
sr.result = true;
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error getting RB: " + ioe);
result = false;
sr.result = false;
}
break;
case LS:
try {
sr.ls = getResultNl();
sr.result = true;
} catch (IOException ioe) {
Debug.debug( Debug.DEBUG, "Error getting LS: " + ioe);
result = false;
sr.result = false;
}
break;
}
}
lastLine = foo;
}
return result;
}
/**
* send command to pop3 server. Does NOT flush or read or wait.
* Caller must sync.
*
* @param cmd command to send non-null
* @throws IOException
* @since 0.9.13
*/
private void sendCmd1aNoWait(String cmd) throws IOException {
/*
* dont log password
*/
String msg = cmd;
if (msg.startsWith("PASS"))
msg = "PASS provided";
Debug.debug(Debug.DEBUG, "sendCmd1a(" + msg + ")");
cmd += "\r\n";
socket.getOutputStream().write(DataHelper.getASCII(cmd));
updateActivity();
}
/**
* Tries twice
* Caller must sync.
*
* @return buffer or null
*/
private ReadBuffer sendCmdN(String cmd )
{
synchronized (synchronizer) {
try {
return sendCmdNa(cmd);
} catch (IOException e) {
lastError = e.toString();
Debug.debug( Debug.DEBUG, "sendCmdNa throws: " + e);
}
connect();
if (connected) {
try {
return sendCmdNa(cmd);
} catch (IOException e2) {
lastError = e2.toString();
Debug.debug( Debug.DEBUG, "2nd sendCmdNa throws: " + e2);
}
} else {
Debug.debug( Debug.DEBUG, "not connected after reconnect" );
}
}
return null;
}
/**
* No total timeout (result could be large)
* Caller must sync.
*
* @return buffer or null
* @throws IOException
*/
private ReadBuffer sendCmdNa(String cmd) throws IOException
{
if (sendCmd1a(cmd)) {
return getResultNa();
} else {
Debug.debug( Debug.DEBUG, "sendCmd1a returned false" );
return null;
}
}
/**
* Like sendCmdNa but returns a list of strings, one per line.
* Strings will have trailing \r but not \n.
* Total timeout 2 minutes.
* Caller must sync.
*
* @return the lines or null on error
* @throws IOException on timeout
* @since 0.9.13
*/
private List<String> sendCmdNl(String cmd) throws IOException
{
if (sendCmd1a(cmd)) {
return getResultNl();
} else {
Debug.debug( Debug.DEBUG, "sendCmd1a returned false" );
return null;
}
}
/**
* No total timeout (result could be large)
* Caller must sync.
*
* @return buffer non-null
* @throws IOException
*/
private ReadBuffer getResultNa() throws IOException
{
InputStream input = socket.getInputStream();
StringBuilder buf = new StringBuilder(512);
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
while (DataHelper.readLine(input, buf)) {
updateActivity();
int len = buf.length();
if (len == 0)
break; // huh? no \r?
if (len == 2 && buf.charAt(0) == '.' && buf.charAt(1) == '\r')
break;
String line;
// RFC 1939 sec. 3 de-byte-stuffing
if (buf.charAt(0) == '.')
line = buf.substring(1);
else
line = buf.toString();
baos.write(DataHelper.getASCII(line));
if (buf.charAt(len - 1) != '\r')
baos.write((byte) '\n');
baos.write((byte) '\n');
buf.setLength(0);
}
return new ReadBuffer(baos.toByteArray(), 0, baos.size());
}
/**
* Like getResultNa but returns a list of strings, one per line.
* Strings will have trailing \r but not \n.
* Total timeout 2 minutes.
* Caller must sync.
*
* @return the lines non-null
* @throws IOException on timeout
* @since 0.9.13
*/
private List<String> getResultNl() throws IOException
{
List<String> rv = new ArrayList<String>(16);
long timeOut = 120*1000;
InputStream input = socket.getInputStream();
long startTime = System.currentTimeMillis();
StringBuilder buf = new StringBuilder(512);
while (DataHelper.readLine(input, buf)) {
updateActivity();
int len = buf.length();
if (len == 0)
break; // huh? no \r?
if (len == 2 && buf.charAt(0) == '.' && buf.charAt(1) == '\r')
break;
if( System.currentTimeMillis() - startTime > timeOut )
throw new IOException( "Timeout while waiting on server response." );
String line;
// RFC 1939 sec. 3 de-byte-stuffing
if (buf.charAt(0) == '.')
line = buf.substring(1);
else
line = buf.toString();
rv.add(line);
buf.setLength(0);
}
return rv;
}
/**
* Warning - forces a connection.
*
* @return The amount of e-mails available.
*/
public int getNumMails() {
synchronized( synchronizer ) {
Debug.debug(Debug.DEBUG, "getNumMails()");
try {
checkConnection();
} catch (IOException ioe) {}
return connected ? mails : 0;
}
}
/**
* @return The most recent error message.
*/
public String lastError() {
//Debug.debug(Debug.DEBUG, "lastError()");
// Hide the "-ERR" from the user
String e = lastError;
if (e.startsWith("-ERR ") && e.length() > 5)
e = e.substring(5);
// translate this common error
if (e.trim().equals("Login failed."))
e = _("Login failed");
return e;
}
/**
* Relay from the checker to the webmail session object,
* which relays to MailCache, which will fetch the mail from us
* in a big circle
*
* @since 0.9.13
*/
public void setNewMailListener(NewMailListener nml) {
newMailListener = nml;
}
/**
* Relay from the checker to the webmail session object,
* which relays to MailCache, which will fetch the mail from us
* in a big circle
*
* @since 0.9.13
*/
public void foundNewMail() {
NewMailListener nml = newMailListener;
if (nml != null)
nml.foundNewMail();
}
/**
* Close without waiting for response,
* and remove any delayed tasks and resources.
*/
public void destroy() {
delayedDeleter.cancel();
synchronized( synchronizer ) {
if (backgroundChecker != null)
backgroundChecker.cancel();
close(false);
}
}
/**
* For helper threads to lock
* @since 0.9.13
*/
Object getLock() {
return synchronizer;
}
/**
* Do we have UIDLs to delete?
* @since 0.9.13
*/
boolean hasQueuedDeletions() {
return !delayedDeleter.getQueued().isEmpty();
}
/**
* Close without waiting for response.
* Deletes all queued deletions.
*/
public void close() {
close(false);
}
/**
* Close and optionally wait for response.
* Deletes all queued deletions.
* @since 0.9.13
*/
void close(boolean shouldWait) {
synchronized( synchronizer ) {
Debug.debug(Debug.DEBUG, "close()");
if (idleCloser != null)
idleCloser.cancel();
if (socket != null && socket.isConnected()) {
try {
Collection<String> toDelete = delayedDeleter.getQueued();
Map<String, Integer> sendDelete = new HashMap<String, Integer>(toDelete.size());
for (String uidl : toDelete) {
int id = getIDfromUIDL(uidl);
if (id >= 0) {
sendDelete.put(uidl, Integer.valueOf(id));
}
}
if (shouldWait) {
if (!sendDelete.isEmpty()) {
// Verify deleted, remove from the delete queue
// this does the quit and close
Collection<String> deleted = delete(sendDelete.keySet());
for (String uidl : deleted) {
delayedDeleter.removeQueued(uidl);
}
} else {
sendCmd1a("QUIT");
}
Debug.debug( Debug.DEBUG, "close() with wait complete");
} else {
if (!sendDelete.isEmpty()) {
// spray and pray the deletions, don't remove from delete queue
for (Integer id : sendDelete.values()) {
sendCmd1aNoWait("DELE " + id);
}
}
sendCmd1aNoWait("QUIT");
}
socket.close();
} catch (IOException e) {
Debug.debug( Debug.DEBUG, "error closing: " + e);
}
}
socket = null;
connected = false;
clear();
}
}
/**
* returns number of message with given UIDL
* Caller must sync.
*
* @param uidl
* @return Message number or -1
*/
private int getIDfromUIDL( String uidl )
{
int result = -1;
Integer intObject = uidlToID.get( uidl );
if( intObject != null ) {
result = intObject.intValue();
}
return result;
}
/**
* Unused
* @param id
* @return UIDL or null
*/
/****
public String getUIDLfromID( int id )
{
synchronized( synchronizer ) {
try {
return uidlList.get( id );
} catch (IndexOutOfBoundsException ioobe) {
return null;
}
}
}
****/
/**
* Only if connected. Does not force a connect.
* If not connected, returns null.
*
* @return A new array of the available UIDLs. No particular order.
*/
public Collection<String> getUIDLs()
{
if (!isConnected())
return null;
synchronized( synchronizer ) {
return new ArrayList<String>(uidlToID.keySet());
}
}
/**
*
* @param args
*/
/****
public static void main( String[] args )
{
Debug.setLevel( Debug.DEBUG );
POP3MailBox mailbox = new POP3MailBox( "localhost", 7660 , "test", "test");
ReadBuffer readBuffer = mailbox.sendCmdN( "LIST" );
System.out.println( "list='" + readBuffer + "'" );
}
****/
/**
* Close and reconnect. Takes a while.
* UNUSED
*/
/****
public void performDelete()
{
synchronized( synchronizer ) {
close(true);
// why reconnect?
//connect();
}
}
****/
/** for SendRecv */
private enum Mode {
/** no extra lines (sendCmd1a) */
A1,
/** return extra lines in ReadBuffer (sendCmdNa) */
RB,
/** return extra lines in List of Strings (sendCmdNl) */
LS
}
/**
* A command to send and a mode to receive and return the results
* @since 0.9.13
*/
private static class SendRecv {
public final String send;
public final Mode mode;
public String response;
public boolean result;
public ReadBuffer rb;
public List<String> ls;
// to remember things
public Object savedObject;
/** @param s may be null */
public SendRecv(String s, Mode m) {
send = s;
mode = m;
}
}
public interface FetchRequest {
public String getUIDL();
public boolean getHeaderOnly();
public void setBuffer(ReadBuffer buffer);
}
/** translate */
private static String _(String s) {
return Messages.getString(s);
}
}
| |
/*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth2.token.handlers.grant;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.application.authentication.framework.exception.UserIdNotFoundException;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.oauth.OAuthUtil;
import org.wso2.carbon.identity.oauth.cache.OAuthCache;
import org.wso2.carbon.identity.oauth.cache.OAuthCacheKey;
import org.wso2.carbon.identity.oauth.common.OAuthConstants;
import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException;
import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
import org.wso2.carbon.identity.oauth.dao.OAuthAppDO;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.dao.AuthorizationCodeValidationResult;
import org.wso2.carbon.identity.oauth2.dao.OAuthTokenPersistenceFactory;
import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenReqDTO;
import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenRespDTO;
import org.wso2.carbon.identity.oauth2.model.AccessTokenDO;
import org.wso2.carbon.identity.oauth2.model.AuthzCodeDO;
import org.wso2.carbon.identity.oauth2.token.OAuthTokenReqMessageContext;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import static org.wso2.carbon.identity.oauth2.util.OAuth2Util.buildCacheKeyStringForTokenWithUserId;
import static org.wso2.carbon.identity.oauth2.util.OAuth2Util.getTimeToExpire;
import static org.wso2.carbon.identity.oauth2.util.OAuth2Util.validatePKCE;
import static org.wso2.carbon.identity.openidconnect.OIDCConstants.CODE_ID;
/**
* Implements the AuthorizationGrantHandler for the Grant Type : authorization_code.
*/
public class AuthorizationCodeGrantHandler extends AbstractAuthorizationGrantHandler {
// This is used to keep the pre processed authorization code in the OAuthTokenReqMessageContext.
private static final String AUTHZ_CODE = "AuthorizationCode";
private static final int ALLOWED_MINIMUM_VALIDITY_PERIOD = 1000;
private static final Log log = LogFactory.getLog(AuthorizationCodeGrantHandler.class);
@Override
public boolean validateGrant(OAuthTokenReqMessageContext tokReqMsgCtx) throws IdentityOAuth2Exception {
super.validateGrant(tokReqMsgCtx);
OAuth2AccessTokenReqDTO tokenReq = tokReqMsgCtx.getOauth2AccessTokenReqDTO();
AuthzCodeDO authzCodeBean = getPersistedAuthzCode(tokenReq);
validateAuthzCodeFromRequest(authzCodeBean, tokenReq.getClientId(), tokenReq.getAuthorizationCode());
try {
// If redirect_uri was given in the authorization request,
// token request should send matching redirect_uri value.
validateCallbackUrlFromRequest(tokenReq.getCallbackURI(), authzCodeBean.getCallbackUrl());
validatePKCECode(authzCodeBean, tokenReq.getPkceCodeVerifier());
setPropertiesForTokenGeneration(tokReqMsgCtx, tokenReq, authzCodeBean);
} finally {
// After validating grant, authorization code is revoked. This is done to stop repetitive usage of
// same authorization code in erroneous token requests.
tokReqMsgCtx.addProperty(CODE_ID, authzCodeBean.getAuthzCodeId());
revokeAuthorizationCode(authzCodeBean);
}
if (log.isDebugEnabled()) {
log.debug("Found Authorization Code for Client : " + tokenReq.getClientId() +
", authorized user : " + authzCodeBean.getAuthorizedUser() +
", scope : " + OAuth2Util.buildScopeString(authzCodeBean.getScope()));
}
return true;
}
@Override
public OAuth2AccessTokenRespDTO issue(OAuthTokenReqMessageContext tokReqMsgCtx)
throws IdentityOAuth2Exception {
OAuth2AccessTokenRespDTO tokenResp = super.issue(tokReqMsgCtx);
String authzCode = retrieveAuthzCode(tokReqMsgCtx);
deactivateAuthzCode(tokReqMsgCtx, tokenResp.getTokenId(), authzCode);
clearAuthzCodeCache(tokReqMsgCtx, authzCode);
return tokenResp;
}
private void setPropertiesForTokenGeneration(OAuthTokenReqMessageContext tokReqMsgCtx,
OAuth2AccessTokenReqDTO tokenReq, AuthzCodeDO authzCodeBean) {
tokReqMsgCtx.setAuthorizedUser(authzCodeBean.getAuthorizedUser());
tokReqMsgCtx.setScope(authzCodeBean.getScope());
// keep the pre processed authz code as a OAuthTokenReqMessageContext property to avoid
// calculating it again when issuing the access token.
tokReqMsgCtx.addProperty(AUTHZ_CODE, tokenReq.getAuthorizationCode());
}
private boolean validateCallbackUrlFromRequest(String callbackUrlFromRequest,
String callbackUrlFromPersistedAuthzCode)
throws IdentityOAuth2Exception {
if (StringUtils.isEmpty(callbackUrlFromPersistedAuthzCode)) {
return true;
}
if (!callbackUrlFromPersistedAuthzCode.equals(callbackUrlFromRequest)) {
if (log.isDebugEnabled()) {
log.debug("Received callback url in the request : " + callbackUrlFromRequest +
" is not matching with persisted callback url " + callbackUrlFromPersistedAuthzCode);
}
throw new IdentityOAuth2Exception("Callback url mismatch");
}
return true;
}
private void clearAuthzCodeCache(OAuthTokenReqMessageContext tokReqMsgCtx, String authzCode) {
if (cacheEnabled) {
String clientId = tokReqMsgCtx.getOauth2AccessTokenReqDTO().getClientId();
OAuthCacheKey cacheKey = new OAuthCacheKey(OAuth2Util.buildCacheKeyStringForAuthzCode(
clientId, authzCode));
OAuthCache.getInstance().clearCacheEntry(cacheKey);
if (log.isDebugEnabled()) {
log.debug("Cache was cleared for authorization code info for client id : " + clientId);
}
}
}
private void deactivateAuthzCode(OAuthTokenReqMessageContext tokReqMsgCtx, String tokenId,
String authzCode) throws IdentityOAuth2Exception {
try {
// Here we deactivate the authorization and in the process update the tokenId against the authorization
// code so that we can correlate the current access token that is valid against the authorization code.
AuthzCodeDO authzCodeDO = new AuthzCodeDO();
authzCodeDO.setAuthorizationCode(authzCode);
authzCodeDO.setOauthTokenId(tokenId);
authzCodeDO.setAuthzCodeId(tokReqMsgCtx.getProperty(CODE_ID).toString());
OAuthTokenPersistenceFactory.getInstance().getAuthorizationCodeDAO()
.deactivateAuthorizationCode(authzCodeDO);
if (log.isDebugEnabled()
&& IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.AUTHORIZATION_CODE)) {
log.debug("Deactivated authorization code : " + authzCode);
}
} catch (IdentityException e) {
throw new IdentityOAuth2Exception("Error occurred while deactivating authorization code", e);
}
}
/**
* Returns whether an unexpired, pre-generated token is served for this request
* @param tokReqMsgCtx
* @return
*/
private boolean isExistingTokenUsed(OAuthTokenReqMessageContext tokReqMsgCtx) {
if (tokReqMsgCtx.getProperty(EXISTING_TOKEN_ISSUED) != null) {
if (log.isDebugEnabled()) {
log.debug("Token request message context has 'existingTokenUsed' value : " +
tokReqMsgCtx.getProperty(EXISTING_TOKEN_ISSUED).toString());
}
return (Boolean) tokReqMsgCtx.getProperty(EXISTING_TOKEN_ISSUED);
}
if (log.isDebugEnabled()) {
log.debug("'existingTokenUsed' property not set in token request message context");
}
return false;
}
/**
* Get the token from the OAuthTokenReqMessageContext which is stored while validating the authorization code.
* If it's not there (which is unlikely), recalculate it.
* @param tokReqMsgCtx
* @return
*/
private String retrieveAuthzCode(OAuthTokenReqMessageContext tokReqMsgCtx) {
String authzCode = (String) tokReqMsgCtx.getProperty(AUTHZ_CODE);
if (authzCode == null) {
if (log.isDebugEnabled()) {
log.debug("authorization code is not saved in the token request message context for client : " +
tokReqMsgCtx.getOauth2AccessTokenReqDTO().getClientId());
}
authzCode = tokReqMsgCtx.getOauth2AccessTokenReqDTO().getAuthorizationCode();
}
return authzCode;
}
@Override
public boolean authorizeAccessDelegation(OAuthTokenReqMessageContext tokReqMsgCtx)
throws IdentityOAuth2Exception {
// authorization is handled when the authorization code was issued.
return true;
}
@Override
protected void storeAccessToken(OAuth2AccessTokenReqDTO oAuth2AccessTokenReqDTO, String userStoreDomain,
AccessTokenDO newTokenBean, String newAccessToken, AccessTokenDO
existingTokenBean)
throws IdentityOAuth2Exception {
try {
newTokenBean.setAuthorizationCode(oAuth2AccessTokenReqDTO.getAuthorizationCode());
OAuthTokenPersistenceFactory.getInstance().getAccessTokenDAO()
.insertAccessToken(newAccessToken, oAuth2AccessTokenReqDTO.getClientId(),
newTokenBean, existingTokenBean, userStoreDomain);
} catch (IdentityException e) {
throw new IdentityOAuth2Exception(
"Error occurred while storing new access token", e);
}
}
@Override
public boolean issueRefreshToken() throws IdentityOAuth2Exception {
return OAuthServerConfiguration.getInstance()
.getValueForIsRefreshTokenAllowed(OAuthConstants.GrantTypes.AUTHORIZATION_CODE);
}
/**
* Provides authorization code request details saved in cache or DB
* @param tokenReqDTO
* @return
* @throws IdentityOAuth2Exception
*/
private AuthzCodeDO getPersistedAuthzCode(OAuth2AccessTokenReqDTO tokenReqDTO) throws IdentityOAuth2Exception {
AuthzCodeDO authzCodeDO;
// If cache is enabled, check in the cache first.
if (cacheEnabled) {
OAuthCacheKey cacheKey = new OAuthCacheKey(OAuth2Util.buildCacheKeyStringForAuthzCode(
tokenReqDTO.getClientId(), tokenReqDTO.getAuthorizationCode()));
authzCodeDO = (AuthzCodeDO) OAuthCache.getInstance().getValueFromCache(cacheKey);
if (authzCodeDO != null) {
return authzCodeDO;
} else {
if (log.isDebugEnabled()) {
log.debug("Authorization Code Info was not available in cache for client id : "
+ tokenReqDTO.getClientId());
}
}
}
if (log.isDebugEnabled()) {
log.debug("Retrieving authorization code information from db for client id : " + tokenReqDTO.getClientId());
}
AuthorizationCodeValidationResult validationResult = OAuthTokenPersistenceFactory.getInstance()
.getAuthorizationCodeDAO().validateAuthorizationCode(tokenReqDTO.getClientId(),
tokenReqDTO.getAuthorizationCode());
if (validationResult != null) {
if (!validationResult.isActiveCode()) {
String tokenAlias = OAuthTokenPersistenceFactory.getInstance().getAccessTokenDAO()
.getAccessTokenByTokenId(validationResult.getTokenId());
//revoking access token issued for authorization code as per RFC 6749 Section 4.1.2
revokeExistingAccessTokens(validationResult.getTokenId(), validationResult.getAuthzCodeDO());
clearTokenCache(tokenAlias, validationResult.getTokenId());
String scope = OAuth2Util.buildScopeString(validationResult.getAuthzCodeDO().getScope());
OAuthUtil.clearOAuthCache(tokenReqDTO.getClientId(), validationResult.getAuthzCodeDO().
getAuthorizedUser(), scope);
}
return validationResult.getAuthzCodeDO();
} else {
// This means an invalid authorization code was sent for validation. We return null since higher
// layers expect a null value for an invalid authorization code.
return null;
}
}
private void revokeExistingAccessTokens(String tokenId, AuthzCodeDO authzCodeDO) throws IdentityOAuth2Exception {
String userId = null;
try {
userId = authzCodeDO.getAuthorizedUser().getUserId();
} catch (UserIdNotFoundException e) {
throw new IdentityOAuth2Exception("User id not found for user: "
+ authzCodeDO.getAuthorizedUser().getLoggableUserId(), e);
}
OAuthTokenPersistenceFactory.getInstance().getAccessTokenDAO().revokeAccessToken(tokenId, userId);
if (log.isDebugEnabled()) {
if (IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.AUTHORIZATION_CODE)) {
log.debug("Validated authorization code(hashed): " + DigestUtils.sha256Hex(authzCodeDO
.getAuthorizationCode()) + " for client: " + authzCodeDO.getConsumerKey() + " is not active. " +
"So revoking the access tokens issued for the authorization code.");
} else {
log.debug("Validated authorization code for client: " + authzCodeDO.getConsumerKey() + " is not " +
"active. So revoking the access tokens issued for the authorization code.");
}
}
}
private String buildCacheKeyForToken(String clientId, AuthzCodeDO authzCodeDO) throws IdentityOAuth2Exception {
String scope = OAuth2Util.buildScopeString(authzCodeDO.getScope());
try {
return buildCacheKeyStringForTokenWithUserId(clientId, scope, authzCodeDO.getAuthorizedUser().getUserId(),
authzCodeDO.getAuthorizedUser().getFederatedIdPName(), authzCodeDO.getTokenBindingReference());
} catch (UserIdNotFoundException e) {
throw new IdentityOAuth2Exception("User id not available for user: "
+ authzCodeDO.getAuthorizedUser().getLoggableUserId(), e);
}
}
/**
* Checks whether the retrieved authorization data is invalid, inactive or expired.
* Returns true otherwise
*
* @param authzCodeBean
* @param clientId
* @return
* @throws IdentityOAuth2Exception
*/
private boolean validateAuthzCodeFromRequest(AuthzCodeDO authzCodeBean, String clientId, String authzCode)
throws IdentityOAuth2Exception {
if (authzCodeBean == null) {
// If no auth code details available, cannot proceed with Authorization code grant
if (log.isDebugEnabled()) {
log.debug("Invalid token request for client id: " + clientId +
"and couldn't find persisted data for authorization code: " + authzCode);
}
throw new IdentityOAuth2Exception("Invalid authorization code received from token request");
}
if (isInactiveAuthzCode(authzCodeBean)) {
clearTokenCache(authzCodeBean, clientId);
throw new IdentityOAuth2Exception("Inactive authorization code received from token request");
}
if (isAuthzCodeExpired(authzCodeBean) || isAuthzCodeRevoked(authzCodeBean)) {
throw new IdentityOAuth2Exception("Expired or Revoked authorization code received from token request");
}
return true;
}
private void clearTokenCache(AuthzCodeDO authzCodeBean, String clientId) throws IdentityOAuth2Exception {
if (cacheEnabled) {
String cacheKeyString = buildCacheKeyForToken(clientId, authzCodeBean);
OAuthCache.getInstance().clearCacheEntry(new OAuthCacheKey(cacheKeyString));
if (log.isDebugEnabled()) {
log.debug("Removed token from cache for user : " + authzCodeBean.getAuthorizedUser().toString() +
", for client : " + clientId);
}
}
}
private void clearTokenCache(String tokenAlias, String tokenId) {
if (cacheEnabled) {
if (tokenAlias == null) {
if (log.isDebugEnabled()) {
log.debug("Received token alias is null. Skipping clearing token cache with token alias for " +
"tokenId : " + tokenId);
}
return;
}
OAuthCache.getInstance().clearCacheEntry(new OAuthCacheKey(tokenAlias));
if (log.isDebugEnabled()) {
if (IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.ACCESS_TOKEN)) {
log.debug("Removed token from cache for token alias : " + tokenAlias);
} else {
log.debug("Removed token from cache for token alias associated with tokenId : "
+ tokenId);
}
}
}
}
private boolean isInactiveAuthzCode(AuthzCodeDO authzCodeBean) {
if (OAuthConstants.AuthorizationCodeState.INACTIVE.equals(authzCodeBean.getState())) {
if (log.isDebugEnabled()) {
log.debug("Invalid access token request with Client Id : " + authzCodeBean.getConsumerKey() +
", Inactive authorization code : " + authzCodeBean.getAuthorizationCode());
}
return true;
}
return false;
}
private boolean isAuthzCodeRevoked(AuthzCodeDO authzCodeBean) {
if (OAuthConstants.AuthorizationCodeState.REVOKED.equals(authzCodeBean.getState())) {
if (log.isDebugEnabled()) {
log.debug("Invalid access token request with Client Id : " + authzCodeBean.getConsumerKey() +
", Revoked authorization code : " + authzCodeBean.getAuthorizationCode());
}
return true;
}
return false;
}
private boolean isAuthzCodeExpired(AuthzCodeDO authzCodeBean)
throws IdentityOAuth2Exception {
if (OAuthConstants.AuthorizationCodeState.EXPIRED.equals(authzCodeBean.getState())) {
if (log.isDebugEnabled()) {
log.debug("Invalid access token request with Client Id : " + authzCodeBean.getConsumerKey() +
", Expired authorization code : " + authzCodeBean.getAuthorizationCode());
}
return true;
}
long issuedTime = authzCodeBean.getIssuedTime().getTime();
long validityPeriod = authzCodeBean.getValidityPeriod();
// If the code is not valid for more than 1 sec, it is considered to be expired
if (getTimeToExpire(issuedTime, validityPeriod) < ALLOWED_MINIMUM_VALIDITY_PERIOD) {
markAsExpired(authzCodeBean);
if (log.isDebugEnabled()) {
log.debug("Authorization Code Issued Time(ms): " + issuedTime +
", Validity Period: " + validityPeriod + ", Timestamp Skew: " +
OAuthServerConfiguration.getInstance().getTimeStampSkewInSeconds() * 1000 +
", Current Time: " + System.currentTimeMillis());
}
return true;
}
return false;
}
private void markAsExpired(AuthzCodeDO authzCodeBean) throws IdentityOAuth2Exception {
OAuthTokenPersistenceFactory.getInstance().getAuthorizationCodeDAO()
.updateAuthorizationCodeState(authzCodeBean.getAuthorizationCode(),
OAuthConstants.AuthorizationCodeState.EXPIRED);
if (log.isDebugEnabled()) {
log.debug("Changed state of authorization code : " + authzCodeBean.getAuthorizationCode() + " to expired");
}
if (cacheEnabled) {
// remove the authorization code from the cache
OAuthCache.getInstance().clearCacheEntry(new OAuthCacheKey(
OAuth2Util.buildCacheKeyStringForAuthzCode(authzCodeBean.getConsumerKey(),
authzCodeBean.getAuthorizationCode())));
if (log.isDebugEnabled()) {
log.debug("Expired Authorization code issued for client " + authzCodeBean.getConsumerKey() +
" was removed from the cache.");
}
}
}
/**
* Performs PKCE Validation for "Authorization Code" Grant Type
*
* @param authzCodeBean
* @param verificationCode
* @return true if PKCE is validated
* @throws IdentityOAuth2Exception
*/
private boolean validatePKCECode(AuthzCodeDO authzCodeBean, String verificationCode)
throws IdentityOAuth2Exception {
String pkceCodeChallenge = authzCodeBean.getPkceCodeChallenge();
String pkceCodeChallengeMethod = authzCodeBean.getPkceCodeChallengeMethod();
OAuthAppDO oAuthApp = getOAuthAppDO(authzCodeBean.getConsumerKey());
if (!validatePKCE(pkceCodeChallenge, verificationCode, pkceCodeChallengeMethod, oAuthApp)) {
//possible malicious oAuthRequest
log.warn("Failed PKCE Verification for oAuth 2.0 request");
if (log.isDebugEnabled()) {
log.debug("PKCE code verification failed for client : " + authzCodeBean.getConsumerKey());
}
throw new IdentityOAuth2Exception("PKCE validation failed");
}
return true;
}
private void revokeAuthorizationCode(AuthzCodeDO authzCodeBean) throws IdentityOAuth2Exception {
OAuthTokenPersistenceFactory.getInstance().getAuthorizationCodeDAO().updateAuthorizationCodeState(
authzCodeBean.getAuthorizationCode(), OAuthConstants.AuthorizationCodeState.REVOKED);
if (log.isDebugEnabled()) {
log.debug("Changed state of authorization code : " + authzCodeBean.getAuthorizationCode() + " to revoked");
}
if (cacheEnabled) {
// remove the authorization code from the cache
OAuthCache.getInstance().clearCacheEntry(new OAuthCacheKey(
OAuth2Util.buildCacheKeyStringForAuthzCode(authzCodeBean.getConsumerKey(),
authzCodeBean.getAuthorizationCode())));
if (log.isDebugEnabled()) {
log.debug("Revoked Authorization code issued for client " + authzCodeBean.getConsumerKey() +
" was removed from the cache.");
}
}
}
private OAuthAppDO getOAuthAppDO(String clientId) throws IdentityOAuth2Exception {
try {
return OAuth2Util.getAppInformationByClientId(clientId);
} catch (InvalidOAuthClientException e) {
throw new IdentityOAuth2Exception("Error while retrieving app information for client: " + clientId);
}
}
}
| |
package com.missionhub.model;
import android.text.Html;
import java.util.List;
import com.missionhub.R;
import com.missionhub.api.Api;
import com.missionhub.api.ApiOptions;
import com.missionhub.application.Application;
import com.missionhub.model.DaoSession;
import com.missionhub.model.generic.InteractionVisibility;
import com.missionhub.util.DateUtils;
import com.missionhub.util.ResourceUtils;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import de.greenrobot.dao.DaoException;
// THIS CODE IS GENERATED BY greenDAO, EDIT ONLY INSIDE THE "KEEP"-SECTIONS
// KEEP INCLUDES - put your custom includes here
import java.util.ArrayList;
// KEEP INCLUDES END
/**
* Entity mapped to table INTERACTION.
*/
public class Interaction implements com.missionhub.model.TimestampedEntity {
private Long id;
private Long interaction_type_id;
private Long receiver_id;
private Long organization_id;
private Long created_by_id;
private Long updated_by_id;
private String comment;
private String privacy_setting;
private String timestamp;
private String created_at;
private String updated_at;
/** Used to resolve relations */
private transient DaoSession daoSession;
/** Used for active entity operations. */
private transient InteractionDao myDao;
private Person receiver;
private Long receiver__resolvedKey;
private Organization organization;
private Long organization__resolvedKey;
private Person creator;
private Long creator__resolvedKey;
private Person updater;
private Long updater__resolvedKey;
private InteractionType interactionType;
private Long interactionType__resolvedKey;
private List<InteractionInitiator> interactionInitiatorList;
// KEEP FIELDS - put your custom fields here
private InteractionViewCache mViewCache;
// KEEP FIELDS END
public Interaction() {
}
public Interaction(Long id) {
this.id = id;
}
public Interaction(Long id, Long interaction_type_id, Long receiver_id, Long organization_id, Long created_by_id, Long updated_by_id, String comment, String privacy_setting, String timestamp, String created_at, String updated_at) {
this.id = id;
this.interaction_type_id = interaction_type_id;
this.receiver_id = receiver_id;
this.organization_id = organization_id;
this.created_by_id = created_by_id;
this.updated_by_id = updated_by_id;
this.comment = comment;
this.privacy_setting = privacy_setting;
this.timestamp = timestamp;
this.created_at = created_at;
this.updated_at = updated_at;
}
/** called by internal mechanisms, do not call yourself. */
public void __setDaoSession(DaoSession daoSession) {
this.daoSession = daoSession;
myDao = daoSession != null ? daoSession.getInteractionDao() : null;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getInteraction_type_id() {
return interaction_type_id;
}
public void setInteraction_type_id(Long interaction_type_id) {
this.interaction_type_id = interaction_type_id;
}
public Long getReceiver_id() {
return receiver_id;
}
public void setReceiver_id(Long receiver_id) {
this.receiver_id = receiver_id;
}
public Long getOrganization_id() {
return organization_id;
}
public void setOrganization_id(Long organization_id) {
this.organization_id = organization_id;
}
public Long getCreated_by_id() {
return created_by_id;
}
public void setCreated_by_id(Long created_by_id) {
this.created_by_id = created_by_id;
}
public Long getUpdated_by_id() {
return updated_by_id;
}
public void setUpdated_by_id(Long updated_by_id) {
this.updated_by_id = updated_by_id;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public String getPrivacy_setting() {
return privacy_setting;
}
public void setPrivacy_setting(String privacy_setting) {
this.privacy_setting = privacy_setting;
}
public String getTimestamp() {
return timestamp;
}
public void setTimestamp(String timestamp) {
this.timestamp = timestamp;
}
public String getCreated_at() {
return created_at;
}
public void setCreated_at(String created_at) {
this.created_at = created_at;
}
public String getUpdated_at() {
return updated_at;
}
public void setUpdated_at(String updated_at) {
this.updated_at = updated_at;
}
/** To-one relationship, resolved on first access. */
public Person getReceiver() {
Long __key = this.receiver_id;
if (receiver__resolvedKey == null || !receiver__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
PersonDao targetDao = daoSession.getPersonDao();
Person receiverNew = targetDao.load(__key);
synchronized (this) {
receiver = receiverNew;
receiver__resolvedKey = __key;
}
}
return receiver;
}
public void setReceiver(Person receiver) {
synchronized (this) {
this.receiver = receiver;
receiver_id = receiver == null ? null : receiver.getId();
receiver__resolvedKey = receiver_id;
}
}
/** To-one relationship, resolved on first access. */
public Organization getOrganization() {
Long __key = this.organization_id;
if (organization__resolvedKey == null || !organization__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
OrganizationDao targetDao = daoSession.getOrganizationDao();
Organization organizationNew = targetDao.load(__key);
synchronized (this) {
organization = organizationNew;
organization__resolvedKey = __key;
}
}
return organization;
}
public void setOrganization(Organization organization) {
synchronized (this) {
this.organization = organization;
organization_id = organization == null ? null : organization.getId();
organization__resolvedKey = organization_id;
}
}
/** To-one relationship, resolved on first access. */
public Person getCreator() {
Long __key = this.created_by_id;
if (creator__resolvedKey == null || !creator__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
PersonDao targetDao = daoSession.getPersonDao();
Person creatorNew = targetDao.load(__key);
synchronized (this) {
creator = creatorNew;
creator__resolvedKey = __key;
}
}
return creator;
}
public void setCreator(Person creator) {
synchronized (this) {
this.creator = creator;
created_by_id = creator == null ? null : creator.getId();
creator__resolvedKey = created_by_id;
}
}
/** To-one relationship, resolved on first access. */
public Person getUpdater() {
Long __key = this.updated_by_id;
if (updater__resolvedKey == null || !updater__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
PersonDao targetDao = daoSession.getPersonDao();
Person updaterNew = targetDao.load(__key);
synchronized (this) {
updater = updaterNew;
updater__resolvedKey = __key;
}
}
return updater;
}
public void setUpdater(Person updater) {
synchronized (this) {
this.updater = updater;
updated_by_id = updater == null ? null : updater.getId();
updater__resolvedKey = updated_by_id;
}
}
/** To-one relationship, resolved on first access. */
public InteractionType getInteractionType() {
Long __key = this.interaction_type_id;
if (interactionType__resolvedKey == null || !interactionType__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
InteractionTypeDao targetDao = daoSession.getInteractionTypeDao();
InteractionType interactionTypeNew = targetDao.load(__key);
synchronized (this) {
interactionType = interactionTypeNew;
interactionType__resolvedKey = __key;
}
}
return interactionType;
}
public void setInteractionType(InteractionType interactionType) {
synchronized (this) {
this.interactionType = interactionType;
interaction_type_id = interactionType == null ? null : interactionType.getId();
interactionType__resolvedKey = interaction_type_id;
}
}
/** To-many relationship, resolved on first access (and after reset). Changes to to-many relations are not persisted, make changes to the target entity. */
public List<InteractionInitiator> getInteractionInitiatorList() {
if (interactionInitiatorList == null) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
InteractionInitiatorDao targetDao = daoSession.getInteractionInitiatorDao();
List<InteractionInitiator> interactionInitiatorListNew = targetDao._queryInteraction_InteractionInitiatorList(id);
synchronized (this) {
if(interactionInitiatorList == null) {
interactionInitiatorList = interactionInitiatorListNew;
}
}
}
return interactionInitiatorList;
}
/** Resets a to-many relationship, making the next get call to query for a fresh result. */
public synchronized void resetInteractionInitiatorList() {
interactionInitiatorList = null;
}
/** Convenient call for {@link AbstractDao#delete(Object)}. Entity must attached to an entity context. */
public void delete() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.delete(this);
}
/** Convenient call for {@link AbstractDao#update(Object)}. Entity must attached to an entity context. */
public void update() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.update(this);
}
/** Convenient call for {@link AbstractDao#refresh(Object)}. Entity must attached to an entity context. */
public void refresh() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.refresh(this);
}
// KEEP METHODS - put your custom methods here
public void refreshAll() {
refresh();
invalidateViewCache();
}
public void deleteWithRelations() {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
daoSession.getInteractionInitiatorDao().deleteByKeyInTx(daoSession.getInteractionInitiatorDao().queryBuilder().where(InteractionInitiatorDao.Properties.Interaction_id.eq(getId())).<Long>listKeys());
delete();
}
public synchronized Long[] getInitiatorIds() {
List<Long> ids = new ArrayList<Long>();
List<InteractionInitiator> initiators = getInteractionInitiatorList();
for (InteractionInitiator initiator : initiators) {
ids.add(initiator.getPerson_id());
}
return ids.toArray(new Long[ids.size()]);
}
public InteractionVisibility getVisibility() {
return InteractionVisibility.parse(getPrivacy_setting());
}
public InteractionViewCache getViewCache() {
if (mViewCache == null) {
synchronized (this) {
mViewCache = new InteractionViewCache();
DateTime timestamp;
if (StringUtils.isNotEmpty(getTimestamp())) {
timestamp = DateUtils.parseISO8601(getTimestamp());
} else {
timestamp = DateUtils.parseISO8601(getCreated_at());
}
mViewCache.timestamp = timestamp.toString(DateTimeFormat.forPattern("d MMM yyyy").withZone(DateTimeZone.getDefault()));
mViewCache.visibility = getVisibility().toString();
List<String> initiatorNames = new ArrayList<String>();
List<InteractionInitiator> initiators = getInteractionInitiatorList();
for(InteractionInitiator initiator : initiators) {
Person p = initiator.getPerson();
if (p != null) {
initiatorNames.add(p.getName());
}
}
String receiverName = "";
if (getReceiver() != null) {
receiverName = getReceiver().getName();
}
InteractionType type = getInteractionType();
if (type != null) {
mViewCache.iconResource = type.getIconResource();
mViewCache.action = Html.fromHtml(type.getActionString(StringUtils.join(initiatorNames, ", "), receiverName));
}
if (StringUtils.isNotEmpty(getComment())) {
mViewCache.comment = Html.fromHtml(String.format(ResourceUtils.getString(R.string.interaction_comment_message), StringUtils.join(initiatorNames, ", "), getComment().trim()));
}
DateTime updated;
if (StringUtils.isNotEmpty(getUpdated_at())) {
updated = DateUtils.parseISO8601(getUpdated_at());
} else {
updated = DateUtils.parseISO8601(getCreated_at());
}
String updatedTime = updated.toString(DateTimeFormat.forPattern("h:mm a").withZone(DateTimeZone.getDefault()));
String updatedDate = updated.toString(DateTimeFormat.forPattern("d MMM yyyy").withZone(DateTimeZone.getDefault()));
String updaterName;
if (getUpdater() != null) {
updaterName = getUpdater().getName();
} else if (getCreator() != null) {
updaterName = getCreator().getName();
} else {
updaterName = "Unknown";
}
mViewCache.updated = String.format(ResourceUtils.getString(R.string.interaction_updated_at), updaterName, updatedDate, updatedTime);
}
}
return mViewCache;
}
public void invalidateViewCache() {
mViewCache = null;
}
public static class InteractionViewCache {
public int iconResource;
public CharSequence timestamp;
public CharSequence visibility;
public CharSequence action;
public CharSequence comment;
public CharSequence updated;
}
// KEEP METHODS END
}
| |
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* K2.java
* Copyright (C) 2001-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.bayes.net.search.local;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
import weka.classifiers.bayes.BayesNet;
import weka.core.Instances;
import weka.core.Option;
import weka.core.RevisionUtils;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
/**
* <!-- globalinfo-start --> This Bayes Network learning algorithm uses a hill
* climbing algorithm restricted by an order on the variables.<br/>
* <br/>
* For more information see:<br/>
* <br/>
* G.F. Cooper, E. Herskovits (1990). A Bayesian method for constructing
* Bayesian belief networks from databases.<br/>
* <br/>
* G. Cooper, E. Herskovits (1992). A Bayesian method for the induction of
* probabilistic networks from data. Machine Learning. 9(4):309-347.<br/>
* <br/>
* Works with nominal variables and no missing values only.
* <p/>
* <!-- globalinfo-end -->
*
* <!-- technical-bibtex-start --> BibTeX:
*
* <pre>
* @proceedings{Cooper1990,
* author = {G.F. Cooper and E. Herskovits},
* booktitle = {Proceedings of the Conference on Uncertainty in AI},
* pages = {86-94},
* title = {A Bayesian method for constructing Bayesian belief networks from databases},
* year = {1990}
* }
*
* @article{Cooper1992,
* author = {G. Cooper and E. Herskovits},
* journal = {Machine Learning},
* number = {4},
* pages = {309-347},
* title = {A Bayesian method for the induction of probabilistic networks from data},
* volume = {9},
* year = {1992}
* }
* </pre>
* <p/>
* <!-- technical-bibtex-end -->
*
* <!-- options-start --> Valid options are:
* <p/>
*
* <pre>
* -N
* Initial structure is empty (instead of Naive Bayes)
* </pre>
*
* <pre>
* -P <nr of parents>
* Maximum number of parents
* </pre>
*
* <pre>
* -R
* Random order.
* (default false)
* </pre>
*
* <pre>
* -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.
* </pre>
*
* <pre>
* -S [BAYES|MDL|ENTROPY|AIC|CROSS_CLASSIC|CROSS_BAYES]
* Score type (BAYES, BDeu, MDL, ENTROPY and AIC)
* </pre>
*
* <!-- options-end -->
*
* @author Remco Bouckaert (rrb@xm.co.nz)
* @version $Revision: 10154 $
*/
public class K2 extends LocalScoreSearchAlgorithm implements
TechnicalInformationHandler {
/** for serialization */
static final long serialVersionUID = 6176545934752116631L;
/** Holds flag to indicate ordering should be random **/
boolean m_bRandomOrder = false;
/**
* Returns an instance of a TechnicalInformation object, containing detailed
* information about the technical background of this class, e.g., paper
* reference or book this class is based on.
*
* @return the technical information about this class
*/
@Override
public TechnicalInformation getTechnicalInformation() {
TechnicalInformation result;
TechnicalInformation additional;
result = new TechnicalInformation(Type.PROCEEDINGS);
result.setValue(Field.AUTHOR, "G.F. Cooper and E. Herskovits");
result.setValue(Field.YEAR, "1990");
result
.setValue(Field.TITLE,
"A Bayesian method for constructing Bayesian belief networks from databases");
result.setValue(Field.BOOKTITLE,
"Proceedings of the Conference on Uncertainty in AI");
result.setValue(Field.PAGES, "86-94");
additional = result.add(Type.ARTICLE);
additional.setValue(Field.AUTHOR, "G. Cooper and E. Herskovits");
additional.setValue(Field.YEAR, "1992");
additional
.setValue(Field.TITLE,
"A Bayesian method for the induction of probabilistic networks from data");
additional.setValue(Field.JOURNAL, "Machine Learning");
additional.setValue(Field.VOLUME, "9");
additional.setValue(Field.NUMBER, "4");
additional.setValue(Field.PAGES, "309-347");
return result;
}
/**
* search determines the network structure/graph of the network with the K2
* algorithm, restricted by its initial structure (which can be an empty
* graph, or a Naive Bayes graph.
*
* @param bayesNet the network
* @param instances the data to work with
* @throws Exception if something goes wrong
*/
@Override
public void search(BayesNet bayesNet, Instances instances) throws Exception {
int nOrder[] = new int[instances.numAttributes()];
nOrder[0] = instances.classIndex();
int nAttribute = 0;
for (int iOrder = 1; iOrder < instances.numAttributes(); iOrder++) {
if (nAttribute == instances.classIndex()) {
nAttribute++;
}
nOrder[iOrder] = nAttribute++;
}
if (m_bRandomOrder) {
// generate random ordering (if required)
Random random = new Random();
int iClass;
if (getInitAsNaiveBayes()) {
iClass = 0;
} else {
iClass = -1;
}
for (int iOrder = 0; iOrder < instances.numAttributes(); iOrder++) {
int iOrder2 = Math.abs(random.nextInt()) % instances.numAttributes();
if (iOrder != iClass && iOrder2 != iClass) {
int nTmp = nOrder[iOrder];
nOrder[iOrder] = nOrder[iOrder2];
nOrder[iOrder2] = nTmp;
}
}
}
// determine base scores
double[] fBaseScores = new double[instances.numAttributes()];
for (int iOrder = 0; iOrder < instances.numAttributes(); iOrder++) {
int iAttribute = nOrder[iOrder];
fBaseScores[iAttribute] = calcNodeScore(iAttribute);
}
// K2 algorithm: greedy search restricted by ordering
for (int iOrder = 1; iOrder < instances.numAttributes(); iOrder++) {
int iAttribute = nOrder[iOrder];
double fBestScore = fBaseScores[iAttribute];
boolean bProgress = (bayesNet.getParentSet(iAttribute).getNrOfParents() < getMaxNrOfParents());
while (bProgress) {
int nBestAttribute = -1;
for (int iOrder2 = 0; iOrder2 < iOrder; iOrder2++) {
int iAttribute2 = nOrder[iOrder2];
double fScore = calcScoreWithExtraParent(iAttribute, iAttribute2);
if (fScore > fBestScore) {
fBestScore = fScore;
nBestAttribute = iAttribute2;
}
}
if (nBestAttribute != -1) {
bayesNet.getParentSet(iAttribute)
.addParent(nBestAttribute, instances);
fBaseScores[iAttribute] = fBestScore;
bProgress = (bayesNet.getParentSet(iAttribute).getNrOfParents() < getMaxNrOfParents());
} else {
bProgress = false;
}
}
}
} // buildStructure
/**
* Sets the max number of parents
*
* @param nMaxNrOfParents the max number of parents
*/
public void setMaxNrOfParents(int nMaxNrOfParents) {
m_nMaxNrOfParents = nMaxNrOfParents;
}
/**
* Gets the max number of parents.
*
* @return the max number of parents
*/
public int getMaxNrOfParents() {
return m_nMaxNrOfParents;
}
/**
* Sets whether to init as naive bayes
*
* @param bInitAsNaiveBayes whether to init as naive bayes
*/
public void setInitAsNaiveBayes(boolean bInitAsNaiveBayes) {
m_bInitAsNaiveBayes = bInitAsNaiveBayes;
}
/**
* Gets whether to init as naive bayes
*
* @return whether to init as naive bayes
*/
public boolean getInitAsNaiveBayes() {
return m_bInitAsNaiveBayes;
}
/**
* Set random order flag
*
* @param bRandomOrder the random order flag
*/
public void setRandomOrder(boolean bRandomOrder) {
m_bRandomOrder = bRandomOrder;
} // SetRandomOrder
/**
* Get random order flag
*
* @return the random order flag
*/
public boolean getRandomOrder() {
return m_bRandomOrder;
} // getRandomOrder
/**
* Returns an enumeration describing the available options.
*
* @return an enumeration of all the available options.
*/
@Override
public Enumeration<Option> listOptions() {
Vector<Option> newVector = new Vector<Option>(2);
newVector.addElement(new Option(
"\tInitial structure is empty (instead of Naive Bayes)", "N", 0, "-N"));
newVector.addElement(new Option("\tMaximum number of parents", "P", 1,
"-P <nr of parents>"));
newVector.addElement(new Option("\tRandom order.\n" + "\t(default false)",
"R", 0, "-R"));
newVector.addAll(Collections.list(super.listOptions()));
return newVector.elements();
}
/**
* Parses a given list of options.
* <p/>
*
* <!-- options-start --> Valid options are:
* <p/>
*
* <pre>
* -N
* Initial structure is empty (instead of Naive Bayes)
* </pre>
*
* <pre>
* -P <nr of parents>
* Maximum number of parents
* </pre>
*
* <pre>
* -R
* Random order.
* (default false)
* </pre>
*
* <pre>
* -mbc
* Applies a Markov Blanket correction to the network structure,
* after a network structure is learned. This ensures that all
* nodes in the network are part of the Markov blanket of the
* classifier node.
* </pre>
*
* <pre>
* -S [BAYES|MDL|ENTROPY|AIC|CROSS_CLASSIC|CROSS_BAYES]
* Score type (BAYES, BDeu, MDL, ENTROPY and AIC)
* </pre>
*
* <!-- options-end -->
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
@Override
public void setOptions(String[] options) throws Exception {
setRandomOrder(Utils.getFlag('R', options));
m_bInitAsNaiveBayes = !(Utils.getFlag('N', options));
String sMaxNrOfParents = Utils.getOption('P', options);
if (sMaxNrOfParents.length() != 0) {
setMaxNrOfParents(Integer.parseInt(sMaxNrOfParents));
} else {
setMaxNrOfParents(100000);
}
super.setOptions(options);
}
/**
* Gets the current settings of the search algorithm.
*
* @return an array of strings suitable for passing to setOptions
*/
@Override
public String[] getOptions() {
Vector<String> options = new Vector<String>();
options.add("-P");
options.add("" + m_nMaxNrOfParents);
if (!m_bInitAsNaiveBayes) {
options.add("-N");
}
if (getRandomOrder()) {
options.add("-R");
}
Collections.addAll(options, super.getOptions());
// Fill up rest with empty strings, not nulls!
return options.toArray(new String[0]);
}
/**
* This will return a string describing the search algorithm.
*
* @return The string.
*/
@Override
public String globalInfo() {
return "This Bayes Network learning algorithm uses a hill climbing algorithm "
+ "restricted by an order on the variables.\n\n"
+ "For more information see:\n\n"
+ getTechnicalInformation().toString()
+ "\n\n" + "Works with nominal variables and no missing values only.";
}
/**
* @return a string to describe the RandomOrder option.
*/
public String randomOrderTipText() {
return "When set to true, the order of the nodes in the network is random."
+ " Default random order is false and the order"
+ " of the nodes in the dataset is used."
+ " In any case, when the network was initialized as Naive Bayes Network, the"
+ " class variable is first in the ordering though.";
} // randomOrderTipText
/**
* Returns the revision string.
*
* @return the revision
*/
@Override
public String getRevision() {
return RevisionUtils.extract("$Revision: 10154 $");
}
}
| |
package org.sagebionetworks.repo.manager;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.sagebionetworks.repo.manager.storagelocation.StorageLocationProcessor;
import org.sagebionetworks.repo.manager.trash.TrashManager;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.EntityType;
import org.sagebionetworks.repo.model.EntityTypeUtils;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ProjectSettingsDAO;
import org.sagebionetworks.repo.model.StorageLocationDAO;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.file.UploadDestinationLocation;
import org.sagebionetworks.repo.model.file.UploadType;
import org.sagebionetworks.repo.model.jdo.KeyFactory;
import org.sagebionetworks.repo.model.principal.PrincipalAliasDAO;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.ProjectSetting;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.repo.model.project.StsStorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.repo.transactions.WriteTransaction;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.util.ValidateArgument;
import org.springframework.beans.factory.annotation.Autowired;
import com.google.common.collect.ImmutableMap;
public class ProjectSettingsManagerImpl implements ProjectSettingsManager {
private static final String EXTERNAL_STORAGE_HELP = "http://docs.synapse.org/articles/custom_storage_location.html for more information on how to create a new external upload destination.";
public static final int MAX_LOCATIONS_PER_PROJECT = 10;
@Autowired
private ProjectSettingsDAO projectSettingsDao;
@Autowired
private StorageLocationDAO storageLocationDAO;
@Autowired
private PrincipalAliasDAO principalAliasDAO;
@Autowired
private AuthorizationManager authorizationManager;
@Autowired
private NodeManager nodeManager;
@Autowired
private TrashManager trashManager;
private static final Map<Class<? extends ProjectSetting>, ProjectSettingsType> TYPE_MAP = ImmutableMap.of(
UploadDestinationListSetting.class, ProjectSettingsType.upload
);
private List<StorageLocationProcessor<? extends StorageLocationSetting>> storageLocationProcessors;
@Autowired
public void setStorageLocationProcessors(List<StorageLocationProcessor<? extends StorageLocationSetting>> storageLocationProcessors) {
this.storageLocationProcessors = storageLocationProcessors;
}
@Override
public ProjectSetting getProjectSetting(UserInfo userInfo, String id) throws DatastoreException, NotFoundException {
ProjectSetting projectSetting = projectSettingsDao.get(id);
if (!authorizationManager.canAccess(userInfo, projectSetting.getProjectId(), ObjectType.ENTITY, ACCESS_TYPE.READ).isAuthorized()) {
throw new UnauthorizedException("The current user does not have READ access on the project this setting applies to.");
}
return projectSetting;
}
@Override
public Optional<ProjectSetting> getProjectSettingByProjectAndType(UserInfo userInfo, String projectId, ProjectSettingsType type)
throws DatastoreException, NotFoundException {
if (!authorizationManager.canAccess(userInfo, projectId, ObjectType.ENTITY, ACCESS_TYPE.READ).isAuthorized()) {
throw new UnauthorizedException("The current user does not have READ access on the project " + projectId + ".");
}
return projectSettingsDao.get(projectId, type);
}
@Override
public <T extends ProjectSetting> Optional<T> getProjectSettingForNode(UserInfo userInfo, String nodeId, ProjectSettingsType type,
Class<T> expectedType) throws DatastoreException, UnauthorizedException, NotFoundException {
ProjectSetting projectSetting = null;
String projectSettingId = projectSettingsDao.getInheritedProjectSetting(nodeId, type);
if (projectSettingId != null) {
// Note that get throws NotFoundException if the project setting somehow doesn't exist.
projectSetting = projectSettingsDao.get(projectSettingId);
}
if (projectSetting == null) {
// Not having a setting is normal.
return Optional.empty();
}
if (!expectedType.isInstance(projectSetting)) {
throw new IllegalArgumentException("Settings type for '" + type + "' is not of type " + expectedType.getName());
}
return Optional.of(expectedType.cast(projectSetting));
}
@Override
public List<UploadDestinationLocation> getUploadDestinationLocations(UserInfo userInfo, List<Long> storageLocationIds)
throws DatastoreException, NotFoundException {
return storageLocationDAO.getUploadDestinationLocations(storageLocationIds);
}
@Override
@WriteTransaction
public ProjectSetting createProjectSetting(UserInfo userInfo, ProjectSetting projectSetting)
throws DatastoreException, NotFoundException {
String parentId = projectSetting.getProjectId();
// make sure the project id is a project
EntityType nodeType = nodeManager.getNodeType(userInfo, parentId);
Class<? extends Entity> nodeClass = EntityTypeUtils.getClassForType(nodeType);
if (nodeClass != Project.class && nodeClass != Folder.class) {
throw new IllegalArgumentException("The id is not the id of a project or folder entity");
}
if (!authorizationManager.canAccess(userInfo, parentId, ObjectType.ENTITY, ACCESS_TYPE.CREATE).isAuthorized()) {
throw new UnauthorizedException("Cannot create settings for this project");
}
// Can't create project settings if a parent has an StsStorageLocation.
Optional<ProjectSetting> parentSetting = getProjectSettingForNode(userInfo, parentId, ProjectSettingsType.upload,
ProjectSetting.class);
if (parentSetting.isPresent() && isStsStorageLocationSetting(parentSetting.get())) {
throw new IllegalArgumentException("Can't override project settings in an STS-enabled folder path");
}
// Auto-fill the setting type to avoid inconsistencies in the database
projectSetting.setSettingsType(TYPE_MAP.get(projectSetting.getClass()));
validateProjectSetting(projectSetting, userInfo);
// Can't add an StsStorageLocation to a non-empty entity.
if (!isEntityEmptyWithTrash(parentId) && isStsStorageLocationSetting(projectSetting)) {
throw new IllegalArgumentException("Can't enable STS in a non-empty folder");
}
String id = projectSettingsDao.create(projectSetting);
return projectSettingsDao.get(id);
}
@Override
@WriteTransaction
public void updateProjectSetting(UserInfo userInfo, ProjectSetting projectSetting) throws DatastoreException, NotFoundException {
ValidateArgument.required(projectSetting.getId(), "The id");
ValidateArgument.required(projectSetting.getProjectId(), "The project id");
if (!authorizationManager.canAccess(userInfo, projectSetting.getProjectId(), ObjectType.ENTITY, ACCESS_TYPE.UPDATE).isAuthorized()) {
throw new UnauthorizedException("Cannot update settings on this project");
}
// Auto-fill the setting type to avoid inconsistencies in the database
projectSetting.setSettingsType(TYPE_MAP.get(projectSetting.getClass()));
validateProjectSetting(projectSetting, userInfo);
// Can't add or modify an StsStorageLocation on a non-empty entity.
if (!isEntityEmptyWithTrash(projectSetting.getProjectId())) {
if (isStsStorageLocationSetting(projectSetting)) {
throw new IllegalArgumentException("Can't enable STS in a non-empty folder");
}
ProjectSetting oldSetting = projectSettingsDao.get(projectSetting.getId());
if (isStsStorageLocationSetting(oldSetting)) {
throw new IllegalArgumentException("Can't disable STS in a non-empty folder");
}
}
projectSettingsDao.update(projectSetting);
}
@Override
@WriteTransaction
public void deleteProjectSetting(UserInfo userInfo, String id) throws DatastoreException, NotFoundException {
// Note: projectSettingsDao.get() ensures that projectSetting is not null, or throws a NotFoundException.
ProjectSetting projectSetting = projectSettingsDao.get(id);
if (!authorizationManager.canAccess(userInfo, projectSetting.getProjectId(), ObjectType.ENTITY, ACCESS_TYPE.DELETE)
.isAuthorized()) {
throw new UnauthorizedException("Cannot delete settings from this project");
}
// Can't delete an StsStorageLocation on a non-empty entity.
if (!isEntityEmptyWithTrash(projectSetting.getProjectId()) &&
isStsStorageLocationSetting(projectSetting)) {
throw new IllegalArgumentException("Can't disable STS in a non-empty folder");
}
projectSettingsDao.delete(id);
}
// Helper method to check that the given entity has no children (either in the node hierarchy or in the trash can).
boolean isEntityEmptyWithTrash(String entityId) {
return !nodeManager.doesNodeHaveChildren(entityId) && !trashManager.doesEntityHaveTrashedChildren(entityId);
}
@Override
public <T extends StorageLocationSetting> T createStorageLocationSetting(UserInfo userInfo, T storageLocationSetting)
throws DatastoreException, NotFoundException {
ValidateArgument.required(userInfo, "The user");
ValidateArgument.required(storageLocationSetting, "The storage location");
this.processStorageLocation(userInfo, storageLocationSetting);
// Default UploadType to null.
if (storageLocationSetting.getUploadType() == null) {
storageLocationSetting.setUploadType(UploadType.NONE);
}
storageLocationSetting.setCreatedBy(userInfo.getId());
storageLocationSetting.setCreatedOn(new Date());
Long storageLocationId = storageLocationDAO.create(storageLocationSetting);
return (T) storageLocationDAO.get(storageLocationId);
}
@Override
public List<StorageLocationSetting> getMyStorageLocationSettings(UserInfo userInfo) throws DatastoreException, NotFoundException {
return storageLocationDAO.getByOwner(userInfo.getId());
}
@Override
public StorageLocationSetting getMyStorageLocationSetting(UserInfo userInfo, Long storageLocationId)
throws DatastoreException, NotFoundException {
ValidateArgument.required(storageLocationId, "storageLocationId");
StorageLocationSetting setting = storageLocationDAO.get(storageLocationId);
if (!userInfo.getId().equals(setting.getCreatedBy())) {
throw new UnauthorizedException("Only the creator can access storage location settings");
}
return setting;
}
@Override
public StorageLocationSetting getStorageLocationSetting(Long storageLocationId) throws DatastoreException, NotFoundException {
if (storageLocationId == null) {
return null;
}
return storageLocationDAO.get(storageLocationId);
}
// package private for testing only
void validateProjectSetting(ProjectSetting setting, UserInfo currentUser) {
ValidateArgument.required(setting.getProjectId(), "projectId");
ValidateArgument.required(setting.getSettingsType(), "settingsType");
if (setting instanceof UploadDestinationListSetting) {
validateUploadDestinationListSetting((UploadDestinationListSetting) setting, currentUser);
} else {
ValidateArgument.failRequirement("Cannot handle project setting of type " + setting.getClass().getName());
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
void processStorageLocation(UserInfo userInfo, StorageLocationSetting storageLocation) {
for (StorageLocationProcessor processor : storageLocationProcessors) {
Class<? extends StorageLocationSetting> clazz = storageLocation.getClass();
if (processor.supports(clazz)) {
processor.beforeCreate(userInfo, storageLocation);
}
}
}
private void validateUploadDestinationListSetting(UploadDestinationListSetting setting, UserInfo currentUser) {
ValidateArgument.required(setting.getLocations(), "settings.locations");
ValidateArgument.requirement(setting.getLocations().size() >= 1, "settings.locations must at least have one entry");
ValidateArgument.requirement(setting.getLocations().size() <= MAX_LOCATIONS_PER_PROJECT,
"The maximum number of settings.locations is limited to " + MAX_LOCATIONS_PER_PROJECT);
for (Long uploadId : setting.getLocations()) {
try {
StorageLocationSetting storageLocationSetting = storageLocationDAO.get(uploadId);
if (storageLocationSetting instanceof ExternalS3StorageLocationSetting) {
// only the owner or an admin can add this setting to a project
if (!currentUser.isAdmin() && !currentUser.getId().equals(storageLocationSetting.getCreatedBy())) {
String ownerUsername = principalAliasDAO.getUserName(storageLocationSetting.getCreatedBy());
throw new UnauthorizedException("Only the owner of the external S3 upload destination (user " + ownerUsername
+ ") can add this upload destination to a project. Either ask that user to perform this operation or follow the steps to create a new external s3 upload destination (see "
+ EXTERNAL_STORAGE_HELP);
}
}
// STS storage locations have additional restrictions.
if (storageLocationSetting instanceof StsStorageLocationSetting
&& Boolean.TRUE.equals(((StsStorageLocationSetting) storageLocationSetting).getStsEnabled())) {
// Can only be applied to folders.
EntityType nodeType = nodeManager.getNodeType(currentUser, setting.getProjectId());
if (EntityType.folder != nodeType) {
throw new IllegalArgumentException("Can only enable STS on a folder");
}
// Cannot be applied with other storage locations.
if (setting.getLocations().size() != 1) {
throw new IllegalArgumentException("An STS-enabled folder cannot add other upload " + "destinations");
}
}
} catch (NotFoundException e) {
ValidateArgument.failRequirement("uploadId " + uploadId + " is not a valid upload destination location");
}
}
}
@Override
public boolean isStsStorageLocationSetting(StorageLocationSetting storageLocationSetting) {
return storageLocationSetting instanceof StsStorageLocationSetting &&
Boolean.TRUE.equals(((StsStorageLocationSetting) storageLocationSetting).getStsEnabled());
}
@Override
public boolean isStsStorageLocationSetting(ProjectSetting projectSetting) {
if (!(projectSetting instanceof UploadDestinationListSetting)) {
// Impossible code path, but add this check here to future-proof this against ClassCastExceptions.
return false;
}
// Short-cut: Only check the first Storage Location ID. Entities with an StsStorageLocation can't have other
// storage locations.
List<Long> storageLocationIdList = ((UploadDestinationListSetting) projectSetting).getLocations();
long storageLocationId = storageLocationIdList.get(0);
try {
StorageLocationSetting storageLocationSetting = storageLocationDAO.get(storageLocationId);
return isStsStorageLocationSetting(storageLocationSetting);
} catch (NotFoundException e) {
// If the storage location somehow doesn't exist, then it's not an StsStorageLocation.
return false;
}
}
/**
*
* @param entityId
* @return true iff entityId is a descendant of an STS Enabled folder and not an STS Folder itself
*/
@Override
public boolean entityIsWithinSTSEnabledFolder(String entityId) {
// Note that even though the method is called getProjectId(), it can actually refer to either a Project or a
// Folder.
Optional<UploadDestinationListSetting> projectSetting = getProjectSettingForNode(
null, entityId, ProjectSettingsType.upload, UploadDestinationListSetting.class);
if (projectSetting.isPresent() && !KeyFactory.equals(projectSetting.get().getProjectId(), entityId)) {
if (isStsStorageLocationSetting(projectSetting.get())) {
return true;
}
}
return false;
}
}
| |
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.datastore.snippets;
import com.google.cloud.Timestamp;
import com.google.cloud.datastore.Datastore;
import com.google.cloud.datastore.DatastoreOptions;
import com.google.cloud.datastore.Entity;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.KeyFactory;
import com.google.cloud.datastore.Query;
import com.google.cloud.datastore.StringValue;
import com.google.cloud.datastore.StructuredQuery.OrderBy;
import com.google.cloud.datastore.Transaction;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* A simple Task List application demonstrating how to connect to Cloud Datastore, create, modify,
* delete, and query entities.
*/
public class TaskList {
// [START datastore_build_service]
// Create an authorized Datastore service using Application Default Credentials.
private final Datastore datastore = DatastoreOptions.getDefaultInstance().getService();
// Create a Key factory to construct keys associated with this project.
private final KeyFactory keyFactory = datastore.newKeyFactory().setKind("Task");
// [END datastore_build_service]
// [START datastore_add_entity]
/**
* Adds a task entity to the Datastore.
*
* @param description The task description
* @return The {@link Key} of the entity
* @throws DatastoreException if the ID allocation or put fails
*/
Key addTask(String description) {
Key key = datastore.allocateId(keyFactory.newKey());
Entity task =
Entity.newBuilder(key)
.set(
"description",
StringValue.newBuilder(description).setExcludeFromIndexes(true).build())
.set("created", Timestamp.now())
.set("done", false)
.build();
datastore.put(task);
return key;
}
// [END datastore_add_entity]
// [START datastore_update_entity]
/**
* Marks a task entity as done.
*
* @param id The ID of the task entity as given by {@link Key#id()}
* @return true if the task was found, false if not
* @throws DatastoreException if the transaction fails
*/
boolean markDone(long id) {
Transaction transaction = datastore.newTransaction();
try {
Entity task = transaction.get(keyFactory.newKey(id));
if (task != null) {
transaction.put(Entity.newBuilder(task).set("done", true).build());
}
transaction.commit();
return task != null;
} finally {
if (transaction.isActive()) {
transaction.rollback();
}
}
}
// [END datastore_update_entity]
// [START datastore_retrieve_entities]
/**
* Returns a list of all task entities in ascending order of creation time.
*
* @throws DatastoreException if the query fails
*/
Iterator<Entity> listTasks() {
Query<Entity> query =
Query.newEntityQueryBuilder().setKind("Task").setOrderBy(OrderBy.asc("created")).build();
return datastore.run(query);
}
// [END datastore_retrieve_entities]
// [START datastore_delete_entity]
/**
* Deletes a task entity.
*
* @param id The ID of the task entity as given by {@link Key#id()}
* @throws DatastoreException if the delete fails
*/
void deleteTask(long id) {
datastore.delete(keyFactory.newKey(id));
}
// [END datastore_delete_entity]
/**
* Converts a list of task entities to a list of formatted task strings.
*
* @param tasks An iterator over task entities
* @return A list of tasks strings, one per entity
*/
static List<String> formatTasks(Iterator<Entity> tasks) {
List<String> strings = new ArrayList<>();
while (tasks.hasNext()) {
Entity task = tasks.next();
if (task.getBoolean("done")) {
strings.add(
String.format("%d : %s (done)", task.getKey().getId(), task.getString("description")));
} else {
strings.add(
String.format(
"%d : %s (created %s)",
task.getKey().getId(),
task.getString("description"),
task.getTimestamp("created")));
}
}
return strings;
}
/**
* Handles a single command.
*
* @param commandLine A line of input provided by the user
*/
void handleCommandLine(String commandLine) {
String[] args = commandLine.split("\\s+");
if (args.length < 1) {
throw new IllegalArgumentException("not enough args");
}
String command = args[0];
switch (command) {
case "new":
// Everything after the first whitespace token is interpreted to be the description.
args = commandLine.split("\\s+", 2);
if (args.length != 2) {
throw new IllegalArgumentException("missing description");
}
// Set created to now() and done to false.
addTask(args[1]);
System.out.println("task added");
break;
case "done":
assertArgsLength(args, 2);
long id = Long.parseLong(args[1]);
if (markDone(id)) {
System.out.println("task marked done");
} else {
System.out.printf("did not find a Task entity with ID %d%n", id);
}
break;
case "list":
assertArgsLength(args, 1);
List<String> tasks = formatTasks(listTasks());
System.out.printf("found %d tasks:%n", tasks.size());
System.out.println("task ID : description");
System.out.println("---------------------");
for (String taskString : tasks) {
System.out.println(taskString);
}
break;
case "delete":
assertArgsLength(args, 2);
deleteTask(Long.parseLong(args[1]));
System.out.println("task deleted (if it existed)");
break;
default:
throw new IllegalArgumentException("unrecognized command: " + command);
}
}
private void assertArgsLength(String[] args, int expectedLength) {
if (args.length != expectedLength) {
throw new IllegalArgumentException(
String.format("expected exactly %d arg(s), found %d", expectedLength, args.length));
}
}
/**
* Exercises the methods defined in this class.
*
* <p>Assumes that you are authenticated using the Google Cloud SDK (using {@code gcloud auth
* application-default login}).
*/
public static void main(String[] args) throws Exception {
TaskList taskList = new TaskList();
System.out.println("Cloud Datastore Task List");
System.out.println();
printUsage();
while (true) {
String commandLine = System.console().readLine("> ");
if (commandLine.trim().isEmpty()) {
break;
}
try {
taskList.handleCommandLine(commandLine);
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
printUsage();
}
}
System.out.println("exiting");
System.exit(0);
}
private static void printUsage() {
System.out.println("Usage:");
System.out.println();
System.out.println(" new <description> Adds a task with a description <description>");
System.out.println(" done <task-id> Marks a task as done");
System.out.println(" list Lists all tasks by creation time");
System.out.println(" delete <task-id> Deletes a task");
System.out.println();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.java.sampling;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.math3.stat.inference.KolmogorovSmirnovTest;
import org.apache.flink.testutils.junit.RetryOnFailure;
import org.apache.flink.testutils.junit.RetryRule;
import org.apache.flink.util.Preconditions;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* This test suite try to verify whether all the random samplers work as we expected, which mainly focus on:
* <ul>
* <li>Does sampled result fit into input parameters? we check parameters like sample fraction, sample size,
* w/o replacement, and so on.</li>
* <li>Does sampled result randomly selected? we verify this by measure how much does it distributed on source data.
* Run Kolmogorov-Smirnov (KS) test between the random samplers and default reference samplers which is distributed
* well-proportioned on source data. If random sampler select elements randomly from source, it would distributed
* well-proportioned on source data as well. The KS test will fail to strongly reject the null hypothesis that
* the distributions of sampling gaps are the same.
* </li>
* </ul>
*
* @see <a href="https://en.wikipedia.org/wiki/Kolmogorov%E2%80%93Smirnov_test">Kolmogorov Smirnov test</a>
*/
public class RandomSamplerTest {
private static final int SOURCE_SIZE = 10000;
private static final int DEFAULT_PARTITION_NUMBER = 10;
private static final KolmogorovSmirnovTest ksTest = new KolmogorovSmirnovTest();
private static final List<Double> source = new ArrayList<Double>(SOURCE_SIZE);
@Rule
public final RetryRule retryRule = new RetryRule();
@SuppressWarnings({"unchecked", "rawtypes"})
private final List<Double>[] sourcePartitions = new List[DEFAULT_PARTITION_NUMBER];
@BeforeClass
public static void init() {
// initiate source data set.
for (int i = 0; i < SOURCE_SIZE; i++) {
source.add((double) i);
}
}
private void initSourcePartition() {
for (int i = 0; i< DEFAULT_PARTITION_NUMBER; i++) {
sourcePartitions[i] = new ArrayList<Double>((int)Math.ceil((double)SOURCE_SIZE / DEFAULT_PARTITION_NUMBER));
}
for (int i = 0; i< SOURCE_SIZE; i++) {
int index = i % DEFAULT_PARTITION_NUMBER;
sourcePartitions[index].add((double)i);
}
}
@Test(expected = java.lang.IllegalArgumentException.class)
public void testBernoulliSamplerWithUnexpectedFraction1() {
verifySamplerFraction(-1, false);
}
@Test(expected = java.lang.IllegalArgumentException.class)
public void testBernoulliSamplerWithUnexpectedFraction2() {
verifySamplerFraction(2, false);
}
@Test
@RetryOnFailure(times=3)
public void testBernoulliSamplerFraction() {
verifySamplerFraction(0.01, false);
verifySamplerFraction(0.05, false);
verifySamplerFraction(0.1, false);
verifySamplerFraction(0.3, false);
verifySamplerFraction(0.5, false);
verifySamplerFraction(0.854, false);
verifySamplerFraction(0.99, false);
}
@Test
@RetryOnFailure(times=3)
public void testBernoulliSamplerDuplicateElements() {
verifyRandomSamplerDuplicateElements(new BernoulliSampler<Double>(0.01));
verifyRandomSamplerDuplicateElements(new BernoulliSampler<Double>(0.1));
verifyRandomSamplerDuplicateElements(new BernoulliSampler<Double>(0.5));
}
@Test(expected = java.lang.IllegalArgumentException.class)
public void testPoissonSamplerWithUnexpectedFraction1() {
verifySamplerFraction(-1, true);
}
@Test
@RetryOnFailure(times=3)
public void testPoissonSamplerFraction() {
verifySamplerFraction(0.01, true);
verifySamplerFraction(0.05, true);
verifySamplerFraction(0.1, true);
verifySamplerFraction(0.5, true);
verifySamplerFraction(0.854, true);
verifySamplerFraction(0.99, true);
verifySamplerFraction(1.5, true);
}
@Test(expected = java.lang.IllegalArgumentException.class)
public void testReservoirSamplerUnexpectedSize1() {
verifySamplerFixedSampleSize(-1, true);
}
@Test(expected = java.lang.IllegalArgumentException.class)
public void testReservoirSamplerUnexpectedSize2() {
verifySamplerFixedSampleSize(-1, false);
}
@Test
@RetryOnFailure(times=3)
public void testBernoulliSamplerDistribution() {
verifyBernoulliSampler(0.01d);
verifyBernoulliSampler(0.05d);
verifyBernoulliSampler(0.1d);
verifyBernoulliSampler(0.5d);
}
@Test
@RetryOnFailure(times=3)
public void testPoissonSamplerDistribution() {
verifyPoissonSampler(0.01d);
verifyPoissonSampler(0.05d);
verifyPoissonSampler(0.1d);
verifyPoissonSampler(0.5d);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerSampledSize() {
verifySamplerFixedSampleSize(1, true);
verifySamplerFixedSampleSize(10, true);
verifySamplerFixedSampleSize(100, true);
verifySamplerFixedSampleSize(1234, true);
verifySamplerFixedSampleSize(9999, true);
verifySamplerFixedSampleSize(20000, true);
verifySamplerFixedSampleSize(1, false);
verifySamplerFixedSampleSize(10, false);
verifySamplerFixedSampleSize(100, false);
verifySamplerFixedSampleSize(1234, false);
verifySamplerFixedSampleSize(9999, false);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerSampledSize2() {
RandomSampler<Double> sampler = new ReservoirSamplerWithoutReplacement<Double>(20000);
Iterator<Double> sampled = sampler.sample(source.iterator());
assertTrue("ReservoirSamplerWithoutReplacement sampled output size should not beyond the source size.", getSize(sampled) == SOURCE_SIZE);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerDuplicateElements() {
verifyRandomSamplerDuplicateElements(new ReservoirSamplerWithoutReplacement<Double>(100));
verifyRandomSamplerDuplicateElements(new ReservoirSamplerWithoutReplacement<Double>(1000));
verifyRandomSamplerDuplicateElements(new ReservoirSamplerWithoutReplacement<Double>(5000));
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerWithoutReplacement() {
verifyReservoirSamplerWithoutReplacement(100, false);
verifyReservoirSamplerWithoutReplacement(500, false);
verifyReservoirSamplerWithoutReplacement(1000, false);
verifyReservoirSamplerWithoutReplacement(5000, false);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerWithReplacement() {
verifyReservoirSamplerWithReplacement(100, false);
verifyReservoirSamplerWithReplacement(500, false);
verifyReservoirSamplerWithReplacement(1000, false);
verifyReservoirSamplerWithReplacement(5000, false);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerWithMultiSourcePartitions1() {
initSourcePartition();
verifyReservoirSamplerWithoutReplacement(100, true);
verifyReservoirSamplerWithoutReplacement(500, true);
verifyReservoirSamplerWithoutReplacement(1000, true);
verifyReservoirSamplerWithoutReplacement(5000, true);
}
@Test
@RetryOnFailure(times=3)
public void testReservoirSamplerWithMultiSourcePartitions2() {
initSourcePartition();
verifyReservoirSamplerWithReplacement(100, true);
verifyReservoirSamplerWithReplacement(500, true);
verifyReservoirSamplerWithReplacement(1000, true);
verifyReservoirSamplerWithReplacement(5000, true);
}
/*
* Sample with fixed size, verify whether the sampled result size equals to input size.
*/
private void verifySamplerFixedSampleSize(int numSample, boolean withReplacement) {
RandomSampler<Double> sampler;
if (withReplacement) {
sampler = new ReservoirSamplerWithReplacement<Double>(numSample);
} else {
sampler = new ReservoirSamplerWithoutReplacement<Double>(numSample);
}
Iterator<Double> sampled = sampler.sample(source.iterator());
assertEquals(numSample, getSize(sampled));
}
/*
* Sample with fraction, and verify whether the sampled result close to input fraction.
*/
private void verifySamplerFraction(double fraction, boolean withReplacement) {
RandomSampler<Double> sampler;
if (withReplacement) {
sampler = new PoissonSampler<Double>(fraction);
} else {
sampler = new BernoulliSampler<Double>(fraction);
}
// take 20 times sample, and take the average result size for next step comparison.
int totalSampledSize = 0;
double sampleCount = 20;
for (int i = 0; i < sampleCount; i++) {
totalSampledSize += getSize(sampler.sample(source.iterator()));
}
double resultFraction = totalSampledSize / ((double) SOURCE_SIZE * sampleCount);
assertTrue(String.format("expected fraction: %f, result fraction: %f", fraction, resultFraction), Math.abs((resultFraction - fraction) / fraction) < 0.2);
}
/*
* Test sampler without replacement, and verify that there should not exist any duplicate element in sampled result.
*/
private void verifyRandomSamplerDuplicateElements(final RandomSampler<Double> sampler) {
List<Double> list = Lists.newLinkedList(new Iterable<Double>() {
@Override
public Iterator<Double> iterator() {
return sampler.sample(source.iterator());
}
});
Set<Double> set = Sets.newHashSet(list);
assertTrue("There should not have duplicate element for sampler without replacement.", list.size() == set.size());
}
private int getSize(Iterator<?> iterator) {
int size = 0;
while (iterator.hasNext()) {
iterator.next();
size++;
}
return size;
}
private void verifyBernoulliSampler(double fraction) {
BernoulliSampler<Double> sampler = new BernoulliSampler<Double>(fraction);
verifyRandomSamplerWithFraction(fraction, sampler, true);
verifyRandomSamplerWithFraction(fraction, sampler, false);
}
private void verifyPoissonSampler(double fraction) {
PoissonSampler<Double> sampler = new PoissonSampler<Double>(fraction);
verifyRandomSamplerWithFraction(fraction, sampler, true);
verifyRandomSamplerWithFraction(fraction, sampler, false);
}
private void verifyReservoirSamplerWithReplacement(int numSamplers, boolean sampleOnPartitions) {
ReservoirSamplerWithReplacement<Double> sampler = new ReservoirSamplerWithReplacement<Double>(numSamplers);
verifyRandomSamplerWithSampleSize(numSamplers, sampler, true, sampleOnPartitions);
verifyRandomSamplerWithSampleSize(numSamplers, sampler, false, sampleOnPartitions);
}
private void verifyReservoirSamplerWithoutReplacement(int numSamplers, boolean sampleOnPartitions) {
ReservoirSamplerWithoutReplacement<Double> sampler = new ReservoirSamplerWithoutReplacement<Double>(numSamplers);
verifyRandomSamplerWithSampleSize(numSamplers, sampler, true, sampleOnPartitions);
verifyRandomSamplerWithSampleSize(numSamplers, sampler, false, sampleOnPartitions);
}
/*
* Verify whether random sampler sample with fraction from source data randomly. There are two default sample, one is
* sampled from source data with certain interval, the other is sampled only from the first half region of source data,
* If random sampler select elements randomly from source, it would distributed well-proportioned on source data as well,
* so the K-S Test result would accept the first one, while reject the second one.
*/
private void verifyRandomSamplerWithFraction(double fraction, RandomSampler<Double> sampler, boolean withDefaultSampler) {
double[] baseSample;
if (withDefaultSampler) {
baseSample = getDefaultSampler(fraction);
} else {
baseSample = getWrongSampler(fraction);
}
verifyKSTest(sampler, baseSample, withDefaultSampler);
}
/*
* Verify whether random sampler sample with fixed size from source data randomly. There are two default sample, one is
* sampled from source data with certain interval, the other is sampled only from the first half region of source data,
* If random sampler select elements randomly from source, it would distributed well-proportioned on source data as well,
* so the K-S Test result would accept the first one, while reject the second one.
*/
private void verifyRandomSamplerWithSampleSize(int sampleSize, RandomSampler<Double> sampler, boolean withDefaultSampler, boolean sampleWithPartitions) {
double[] baseSample;
if (withDefaultSampler) {
baseSample = getDefaultSampler(sampleSize);
} else {
baseSample = getWrongSampler(sampleSize);
}
verifyKSTest(sampler, baseSample, withDefaultSampler, sampleWithPartitions);
}
private void verifyKSTest(RandomSampler<Double> sampler, double[] defaultSampler, boolean expectSuccess) {
verifyKSTest(sampler, defaultSampler, expectSuccess, false);
}
private void verifyKSTest(RandomSampler<Double> sampler, double[] defaultSampler, boolean expectSuccess, boolean sampleOnPartitions) {
double[] sampled = getSampledOutput(sampler, sampleOnPartitions);
double pValue = ksTest.kolmogorovSmirnovStatistic(sampled, defaultSampler);
double dValue = getDValue(sampled.length, defaultSampler.length);
if (expectSuccess) {
assertTrue(String.format("KS test result with p value(%f), d value(%f)", pValue, dValue), pValue <= dValue);
} else {
assertTrue(String.format("KS test result with p value(%f), d value(%f)", pValue, dValue), pValue > dValue);
}
}
private double[] getSampledOutput(RandomSampler<Double> sampler, boolean sampleOnPartitions) {
Iterator<Double> sampled;
if (sampleOnPartitions) {
DistributedRandomSampler<Double> reservoirRandomSampler = (DistributedRandomSampler<Double>)sampler;
List<IntermediateSampleData<Double>> intermediateResult = Lists.newLinkedList();
for (int i = 0; i< DEFAULT_PARTITION_NUMBER; i++) {
Iterator<IntermediateSampleData<Double>> partialIntermediateResult = reservoirRandomSampler.sampleInPartition(sourcePartitions[i].iterator());
while (partialIntermediateResult.hasNext()) {
intermediateResult.add(partialIntermediateResult.next());
}
}
sampled = reservoirRandomSampler.sampleInCoordinator(intermediateResult.iterator());
} else {
sampled = sampler.sample(source.iterator());
}
List<Double> list = Lists.newArrayList();
while (sampled.hasNext()) {
list.add(sampled.next());
}
return transferFromListToArrayWithOrder(list);
}
/*
* Some sample result may not order by the input sequence, we should make it in order to do K-S test.
*/
private double[] transferFromListToArrayWithOrder(List<Double> list) {
Collections.sort(list);
double[] result = new double[list.size()];
for (int i = 0; i < list.size(); i++) {
result[i] = list.get(i);
}
return result;
}
private double[] getDefaultSampler(double fraction) {
Preconditions.checkArgument(fraction > 0, "Sample fraction should be positive.");
int size = (int) (SOURCE_SIZE * fraction);
double step = 1 / fraction;
double[] defaultSampler = new double[size];
for (int i = 0; i < size; i++) {
defaultSampler[i] = Math.round(step * i);
}
return defaultSampler;
}
private double[] getDefaultSampler(int fixSize) {
Preconditions.checkArgument(fixSize > 0, "Sample fraction should be positive.");
double step = SOURCE_SIZE / (double) fixSize;
double[] defaultSampler = new double[fixSize];
for (int i = 0; i < fixSize; i++) {
defaultSampler[i] = Math.round(step * i);
}
return defaultSampler;
}
/*
* Build a failed sample distribution which only contains elements in the first half of source data.
*/
private double[] getWrongSampler(double fraction) {
Preconditions.checkArgument(fraction > 0, "Sample size should be positive.");
int size = (int) (SOURCE_SIZE * fraction);
int halfSourceSize = SOURCE_SIZE / 2;
double[] wrongSampler = new double[size];
for (int i = 0; i < size; i++) {
wrongSampler[i] = (double) i % halfSourceSize;
}
return wrongSampler;
}
/*
* Build a failed sample distribution which only contains elements in the first half of source data.
*/
private double[] getWrongSampler(int fixSize) {
Preconditions.checkArgument(fixSize > 0, "Sample size be positive.");
int halfSourceSize = SOURCE_SIZE / 2;
double[] wrongSampler = new double[fixSize];
for (int i = 0; i < fixSize; i++) {
wrongSampler[i] = (double) i % halfSourceSize;
}
return wrongSampler;
}
/*
* Calculate the D value of K-S test for p-value 0.001, m and n are the sample size
*/
private double getDValue(int m, int n) {
Preconditions.checkArgument(m > 0, "input sample size should be positive.");
Preconditions.checkArgument(n > 0, "input sample size should be positive.");
double first = (double) m;
double second = (double) n;
return 1.95 * Math.sqrt((first + second) / (first * second));
}
}
| |
package fr.simon.marquis.preferencesmanager.ui;
/*
* Copyright (C) 2014 Chris Banes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.content.Context;
import android.content.res.TypedArray;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.TextView;
import fr.simon.marquis.preferencesmanager.R;
/**
* Layout which an {@link android.widget.EditText} to show a floating label when the hint is hidden
* due to the user inputting text.
*
* @see <a href="https://dribbble.com/shots/1254439--GIF-Mobile-Form-Interaction">Matt D. Smith on Dribble</a>
* @see <a href="http://bradfrostweb.com/blog/post/float-label-pattern/">Brad Frost's blog post</a>
*/
public final class FloatLabelLayout extends FrameLayout {
private static final long ANIMATION_DURATION = 150;
private static final float DEFAULT_PADDING_LEFT_RIGHT_DP = 12f;
private EditText mEditText;
private TextView mLabel;
public FloatLabelLayout(Context context) {
this(context, null);
}
public FloatLabelLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public FloatLabelLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
final TypedArray a = context
.obtainStyledAttributes(attrs, R.styleable.FloatLabelLayout);
final int sidePadding = a.getDimensionPixelSize(
R.styleable.FloatLabelLayout_floatLabelSidePadding,
dipsToPix(DEFAULT_PADDING_LEFT_RIGHT_DP));
mLabel = new TextView(context);
mLabel.setPadding(sidePadding, 0, sidePadding, 0);
mLabel.setVisibility(INVISIBLE);
mLabel.setTextAppearance(context,
a.getResourceId(R.styleable.FloatLabelLayout_floatLabelTextAppearance,
android.R.style.TextAppearance_Small)
);
addView(mLabel, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
a.recycle();
}
@Override
public final void addView(View child, int index, ViewGroup.LayoutParams params) {
if (child instanceof EditText) {
// If we already have an EditText, throw an exception
if (mEditText != null) {
throw new IllegalArgumentException("We already have an EditText, can only have one");
}
// Update the layout params so that the EditText is at the bottom, with enough top
// margin to show the label
final LayoutParams lp = new LayoutParams(params);
lp.gravity = Gravity.BOTTOM;
lp.topMargin = (int) mLabel.getTextSize();
params = lp;
setEditText((EditText) child);
}
// Carry on adding the View...
super.addView(child, index, params);
}
private void setEditText(EditText editText) {
mEditText = editText;
// Add a TextWatcher so that we know when the text input has changed
mEditText.addTextChangedListener(new TextWatcher() {
@Override
public void afterTextChanged(Editable s) {
if (TextUtils.isEmpty(s)) {
// The text is empty, so hide the label if it is visible
if (mLabel.getVisibility() == View.VISIBLE) {
hideLabel();
}
} else {
// The text is not empty, so show the label if it is not visible
if (mLabel.getVisibility() != View.VISIBLE) {
showLabel();
}
}
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
});
// Add focus listener to the EditText so that we can notify the label that it is activated.
// Allows the use of a ColorStateList for the text color on the label
mEditText.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean focused) {
mLabel.setActivated(focused);
}
});
mLabel.setText(mEditText.getHint());
}
/**
* @return the {@link android.widget.EditText} text input
*/
public EditText getEditText() {
return mEditText;
}
/**
* @return the {@link android.widget.TextView} label
*/
public TextView getLabel() {
return mLabel;
}
/**
* Show the label using an animation
*/
private void showLabel() {
mLabel.setVisibility(View.VISIBLE);
mLabel.setAlpha(0f);
mLabel.setTranslationY(mLabel.getHeight());
mLabel.animate()
.alpha(1f)
.translationY(0f)
.setDuration(ANIMATION_DURATION)
.setListener(null).start();
}
/**
* Hide the label using an animation
*/
private void hideLabel() {
mLabel.setAlpha(1f);
mLabel.setTranslationY(0f);
mLabel.animate()
.alpha(0f)
.translationY(mLabel.getHeight())
.setDuration(ANIMATION_DURATION)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mLabel.setVisibility(View.GONE);
}
}).start();
}
/**
* Helper method to convert dips to pixels.
*/
private int dipsToPix(float dps) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dps,
getResources().getDisplayMetrics());
}
}
| |
/*
* Copyright (C) 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.trilead.ssh2;
import com.googlecode.android_scripting.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* A <code>StreamGobbler</code> is an InputStream that uses an internal worker thread to constantly
* consume input from another InputStream. It uses a buffer to store the consumed data. The buffer
* size is automatically adjusted, if needed.
* <p>
* This class is sometimes very convenient - if you wrap a session's STDOUT and STDERR InputStreams
* with instances of this class, then you don't have to bother about the shared window of STDOUT and
* STDERR in the low level SSH-2 protocol, since all arriving data will be immediatelly consumed by
* the worker threads. Also, as a side effect, the streams will be buffered (e.g., single byte
* read() operations are faster).
* <p>
* Other SSH for Java libraries include this functionality by default in their STDOUT and STDERR
* InputStream implementations, however, please be aware that this approach has also a downside:
* <p>
* If you do not call the StreamGobbler's <code>read()</code> method often enough and the peer is
* constantly sending huge amounts of data, then you will sooner or later encounter a low memory
* situation due to the aggregated data (well, it also depends on the Java heap size). Joe Average
* will like this class anyway - a paranoid programmer would never use such an approach.
* <p>
* The term "StreamGobbler" was taken from an article called "When Runtime.exec() won't", see
* http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html.
*
* @author Christian Plattner, plattner@trilead.com
* @version $Id: StreamGobbler.java,v 1.1 2007/10/15 12:49:56 cplattne Exp $
*/
public class StreamGobbler extends InputStream {
class GobblerThread extends Thread {
@Override
public void run() {
while (true) {
try {
byte[] saveBuffer = null;
int avail = is.read(buffer, write_pos, buffer.length - write_pos);
synchronized (synchronizer) {
if (avail <= 0) {
isEOF = true;
synchronizer.notifyAll();
break;
}
write_pos += avail;
int space_available = buffer.length - write_pos;
if (space_available == 0) {
if (read_pos > 0) {
saveBuffer = new byte[read_pos];
System.arraycopy(buffer, 0, saveBuffer, 0, read_pos);
System.arraycopy(buffer, read_pos, buffer, 0, buffer.length - read_pos);
write_pos -= read_pos;
read_pos = 0;
} else {
write_pos = 0;
saveBuffer = buffer;
}
}
synchronizer.notifyAll();
}
writeToFile(saveBuffer);
} catch (IOException e) {
synchronized (synchronizer) {
exception = e;
synchronizer.notifyAll();
break;
}
}
}
}
}
private InputStream is;
private GobblerThread t;
private Object synchronizer = new Object();
private boolean isEOF = false;
private boolean isClosed = false;
private IOException exception = null;
private byte[] buffer;
private int read_pos = 0;
private int write_pos = 0;
private final FileOutputStream mLogStream;
private final int mBufferSize;
public StreamGobbler(InputStream is, File log, int buffer_size) {
this.is = is;
mBufferSize = buffer_size;
FileOutputStream out = null;
try {
out = new FileOutputStream(log, false);
} catch (IOException e) {
Log.e(e);
}
mLogStream = out;
buffer = new byte[mBufferSize];
t = new GobblerThread();
t.setDaemon(true);
t.start();
}
public void writeToFile(byte[] buffer) {
if (mLogStream != null && buffer != null) {
try {
mLogStream.write(buffer);
} catch (IOException e) {
Log.e(e);
}
}
}
@Override
public int read() throws IOException {
synchronized (synchronizer) {
if (isClosed) {
throw new IOException("This StreamGobbler is closed.");
}
while (read_pos == write_pos) {
if (exception != null) {
throw exception;
}
if (isEOF) {
return -1;
}
try {
synchronizer.wait();
} catch (InterruptedException e) {
}
}
int b = buffer[read_pos++] & 0xff;
return b;
}
}
@Override
public int available() throws IOException {
synchronized (synchronizer) {
if (isClosed) {
throw new IOException("This StreamGobbler is closed.");
}
return write_pos - read_pos;
}
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public void close() throws IOException {
synchronized (synchronizer) {
if (isClosed) {
return;
}
isClosed = true;
isEOF = true;
synchronizer.notifyAll();
is.close();
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (b == null) {
throw new NullPointerException();
}
if ((off < 0) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0) || (off > b.length)) {
throw new IndexOutOfBoundsException();
}
if (len == 0) {
return 0;
}
synchronized (synchronizer) {
if (isClosed) {
throw new IOException("This StreamGobbler is closed.");
}
while (read_pos == write_pos) {
if (exception != null) {
throw exception;
}
if (isEOF) {
return -1;
}
try {
synchronizer.wait();
} catch (InterruptedException e) {
}
}
int avail = write_pos - read_pos;
avail = (avail > len) ? len : avail;
System.arraycopy(buffer, read_pos, b, off, avail);
read_pos += avail;
return avail;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.runtime;
import org.apache.kafka.connect.runtime.isolation.Plugins;
import org.apache.kafka.connect.runtime.rest.entities.ConfigInfos;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.apache.kafka.connect.runtime.rest.entities.TaskInfo;
import org.apache.kafka.connect.util.Callback;
import org.apache.kafka.connect.util.ConnectorTaskId;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* <p>
* The herder interface tracks and manages workers and connectors. It is the main interface for external components
* to make changes to the state of the cluster. For example, in distributed mode, an implementation of this class
* knows how to accept a connector configuration, may need to route it to the current leader worker for the cluster so
* the config can be written to persistent storage, and then ensures the new connector is correctly instantiated on one
* of the workers.
* </p>
* <p>
* This class must implement all the actions that can be taken on the cluster (add/remove connectors, pause/resume tasks,
* get state of connectors and tasks, etc). The non-Java interfaces to the cluster (REST API and CLI) are very simple
* wrappers of the functionality provided by this interface.
* </p>
* <p>
* In standalone mode, this implementation of this class will be trivial because no coordination is needed. In that case,
* the implementation will mainly be delegating tasks directly to other components. For example, when creating a new
* connector in standalone mode, there is no need to persist the config and the connector and its tasks must run in the
* same process, so the standalone herder implementation can immediately instantiate and start the connector and its
* tasks.
* </p>
*/
public interface Herder {
void start();
void stop();
/**
* Get a list of connectors currently running in this cluster. This is a full list of connectors in the cluster gathered
* from the current configuration. However, note
*
* @returns A list of connector names
* @throws org.apache.kafka.connect.runtime.distributed.RequestTargetException if this node can not resolve the request
* (e.g., because it has not joined the cluster or does not have configs in sync with the group) and it is
* not the leader or the task owner (e.g., task restart must be handled by the worker which owns the task)
* @throws org.apache.kafka.connect.errors.ConnectException if this node is the leader, but still cannot resolve the
* request (e.g., it is not in sync with other worker's config state)
*/
void connectors(Callback<Collection<String>> callback);
/**
* Get the definition and status of a connector.
*/
void connectorInfo(String connName, Callback<ConnectorInfo> callback);
/**
* Get the configuration for a connector.
* @param connName name of the connector
* @param callback callback to invoke with the configuration
*/
void connectorConfig(String connName, Callback<Map<String, String>> callback);
/**
* Set the configuration for a connector. This supports creation and updating.
* @param connName name of the connector
* @param config the connectors configuration, or null if deleting the connector
* @param allowReplace if true, allow overwriting previous configs; if false, throw AlreadyExistsException if a connector
* with the same name already exists
* @param callback callback to invoke when the configuration has been written
*/
void putConnectorConfig(String connName, Map<String, String> config, boolean allowReplace, Callback<Created<ConnectorInfo>> callback);
/**
* Delete a connector and its configuration.
* @param connName name of the connector
* @param callback callback to invoke when the configuration has been written
*/
void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback);
/**
* Requests reconfiguration of the task. This should only be triggered by
* {@link HerderConnectorContext}.
*
* @param connName name of the connector that should be reconfigured
*/
void requestTaskReconfiguration(String connName);
/**
* Get the configurations for the current set of tasks of a connector.
* @param connName connector to update
* @param callback callback to invoke upon completion
*/
void taskConfigs(String connName, Callback<List<TaskInfo>> callback);
/**
* Set the configurations for the tasks of a connector. This should always include all tasks in the connector; if
* there are existing configurations and fewer are provided, this will reduce the number of tasks, and if more are
* provided it will increase the number of tasks.
* @param connName connector to update
* @param configs list of configurations
* @param callback callback to invoke upon completion
*/
void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback);
/**
* Lookup the current status of a connector.
* @param connName name of the connector
*/
ConnectorStateInfo connectorStatus(String connName);
/**
* Lookup the status of the a task.
* @param id id of the task
*/
ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id);
/**
* Validate the provided connector config values against the configuration definition.
* @param connectorConfig the provided connector config values
*/
ConfigInfos validateConnectorConfig(Map<String, String> connectorConfig);
/**
* Restart the task with the given id.
* @param id id of the task
* @param cb callback to invoke upon completion
*/
void restartTask(ConnectorTaskId id, Callback<Void> cb);
/**
* Restart the connector.
* @param connName name of the connector
* @param cb callback to invoke upon completion
*/
void restartConnector(String connName, Callback<Void> cb);
/**
* Restart the connector.
* @param delayMs delay before restart
* @param connName name of the connector
* @param cb callback to invoke upon completion
* @returns The id of the request
*/
HerderRequest restartConnector(long delayMs, String connName, Callback<Void> cb);
/**
* Pause the connector. This call will asynchronously suspend processing by the connector and all
* of its tasks.
* @param connector name of the connector
*/
void pauseConnector(String connector);
/**
* Resume the connector. This call will asynchronously start the connector and its tasks (if
* not started already).
* @param connector name of the connector
*/
void resumeConnector(String connector);
/**
* Returns a handle to the plugin factory used by this herder and its worker.
*
* @return a reference to the plugin factory.
*/
Plugins plugins();
/**
* Get the cluster ID of the Kafka cluster backing this Connect cluster.
* @return the cluster ID of the Kafka cluster backing this connect cluster
*/
String kafkaClusterId();
enum ConfigReloadAction {
NONE,
RESTART
}
class Created<T> {
private final boolean created;
private final T result;
public Created(boolean created, T result) {
this.created = created;
this.result = result;
}
public boolean created() {
return created;
}
public T result() {
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Created<?> created1 = (Created<?>) o;
return Objects.equals(created, created1.created) &&
Objects.equals(result, created1.result);
}
@Override
public int hashCode() {
return Objects.hash(created, result);
}
}
}
| |
package apple.healthkit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSData;
import apple.foundation.NSDate;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* HKVerifiableClinicalRecord
* <p>
* An NSObject that represents a verifiable clinical record.
*/
@Generated
@Library("HealthKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class HKVerifiableClinicalRecord extends HKSample {
static {
NatJ.register();
}
@Generated
protected HKVerifiableClinicalRecord(Pointer peer) {
super(peer);
}
/**
* [@property] JWSRepresentation
* <p>
* The record's entirety as JSON Web Signature (JWS) data.
*/
@Generated
@Selector("JWSRepresentation")
public native NSData JWSRepresentation();
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native HKVerifiableClinicalRecord alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native HKVerifiableClinicalRecord allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* [@property] expirationDate
* <p>
* The date this record expires.
*/
@Generated
@Selector("expirationDate")
public native NSDate expirationDate();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native HKVerifiableClinicalRecord init();
@Generated
@Selector("initWithCoder:")
public native HKVerifiableClinicalRecord initWithCoder(NSCoder coder);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
/**
* [@property] issuedDate
* <p>
* The date this record was issued.
*/
@Generated
@Selector("issuedDate")
public native NSDate issuedDate();
/**
* [@property] issuerIdentifier
* <p>
* The identifier for the issuer of this record.
*/
@Generated
@Selector("issuerIdentifier")
public native String issuerIdentifier();
/**
* [@property] itemNames
* <p>
* A list of display names for each item contained in this record.
*/
@Generated
@Selector("itemNames")
public native NSArray<String> itemNames();
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native HKVerifiableClinicalRecord new_objc();
/**
* [@property] recordTypes
* <p>
* The types present in this record.
*/
@Generated
@Selector("recordTypes")
public native NSArray<String> recordTypes();
/**
* [@property] relevantDate
* <p>
* A date most relevant to this record, like when a vaccine was
* administered or a test was performed.
*/
@Generated
@Selector("relevantDate")
public native NSDate relevantDate();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
/**
* [@property] subject
* <p>
* The subject of this record.
*/
@Generated
@Selector("subject")
public native HKVerifiableClinicalRecordSubject subject();
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.ByteArray;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.BestBucketsDeferringCollector;
import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator;
import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
import org.elasticsearch.search.aggregations.bucket.terms.LongKeyedBucketOrds;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.LongToIntFunction;
import java.util.function.LongUnaryOperator;
/**
* An aggregator for date values that attempts to return a specific number of
* buckets, reconfiguring how it rounds dates to buckets on the fly as new
* data arrives.
* <p>
* This class is abstract because there is a simple implementation for when the
* aggregator only collects from a single bucket and a more complex
* implementation when it doesn't. This ain't great from a test coverage
* standpoint but the simpler implementation is between 7% and 15% faster
* when you can use it. This is an important aggregation and we need that
* performance.
*/
abstract class AutoDateHistogramAggregator extends DeferableBucketAggregator {
static AutoDateHistogramAggregator build(
String name,
AggregatorFactories factories,
int targetBuckets,
RoundingInfo[] roundingInfos,
ValuesSourceConfig valuesSourceConfig,
AggregationContext context,
Aggregator parent,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException {
return cardinality == CardinalityUpperBound.ONE
? new FromSingle(
name,
factories,
targetBuckets,
roundingInfos,
valuesSourceConfig,
context,
parent,
metadata
)
: new FromMany(
name,
factories,
targetBuckets,
roundingInfos,
valuesSourceConfig,
context,
parent,
metadata
);
}
private final ValuesSource.Numeric valuesSource;
private final DocValueFormat formatter;
private final Function<Rounding, Rounding.Prepared> roundingPreparer;
/**
* A reference to the collector so we can
* {@link BestBucketsDeferringCollector#rewriteBuckets}.
*/
private BestBucketsDeferringCollector deferringCollector;
protected final RoundingInfo[] roundingInfos;
protected final int targetBuckets;
private AutoDateHistogramAggregator(
String name,
AggregatorFactories factories,
int targetBuckets,
RoundingInfo[] roundingInfos,
ValuesSourceConfig valuesSourceConfig,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(name, factories, context, parent, metadata);
this.targetBuckets = targetBuckets;
// TODO: Remove null usage here, by using a different aggregator for create
this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.Numeric) valuesSourceConfig.getValuesSource() : null;
this.formatter = valuesSourceConfig.format();
this.roundingInfos = roundingInfos;
this.roundingPreparer = valuesSourceConfig.roundingPreparer();
}
@Override
public final ScoreMode scoreMode() {
if (valuesSource != null && valuesSource.needsScores()) {
return ScoreMode.COMPLETE;
}
return super.scoreMode();
}
@Override
protected final boolean shouldDefer(Aggregator aggregator) {
return true;
}
@Override
public final DeferringBucketCollector buildDeferringCollector() {
deferringCollector = new BestBucketsDeferringCollector(topLevelQuery(), searcher(), descendsFromGlobalAggregator(parent()));
return deferringCollector;
}
protected abstract LeafBucketCollector getLeafCollector(SortedNumericDocValues values, LeafBucketCollector sub) throws IOException;
@Override
public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
return getLeafCollector(valuesSource.longValues(ctx), sub);
}
protected final InternalAggregation[] buildAggregations(
LongKeyedBucketOrds bucketOrds,
LongToIntFunction roundingIndexFor,
long[] owningBucketOrds
) throws IOException {
return buildAggregationsForVariableBuckets(
owningBucketOrds,
bucketOrds,
(bucketValue, docCount, subAggregationResults) -> new InternalAutoDateHistogram.Bucket(
bucketValue,
docCount,
formatter,
subAggregationResults
),
(owningBucketOrd, buckets) -> {
// the contract of the histogram aggregation is that shards must return
// buckets ordered by key in ascending order
CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator());
// value source will be null for unmapped fields
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(
roundingInfos,
roundingIndexFor.applyAsInt(owningBucketOrd),
buildEmptySubAggregations()
);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, metadata(), 1);
}
);
}
@Override
public final InternalAggregation buildEmptyAggregation() {
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(
roundingInfos,
0,
buildEmptySubAggregations()
);
return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter, metadata(), 1);
}
protected final Rounding.Prepared prepareRounding(int index) {
return roundingPreparer.apply(roundingInfos[index].rounding);
}
protected final void merge(long[] mergeMap, long newNumBuckets) {
LongUnaryOperator howToRewrite = b -> mergeMap[(int) b];
rewriteBuckets(newNumBuckets, howToRewrite);
if (deferringCollector != null) {
deferringCollector.rewriteBuckets(howToRewrite);
}
}
/**
* Initially it uses the most fine grained rounding configuration possible
* but as more data arrives it rebuckets the data until it "fits" in the
* aggregation rounding. Similar to {@link FromMany} this checks both the
* bucket count and range of the aggregation, but unlike
* {@linkplain FromMany} it keeps an accurate count of the buckets and it
* doesn't delay rebucketing.
* <p>
* Rebucketing is roughly {@code O(number_of_hits_collected_so_far)} but we
* rebucket roughly {@code O(log number_of_hits_collected_so_far)} because
* the "shape" of the roundings is <strong>roughly</strong>
* logarithmically increasing.
*/
private static class FromSingle extends AutoDateHistogramAggregator {
private int roundingIdx;
private Rounding.Prepared preparedRounding;
/**
* Map from value to bucket ordinals.
* <p>
* It is important that this is the exact subtype of
* {@link LongKeyedBucketOrds} so that the JVM can make a monomorphic
* call to {@link LongKeyedBucketOrds#add(long, long)} in the tight
* inner loop of {@link LeafBucketCollector#collect(int, long)}. You'd
* think that it wouldn't matter, but its seriously 7%-15% performance
* difference for the aggregation. Yikes.
*/
private LongKeyedBucketOrds.FromSingle bucketOrds;
private long min = Long.MAX_VALUE;
private long max = Long.MIN_VALUE;
FromSingle(
String name,
AggregatorFactories factories,
int targetBuckets,
RoundingInfo[] roundingInfos,
ValuesSourceConfig valuesSourceConfig,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(
name,
factories,
targetBuckets,
roundingInfos,
valuesSourceConfig,
context,
parent,
metadata
);
preparedRounding = prepareRounding(0);
bucketOrds = new LongKeyedBucketOrds.FromSingle(bigArrays());
}
@Override
protected LeafBucketCollector getLeafCollector(SortedNumericDocValues values, LeafBucketCollector sub) throws IOException {
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
assert owningBucketOrd == 0;
if (false == values.advanceExact(doc)) {
return;
}
int valuesCount = values.docValueCount();
long previousRounded = Long.MIN_VALUE;
for (int i = 0; i < valuesCount; ++i) {
long value = values.nextValue();
long rounded = preparedRounding.round(value);
assert rounded >= previousRounded;
if (rounded == previousRounded) {
continue;
}
collectValue(doc, rounded);
previousRounded = rounded;
}
}
private void collectValue(int doc, long rounded) throws IOException {
long bucketOrd = bucketOrds.add(0, rounded);
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
return;
}
collectBucket(sub, doc, bucketOrd);
increaseRoundingIfNeeded(rounded);
}
private void increaseRoundingIfNeeded(long rounded) {
if (roundingIdx >= roundingInfos.length - 1) {
return;
}
min = Math.min(min, rounded);
max = Math.max(max, rounded);
if (bucketOrds.size() <= targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval()
&& max - min <= targetBuckets * roundingInfos[roundingIdx].getMaximumRoughEstimateDurationMillis()) {
return;
}
do {
try (LongKeyedBucketOrds oldOrds = bucketOrds) {
preparedRounding = prepareRounding(++roundingIdx);
long[] mergeMap = new long[Math.toIntExact(oldOrds.size())];
bucketOrds = new LongKeyedBucketOrds.FromSingle(bigArrays());
LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(0);
while (ordsEnum.next()) {
long oldKey = ordsEnum.value();
long newKey = preparedRounding.round(oldKey);
long newBucketOrd = bucketOrds.add(0, newKey);
mergeMap[(int) ordsEnum.ord()] = newBucketOrd >= 0 ? newBucketOrd : -1 - newBucketOrd;
}
merge(mergeMap, bucketOrds.size());
}
} while (roundingIdx < roundingInfos.length - 1
&& (bucketOrds.size() > targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval()
|| max - min > targetBuckets * roundingInfos[roundingIdx].getMaximumRoughEstimateDurationMillis()));
}
};
}
@Override
public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException {
return buildAggregations(bucketOrds, l -> roundingIdx, owningBucketOrds);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
super.collectDebugInfo(add);
add.accept("surviving_buckets", bucketOrds.size());
}
@Override
protected void doClose() {
Releasables.close(bucketOrds);
}
}
/**
* Initially it uses the most fine grained rounding configuration possible but
* as more data arrives it uses two heuristics to shift to coarser and coarser
* rounding. The first heuristic is the number of buckets, specifically,
* when there are more buckets than can "fit" in the current rounding it shifts
* to the next rounding. Instead of redoing the rounding, it estimates the
* number of buckets that will "survive" at the new rounding and uses
* <strong>that</strong> as the initial value for the bucket count that it
* increments in order to trigger another promotion to another coarser
* rounding. This works fairly well at containing the number of buckets, but
* the estimate of the number of buckets will be wrong if the buckets are
* quite a spread out compared to the rounding.
* <p>
* The second heuristic it uses to trigger promotion to a coarser rounding is
* the distance between the min and max bucket. When that distance is greater
* than what the current rounding supports it promotes. This heuristic
* isn't good at limiting the number of buckets but is great when the buckets
* are spread out compared to the rounding. So it should complement the first
* heuristic.
* <p>
* When promoting a rounding we keep the old buckets around because it is
* expensive to call {@link BestBucketsDeferringCollector#rewriteBuckets}.
* In particular it is {@code O(number_of_hits_collected_so_far)}. So if we
* called it frequently we'd end up in {@code O(n^2)} territory. Bad news for
* aggregations! Instead, we keep a "budget" of buckets that we're ok
* "wasting". When we promote the rounding and our estimate of the number of
* "dead" buckets that have data but have yet to be merged into the buckets
* that are valid for the current rounding exceeds the budget then we rebucket
* the entire aggregation and double the budget.
* <p>
* Once we're done collecting and we know exactly which buckets we'll be
* returning we <strong>finally</strong> perform a "real", "perfect bucketing",
* rounding all of the keys for {@code owningBucketOrd} that we're going to
* collect and picking the rounding based on a real, accurate count and the
* min and max.
*/
private static class FromMany extends AutoDateHistogramAggregator {
/**
* An array of prepared roundings in the same order as
* {@link #roundingInfos}. The 0th entry is prepared initially,
* and other entries are null until first needed.
*/
private final Rounding.Prepared[] preparedRoundings;
/**
* Map from value to bucket ordinals.
* <p>
* It is important that this is the exact subtype of
* {@link LongKeyedBucketOrds} so that the JVM can make a monomorphic
* call to {@link LongKeyedBucketOrds#add(long, long)} in the tight
* inner loop of {@link LeafBucketCollector#collect(int, long)}.
*/
private LongKeyedBucketOrds.FromMany bucketOrds;
/**
* The index of the rounding that each {@code owningBucketOrd} is
* currently using.
* <p>
* During collection we use overestimates for how much buckets are save
* by bumping to the next rounding index. So we end up bumping less
* aggressively than a "perfect" algorithm. That is fine because we
* correct the error when we merge the buckets together all the way
* up in {@link InternalAutoDateHistogram#reduceBucket}. In particular,
* on final reduce we bump the rounding until it we appropriately
* cover the date range across all of the results returned by all of
* the {@link AutoDateHistogramAggregator}s.
*/
private ByteArray roundingIndices;
/**
* The minimum key per {@code owningBucketOrd}.
*/
private LongArray mins;
/**
* The max key per {@code owningBucketOrd}.
*/
private LongArray maxes;
/**
* An underestimate of the number of buckets that are "live" in the
* current rounding for each {@code owningBucketOrdinal}.
*/
private IntArray liveBucketCountUnderestimate;
/**
* An over estimate of the number of wasted buckets. When this gets
* too high we {@link #rebucket} which sets it to 0.
*/
private long wastedBucketsOverestimate = 0;
/**
* The next {@link #wastedBucketsOverestimate} that will trigger a
* {@link #rebucket() rebucketing}.
*/
private long nextRebucketAt = 1000; // TODO this could almost certainly start higher when asMultiBucketAggregator is gone
/**
* The number of times the aggregator had to {@link #rebucket()} the
* results. We keep this just to report to the profiler.
*/
private int rebucketCount = 0;
FromMany(
String name,
AggregatorFactories factories,
int targetBuckets,
RoundingInfo[] roundingInfos,
ValuesSourceConfig valuesSourceConfig,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(
name,
factories,
targetBuckets,
roundingInfos,
valuesSourceConfig,
context,
parent,
metadata
);
assert roundingInfos.length < 127 : "Rounding must fit in a signed byte";
roundingIndices = bigArrays().newByteArray(1, true);
mins = bigArrays().newLongArray(1, false);
mins.set(0, Long.MAX_VALUE);
maxes = bigArrays().newLongArray(1, false);
maxes.set(0, Long.MIN_VALUE);
preparedRoundings = new Rounding.Prepared[roundingInfos.length];
// Prepare the first rounding because we know we'll need it.
preparedRoundings[0] = prepareRounding(0);
bucketOrds = new LongKeyedBucketOrds.FromMany(bigArrays());
liveBucketCountUnderestimate = bigArrays().newIntArray(1, true);
}
@Override
protected LeafBucketCollector getLeafCollector(SortedNumericDocValues values, LeafBucketCollector sub) throws IOException {
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long owningBucketOrd) throws IOException {
if (false == values.advanceExact(doc)) {
return;
}
int valuesCount = values.docValueCount();
long previousRounded = Long.MIN_VALUE;
int roundingIdx = roundingIndexFor(owningBucketOrd);
for (int i = 0; i < valuesCount; ++i) {
long value = values.nextValue();
long rounded = preparedRoundings[roundingIdx].round(value);
assert rounded >= previousRounded;
if (rounded == previousRounded) {
continue;
}
roundingIdx = collectValue(owningBucketOrd, roundingIdx, doc, rounded);
previousRounded = rounded;
}
}
private int collectValue(long owningBucketOrd, int roundingIdx, int doc, long rounded) throws IOException {
long bucketOrd = bucketOrds.add(owningBucketOrd, rounded);
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
return roundingIdx;
}
collectBucket(sub, doc, bucketOrd);
liveBucketCountUnderestimate = bigArrays().grow(liveBucketCountUnderestimate, owningBucketOrd + 1);
int estimatedBucketCount = liveBucketCountUnderestimate.increment(owningBucketOrd, 1);
return increaseRoundingIfNeeded(owningBucketOrd, estimatedBucketCount, rounded, roundingIdx);
}
/**
* Increase the rounding of {@code owningBucketOrd} using
* estimated, bucket counts, {@link #rebucket() rebucketing} the all
* buckets if the estimated number of wasted buckets is too high.
*/
private int increaseRoundingIfNeeded(long owningBucketOrd, int oldEstimatedBucketCount, long newKey, int oldRounding) {
if (oldRounding >= roundingInfos.length - 1) {
return oldRounding;
}
if (mins.size() < owningBucketOrd + 1) {
long oldSize = mins.size();
mins = bigArrays().grow(mins, owningBucketOrd + 1);
mins.fill(oldSize, mins.size(), Long.MAX_VALUE);
}
if (maxes.size() < owningBucketOrd + 1) {
long oldSize = maxes.size();
maxes = bigArrays().grow(maxes, owningBucketOrd + 1);
maxes.fill(oldSize, maxes.size(), Long.MIN_VALUE);
}
long min = Math.min(mins.get(owningBucketOrd), newKey);
mins.set(owningBucketOrd, min);
long max = Math.max(maxes.get(owningBucketOrd), newKey);
maxes.set(owningBucketOrd, max);
if (oldEstimatedBucketCount <= targetBuckets * roundingInfos[oldRounding].getMaximumInnerInterval()
&& max - min <= targetBuckets * roundingInfos[oldRounding].getMaximumRoughEstimateDurationMillis()) {
return oldRounding;
}
long oldRoughDuration = roundingInfos[oldRounding].roughEstimateDurationMillis;
int newRounding = oldRounding;
int newEstimatedBucketCount;
do {
newRounding++;
double ratio = (double) oldRoughDuration / (double) roundingInfos[newRounding].getRoughEstimateDurationMillis();
newEstimatedBucketCount = (int) Math.ceil(oldEstimatedBucketCount * ratio);
} while (newRounding < roundingInfos.length - 1
&& (newEstimatedBucketCount > targetBuckets * roundingInfos[newRounding].getMaximumInnerInterval()
|| max - min > targetBuckets * roundingInfos[newRounding].getMaximumRoughEstimateDurationMillis()));
setRounding(owningBucketOrd, newRounding);
mins.set(owningBucketOrd, preparedRoundings[newRounding].round(mins.get(owningBucketOrd)));
maxes.set(owningBucketOrd, preparedRoundings[newRounding].round(maxes.get(owningBucketOrd)));
wastedBucketsOverestimate += oldEstimatedBucketCount - newEstimatedBucketCount;
if (wastedBucketsOverestimate > nextRebucketAt) {
rebucket();
// Bump the threshold for the next rebucketing
wastedBucketsOverestimate = 0;
nextRebucketAt *= 2;
} else {
liveBucketCountUnderestimate.set(owningBucketOrd, newEstimatedBucketCount);
}
return newRounding;
}
};
}
private void rebucket() {
rebucketCount++;
try (LongKeyedBucketOrds oldOrds = bucketOrds) {
long[] mergeMap = new long[Math.toIntExact(oldOrds.size())];
bucketOrds = new LongKeyedBucketOrds.FromMany(bigArrays());
for (long owningBucketOrd = 0; owningBucketOrd <= oldOrds.maxOwningBucketOrd(); owningBucketOrd++) {
LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(owningBucketOrd);
Rounding.Prepared preparedRounding = preparedRoundings[roundingIndexFor(owningBucketOrd)];
while (ordsEnum.next()) {
long oldKey = ordsEnum.value();
long newKey = preparedRounding.round(oldKey);
long newBucketOrd = bucketOrds.add(owningBucketOrd, newKey);
mergeMap[(int) ordsEnum.ord()] = newBucketOrd >= 0 ? newBucketOrd : -1 - newBucketOrd;
}
liveBucketCountUnderestimate = bigArrays().grow(liveBucketCountUnderestimate, owningBucketOrd + 1);
liveBucketCountUnderestimate.set(owningBucketOrd, Math.toIntExact(bucketOrds.bucketsInOrd(owningBucketOrd)));
}
merge(mergeMap, bucketOrds.size());
}
}
@Override
public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException {
/*
* Rebucket before building the aggregation to build as small as result
* as possible.
*
* TODO it'd be faster if we could apply the merging on the fly as we
* replay the hits and build the buckets. How much faster is not clear,
* but it does have the advantage of only touching the buckets that we
* want to collect.
*/
rebucket();
return buildAggregations(bucketOrds, this::roundingIndexFor, owningBucketOrds);
}
@Override
public void collectDebugInfo(BiConsumer<String, Object> add) {
super.collectDebugInfo(add);
add.accept("surviving_buckets", bucketOrds.size());
add.accept("wasted_buckets_overestimate", wastedBucketsOverestimate);
add.accept("next_rebucket_at", nextRebucketAt);
add.accept("rebucket_count", rebucketCount);
}
private void setRounding(long owningBucketOrd, int newRounding) {
roundingIndices = bigArrays().grow(roundingIndices, owningBucketOrd + 1);
roundingIndices.set(owningBucketOrd, (byte) newRounding);
if (preparedRoundings[newRounding] == null) {
preparedRoundings[newRounding] = prepareRounding(newRounding);
}
}
private int roundingIndexFor(long owningBucketOrd) {
return owningBucketOrd < roundingIndices.size() ? roundingIndices.get(owningBucketOrd) : 0;
}
@Override
public void doClose() {
Releasables.close(bucketOrds, roundingIndices, mins, maxes, liveBucketCountUnderestimate);
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.ide.DataManager;
import com.intellij.ide.PowerSaveMode;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NotNullComputable;
import com.intellij.openapi.util.text.StringUtil;
import consulo.localize.LocalizeValue;
import consulo.platform.base.localize.ApplicationLocalize;
import consulo.ui.*;
import consulo.ui.annotation.RequiredUIAccess;
import consulo.ui.image.Image;
import consulo.ui.layout.DockLayout;
import consulo.ui.layout.LabeledLayout;
import consulo.ui.layout.Layout;
import consulo.ui.layout.VerticalLayout;
import consulo.ui.border.BorderPosition;
import consulo.ui.border.BorderStyle;
import consulo.ui.util.LabeledBuilder;
import consulo.util.lang.Comparing;
import consulo.util.lang.ThreeState;
import org.intellij.lang.annotations.MagicConstant;
import javax.annotation.Nonnull;
public class CodeCompletionPanel implements NotNullComputable<Layout> {
private final VerticalLayout myLayout;
private final ComboBox<ThreeState> myCaseSensitiveCombo2;
private final CheckBox myCbOnCodeCompletion2;
private final CheckBox myCbOnSmartTypeCompletion2;
private final CheckBox myCbSorting2;
private final CheckBox myCbAutocompletion2;
private final CheckBox myCbSelectByChars2;
private final CheckBox myCbAutopopupJavaDoc2;
private final IntBox myAutopopupJavaDocField2;
private final CheckBox myCbParameterInfoPopup2;
private final IntBox myParameterInfoDelayField2;
private final CheckBox myCbShowFullParameterSignatures2;
@RequiredUIAccess
public CodeCompletionPanel(ActionManager actionManager) {
myLayout = VerticalLayout.create();
ComboBox.Builder<ThreeState> builder = ComboBox.builder();
builder.fillByEnumLocalized(ThreeState.class, o -> {
switch (o) {
case YES:
return ApplicationLocalize.comboboxAutocompleteCaseSensitiveAll();
case NO:
return ApplicationLocalize.comboboxAutocompleteCaseSensitiveNone();
case UNSURE:
return ApplicationLocalize.comboboxAutocompleteCaseSensitiveFirstLetter();
default:
throw new UnsupportedOperationException();
}
});
myCaseSensitiveCombo2 = builder.build();
VerticalLayout completionOptions = VerticalLayout.create();
completionOptions.add(LabeledBuilder.sided(ApplicationLocalize.comboboxCaseSensitiveCompletion(), myCaseSensitiveCombo2));
completionOptions.add(Label.create(ApplicationLocalize.labelAutocompleteWhenOnlyOneChoice()));
String basicShortcut = KeymapUtil.getFirstKeyboardShortcutText(actionManager.getAction(IdeActions.ACTION_CODE_COMPLETION));
if (StringUtil.isNotEmpty(basicShortcut)) {
LocalizeValue value = ApplicationLocalize.checkboxAutocompleteBasic().map((localizeManager, s) -> s + " (" + basicShortcut + ")");
myCbOnCodeCompletion2 = CheckBox.create(value);
}
else {
myCbOnCodeCompletion2 = CheckBox.create(ApplicationLocalize.checkboxAutocompleteBasic());
}
String smartShortcut = KeymapUtil.getFirstKeyboardShortcutText(actionManager.getAction(IdeActions.ACTION_SMART_TYPE_COMPLETION));
if (StringUtil.isNotEmpty(smartShortcut)) {
LocalizeValue value = ApplicationLocalize.checkboxAutocompleteSmartType().map((localizeManager, s) -> s + " (" + smartShortcut + ")");
myCbOnSmartTypeCompletion2 = CheckBox.create(value);
}
else {
myCbOnSmartTypeCompletion2 = CheckBox.create(ApplicationLocalize.checkboxAutocompleteSmartType());
}
VerticalLayout complGroup = VerticalLayout.create();
complGroup.addBorder(BorderPosition.LEFT, BorderStyle.EMPTY, null, Image.DEFAULT_ICON_SIZE);
complGroup.add(myCbOnCodeCompletion2);
complGroup.add(myCbOnSmartTypeCompletion2);
completionOptions.add(complGroup);
myCbSorting2 = CheckBox.create(LocalizeValue.localizeTODO("Sort lookup items lexicographically"));
if(PowerSaveMode.isEnabled()) {
myCbAutocompletion2 = CheckBox.create(LocalizeValue.localizeTODO("Autopopup code completion (not available in Power Save mode)"));
}
else {
myCbAutocompletion2 = CheckBox.create(LocalizeValue.localizeTODO("Autopopup code completion"));
}
completionOptions.add(myCbAutocompletion2);
myCbSelectByChars2 = CheckBox.create(LocalizeValue.localizeTODO("Insert selected variant by typing dot, space, etc."));
myCbSelectByChars2.setEnabled(false);
VerticalLayout indentChars = VerticalLayout.create().add(myCbSelectByChars2);
indentChars.addBorder(BorderPosition.LEFT, BorderStyle.EMPTY, null, Image.DEFAULT_ICON_SIZE);
completionOptions.add(indentChars);
DockLayout autoPopuDocLine = DockLayout.create();
myCbAutopopupJavaDoc2 = CheckBox.create(ApplicationLocalize.editboxAutopopupJavadocInMs());
autoPopuDocLine.left(myCbAutopopupJavaDoc2);
myAutopopupJavaDocField2 = IntBox.create();
myAutopopupJavaDocField2.setEnabled(false);
autoPopuDocLine.right(myAutopopupJavaDocField2);
completionOptions.add(autoPopuDocLine);
myLayout.add(LabeledLayout.create(ApplicationLocalize.titleCodeCompletion(), completionOptions));
VerticalLayout parameterInfoGroup = VerticalLayout.create();
myCbParameterInfoPopup2 = CheckBox.create(ApplicationLocalize.editboxAutopopupInMs());
myParameterInfoDelayField2 = IntBox.create();
myParameterInfoDelayField2.setEnabled(false);
parameterInfoGroup.add(DockLayout.create().left(myCbParameterInfoPopup2).right(myParameterInfoDelayField2));
myCbShowFullParameterSignatures2 = CheckBox.create(ApplicationLocalize.checkboxShowFullSignatures());
parameterInfoGroup.add(myCbShowFullParameterSignatures2);
myLayout.add(LabeledLayout.create(ApplicationLocalize.titleParameterInfo(), parameterInfoGroup));
myCbAutocompletion2.addValueListener(event -> myCbSelectByChars2.setEnabled(myCbAutocompletion2.getValue()));
myCbAutopopupJavaDoc2.addValueListener(event -> myAutopopupJavaDocField2.setEnabled(myCbAutopopupJavaDoc2.getValue()));
myCbParameterInfoPopup2.addValueListener(event -> myParameterInfoDelayField2.setEnabled(myCbParameterInfoPopup2.getValue()));
}
@RequiredUIAccess
public void reset() {
CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance();
final ThreeState caseSensitiveValue;
switch (codeInsightSettings.COMPLETION_CASE_SENSITIVE) {
case CodeInsightSettings.ALL:
caseSensitiveValue = ThreeState.YES;
break;
case CodeInsightSettings.NONE:
caseSensitiveValue = ThreeState.NO;
break;
default:
caseSensitiveValue = ThreeState.UNSURE;
break;
}
myCaseSensitiveCombo2.setValue(caseSensitiveValue);
myCbSelectByChars2.setValue(codeInsightSettings.SELECT_AUTOPOPUP_SUGGESTIONS_BY_CHARS);
myCbOnCodeCompletion2.setValue(codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION);
myCbOnSmartTypeCompletion2.setValue(codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION);
myCbAutocompletion2.setValue(codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP);
myCbAutopopupJavaDoc2.setValue(codeInsightSettings.AUTO_POPUP_JAVADOC_INFO);
myAutopopupJavaDocField2.setEnabled(codeInsightSettings.AUTO_POPUP_JAVADOC_INFO);
myAutopopupJavaDocField2.setValue(codeInsightSettings.JAVADOC_INFO_DELAY);
myCbParameterInfoPopup2.setValue(codeInsightSettings.AUTO_POPUP_PARAMETER_INFO);
myParameterInfoDelayField2.setEnabled(codeInsightSettings.AUTO_POPUP_PARAMETER_INFO);
myParameterInfoDelayField2.setValue(codeInsightSettings.PARAMETER_INFO_DELAY);
myCbShowFullParameterSignatures2.setValue(codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO);
myCbSorting2.setValue(UISettings.getInstance().SORT_LOOKUP_ELEMENTS_LEXICOGRAPHICALLY);
}
public void apply() {
CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance();
codeInsightSettings.COMPLETION_CASE_SENSITIVE = getCaseSensitiveValue();
codeInsightSettings.SELECT_AUTOPOPUP_SUGGESTIONS_BY_CHARS = myCbSelectByChars2.getValue();
codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION = myCbOnCodeCompletion2.getValue();
codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION = myCbOnSmartTypeCompletion2.getValue();
codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO = myCbShowFullParameterSignatures2.getValue();
codeInsightSettings.AUTO_POPUP_PARAMETER_INFO = myCbParameterInfoPopup2.getValue();
codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP = myCbAutocompletion2.getValue();
codeInsightSettings.AUTO_POPUP_JAVADOC_INFO = myCbAutopopupJavaDoc2.getValue();
codeInsightSettings.PARAMETER_INFO_DELAY = myParameterInfoDelayField2.getValueOrError();
codeInsightSettings.JAVADOC_INFO_DELAY = myAutopopupJavaDocField2.getValueOrError();
UISettings.getInstance().SORT_LOOKUP_ELEMENTS_LEXICOGRAPHICALLY = myCbSorting2.getValue();
Project project = DataManager.getInstance().getDataContext(myLayout).getData(CommonDataKeys.PROJECT);
if (project != null) {
DaemonCodeAnalyzer.getInstance(project).settingsChanged();
}
}
public boolean isModified() {
CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance();
boolean isModified = false;
//noinspection ConstantConditions
isModified |= getCaseSensitiveValue() != codeInsightSettings.COMPLETION_CASE_SENSITIVE;
isModified |= isModified(myCbOnCodeCompletion2, codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION);
isModified |= isModified(myCbSelectByChars2, codeInsightSettings.SELECT_AUTOPOPUP_SUGGESTIONS_BY_CHARS);
isModified |= isModified(myCbOnSmartTypeCompletion2, codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION);
isModified |= isModified(myCbShowFullParameterSignatures2, codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO);
isModified |= isModified(myCbParameterInfoPopup2, codeInsightSettings.AUTO_POPUP_PARAMETER_INFO);
isModified |= isModified(myCbAutocompletion2, codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP);
isModified |= isModified(myCbAutopopupJavaDoc2, codeInsightSettings.AUTO_POPUP_JAVADOC_INFO);
isModified |= isModified(myParameterInfoDelayField2, codeInsightSettings.PARAMETER_INFO_DELAY);
isModified |= isModified(myAutopopupJavaDocField2, codeInsightSettings.JAVADOC_INFO_DELAY);
isModified |= isModified(myCbSorting2, UISettings.getInstance().SORT_LOOKUP_ELEMENTS_LEXICOGRAPHICALLY);
return isModified;
}
@Nonnull
@Override
public Layout compute() {
return myLayout;
}
private static <V> boolean isModified(ValueComponent<V> valueComponent, V value) {
return !Comparing.equal(valueComponent.getValue(), value);
}
@MagicConstant(intValues = {CodeInsightSettings.ALL, CodeInsightSettings.NONE, CodeInsightSettings.FIRST_LETTER})
private int getCaseSensitiveValue() {
ThreeState value = myCaseSensitiveCombo2.getValue();
if (value == ThreeState.YES) {
return CodeInsightSettings.ALL;
}
else if (value == ThreeState.NO) {
return CodeInsightSettings.NONE;
}
else {
return CodeInsightSettings.FIRST_LETTER;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.facet.termsstats.strings;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.DoubleValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
import org.elasticsearch.search.facet.terms.strings.HashedAggregator;
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
public class TermsStatsStringFacetExecutor extends FacetExecutor {
private final TermsStatsFacet.ComparatorType comparatorType;
final IndexFieldData keyIndexFieldData;
final IndexNumericFieldData valueIndexFieldData;
final SearchScript script;
private final int size;
private final int shardSize;
final Recycler.V<ObjectObjectOpenHashMap<HashedBytesRef, InternalTermsStatsStringFacet.StringEntry>> entries;
long missing;
public TermsStatsStringFacetExecutor(IndexFieldData keyIndexFieldData, IndexNumericFieldData valueIndexFieldData, SearchScript valueScript,
int size, int shardSize, TermsStatsFacet.ComparatorType comparatorType, SearchContext context) {
this.keyIndexFieldData = keyIndexFieldData;
this.valueIndexFieldData = valueIndexFieldData;
this.script = valueScript;
this.size = size;
this.shardSize = shardSize;
this.comparatorType = comparatorType;
this.entries = context.cacheRecycler().hashMap(-1);
}
@Override
public Collector collector() {
return new Collector();
}
@Override
public InternalFacet buildFacet(String facetName) {
if (entries.v().isEmpty()) {
entries.close();
return new InternalTermsStatsStringFacet(facetName, comparatorType, size, ImmutableList.<InternalTermsStatsStringFacet.StringEntry>of(), missing);
}
if (size == 0) { // all terms
// all terms, just return the collection, we will sort it on the way back
List<InternalTermsStatsStringFacet.StringEntry> stringEntries = new ArrayList<>();
final boolean[] states = entries.v().allocated;
final Object[] values = entries.v().values;
for (int i = 0; i < states.length; i++) {
if (states[i]) {
stringEntries.add((InternalTermsStatsStringFacet.StringEntry) values[i]);
}
}
return new InternalTermsStatsStringFacet(facetName, comparatorType, 0 /* indicates all terms*/, stringEntries, missing);
}
Object[] values = entries.v().values;
Arrays.sort(values, (Comparator) comparatorType.comparator());
List<InternalTermsStatsStringFacet.StringEntry> ordered = Lists.newArrayList();
int limit = shardSize;
for (int i = 0; i < limit; i++) {
InternalTermsStatsStringFacet.StringEntry value = (InternalTermsStatsStringFacet.StringEntry) values[i];
if (value == null) {
break;
}
ordered.add(value);
}
entries.close();
return new InternalTermsStatsStringFacet(facetName, comparatorType, size, ordered, missing);
}
class Collector extends FacetExecutor.Collector {
private final Aggregator aggregator;
private BytesValues keyValues;
public Collector() {
if (script != null) {
this.aggregator = new ScriptAggregator(entries.v(), script);
} else {
this.aggregator = new Aggregator(entries.v());
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
if (script != null) {
script.setScorer(scorer);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
keyValues = keyIndexFieldData.load(context).getBytesValues(true);
if (script != null) {
script.setNextReader(context);
} else {
aggregator.valueValues = valueIndexFieldData.load(context).getDoubleValues();
}
}
@Override
public void collect(int doc) throws IOException {
aggregator.onDoc(doc, keyValues);
}
@Override
public void postCollection() {
TermsStatsStringFacetExecutor.this.missing = aggregator.missing;
aggregator.release();
}
}
public static class Aggregator extends HashedAggregator {
final ObjectObjectOpenHashMap<HashedBytesRef, InternalTermsStatsStringFacet.StringEntry> entries;
final HashedBytesRef spare = new HashedBytesRef();
int missing = 0;
DoubleValues valueValues;
ValueAggregator valueAggregator = new ValueAggregator();
public Aggregator(ObjectObjectOpenHashMap<HashedBytesRef, InternalTermsStatsStringFacet.StringEntry> entries) {
this.entries = entries;
}
@Override
public void onValue(int docId, BytesRef value, int hashCode, BytesValues values) {
spare.reset(value, hashCode);
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
if (stringEntry == null) {
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
entries.put(theValue, stringEntry);
}
stringEntry.count++;
valueAggregator.stringEntry = stringEntry;
valueAggregator.onDoc(docId, valueValues);
}
public static class ValueAggregator extends DoubleFacetAggregatorBase {
InternalTermsStatsStringFacet.StringEntry stringEntry;
@Override
public void onValue(int docId, double value) {
if (value < stringEntry.min) {
stringEntry.min = value;
}
if (value > stringEntry.max) {
stringEntry.max = value;
}
stringEntry.total += value;
stringEntry.totalCount++;
}
}
}
public static class ScriptAggregator extends Aggregator {
private final SearchScript script;
public ScriptAggregator(ObjectObjectOpenHashMap<HashedBytesRef, InternalTermsStatsStringFacet.StringEntry> entries, SearchScript script) {
super(entries);
this.script = script;
}
@Override
public void onValue(int docId, BytesRef value, int hashCode, BytesValues values) {
spare.reset(value, hashCode);
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
if (stringEntry == null) {
HashedBytesRef theValue = new HashedBytesRef(values.copyShared(), hashCode);
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
entries.put(theValue, stringEntry);
} else {
stringEntry.count++;
}
script.setNextDocId(docId);
double valueValue = script.runAsDouble();
if (valueValue < stringEntry.min) {
stringEntry.min = valueValue;
}
if (valueValue > stringEntry.max) {
stringEntry.max = valueValue;
}
stringEntry.total += valueValue;
stringEntry.totalCount++;
}
}
}
| |
package nl.esciencecenter.vbrowser.vrs.data.xml;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import nl.esciencecenter.ptk.data.StringHolder;
import nl.esciencecenter.vbrowser.vrs.VRSContext;
import nl.esciencecenter.vbrowser.vrs.VRSProperties;
import nl.esciencecenter.vbrowser.vrs.data.Attribute;
import nl.esciencecenter.vbrowser.vrs.data.AttributeSet;
import nl.esciencecenter.vbrowser.vrs.data.AttributeType;
import nl.esciencecenter.vbrowser.vrs.exceptions.VrsException;
import nl.esciencecenter.vbrowser.vrs.exceptions.VrsIOException;
import nl.esciencecenter.vbrowser.vrs.exceptions.XMLDataException;
import nl.esciencecenter.vbrowser.vrs.infors.InfoResourceNode;
import nl.esciencecenter.vbrowser.vrs.infors.VInfoResourcePath;
import nl.esciencecenter.vbrowser.vrs.vrl.VRL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.AnnotationIntrospector;
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
import com.fasterxml.jackson.dataformat.xml.JacksonXmlAnnotationIntrospector;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
/**
* XML Data Utility to parse and create VRS Data Types from and to XML.
* <p>
* XMLData uses proxy objects:<br>
* <ul>
* <li>Attribute <=> XMLAttribute
* <li>AttributeSet <=> XMLAttributeSet
* <li>List<AttributeSet> <=> XMLAttributeSetList
* </ul>
*/
public class XMLData {
private static final Logger logger = LoggerFactory.getLogger(XMLData.class);
// ========
// Instance
// ========
protected XmlMapper xmlMapper;
protected VRSContext vrsContext;
public XMLData(VRSContext context) {
//
VRSJacksonXmlAnnotarionIntrospector introspector = new VRSJacksonXmlAnnotarionIntrospector();
AnnotationIntrospector intr = new AnnotationIntrospectorPair(new VRSJacksonXmlAnnotarionIntrospector(),
new JacksonXmlAnnotationIntrospector());
xmlMapper = new XmlMapper();
vrsContext = context;
// xmlMapper.setAnnotationIntrospectors(new
// VRSJacksonXmlAnnotarionIntrospector(),new
// JacksonXmlAnnotationIntrospector());
xmlMapper.getSerializationConfig().withInsertedAnnotationIntrospector(introspector);
// optional mixin module.
xmlMapper.registerModule(new VRSXMLMixinModule());
}
public String toXML(AttributeSet attrSet) throws Exception {
XMLAttributeSet xmlProps = toXMLAttributeSet(attrSet);
String xml = xmlMapper.writeValueAsString(xmlProps);
return xml;
}
public XMLAttributeSet toXMLAttributeSet(AttributeSet attrSet) throws XMLDataException {
try {
attrSet = checkSerialization(attrSet);
XMLAttributeSet xmlProps = new XMLAttributeSet(attrSet);
return xmlProps;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public XMLAttributeSetList toXMLAttributeSetList(String name, List<AttributeSet> attrSet) throws XMLDataException {
try {
XMLAttributeSetList xmlProps = new XMLAttributeSetList(name, attrSet);
return xmlProps;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public VRSProperties checkSerialization(VRSProperties props) throws XMLDataException {
//
for (String key : props.keySet()) {
Object value = props.get(key);
// VRL still not 100% serializable, use URI encoding.
if (value instanceof VRL) {
try {
java.net.URI uri;
uri = ((VRL) value).toURI();
value = uri;
props.set(key, value);
} catch (URISyntaxException e) {
throw new XMLDataException(e.getMessage(), e);
}
}
// Attribute value should be serializable:
if ((value != null) && (!xmlMapper.canSerialize(value.getClass()))) {
logger.error("XMLMapper can't serialize attribute:'{}' with class:{}", key, value.getClass());
throw new XMLDataException("XMLMapper can not serialize field:'" + key + "' with class="
+ value.getClass());
}
}
return props;
}
public AttributeSet checkSerialization(AttributeSet attrs) throws XMLDataException {
//
for (String key : attrs.keySet()) {
Attribute attr = attrs.get(key);
AttributeType type = attr.getType();
Object value = attr.getValue();
// VRL still not 100% serializable, use URI encoding.
if (value instanceof VRL) {
try {
java.net.URI uri;
uri = ((VRL) value).toURI();
attr.setValue(uri);
value = uri;
} catch (URISyntaxException e) {
throw new XMLDataException(e.getMessage(), e);
}
}
// Attribute value should be serializable:
if ((value != null) && (!xmlMapper.canSerialize(value.getClass()))) {
logger.error("XMLMapper can't serialize attribute:<{}>:'{}' with class:{}", type, key, value.getClass());
throw new XMLDataException("XMLMapper can not serialize field:'" + key + "' with class="
+ value.getClass());
}
}
return attrs;
}
public AttributeSet parseAttributeSet(String xmlString) throws XMLDataException {
try {
XMLAttributeSet xmlAttrs;
xmlAttrs = xmlMapper.readValue(xmlString, XMLAttributeSet.class);
AttributeSet attrSet = xmlAttrs.toAttributeSet();
return attrSet;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public XMLResourceNode createXMLResourceNode(VInfoResourcePath infoNode, boolean recursive) throws VrsException {
//
XMLResourceNode rootXmlNode = new XMLResourceNode(infoNode.getResourceType());
AttributeSet attrSet = infoNode.getInfoAttributes();
rootXmlNode.setXMLAttributes(new XMLAttributeSet(this.checkSerialization(attrSet)));
if (recursive) {
addSubNodesTo(rootXmlNode, infoNode, recursive);
}
return rootXmlNode;
}
protected XMLResourceNode addSubNodesTo(XMLResourceNode xmlNode, VInfoResourcePath infoNode, boolean recursive)
throws VrsException, XMLDataException {
//
InfoResourceNode folderNode = null;
if (infoNode instanceof InfoResourceNode) {
folderNode = (InfoResourceNode) infoNode;
}
//
List<? extends InfoResourceNode> subNodes = folderNode.listResourceNodes();
List<XMLResourceNode> xmlSubNodes = null;
//
if (subNodes != null) {
xmlSubNodes = new ArrayList<XMLResourceNode>();
for (InfoResourceNode subNode : subNodes) {
XMLResourceNode subXmlNode = this.createXMLResourceNode(subNode, recursive);
xmlSubNodes.add(subXmlNode);
}
}
xmlNode.setSubNodes(xmlSubNodes);
return xmlNode;
}
public String toXML(VInfoResourcePath infoNode) throws VrsException {
XMLResourceNode xmlNode = this.createXMLResourceNode(infoNode, true);
String xml;
try {
xml = xmlMapper.writeValueAsString(xmlNode);
return xml;
} catch (JsonProcessingException e) {
throw new VrsIOException(e);
}
}
/**
* Parse XML String which should be serialized ResourceNodes and add these parsed nodes.
*/
public void addXMLResourceNodesTo(VInfoResourcePath parentNode, String xmlString) throws XMLDataException {
try {
XMLResourceNode xmlRootNode = xmlMapper.readValue(xmlString, XMLResourceNode.class);
addXMLResourceNodesTo(parentNode, xmlRootNode);
} catch (IOException e) {
throw new XMLDataException(e.getMessage(), e);
}
}
/**
* Add sub-nodes of XMLResourceNode to actual InfoResourceNode. Parent node 'xmlRootNode' is not
* added.
*/
protected void addXMLResourceNodesTo(VInfoResourcePath parentNode, XMLResourceNode xmlRootNode)
throws XMLDataException {
//
List<XMLResourceNode> subNodes = xmlRootNode.getSubNodes();
if (subNodes == null) {
return;
}
for (XMLResourceNode xmlNode : subNodes) {
try {
VInfoResourcePath subNode = parentNode.createSubNode(xmlNode.getResourceType(), xmlNode
.getXMLAttributes().toAttributeSet());
addXMLResourceNodesTo(subNode, xmlNode);
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
}
public static String prettyFormat(String input, int indent) {
//
try {
Source xmlInput = new StreamSource(new StringReader(input));
StringWriter stringWriter = new StringWriter();
StreamResult xmlOutput = new StreamResult(stringWriter);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
transformerFactory.setAttribute("indent-number", indent);
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.transform(xmlInput, xmlOutput);
return xmlOutput.getWriter().toString();
} catch (Exception e) {
logger.error("=== XML Parse Error ===\n{}", input);
throw new RuntimeException(e);
}
}
public String toXML(VRSProperties props) throws Exception {
XMLProperties xmlProps = toXMLProperties(props);
String xml = xmlMapper.writeValueAsString(xmlProps);
return xml;
}
public XMLProperties toXMLProperties(VRSProperties props) throws XMLDataException {
try {
props = checkSerialization(props);
XMLProperties xmlProps = new XMLProperties(props);
return xmlProps;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public VRSProperties parseVRSProperties(String xmlString) throws XMLDataException {
try {
XMLProperties xmlProps;
xmlProps = xmlMapper.readValue(xmlString, XMLProperties.class);
VRSProperties props = xmlProps.toVRSProperties();
return props;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public String toXML(String name, List<AttributeSet> list) throws XMLDataException {
try {
XMLAttributeSetList xmlProps = toXMLAttributeSetList(name, list);
String xml = xmlMapper.writeValueAsString(xmlProps);
return xml;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
public List<AttributeSet> parseAttributeSetList(String xmlString, StringHolder setNameH) throws XMLDataException {
try {
XMLAttributeSetList xmlAttrSetList;
xmlAttrSetList = xmlMapper.readValue(xmlString, XMLAttributeSetList.class);
List<AttributeSet> attrSet = xmlAttrSetList.toAttributeSetList();
setNameH.value = xmlAttrSetList.getSetGroupName();
return attrSet;
} catch (Exception e) {
throw new XMLDataException(e.getMessage(), e);
}
}
}
| |
package com.udacity.gamedev.gigagal.entities;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.DelayedRemovalArray;
import com.badlogic.gdx.utils.TimeUtils;
import com.udacity.gamedev.gigagal.Level;
import com.udacity.gamedev.gigagal.util.Assets;
import com.udacity.gamedev.gigagal.util.Constants;
import com.udacity.gamedev.gigagal.util.Enums;
import com.udacity.gamedev.gigagal.util.Enums.Direction;
import com.udacity.gamedev.gigagal.util.Enums.JumpState;
import com.udacity.gamedev.gigagal.util.Enums.WalkState;
import com.udacity.gamedev.gigagal.util.Utils;
public class GigaGal {
public final static String TAG = GigaGal.class.getName();
public boolean jumpButtonPressed;
public boolean leftButtonPressed;
public boolean rightButtonPressed;
private Level level;
private Vector2 spawnLocation;
private Vector2 position;
private Vector2 lastFramePosition;
private Vector2 velocity;
private Direction facing;
private JumpState jumpState;
private WalkState walkState;
private long walkStartTime;
private long jumpStartTime;
private int ammo;
private int lives;
public GigaGal(Vector2 spawnLocation, Level level) {
this.spawnLocation = spawnLocation;
this.level = level;
position = new Vector2();
lastFramePosition = new Vector2();
velocity = new Vector2();
init();
}
public int getAmmo() {
return ammo;
}
public int getLives() {
return lives;
}
public void init() {
ammo = Constants.INITIAL_AMMO;
lives = Constants.INITIAL_LIVES;
respawn();
}
private void respawn() {
position.set(spawnLocation);
lastFramePosition.set(spawnLocation);
velocity.setZero();
jumpState = Enums.JumpState.FALLING;
facing = Direction.RIGHT;
walkState = Enums.WalkState.NOT_WALKING;
}
public Vector2 getPosition() {
return position;
}
public void update(float delta, Array<Platform> platforms) {
lastFramePosition.set(position);
velocity.y -= Constants.GRAVITY;
position.mulAdd(velocity, delta);
if (position.y < Constants.KILL_PLANE) {
lives--;
respawn();
}
// Land on/fall off platforms
if (jumpState != Enums.JumpState.JUMPING) {
if (jumpState != JumpState.RECOILING) {
jumpState = Enums.JumpState.FALLING;
}
for (Platform platform : platforms) {
if (landedOnPlatform(platform)) {
jumpState = Enums.JumpState.GROUNDED;
velocity.y = 0;
velocity.x = 0;
position.y = platform.top + Constants.GIGAGAL_EYE_HEIGHT;
}
}
}
// Collide with enemies
Rectangle gigaGalBounds = new Rectangle(
position.x - Constants.GIGAGAL_STANCE_WIDTH / 2,
position.y - Constants.GIGAGAL_EYE_HEIGHT,
Constants.GIGAGAL_STANCE_WIDTH,
Constants.GIGAGAL_HEIGHT);
for (Enemy enemy : level.getEnemies()) {
Rectangle enemyBounds = new Rectangle(
enemy.position.x - Constants.ENEMY_COLLISION_RADIUS,
enemy.position.y - Constants.ENEMY_COLLISION_RADIUS,
2 * Constants.ENEMY_COLLISION_RADIUS,
2 * Constants.ENEMY_COLLISION_RADIUS
);
if (gigaGalBounds.overlaps(enemyBounds)) {
if (position.x < enemy.position.x) {
recoilFromEnemy(Direction.LEFT);
} else {
recoilFromEnemy(Direction.RIGHT);
}
}
}
// Move left/right
if (jumpState != JumpState.RECOILING) {
boolean left = Gdx.input.isKeyPressed(Keys.LEFT) || leftButtonPressed;
boolean right = Gdx.input.isKeyPressed(Keys.RIGHT) || rightButtonPressed;
if (left && !right) {
moveLeft(delta);
} else if (right && !left) {
moveRight(delta);
} else {
walkState = Enums.WalkState.NOT_WALKING;
}
}
// Jump
if (Gdx.input.isKeyPressed(Keys.Z) || jumpButtonPressed) {
switch (jumpState) {
case GROUNDED:
startJump();
break;
case JUMPING:
continueJump();
}
} else {
endJump();
}
// Check powerups
DelayedRemovalArray<Powerup> powerups = level.getPowerups();
powerups.begin();
for (int i = 0; i < powerups.size; i++) {
Powerup powerup = powerups.get(i);
Rectangle powerupBounds = new Rectangle(
powerup.position.x - Constants.POWERUP_CENTER.x,
powerup.position.y - Constants.POWERUP_CENTER.y,
Assets.instance.powerupAssets.powerup.getRegionWidth(),
Assets.instance.powerupAssets.powerup.getRegionHeight()
);
if (gigaGalBounds.overlaps(powerupBounds)) {
ammo += Constants.POWERUP_AMMO;
// TODO: Add the POWERUP_SCORE to the level.score
powerups.removeIndex(i);
}
}
powerups.end();
// Shoot
if (Gdx.input.isKeyJustPressed(Keys.X)) {
shoot();
}
}
public void shoot() {
if (ammo > 0) {
ammo--;
Vector2 bulletPosition;
if (facing == Direction.RIGHT) {
bulletPosition = new Vector2(
position.x + Constants.GIGAGAL_CANNON_OFFSET.x,
position.y + Constants.GIGAGAL_CANNON_OFFSET.y
);
} else {
bulletPosition = new Vector2(
position.x - Constants.GIGAGAL_CANNON_OFFSET.x,
position.y + Constants.GIGAGAL_CANNON_OFFSET.y
);
}
level.spawnBullet(bulletPosition, facing);
}
}
boolean landedOnPlatform(Platform platform) {
boolean leftFootIn = false;
boolean rightFootIn = false;
boolean straddle = false;
if (lastFramePosition.y - Constants.GIGAGAL_EYE_HEIGHT >= platform.top &&
position.y - Constants.GIGAGAL_EYE_HEIGHT < platform.top) {
float leftFoot = position.x - Constants.GIGAGAL_STANCE_WIDTH / 2;
float rightFoot = position.x + Constants.GIGAGAL_STANCE_WIDTH / 2;
leftFootIn = (platform.left < leftFoot && platform.right > leftFoot);
rightFootIn = (platform.left < rightFoot && platform.right > rightFoot);
straddle = (platform.left > leftFoot && platform.right < rightFoot);
}
return leftFootIn || rightFootIn || straddle;
}
private void moveLeft(float delta) {
if (jumpState == Enums.JumpState.GROUNDED && walkState != Enums.WalkState.WALKING) {
walkStartTime = TimeUtils.nanoTime();
}
walkState = Enums.WalkState.WALKING;
facing = Direction.LEFT;
position.x -= delta * Constants.GIGAGAL_MOVE_SPEED;
}
private void moveRight(float delta) {
if (jumpState == Enums.JumpState.GROUNDED && walkState != Enums.WalkState.WALKING) {
walkStartTime = TimeUtils.nanoTime();
}
walkState = Enums.WalkState.WALKING;
facing = Direction.RIGHT;
position.x += delta * Constants.GIGAGAL_MOVE_SPEED;
}
private void startJump() {
jumpState = Enums.JumpState.JUMPING;
jumpStartTime = TimeUtils.nanoTime();
continueJump();
}
private void continueJump() {
if (jumpState == Enums.JumpState.JUMPING) {
if (Utils.secondsSince(jumpStartTime) < Constants.MAX_JUMP_DURATION) {
velocity.y = Constants.JUMP_SPEED;
} else {
endJump();
}
}
}
private void endJump() {
if (jumpState == Enums.JumpState.JUMPING) {
jumpState = Enums.JumpState.FALLING;
}
}
private void recoilFromEnemy(Direction direction) {
jumpState = JumpState.RECOILING;
velocity.y = Constants.KNOCKBACK_VELOCITY.y;
if (direction == Direction.LEFT) {
velocity.x = -Constants.KNOCKBACK_VELOCITY.x;
} else {
velocity.x = Constants.KNOCKBACK_VELOCITY.x;
}
}
public void render(SpriteBatch batch) {
TextureRegion region = Assets.instance.gigaGalAssets.standingRight;
if (facing == Direction.RIGHT && jumpState != Enums.JumpState.GROUNDED) {
region = Assets.instance.gigaGalAssets.jumpingRight;
} else if (facing == Direction.RIGHT && walkState == Enums.WalkState.NOT_WALKING) {
region = Assets.instance.gigaGalAssets.standingRight;
} else if (facing == Direction.RIGHT && walkState == Enums.WalkState.WALKING) {
float walkTimeSeconds = Utils.secondsSince(walkStartTime);
region = Assets.instance.gigaGalAssets.walkingRightAnimation.getKeyFrame(walkTimeSeconds);
} else if (facing == Direction.LEFT && jumpState != Enums.JumpState.GROUNDED) {
region = Assets.instance.gigaGalAssets.jumpingLeft;
} else if (facing == Direction.LEFT && walkState == Enums.WalkState.NOT_WALKING) {
region = Assets.instance.gigaGalAssets.standingLeft;
} else if (facing == Direction.LEFT && walkState == Enums.WalkState.WALKING) {
float walkTimeSeconds = Utils.secondsSince(walkStartTime);
region = Assets.instance.gigaGalAssets.walkingLeftAnimation.getKeyFrame(walkTimeSeconds);
}
Utils.drawTextureRegion(batch, region, position, Constants.GIGAGAL_EYE_POSITION);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.binary;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
/**
* Field accessor to speedup access.
*/
public abstract class BinaryFieldAccessor {
/** Field ID. */
protected final int id;
/** Field name */
protected final String name;
/** Mode. */
protected final BinaryWriteMode mode;
/**
* Create accessor for the field.
*
* @param field Field.
* @param id FIeld ID.
* @return Accessor.
*/
public static BinaryFieldAccessor create(Field field, int id) {
BinaryWriteMode mode = BinaryUtils.mode(field.getType());
switch (mode) {
case P_BYTE:
return new BytePrimitiveAccessor(field, id);
case P_BOOLEAN:
return new BooleanPrimitiveAccessor(field, id);
case P_SHORT:
return new ShortPrimitiveAccessor(field, id);
case P_CHAR:
return new CharPrimitiveAccessor(field, id);
case P_INT:
return new IntPrimitiveAccessor(field, id);
case P_LONG:
return new LongPrimitiveAccessor(field, id);
case P_FLOAT:
return new FloatPrimitiveAccessor(field, id);
case P_DOUBLE:
return new DoublePrimitiveAccessor(field, id);
case BYTE:
case BOOLEAN:
case SHORT:
case CHAR:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case DECIMAL:
case STRING:
case UUID:
case DATE:
case TIMESTAMP:
case TIME:
case BYTE_ARR:
case SHORT_ARR:
case INT_ARR:
case LONG_ARR:
case FLOAT_ARR:
case DOUBLE_ARR:
case CHAR_ARR:
case BOOLEAN_ARR:
case DECIMAL_ARR:
case STRING_ARR:
case UUID_ARR:
case DATE_ARR:
case TIMESTAMP_ARR:
case TIME_ARR:
case ENUM_ARR:
case OBJECT_ARR:
case BINARY_OBJ:
case BINARY:
return new DefaultFinalClassAccessor(field, id, mode, false);
default:
return new DefaultFinalClassAccessor(field, id, mode, !U.isFinal(field.getType()));
}
}
/**
* Protected constructor.
*
* @param id Field ID.
* @param mode Mode;
*/
protected BinaryFieldAccessor(Field field, int id, BinaryWriteMode mode) {
assert field != null;
assert id != 0;
assert mode != null;
this.name = field.getName();
this.id = id;
this.mode = mode;
}
/**
* Get mode.
*
* @return Mode.
*/
public BinaryWriteMode mode() {
return mode;
}
/**
* Write field.
*
* @param obj Object.
* @param writer Writer.
* @throws BinaryObjectException If failed.
*/
public abstract void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException;
/**
* Read field.
*
* @param obj Object.
* @param reader Reader.
* @throws BinaryObjectException If failed.
*/
public void read(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
try {
read0(obj, reader);
}
catch (Exception ex) {
if (S.INCLUDE_SENSITIVE && !F.isEmpty(name))
throw new BinaryObjectException("Failed to read field [name=" + name + ']', ex);
else
throw new BinaryObjectException("Failed to read field [id=" + id + ']', ex);
}
}
/**
* Read field.
*
* @param obj Object.
* @param reader Reader.
* @throws BinaryObjectException If failed.
*/
protected abstract void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException;
/**
* Base primitive field accessor.
*/
private static abstract class AbstractPrimitiveAccessor extends BinaryFieldAccessor {
/** Offset. */
protected final long offset;
/**
* Constructor.
*
* @param field Field.
* @param id Field ID.
* @param mode Mode.
*/
protected AbstractPrimitiveAccessor(Field field, int id, BinaryWriteMode mode) {
super(field, id, mode);
offset = GridUnsafe.objectFieldOffset(field);
}
}
/**
* Byte field accessor.
*/
private static class BytePrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public BytePrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_BYTE);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
byte val = GridUnsafe.getByteField(obj, offset);
writer.writeByteFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
byte val = reader.readByte(id);
GridUnsafe.putByteField(obj, offset, val);
}
}
/**
* Boolean field accessor.
*/
private static class BooleanPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public BooleanPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_BOOLEAN);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
boolean val = GridUnsafe.getBooleanField(obj, offset);
writer.writeBooleanFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
boolean val = reader.readBoolean(id);
GridUnsafe.putBooleanField(obj, offset, val);
}
}
/**
* Short field accessor.
*/
private static class ShortPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public ShortPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_SHORT);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
short val = GridUnsafe.getShortField(obj, offset);
writer.writeShortFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
short val = reader.readShort(id);
GridUnsafe.putShortField(obj, offset, val);
}
}
/**
* Char field accessor.
*/
private static class CharPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public CharPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_CHAR);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
char val = GridUnsafe.getCharField(obj, offset);
writer.writeCharFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
char val = reader.readChar(id);
GridUnsafe.putCharField(obj, offset, val);
}
}
/**
* Int field accessor.
*/
private static class IntPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public IntPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_INT);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
int val = GridUnsafe.getIntField(obj, offset);
writer.writeIntFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
int val = reader.readInt(id);
GridUnsafe.putIntField(obj, offset, val);
}
}
/**
* Long field accessor.
*/
private static class LongPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public LongPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_LONG);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
long val = GridUnsafe.getLongField(obj, offset);
writer.writeLongFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
long val = reader.readLong(id);
GridUnsafe.putLongField(obj, offset, val);
}
}
/**
* Float field accessor.
*/
private static class FloatPrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public FloatPrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_FLOAT);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
float val = GridUnsafe.getFloatField(obj, offset);
writer.writeFloatFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
float val = reader.readFloat(id);
GridUnsafe.putFloatField(obj, offset, val);
}
}
/**
* Double field accessor.
*/
private static class DoublePrimitiveAccessor extends AbstractPrimitiveAccessor {
/**
* Constructor.
*
* @param field Field.
*/
public DoublePrimitiveAccessor(Field field, int id) {
super(field, id, BinaryWriteMode.P_DOUBLE);
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
writer.writeFieldIdNoSchemaUpdate(id);
double val = GridUnsafe.getDoubleField(obj, offset);
writer.writeDoubleFieldPrimitive(val);
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
double val = reader.readDouble(id);
GridUnsafe.putDoubleField(obj, offset, val);
}
}
/**
* Default accessor.
*/
private static class DefaultFinalClassAccessor extends BinaryFieldAccessor {
/** Target field. */
private final Field field;
/** Dynamic accessor flag. */
private final boolean dynamic;
/**
* Constructor.
*
* @param field Field.
* @param id Field ID.
* @param mode Mode.
*/
DefaultFinalClassAccessor(Field field, int id, BinaryWriteMode mode, boolean dynamic) {
super(field, id, mode);
this.field = field;
this.dynamic = dynamic;
}
/** {@inheritDoc} */
@Override public void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException {
assert obj != null;
assert writer != null;
writer.writeFieldIdNoSchemaUpdate(id);
Object val;
try {
val = field.get(obj);
}
catch (IllegalAccessException e) {
throw new BinaryObjectException("Failed to get value for field: " + field, e);
}
switch (mode(val)) {
case BYTE:
writer.writeByteField((Byte) val);
break;
case SHORT:
writer.writeShortField((Short) val);
break;
case INT:
writer.writeIntField((Integer) val);
break;
case LONG:
writer.writeLongField((Long)val);
break;
case FLOAT:
writer.writeFloatField((Float)val);
break;
case DOUBLE:
writer.writeDoubleField((Double)val);
break;
case CHAR:
writer.writeCharField((Character)val);
break;
case BOOLEAN:
writer.writeBooleanField((Boolean)val);
break;
case DECIMAL:
writer.writeDecimalField((BigDecimal)val);
break;
case STRING:
writer.writeStringField((String)val);
break;
case UUID:
writer.writeUuidField((UUID)val);
break;
case DATE:
writer.writeDateField((Date)val);
break;
case TIMESTAMP:
writer.writeTimestampField((Timestamp)val);
break;
case TIME:
writer.writeTimeField((Time)val);
break;
case BYTE_ARR:
writer.writeByteArrayField((byte[])val);
break;
case SHORT_ARR:
writer.writeShortArrayField((short[])val);
break;
case INT_ARR:
writer.writeIntArrayField((int[])val);
break;
case LONG_ARR:
writer.writeLongArrayField((long[])val);
break;
case FLOAT_ARR:
writer.writeFloatArrayField((float[])val);
break;
case DOUBLE_ARR:
writer.writeDoubleArrayField((double[])val);
break;
case CHAR_ARR:
writer.writeCharArrayField((char[])val);
break;
case BOOLEAN_ARR:
writer.writeBooleanArrayField((boolean[])val);
break;
case DECIMAL_ARR:
writer.writeDecimalArrayField((BigDecimal[])val);
break;
case STRING_ARR:
writer.writeStringArrayField((String[])val);
break;
case UUID_ARR:
writer.writeUuidArrayField((UUID[])val);
break;
case DATE_ARR:
writer.writeDateArrayField((Date[])val);
break;
case TIMESTAMP_ARR:
writer.writeTimestampArrayField((Timestamp[])val);
break;
case TIME_ARR:
writer.writeTimeArrayField((Time[])val);
break;
case OBJECT_ARR:
writer.writeObjectArrayField((Object[])val);
break;
case COL:
writer.writeCollectionField((Collection<?>)val);
break;
case MAP:
writer.writeMapField((Map<?, ?>)val);
break;
case BINARY_OBJ:
writer.writeBinaryObjectField((BinaryObjectImpl)val);
break;
case ENUM:
writer.writeEnumField((Enum<?>)val);
break;
case ENUM_ARR:
writer.writeEnumArrayField((Object[])val);
break;
case BINARY:
case OBJECT:
case PROXY:
writer.writeObjectField(val);
break;
case CLASS:
writer.writeClassField((Class)val);
break;
default:
assert false : "Invalid mode: " + mode;
}
}
/** {@inheritDoc} */
@Override public void read0(Object obj, BinaryReaderExImpl reader) throws BinaryObjectException {
Object val = dynamic ? reader.readField(id) : readFixedType(reader);
try {
if (val != null || !field.getType().isPrimitive())
field.set(obj, val);
}
catch (IllegalAccessException e) {
throw new BinaryObjectException("Failed to set value for field: " + field, e);
}
}
/**
* Reads fixed type from the given reader with flags validation.
*
* @param reader Reader to read from.
* @return Read value.
* @throws BinaryObjectException If failed to read value from the stream.
*/
protected Object readFixedType(BinaryReaderExImpl reader) throws BinaryObjectException {
Object val = null;
switch (mode) {
case BYTE:
val = reader.readByteNullable(id);
break;
case SHORT:
val = reader.readShortNullable(id);
break;
case INT:
val = reader.readIntNullable(id);
break;
case LONG:
val = reader.readLongNullable(id);
break;
case FLOAT:
val = reader.readFloatNullable(id);
break;
case DOUBLE:
val = reader.readDoubleNullable(id);
break;
case CHAR:
val = reader.readCharNullable(id);
break;
case BOOLEAN:
val = reader.readBooleanNullable(id);
break;
case DECIMAL:
val = reader.readDecimal(id);
break;
case STRING:
val = reader.readString(id);
break;
case UUID:
val = reader.readUuid(id);
break;
case DATE:
val = reader.readDate(id);
break;
case TIMESTAMP:
val = reader.readTimestamp(id);
break;
case TIME:
val = reader.readTime(id);
break;
case BYTE_ARR:
val = reader.readByteArray(id);
break;
case SHORT_ARR:
val = reader.readShortArray(id);
break;
case INT_ARR:
val = reader.readIntArray(id);
break;
case LONG_ARR:
val = reader.readLongArray(id);
break;
case FLOAT_ARR:
val = reader.readFloatArray(id);
break;
case DOUBLE_ARR:
val = reader.readDoubleArray(id);
break;
case CHAR_ARR:
val = reader.readCharArray(id);
break;
case BOOLEAN_ARR:
val = reader.readBooleanArray(id);
break;
case DECIMAL_ARR:
val = reader.readDecimalArray(id);
break;
case STRING_ARR:
val = reader.readStringArray(id);
break;
case UUID_ARR:
val = reader.readUuidArray(id);
break;
case DATE_ARR:
val = reader.readDateArray(id);
break;
case TIMESTAMP_ARR:
val = reader.readTimestampArray(id);
break;
case TIME_ARR:
val = reader.readTimeArray(id);
break;
case OBJECT_ARR:
val = reader.readObjectArray(id);
break;
case COL:
val = reader.readCollection(id, null);
break;
case MAP:
val = reader.readMap(id, null);
break;
case BINARY_OBJ:
val = reader.readBinaryObject(id);
break;
case ENUM:
val = reader.readEnum(id, field.getType());
break;
case ENUM_ARR:
val = reader.readEnumArray(id, field.getType().getComponentType());
break;
case BINARY:
case OBJECT:
val = reader.readObject(id);
break;
case CLASS:
val = reader.readClass(id);
break;
default:
assert false : "Invalid mode: " + mode;
}
return val;
}
/**
* @param val Val to get write mode for.
* @return Write mode.
*/
protected BinaryWriteMode mode(Object val) {
return dynamic ?
val == null ? BinaryWriteMode.OBJECT : BinaryUtils.mode(val.getClass()) :
mode;
}
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dusan.stefanovic.connectionapp;
import java.util.Set;
import dusan.stefanovic.connectionapp.service.WATCHiTServiceInterface;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.AdapterView.OnItemClickListener;
/**
* This Activity appears as a dialog. It lists any paired devices and
* devices detected in the area after discovery. When a device is chosen
* by the user, the MAC address of the device is sent back to the parent
* Activity in the result Intent.
*/
public class DeviceListActivity extends Activity {
// Debugging
private static final String TAG = "DeviceListActivity";
private static final boolean D = true;
// Member fields
private BluetoothAdapter bluetoothAdapter;
private ArrayAdapter<String> pairedDevicesArrayAdapter;
private ArrayAdapter<String> newDevicesArrayAdapter;
// The BroadcastReceiver that listens for discovered devices and
// changes the title when discovery is finished
private final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
// When discovery finds a device
if (BluetoothDevice.ACTION_FOUND.equals(action)) {
// Get the BluetoothDevice object from the Intent
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
// If it's already paired, skip it, because it's been listed already
if (device.getBondState() != BluetoothDevice.BOND_BONDED) {
newDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress());
}
// When discovery is finished, change the Activity title
} else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) {
setProgressBarIndeterminateVisibility(false);
setTitle(R.string.select_device);
if (newDevicesArrayAdapter.getCount() == 0) {
String noDevices = getResources().getText(R.string.none_found).toString();
newDevicesArrayAdapter.add(noDevices);
}
}
}
};
// The on-click listener for all devices in the ListViews
private OnItemClickListener onItemClickListener = new OnItemClickListener() {
public void onItemClick(AdapterView<?> av, View v, int arg2, long arg3) {
// Cancel discovery because it's costly and we're about to connect
bluetoothAdapter.cancelDiscovery();
// Get the device MAC address, which is the last 17 chars in the View
String info = ((TextView) v).getText().toString();
String address = info.substring(info.length() - 17);
// Create the result Intent and include the MAC address
Intent intent = new Intent();
intent.putExtra(WATCHiTServiceInterface.DEVICE_ADDRESS, address);
// Set result and finish this Activity
setResult(Activity.RESULT_OK, intent);
finish();
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Setup the window
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setContentView(R.layout.activity_device_list);
// Set result CANCELED in case the user backs out
setResult(Activity.RESULT_CANCELED);
// Initialize the button to perform device discovery
Button scanButton = (Button) findViewById(R.id.scan_button);
scanButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
doDiscovery();
v.setVisibility(View.GONE);
}
});
// Initialize array adapters. One for already paired devices and
// one for newly discovered devices
pairedDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name);
newDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name);
// Find and set up the ListView for paired devices
ListView pairedListView = (ListView) findViewById(R.id.paired_devices_listView);
pairedListView.setAdapter(pairedDevicesArrayAdapter);
pairedListView.setOnItemClickListener(onItemClickListener);
// Find and set up the ListView for newly discovered devices
ListView newDevicesListView = (ListView) findViewById(R.id.new_devices_listView);
newDevicesListView.setAdapter(newDevicesArrayAdapter);
newDevicesListView.setOnItemClickListener(onItemClickListener);
// Register for broadcasts when a device is discovered
IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND);
this.registerReceiver(broadcastReceiver, filter);
// Register for broadcasts when discovery has finished
filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED);
this.registerReceiver(broadcastReceiver, filter);
// Get the local Bluetooth adapter
bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// Get a set of currently paired devices
Set<BluetoothDevice> pairedDevices = bluetoothAdapter.getBondedDevices();
// If there are paired devices, add each one to the ArrayAdapter
if (pairedDevices.size() > 0) {
findViewById(R.id.title_paired_devices_textView).setVisibility(View.VISIBLE);
for (BluetoothDevice device : pairedDevices) {
pairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress());
}
} else {
String noDevices = getResources().getText(R.string.none_paired).toString();
pairedDevicesArrayAdapter.add(noDevices);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
// Make sure we're not doing discovery anymore
if (bluetoothAdapter != null) {
bluetoothAdapter.cancelDiscovery();
}
// Unregister broadcast listeners
this.unregisterReceiver(broadcastReceiver);
}
/**
* Start device discover with the BluetoothAdapter
*/
private void doDiscovery() {
if (D) Log.d(TAG, "doDiscovery()");
// Indicate scanning in the title
setProgressBarIndeterminateVisibility(true);
setTitle(R.string.scanning);
// Turn on sub-title for new devices
findViewById(R.id.title_new_devices_textView).setVisibility(View.VISIBLE);
// If we're already discovering, stop it
if (bluetoothAdapter.isDiscovering()) {
bluetoothAdapter.cancelDiscovery();
}
// Request discover from BluetoothAdapter
bluetoothAdapter.startDiscovery();
}
}
| |
package com.planet_ink.coffee_mud.Locales;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class WaterSurface extends StdRoom implements Drink
{
@Override
public String ID()
{
return "WaterSurface";
}
protected int liquidType = RawMaterial.RESOURCE_FRESHWATER;
public WaterSurface()
{
super();
name="the water";
basePhyStats().setDisposition(basePhyStats().disposition()|PhyStats.IS_SWIMMING);
basePhyStats.setWeight(2);
recoverPhyStats();
climask=Places.CLIMASK_WET;
}
@Override
public int domainType()
{
return Room.DOMAIN_OUTDOORS_WATERSURFACE;
}
@Override
public long decayTime()
{
return 0;
}
@Override
public void setDecayTime(long time)
{
}
protected String UnderWaterLocaleID()
{
return "UnderWaterGrid";
}
protected int UnderWaterDomainType()
{
return Room.DOMAIN_OUTDOORS_UNDERWATER;
}
protected boolean IsUnderWaterFatClass(Room thatSea)
{
return (thatSea instanceof UnderWaterGrid)
|| (thatSea instanceof UnderWaterThinGrid)
|| (thatSea instanceof UnderWaterColumnGrid);
}
@Override
public List<Room> getSky()
{
List<Room> skys = new Vector<Room>(1);
if(!skyedYet)
return skys;
skys.addAll(super.getSky());
final Room room=rawDoors()[Directions.DOWN];
if(room!=null)
{
if((room.roomID().length()==0)
&&(IsUnderWaterFatClass(room)))
{
skys.add(room);
}
}
return skys;
}
@Override
public void giveASky(int depth)
{
if(skyedYet)
return;
if(depth>1000)
return;
super.giveASky(depth+1);
skyedYet=true;
if((roomID().length()==0)
&&(getGridParent()!=null)
&&(getGridParent().roomID().length()==0))
return;
if((rawDoors()[Directions.DOWN]==null)
&&(domainType()!=UnderWaterDomainType())
&&(domainType()!=Room.DOMAIN_OUTDOORS_AIR)
&&(CMProps.getIntVar(CMProps.Int.SKYSIZE)!=0))
{
Exit dnE=null;
final Exit upE=CMClass.getExit("StdOpenDoorway");
if(CMProps.getIntVar(CMProps.Int.SKYSIZE)>0)
dnE=upE;
else
dnE=CMClass.getExit("UnseenWalkway");
final GridLocale sea=(GridLocale)CMClass.getLocale(UnderWaterLocaleID());
sea.setRoomID("");
sea.setArea(getArea());
rawDoors()[Directions.DOWN]=sea;
setRawExit(Directions.DOWN,dnE);
sea.rawDoors()[Directions.UP]=this;
sea.setRawExit(Directions.UP,upE);
for(int dir : Directions.CODES())
{
Room thatRoom=rawDoors()[dir];
Room thatSea=null;
if((thatRoom!=null)&&(getRawExit(dir)!=null))
{
thatRoom=CMLib.map().getRoom(thatRoom);
thatRoom.giveASky(depth+1);
thatSea=thatRoom.rawDoors()[Directions.DOWN];
}
if((thatSea!=null)
&&(thatSea.roomID().length()==0)
&&(IsUnderWaterFatClass(thatSea)))
{
sea.rawDoors()[dir]=thatSea;
sea.setRawExit(dir,getRawExit(dir));
thatSea.rawDoors()[Directions.getOpDirectionCode(dir)]=sea;
if(thatRoom!=null)
{
Exit xo=thatRoom.getRawExit(Directions.getOpDirectionCode(dir));
if((xo==null)||(xo.hasADoor()))
xo=upE;
thatSea.setRawExit(Directions.getOpDirectionCode(dir),xo);
}
((GridLocale)thatSea).clearGrid(null);
}
}
sea.clearGrid(null);
}
}
@Override
public void clearSky()
{
if(!skyedYet)
return;
super.clearSky();
final Room room=rawDoors()[Directions.DOWN];
if(room!=null)
{
if((room.roomID().length()==0)
&&(IsUnderWaterFatClass(room)))
{
((GridLocale)room).clearGrid(null);
rawDoors()[Directions.DOWN]=null;
setRawExit(Directions.DOWN,null);
CMLib.map().emptyRoom(room,null,true);
room.destroy();
skyedYet=false;
}
}
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
switch(CMLib.tracking().isOkWaterSurfaceAffect(this,msg))
{
case CANCEL:
return false;
case FORCEDOK:
return true;
default:
case CONTINUE:
return super.okMessage(myHost,msg);
}
}
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
super.executeMsg(myHost,msg);
UnderWater.sinkAffects(this,msg);
}
@Override
public int thirstQuenched()
{
return 1000;
}
@Override
public int liquidHeld()
{
return Integer.MAX_VALUE - 1000;
}
@Override
public int liquidRemaining()
{
return Integer.MAX_VALUE - 1000;
}
@Override
public int liquidType()
{
return liquidType;
}
@Override
public void setLiquidType(int newLiquidType)
{
liquidType = newLiquidType;
}
@Override
public void setThirstQuenched(int amount)
{
}
@Override
public void setLiquidHeld(int amount)
{
}
@Override
public void setLiquidRemaining(int amount)
{
}
@Override
public boolean disappearsAfterDrinking()
{
return false;
}
@Override
public boolean containsDrink()
{
return true;
}
@Override
public int amountTakenToFillMe(Drink theSource)
{
return 0;
}
@Override
public List<Integer> resourceChoices()
{
return UnderWater.roomResources;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.library.clustering.undirected;
import org.apache.flink.api.common.functions.GroupReduceFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.operators.Order;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.Graph;
import org.apache.flink.graph.asm.degree.annotate.undirected.EdgeDegreePair;
import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingBase;
import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
import org.apache.flink.graph.utils.proxy.OptionalBoolean;
import org.apache.flink.types.BooleanValue;
import org.apache.flink.types.CopyableValue;
import org.apache.flink.types.LongValue;
import org.apache.flink.util.Collector;
import org.apache.flink.util.Preconditions;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
/**
*
*
* @param <K> graph ID type
* @param <VV> vertex value type
* @param <EV> edge value type
*/
public class RectangleListing<K extends Comparable<K> & CopyableValue<K>, VV, EV>
extends GraphAlgorithmWrappingDataSet<K, VV, EV, Tuple4<K, K, K, K>> {
// Optional configuration
private OptionalBoolean sortRectangleVertices = new OptionalBoolean(false, false);
private int littleParallelism = PARALLELISM_DEFAULT;
/**
* Normalize the rectangle listing such that for each result (K0, K1, K2, K3)
* the vertex IDs are sorted K0 < K1 < K2 < K3.
*
* @param sortRectangleVertices whether to output each rectangle's vertices in sorted order
* @return this
*/
public RectangleListing<K, VV, EV> setSortRectangleVertices(boolean sortRectangleVertices) {
this.sortRectangleVertices.set(sortRectangleVertices);
return this;
}
/**
* Override the parallelism of operators processing small amounts of data.
*
* @param littleParallelism operator parallelism
* @return this
*/
public RectangleListing<K, VV, EV> setLittleParallelism(int littleParallelism) {
Preconditions.checkArgument(littleParallelism > 0 || littleParallelism == PARALLELISM_DEFAULT,
"The parallelism must be greater than zero.");
this.littleParallelism = littleParallelism;
return this;
}
@Override
protected void mergeConfiguration(GraphAlgorithmWrappingBase other) {
super.mergeConfiguration(other);
RectangleListing rhs = (RectangleListing) other;
sortRectangleVertices.mergeWith(rhs.sortRectangleVertices);
}
/*
* Implementation notes:
*
* The requirement that "K extends CopyableValue<K>" can be removed when
* Flink has a self-join and GenerateTriplets is implemented as such.
*/
@Override
public DataSet<Tuple4<K, K, K, K>> runInternal(Graph<K, VV, EV> input)
throws Exception {
// u, v, (edge value, deg(u), deg(v))
DataSet<Edge<K, Tuple3<EV, LongValue, LongValue>>> pairDegree = input
.run(new EdgeDegreePair<K, VV, EV>()
.setParallelism(littleParallelism));
// u, v, deg(u) < deg(v) or (deg(u) == deg(v) and u < v)
DataSet<Tuple3<K, K, BooleanValue>> markedByDegree = pairDegree
.map(new MarkByDegree<K, EV>())
.setParallelism(littleParallelism)
.name("Mark by degree");
// u, v, w where (u, v) and (u, w) are edges in graph, v < w
DataSet<Tuple3<K, K, K>> triplets = markedByDegree
.groupBy(0)
.sortGroup(2, Order.DESCENDING)
.reduceGroup(new GenerateTriplets<K>())
.setParallelism(littleParallelism)
.name("Generate triplets");
// u, v, w, x where (u, v), (v, w), (w, x), and (x, u) are edges in graph
DataSet<Tuple4<K, K, K, K>> rectangles = triplets
.groupBy(1, 2)
.reduceGroup(new ListRectangles<K>())
.setParallelism(littleParallelism)
.name("Rectangle listing");
// if (sortTriangleVertices.get()) {
// triangles = triangles
// .map(new SortTriangleVertices<K>())
// .name("Sort triangle vertices");
// }
return rectangles;
}
/**
*
*
* @param <T> ID type
*/
@ForwardedFields("0; 1")
private static final class MarkByDegree<T extends Comparable<T>, ET>
implements MapFunction<Edge<T, Tuple3<ET, LongValue, LongValue>>, Tuple3<T, T, BooleanValue>> {
private Tuple3<T, T, BooleanValue> edge = new Tuple3<>();
@Override
public Tuple3<T, T, BooleanValue> map(Edge<T, Tuple3<ET, LongValue, LongValue>> value)
throws Exception {
edge.f0 = value.f0;
edge.f1 = value.f1;
Tuple3<ET, LongValue, LongValue> degrees = value.f2;
long sourceDegree = degrees.f1.getValue();
long targetDegree = degrees.f2.getValue();
if (sourceDegree < targetDegree ||
(sourceDegree == targetDegree && value.f0.compareTo(value.f1) < 0)) {
edge.f2 = BooleanValue.TRUE;
} else {
edge.f2 = BooleanValue.FALSE;
}
return edge;
}
}
/**
* @param <T> ID type
*/
@ForwardedFields("0")
private static final class GenerateTriplets<T extends Comparable<T> & CopyableValue<T>>
implements GroupReduceFunction<Tuple3<T, T, BooleanValue>, Tuple3<T, T, T>> {
private Tuple3<T, T, T> output = new Tuple3<>();
private List<T> visited = new ArrayList<>();
@Override
public void reduce(Iterable<Tuple3<T, T, BooleanValue>> values, Collector<Tuple3<T, T, T>> out)
throws Exception {
int visitedCount = 0;
Iterator<Tuple3<T, T, BooleanValue>> iter = values.iterator();
while (true) {
Tuple3<T, T, BooleanValue> edge = iter.next();
output.f0 = edge.f0;
for (int i = 0; i < visitedCount; i++) {
T prior = visited.get(i);
if (prior.compareTo(edge.f1) < 0) {
output.f1 = prior;
output.f2 = edge.f1;
} else {
output.f1 = edge.f1;
output.f2 = prior;
}
out.collect(output);
}
if (! iter.hasNext()) {
break;
}
if (edge.f2.getValue()) {
if (visitedCount == visited.size()) {
visited.add(edge.f1.copy());
} else {
edge.f1.copyTo(visited.get(visitedCount));
}
visitedCount += 1;
}
}
}
}
/**
*
*
* @param <T> ID type
*/
private static class ListRectangles<T extends CopyableValue<T>>
implements GroupReduceFunction<Tuple3<T, T, T>, Tuple4<T, T, T, T>> {
private List<Tuple4<T, T, T, T>> visited = new ArrayList<>();
@Override
public void reduce(Iterable<Tuple3<T, T, T>> values, Collector<Tuple4<T, T, T, T>> out)
throws Exception {
int visitedCount = 0;
Iterator<Tuple3<T, T, T>> iter = values.iterator();
while (true) {
Tuple3<T, T, T> triplet = iter.next();
for (int i = 0; i < visitedCount; i++) {
Tuple4<T, T, T, T> prior = visited.get(i);
prior.f3 = triplet.f0;
out.collect(prior);
}
if (! iter.hasNext()) {
break;
}
if (visitedCount == visited.size()) {
visited.add(new Tuple4<T, T, T, T>(triplet.f0.copy(), triplet.f1.copy(), triplet.f2.copy(), null));
} else {
Tuple4<T, T, T, T> prior = visited.get(visitedCount);
triplet.f0.copyTo(prior.f0);
triplet.f1.copyTo(prior.f1);
triplet.f2.copyTo(prior.f2);
}
visitedCount += 1;
}
}
}
/**
* Reorders the vertices of each emitted rectangle with path (K0, K1, K2, K3)
* into sorted order (K0, K1, K3, K2) such that K0 < K1 < K2.
*
* @param <T> ID type
*/
// private static final class SortTriangleVertices<T extends Comparable<T>>
// implements MapFunction<Tuple3<T, T, T>, Tuple3<T, T, T>> {
// @Override
// public Tuple3<T, T, T> map(Tuple3<T, T, T> value)
// throws Exception {
// // by the triangle listing algorithm we know f1 < f2
// if (value.f0.compareTo(value.f1) > 0) {
// T temp_val = value.f0;
// value.f0 = value.f1;
//
// if (temp_val.compareTo(value.f2) <= 0) {
// value.f1 = temp_val;
// } else {
// value.f1 = value.f2;
// value.f2 = temp_val;
// }
// }
//
// return value;
// }
// }
}
| |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.rest.api.portal.rest.resource;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import io.gravitee.common.http.HttpStatusCode;
import io.gravitee.rest.api.model.ApplicationMetadataEntity;
import io.gravitee.rest.api.model.MetadataFormat;
import io.gravitee.rest.api.model.NewApplicationMetadataEntity;
import io.gravitee.rest.api.model.UpdateApplicationMetadataEntity;
import io.gravitee.rest.api.portal.rest.model.*;
import io.gravitee.rest.api.portal.rest.model.Error;
import io.gravitee.rest.api.service.exceptions.ApplicationMetadataNotFoundException;
import io.gravitee.rest.api.service.exceptions.ApplicationNotFoundException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
/**
* @author Florent CHAMFROY (florent.chamfroy at graviteesource.com)
* @author GraviteeSource Team
*/
public class ApplicationMetadataResourceTest extends AbstractResourceTest {
private static final String APPLICATION = "my-application";
private static final String UNKNOWN_APPLICATION = "unknown-application";
private static final String METADATA_1 = "my-metadata-1";
private static final String METADATA_1_NAME = "my-metadata-1-name";
private static final String METADATA_1_FORMAT = "BOOLEAN";
private static final String METADATA_1_VALUE = "my-metadata-1-value";
private static final String METADATA_1_DEFAULT_VALUE = "my-metadata-1-defaut-value";
private static final String METADATA_2 = "my-metadata-2";
private static final String UNKNOWN_METADATA = "unknown-metadata";
@Override
protected String contextPath() {
return "applications/";
}
@Before
public void init() {
resetAllMocks();
ApplicationMetadataEntity applicationMetadataEntity1 = new ApplicationMetadataEntity();
applicationMetadataEntity1.setKey(METADATA_1);
ApplicationMetadataEntity applicationMetadataEntity2 = new ApplicationMetadataEntity();
applicationMetadataEntity2.setKey(METADATA_2);
when(referenceMetadataMapper.convert(any())).thenCallRealMethod();
when(referenceMetadataMapper.convert(any(), any())).thenCallRealMethod();
when(referenceMetadataMapper.convert(any(), any(), any())).thenCallRealMethod();
doReturn(Arrays.asList(applicationMetadataEntity1, applicationMetadataEntity2))
.when(applicationMetadataService)
.findAllByApplication(APPLICATION);
doReturn(applicationMetadataEntity1).when(applicationMetadataService).findByIdAndApplication(METADATA_1, APPLICATION);
doReturn(null).when(applicationMetadataService).findByIdAndApplication(METADATA_2, APPLICATION);
when(applicationMetadataService.create(any()))
.thenAnswer(
invocation -> {
NewApplicationMetadataEntity newApplicationMetadataEntity = invocation.getArgument(0);
if (newApplicationMetadataEntity.getApplicationId().equals(UNKNOWN_APPLICATION)) {
throw new ApplicationNotFoundException(UNKNOWN_APPLICATION);
}
return applicationMetadataEntity1;
}
);
when(applicationMetadataService.update(any()))
.thenAnswer(
invocation -> {
UpdateApplicationMetadataEntity updateApplicationMetadataEntity = invocation.getArgument(0);
if (updateApplicationMetadataEntity.getApplicationId().equals(UNKNOWN_APPLICATION)) {
throw new ApplicationNotFoundException(UNKNOWN_APPLICATION);
}
if (updateApplicationMetadataEntity.getKey().equals(UNKNOWN_METADATA)) {
throw new ApplicationMetadataNotFoundException(
updateApplicationMetadataEntity.getApplicationId(),
UNKNOWN_METADATA
);
}
return applicationMetadataEntity1;
}
);
doThrow(ApplicationNotFoundException.class).when(applicationMetadataService).findAllByApplication(UNKNOWN_APPLICATION);
doThrow(ApplicationNotFoundException.class).when(applicationMetadataService).findByIdAndApplication(any(), eq(UNKNOWN_APPLICATION));
doThrow(ApplicationNotFoundException.class).when(applicationMetadataService).delete(any(), eq(UNKNOWN_APPLICATION));
doThrow(ApplicationMetadataNotFoundException.class)
.when(applicationMetadataService)
.findByIdAndApplication(UNKNOWN_METADATA, APPLICATION);
doThrow(ApplicationMetadataNotFoundException.class).when(applicationMetadataService).delete(eq(UNKNOWN_METADATA), any());
}
@Test
public void shouldGetAllMetadata() {
final Response response = target(APPLICATION).path("metadata").request().get();
assertEquals(HttpStatusCode.OK_200, response.getStatus());
ReferenceMetadataResponse metadataResponse = response.readEntity(ReferenceMetadataResponse.class);
assertEquals(2, metadataResponse.getData().size());
assertEquals(METADATA_1, metadataResponse.getData().get(0).getKey());
assertEquals(METADATA_2, metadataResponse.getData().get(1).getKey());
Links links = metadataResponse.getLinks();
assertNotNull(links);
}
@Test
public void shouldGetMetadataWithPaginatedLink() {
final Response response = target(APPLICATION).path("metadata").queryParam("page", 2).queryParam("size", 1).request().get();
assertEquals(HttpStatusCode.OK_200, response.getStatus());
ReferenceMetadataResponse metadataResponse = response.readEntity(ReferenceMetadataResponse.class);
assertEquals(1, metadataResponse.getData().size());
assertEquals(METADATA_2, metadataResponse.getData().get(0).getKey());
Links links = metadataResponse.getLinks();
assertNotNull(links);
}
@Test
public void shouldNotGetMetadata() {
final Response response = target(APPLICATION).path("metadata").queryParam("page", 10).queryParam("size", 1).request().get();
assertEquals(HttpStatusCode.BAD_REQUEST_400, response.getStatus());
ErrorResponse errorResponse = response.readEntity(ErrorResponse.class);
List<Error> errors = errorResponse.getErrors();
assertNotNull(errors);
assertEquals(1, errors.size());
Error error = errors.get(0);
assertEquals("errors.pagination.invalid", error.getCode());
assertEquals("400", error.getStatus());
assertEquals("Pagination is not valid", error.getMessage());
}
@Test
public void shouldGetNoMetadataAndNoLink() {
doReturn(Collections.emptyList()).when(applicationMetadataService).findAllByApplication(any());
//Test with default limit
final Response response = target(APPLICATION).path("metadata").request().get();
assertEquals(HttpStatusCode.OK_200, response.getStatus());
ReferenceMetadataResponse metadataResponse = response.readEntity(ReferenceMetadataResponse.class);
assertEquals(0, metadataResponse.getData().size());
Links links = metadataResponse.getLinks();
assertNull(links);
//Test with small limit
final Response anotherResponse = target(APPLICATION).path("metadata").queryParam("page", 2).queryParam("size", 1).request().get();
assertEquals(HttpStatusCode.OK_200, anotherResponse.getStatus());
metadataResponse = anotherResponse.readEntity(ReferenceMetadataResponse.class);
assertEquals(0, metadataResponse.getData().size());
links = metadataResponse.getLinks();
assertNull(links);
}
@Test
public void shouldGetMetadata() {
final Response response = target(APPLICATION).path("metadata").path(METADATA_1).request().get();
assertEquals(HttpStatusCode.OK_200, response.getStatus());
ReferenceMetadata responseMetadata = response.readEntity(ReferenceMetadata.class);
assertNotNull(responseMetadata);
assertEquals(METADATA_1, responseMetadata.getKey());
}
@Test
public void shouldDeleteMetadata() {
final Response response = target(APPLICATION).path("metadata").path(METADATA_1).request().delete();
assertEquals(HttpStatusCode.NO_CONTENT_204, response.getStatus());
Mockito.verify(applicationMetadataService).delete(METADATA_1, APPLICATION);
}
@Test
public void shouldCreateMetadata() {
ReferenceMetadataInput metadataInput = new ReferenceMetadataInput()
.name(METADATA_1_NAME)
.defaultValue(METADATA_1_DEFAULT_VALUE)
.format(ReferenceMetadataFormatType.valueOf(METADATA_1_FORMAT))
.value(METADATA_1_VALUE);
final Response response = target(APPLICATION).path("metadata").request().post(Entity.json(metadataInput));
assertEquals(HttpStatusCode.CREATED_201, response.getStatus());
assertEquals(
target(APPLICATION).path("metadata").path(METADATA_1).getUri().toString(),
response.getHeaders().getFirst(HttpHeaders.LOCATION)
);
ArgumentCaptor<NewApplicationMetadataEntity> newMetadataEntityCaptor = ArgumentCaptor.forClass(NewApplicationMetadataEntity.class);
Mockito.verify(applicationMetadataService).create(newMetadataEntityCaptor.capture());
final NewApplicationMetadataEntity newMetadataEntityCaptorValue = newMetadataEntityCaptor.getValue();
assertEquals(APPLICATION, newMetadataEntityCaptorValue.getApplicationId());
assertEquals(METADATA_1_NAME, newMetadataEntityCaptorValue.getName());
assertEquals(METADATA_1_VALUE, newMetadataEntityCaptorValue.getValue());
assertEquals(METADATA_1_DEFAULT_VALUE, newMetadataEntityCaptorValue.getDefaultValue());
assertEquals(MetadataFormat.valueOf(METADATA_1_FORMAT), newMetadataEntityCaptorValue.getFormat());
}
@Test
public void shouldUpdateMetadata() {
ReferenceMetadataInput metadataInput = new ReferenceMetadataInput()
.name(METADATA_1_NAME)
.defaultValue(METADATA_1_DEFAULT_VALUE)
.format(ReferenceMetadataFormatType.valueOf(METADATA_1_FORMAT))
.value(METADATA_1_VALUE);
final Response response = target(APPLICATION).path("metadata").path(METADATA_1).request().put(Entity.json(metadataInput));
assertEquals(HttpStatusCode.OK_200, response.getStatus());
ArgumentCaptor<UpdateApplicationMetadataEntity> updateMetadataEntityCaptor = ArgumentCaptor.forClass(
UpdateApplicationMetadataEntity.class
);
Mockito.verify(applicationMetadataService).update(updateMetadataEntityCaptor.capture());
final UpdateApplicationMetadataEntity uodateMetadataEntityCaptorValue = updateMetadataEntityCaptor.getValue();
assertEquals(APPLICATION, uodateMetadataEntityCaptorValue.getApplicationId());
assertEquals(METADATA_1, uodateMetadataEntityCaptorValue.getKey());
assertEquals(METADATA_1_NAME, uodateMetadataEntityCaptorValue.getName());
assertEquals(METADATA_1_VALUE, uodateMetadataEntityCaptorValue.getValue());
assertEquals(METADATA_1_DEFAULT_VALUE, uodateMetadataEntityCaptorValue.getDefaultValue());
assertEquals(MetadataFormat.valueOf(METADATA_1_FORMAT), uodateMetadataEntityCaptorValue.getFormat());
}
//404 GET /metadata
@Test
public void shouldHaveNotFoundWhileGettingMetadata() {
final Response response = target(UNKNOWN_APPLICATION).path("metadata").request().get();
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
//404 POST /metadata
@Test
public void shouldHaveNotFoundWhileCreatingNewMetadataUnknownApplication() {
final Response response = target(UNKNOWN_APPLICATION).path("metadata").request().post(Entity.json(new ReferenceMetadataInput()));
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
//404 PUT /metadata/{metadataId}
@Test
public void shouldHaveNotFoundWhileUpdatingNewMetadataUnknownApplication() {
final Response response = target(UNKNOWN_APPLICATION)
.path("metadata")
.path(METADATA_1)
.request()
.put(Entity.json(new ReferenceMetadataInput()));
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
@Test
public void shouldHaveNotFoundWhileUpdatingNewMetadataUnknownMetadata() {
final Response response = target(APPLICATION)
.path("metadata")
.path(UNKNOWN_METADATA)
.request()
.put(Entity.json(new ReferenceMetadataInput()));
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
//404 DELETE /metadata/{metadataId}
@Test
public void shouldHaveNotFoundWhileDeletingMetadataUnknonwnApplication() {
final Response response = target(UNKNOWN_APPLICATION).path("metadata").path(METADATA_1).request().delete();
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
@Test
public void shouldHaveNotFoundWhileDeletingMetadataUnknownMetadata() {
final Response response = target(APPLICATION).path("metadata").path(UNKNOWN_METADATA).request().delete();
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
//404 GET /metadata/{metadataId}
@Test
public void shouldHaveNotFoundWhileGettingMetadataUnknownApplication() {
final Response response = target(UNKNOWN_APPLICATION).path("metadata").path(METADATA_1).request().get();
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
@Test
public void shouldHaveNotFoundWhileGettingMetadataUnknownMember() {
final Response response = target(APPLICATION).path("metadata").path(UNKNOWN_METADATA).request().get();
assertEquals(HttpStatusCode.NOT_FOUND_404, response.getStatus());
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes.shelf;
import com.intellij.CommonBundle;
import com.intellij.diff.DiffContentFactoryEx;
import com.intellij.diff.chains.DiffRequestProducerException;
import com.intellij.diff.impl.CacheDiffRequestProcessor;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.requests.SimpleDiffRequest;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.actions.EditSourceAction;
import com.intellij.ide.dnd.*;
import com.intellij.ide.dnd.aware.DnDAwareTree;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.lifecycle.PeriodicalTasksCloser;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.impl.patch.FilePatch;
import com.intellij.openapi.diff.impl.patch.PatchSyntaxException;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Couple;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.actions.ShowDiffPreviewAction;
import com.intellij.openapi.vcs.changes.issueLinks.IssueLinkRenderer;
import com.intellij.openapi.vcs.changes.issueLinks.TreeLinkMouseListener;
import com.intellij.openapi.vcs.changes.patch.RelativePathCalculator;
import com.intellij.openapi.vcs.changes.patch.tool.PatchDiffRequest;
import com.intellij.openapi.vcs.changes.ui.ChangeListDragBean;
import com.intellij.openapi.vcs.changes.ui.ChangesViewContentManager;
import com.intellij.openapi.vcs.changes.ui.ShelvedChangeListDragBean;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.pom.Navigatable;
import com.intellij.pom.NavigatableAdapter;
import com.intellij.ui.*;
import com.intellij.ui.content.Content;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.IconUtil;
import com.intellij.util.IconUtil.IconSizeWrapper;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import com.intellij.util.text.DateFormatUtil;
import com.intellij.util.ui.GraphicsUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.CalledInAwt;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.List;
import static com.intellij.icons.AllIcons.Vcs.Patch_applied;
import static com.intellij.openapi.actionSystem.Anchor.AFTER;
import static com.intellij.openapi.vcs.changes.shelf.DiffShelvedChangesAction.createAppliedTextPatch;
import static com.intellij.util.FontUtil.spaceAndThinSpace;
import static com.intellij.util.ObjectUtils.assertNotNull;
import static com.intellij.util.containers.ContainerUtil.notNullize;
public class ShelvedChangesViewManager implements ProjectComponent {
private static final Logger LOG = Logger.getInstance(ShelvedChangesViewManager.class);
@NonNls static final String SHELF_CONTEXT_MENU = "Vcs.Shelf.ContextMenu";
private final ChangesViewContentManager myContentManager;
private final ShelveChangesManager myShelveChangesManager;
private final Project myProject;
private final ShelfTree myTree;
private Content myContent = null;
private final DeleteProvider myDeleteProvider = new MyShelveDeleteProvider();
private boolean myUpdatePending = false;
private Runnable myPostUpdateRunnable = null;
public static final DataKey<ShelvedChangeList[]> SHELVED_CHANGELIST_KEY = DataKey.create("ShelveChangesManager.ShelvedChangeListData");
public static final DataKey<ShelvedChangeList[]> SHELVED_RECYCLED_CHANGELIST_KEY = DataKey.create("ShelveChangesManager.ShelvedRecycledChangeListData");
public static final DataKey<List<ShelvedChange>> SHELVED_CHANGE_KEY = DataKey.create("ShelveChangesManager.ShelvedChange");
public static final DataKey<List<ShelvedBinaryFile>> SHELVED_BINARY_FILE_KEY = DataKey.create("ShelveChangesManager.ShelvedBinaryFile");
private static final Object ROOT_NODE_VALUE = new Object();
private DefaultMutableTreeNode myRoot;
private final Map<Couple<String>, String> myMoveRenameInfo;
private PreviewDiffSplitterComponent mySplitterComponent;
public static ShelvedChangesViewManager getInstance(Project project) {
return PeriodicalTasksCloser.getInstance().safeGetComponent(project, ShelvedChangesViewManager.class);
}
public ShelvedChangesViewManager(Project project, ChangesViewContentManager contentManager, ShelveChangesManager shelveChangesManager,
final MessageBus bus) {
myProject = project;
myContentManager = contentManager;
myShelveChangesManager = shelveChangesManager;
bus.connect().subscribe(ShelveChangesManager.SHELF_TOPIC, new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
myUpdatePending = true;
ApplicationManager.getApplication().invokeLater(() -> updateChangesContent(), ModalityState.NON_MODAL);
}
});
myMoveRenameInfo = new HashMap<>();
myTree = new ShelfTree();
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
myTree.setCellRenderer(new ShelfTreeCellRenderer(project, myMoveRenameInfo));
new TreeLinkMouseListener(new ShelfTreeCellRenderer(project, myMoveRenameInfo)).installOn(myTree);
DnDSupport.createBuilder(myTree).disableAsTarget().setImageProvider(this::createDraggedImage).setBeanProvider(this::createDragStartBean)
.install();
final AnAction showDiffAction = ActionManager.getInstance().getAction("ShelvedChanges.Diff");
showDiffAction.registerCustomShortcutSet(showDiffAction.getShortcutSet(), myTree);
final EditSourceAction editSourceAction = new EditSourceAction();
editSourceAction.registerCustomShortcutSet(editSourceAction.getShortcutSet(), myTree);
PopupHandler.installPopupHandler(myTree, "ShelvedChangesPopupMenu", SHELF_CONTEXT_MENU);
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
DataContext dc = DataManager.getInstance().getDataContext(myTree);
if (getShelveChanges(dc).isEmpty() && getBinaryShelveChanges(dc).isEmpty()) return false;
DiffShelvedChangesAction.showShelvedChangesDiff(dc);
return true;
}
}.installOn(myTree);
new TreeSpeedSearch(myTree, o -> {
final Object lc = o.getLastPathComponent();
final Object lastComponent = lc == null ? null : ((DefaultMutableTreeNode) lc).getUserObject();
if (lastComponent instanceof ShelvedChangeList) {
return ((ShelvedChangeList) lastComponent).DESCRIPTION;
} else if (lastComponent instanceof ShelvedChange) {
final ShelvedChange shelvedChange = (ShelvedChange)lastComponent;
return shelvedChange.getBeforeFileName() == null ? shelvedChange.getAfterFileName() : shelvedChange.getBeforeFileName();
} else if (lastComponent instanceof ShelvedBinaryFile) {
final ShelvedBinaryFile sbf = (ShelvedBinaryFile) lastComponent;
final String value = sbf.BEFORE_PATH == null ? sbf.AFTER_PATH : sbf.BEFORE_PATH;
int idx = value.lastIndexOf("/");
idx = (idx == -1) ? value.lastIndexOf("\\") : idx;
return idx > 0 ? value.substring(idx + 1) : value;
}
return null;
}, true);
myTree.addTreeSelectionListener(new TreeSelectionListener() {
@Override
public void valueChanged(TreeSelectionEvent e) {
mySplitterComponent.updatePreview();
}
});
}
@Override
public void projectOpened() {
StartupManager startupManager = StartupManager.getInstance(myProject);
if (startupManager == null) {
LOG.error("Couldn't start loading shelved changes");
return;
}
startupManager.registerPostStartupActivity((DumbAwareRunnable)() -> updateChangesContent());
}
@Override
@NonNls @NotNull
public String getComponentName() {
return "ShelvedChangesViewManager";
}
private void updateChangesContent() {
myUpdatePending = false;
final List<ShelvedChangeList> changeLists = new ArrayList<>(myShelveChangesManager.getShelvedChangeLists());
changeLists.addAll(myShelveChangesManager.getRecycledShelvedChangeLists());
if (changeLists.size() == 0) {
if (myContent != null) {
myContentManager.removeContent(myContent);
myContentManager.selectContent(ChangesViewContentManager.LOCAL_CHANGES);
}
myContent = null;
}
else {
if (myContent == null) {
myTree.updateUI();
JPanel rootPanel = createRootPanel();
myContent = new MyShelfContent(rootPanel, VcsBundle.message("shelf.tab"), false);
myContent.setCloseable(false);
myContentManager.addContent(myContent);
}
TreeState state = TreeState.createOn(myTree);
myTree.setModel(buildChangesModel());
state.applyTo(myTree);
if (myPostUpdateRunnable != null) {
myPostUpdateRunnable.run();
}
}
myPostUpdateRunnable = null;
}
@NotNull
private JPanel createRootPanel() {
JScrollPane pane = ScrollPaneFactory.createScrollPane(myTree);
pane.setBorder(null);
DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.addAll((ActionGroup)ActionManager.getInstance().getAction("ShelvedChangesToolbar"));
ShowDiffPreviewAction diffPreviewAction = new ShowDiffPreviewAction() {
@Override
public void setSelected(AnActionEvent e, boolean state) {
super.setSelected(e, state);
assertNotNull(mySplitterComponent).setDetailsOn(state);
VcsConfiguration.getInstance(myProject).SHELVE_DETAILS_PREVIEW_SHOWN = state;
}
};
actionGroup.add(diffPreviewAction, new Constraints(AFTER, "ShelvedChanges.ShowHideDeleted"));
MyShelvedPreviewProcessor changeProcessor = new MyShelvedPreviewProcessor(myProject);
mySplitterComponent =
new PreviewDiffSplitterComponent(pane, changeProcessor, "ShelvedChangesViewManager.DETAILS_SPLITTER_PROPORTION",
VcsConfiguration.getInstance(myProject).SHELVE_DETAILS_PREVIEW_SHOWN);
diffPreviewAction.setSelected(null, mySplitterComponent.isDetailsOn());
ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar("ShelvedChanges", actionGroup, false);
JPanel rootPanel = new JPanel(new BorderLayout());
rootPanel.add(toolbar.getComponent(), BorderLayout.WEST);
rootPanel.add(mySplitterComponent, BorderLayout.CENTER);
DataManager.registerDataProvider(rootPanel, myTree);
return rootPanel;
}
private TreeModel buildChangesModel() {
myRoot = new DefaultMutableTreeNode(ROOT_NODE_VALUE); // not null for TreeState matching to work
DefaultTreeModel model = new DefaultTreeModel(myRoot);
final List<ShelvedChangeList> changeLists = new ArrayList<>(myShelveChangesManager.getShelvedChangeLists());
Collections.sort(changeLists, ChangelistComparator.getInstance());
if (myShelveChangesManager.isShowRecycled()) {
ArrayList<ShelvedChangeList> recycled = new ArrayList<>(myShelveChangesManager.getRecycledShelvedChangeLists());
changeLists.addAll(recycled);
Collections.sort(changeLists, ChangelistComparator.getInstance());
}
myMoveRenameInfo.clear();
for(ShelvedChangeList changeList: changeLists) {
DefaultMutableTreeNode node = new DefaultMutableTreeNode(changeList);
model.insertNodeInto(node, myRoot, myRoot.getChildCount());
final List<Object> shelvedFilesNodes = new ArrayList<>();
List<ShelvedChange> changes = changeList.getChanges(myProject);
for(ShelvedChange change: changes) {
putMovedMessage(change.getBeforePath(), change.getAfterPath());
shelvedFilesNodes.add(change);
}
List<ShelvedBinaryFile> binaryFiles = changeList.getBinaryFiles();
for(ShelvedBinaryFile file: binaryFiles) {
putMovedMessage(file.BEFORE_PATH, file.AFTER_PATH);
shelvedFilesNodes.add(file);
}
Collections.sort(shelvedFilesNodes, ShelvedFilePatchComparator.getInstance());
for (int i = 0; i < shelvedFilesNodes.size(); i++) {
final Object filesNode = shelvedFilesNodes.get(i);
final DefaultMutableTreeNode pathNode = new DefaultMutableTreeNode(filesNode);
model.insertNodeInto(pathNode, node, i);
}
}
return model;
}
private static class ChangelistComparator implements Comparator<ShelvedChangeList> {
private final static ChangelistComparator ourInstance = new ChangelistComparator();
public static ChangelistComparator getInstance() {
return ourInstance;
}
@Override
public int compare(ShelvedChangeList o1, ShelvedChangeList o2) {
return o2.DATE.compareTo(o1.DATE);
}
}
private void putMovedMessage(final String beforeName, final String afterName) {
final String movedMessage = RelativePathCalculator.getMovedString(beforeName, afterName);
if (movedMessage != null) {
myMoveRenameInfo.put(Couple.of(beforeName, afterName), movedMessage);
}
}
public void activateView(final ShelvedChangeList list) {
Runnable runnable = () -> {
if (list != null) {
TreeUtil.selectNode(myTree, TreeUtil.findNodeWithObject(myRoot, list));
}
myContentManager.setSelectedContent(myContent);
ToolWindow window = ToolWindowManager.getInstance(myProject).getToolWindow(ChangesViewContentManager.TOOLWINDOW_ID);
if (!window.isVisible()) {
window.activate(null);
}
};
if (myUpdatePending) {
myPostUpdateRunnable = runnable;
}
else {
runnable.run();
}
}
private class ShelfTree extends Tree implements DataProvider {
@Nullable
@Override
public Object getData(@NonNls String dataId) {
if (SHELVED_CHANGELIST_KEY.is(dataId)) {
final Set<ShelvedChangeList> changeLists = getSelectedLists(false);
if (changeLists.size() > 0) {
return changeLists.toArray(new ShelvedChangeList[changeLists.size()]);
}
}
else if (SHELVED_RECYCLED_CHANGELIST_KEY.is(dataId)) {
final Set<ShelvedChangeList> changeLists = getSelectedLists(true);
if (changeLists.size() > 0) {
return changeLists.toArray(new ShelvedChangeList[changeLists.size()]);
}
}
else if (SHELVED_CHANGE_KEY.is(dataId)) {
return TreeUtil.collectSelectedObjectsOfType(this, ShelvedChange.class);
}
else if (SHELVED_BINARY_FILE_KEY.is(dataId)) {
return TreeUtil.collectSelectedObjectsOfType(this, ShelvedBinaryFile.class);
}
else if (VcsDataKeys.HAVE_SELECTED_CHANGES.is(dataId)) {
return getSelectionCount() > 0;
}
else if (VcsDataKeys.CHANGES.is(dataId)) {
List<ShelvedChange> shelvedChanges = TreeUtil.collectSelectedObjectsOfType(this, ShelvedChange.class);
final List<ShelvedBinaryFile> shelvedBinaryFiles = TreeUtil.collectSelectedObjectsOfType(this, ShelvedBinaryFile.class);
if (!shelvedChanges.isEmpty() || !shelvedBinaryFiles.isEmpty()) {
final List<Change> changes = new ArrayList<>(shelvedChanges.size() + shelvedBinaryFiles.size());
for (ShelvedChange shelvedChange : shelvedChanges) {
changes.add(shelvedChange.getChange(myProject));
}
for (ShelvedBinaryFile binaryFile : shelvedBinaryFiles) {
changes.add(binaryFile.createChange(myProject));
}
return changes.toArray(new Change[changes.size()]);
}
else {
final List<ShelvedChangeList> changeLists = TreeUtil.collectSelectedObjectsOfType(this, ShelvedChangeList.class);
final List<Change> changes = new ArrayList<>();
for (ShelvedChangeList changeList : changeLists) {
shelvedChanges = changeList.getChanges(myProject);
for (ShelvedChange shelvedChange : shelvedChanges) {
changes.add(shelvedChange.getChange(myProject));
}
final List<ShelvedBinaryFile> binaryFiles = changeList.getBinaryFiles();
for (ShelvedBinaryFile file : binaryFiles) {
changes.add(file.createChange(myProject));
}
}
return changes.toArray(new Change[changes.size()]);
}
}
else if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myDeleteProvider;
}
else if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) {
List<ShelvedChange> shelvedChanges = new ArrayList<>(TreeUtil.collectSelectedObjectsOfType(this, ShelvedChange.class));
final ArrayDeque<Navigatable> navigatables = new ArrayDeque<>();
final List<ShelvedChangeList> changeLists = TreeUtil.collectSelectedObjectsOfType(this, ShelvedChangeList.class);
for (ShelvedChangeList changeList : changeLists) {
shelvedChanges.addAll(changeList.getChanges(myProject));
}
for (final ShelvedChange shelvedChange : shelvedChanges) {
if (shelvedChange.getBeforePath() != null && !FileStatus.ADDED.equals(shelvedChange.getFileStatus())) {
final NavigatableAdapter navigatable = new NavigatableAdapter() {
@Override
public void navigate(boolean requestFocus) {
final VirtualFile vf = shelvedChange.getBeforeVFUnderProject(myProject);
if (vf != null) {
navigate(myProject, vf, true);
}
}
};
navigatables.add(navigatable);
}
}
return navigatables.toArray(new Navigatable[navigatables.size()]);
}
return null;
}
private Set<ShelvedChangeList> getSelectedLists(final boolean recycled) {
final TreePath[] selections = getSelectionPaths();
final Set<ShelvedChangeList> changeLists = new HashSet<>();
if (selections != null) {
for(TreePath path: selections) {
if (path.getPathCount() >= 2) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) path.getPathComponent(1);
if (node.getUserObject() instanceof ShelvedChangeList) {
final ShelvedChangeList list = (ShelvedChangeList)node.getUserObject();
if (((! recycled) && (! list.isRecycled())) ||
(recycled && list.isRecycled())) {
changeLists.add(list);
}
}
}
}
}
return changeLists;
}
}
@NotNull
public static List<ShelvedChangeList> getShelvedLists(@NotNull final DataContext dataContext) {
final ShelvedChangeList[] shelved = SHELVED_CHANGELIST_KEY.getData(dataContext);
final ShelvedChangeList[] recycled = SHELVED_RECYCLED_CHANGELIST_KEY.getData(dataContext);
if (shelved == null && recycled == null) return Collections.emptyList();
List<ShelvedChangeList> shelvedChangeLists = ContainerUtil.newArrayList();
if (shelved != null) {
ContainerUtil.addAll(shelvedChangeLists, shelved);
}
if (recycled != null) {
ContainerUtil.addAll(shelvedChangeLists, recycled);
}
return shelvedChangeLists;
}
@NotNull
public static List<ShelvedChange> getShelveChanges(@NotNull final DataContext dataContext) {
return notNullize(dataContext.getData(SHELVED_CHANGE_KEY));
}
@NotNull
public static List<ShelvedBinaryFile> getBinaryShelveChanges(@NotNull final DataContext dataContext) {
return notNullize(dataContext.getData(SHELVED_BINARY_FILE_KEY));
}
private final static class ShelvedFilePatchComparator implements Comparator<Object> {
private final static ShelvedFilePatchComparator ourInstance = new ShelvedFilePatchComparator();
public static ShelvedFilePatchComparator getInstance() {
return ourInstance;
}
@Override
public int compare(final Object o1, final Object o2) {
final String path1 = getPath(o1);
final String path2 = getPath(o2);
// case-insensitive; as in local changes
if (path1 == null) return -1;
if (path2 == null) return 1;
return path1.compareToIgnoreCase(path2);
}
private static String getPath(final Object patch) {
String path = null;
if (patch instanceof ShelvedBinaryFile) {
final ShelvedBinaryFile binaryFile = (ShelvedBinaryFile) patch;
path = binaryFile.BEFORE_PATH;
path = (path == null) ? binaryFile.AFTER_PATH : path;
} else if (patch instanceof ShelvedChange) {
final ShelvedChange shelvedChange = (ShelvedChange)patch;
path = shelvedChange.getBeforePath().replace('/', File.separatorChar);
}
if (path == null) {
return null;
}
final int pos = path.lastIndexOf(File.separatorChar);
return (pos >= 0) ? path.substring(pos + 1) : path;
}
}
private static class ShelfTreeCellRenderer extends ColoredTreeCellRenderer {
private final IssueLinkRenderer myIssueLinkRenderer;
private final Map<Couple<String>, String> myMoveRenameInfo;
private static final Icon PatchIcon = StdFileTypes.PATCH.getIcon();
private static final Icon AppliedPatchIcon =
new IconSizeWrapper(Patch_applied, Patch_applied.getIconWidth(), Patch_applied.getIconHeight()) {
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
GraphicsUtil.paintWithAlpha(g, 0.6f);
super.paintIcon(c, g, x, y);
}
};
private static final Icon DisabledToDeleteIcon = IconUtil.desaturate(AllIcons.Actions.GC);
public ShelfTreeCellRenderer(Project project, final Map<Couple<String>, String> moveRenameInfo) {
myMoveRenameInfo = moveRenameInfo;
myIssueLinkRenderer = new IssueLinkRenderer(project, this);
}
@Override
public void customizeCellRenderer(@NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) value;
Object nodeValue = node.getUserObject();
if (nodeValue instanceof ShelvedChangeList) {
ShelvedChangeList changeListData = (ShelvedChangeList) nodeValue;
if (changeListData.isRecycled()) {
myIssueLinkRenderer.appendTextWithLinks(changeListData.DESCRIPTION, SimpleTextAttributes.GRAYED_BOLD_ATTRIBUTES);
setIcon(changeListData.isMarkedToDelete() ? DisabledToDeleteIcon : AppliedPatchIcon);
}
else {
myIssueLinkRenderer.appendTextWithLinks(changeListData.DESCRIPTION);
setIcon(PatchIcon);
}
int count = node.getChildCount();
String numFilesText = spaceAndThinSpace() + count + " " + StringUtil.pluralize("file", count) + ",";
append(numFilesText, SimpleTextAttributes.GRAYED_ATTRIBUTES);
String date = DateFormatUtil.formatPrettyDateTime(changeListData.DATE);
append(" " + date, SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else if (nodeValue instanceof ShelvedChange) {
ShelvedChange change = (ShelvedChange) nodeValue;
final String movedMessage = myMoveRenameInfo.get(Couple.of(change.getBeforePath(), change.getAfterPath()));
renderFileName(change.getBeforePath(), change.getFileStatus(), movedMessage);
}
else if (nodeValue instanceof ShelvedBinaryFile) {
ShelvedBinaryFile binaryFile = (ShelvedBinaryFile) nodeValue;
String path = binaryFile.BEFORE_PATH;
if (path == null) {
path = binaryFile.AFTER_PATH;
}
final String movedMessage = myMoveRenameInfo.get(Couple.of(binaryFile.BEFORE_PATH, binaryFile.AFTER_PATH));
renderFileName(path, binaryFile.getFileStatus(), movedMessage);
}
}
private void renderFileName(String path, final FileStatus fileStatus, final String movedMessage) {
path = path.replace('/', File.separatorChar);
int pos = path.lastIndexOf(File.separatorChar);
String fileName;
String directory;
if (pos >= 0) {
directory = path.substring(0, pos).replace(File.separatorChar, File.separatorChar);
fileName = path.substring(pos+1);
}
else {
directory = "<project root>";
fileName = path;
}
append(fileName, new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, fileStatus.getColor()));
if (movedMessage != null) {
append(movedMessage, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
append(spaceAndThinSpace() + directory, SimpleTextAttributes.GRAYED_ATTRIBUTES);
setIcon(FileTypeManager.getInstance().getFileTypeByFileName(fileName).getIcon());
}
}
private class MyShelveDeleteProvider implements DeleteProvider {
@Override
public void deleteElement(@NotNull DataContext dataContext) {
final Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) return;
List<ShelvedChangeList> shelvedListsToDelete = TreeUtil.collectSelectedObjectsOfType(myTree, ShelvedChangeList.class);
ArrayList<ShelvedChangeList> shelvedListsFromChanges = ContainerUtil.newArrayList(getShelvedLists(dataContext));
// filter changes
shelvedListsFromChanges.removeAll(shelvedListsToDelete);
List<ShelvedChange> changesToDelete = getChangesNotInLists(shelvedListsToDelete, getShelveChanges(dataContext));
List<ShelvedBinaryFile> binariesToDelete = getBinariesNotInLists(shelvedListsToDelete, getBinaryShelveChanges(dataContext));
int changeListSize = shelvedListsToDelete.size();
int fileListSize = binariesToDelete.size() + changesToDelete.size();
if (fileListSize == 0 && changeListSize == 0) return;
String message = VcsBundle.message("shelve.changes.delete.items.confirm", constructDeleteFilesInfoMessage(fileListSize),
changeListSize != 0 && fileListSize != 0 ? " and " : "",
constructShelvedListInfoMessage(changeListSize, ContainerUtil.getFirstItem(shelvedListsToDelete)));
int rc = Messages
.showOkCancelDialog(myProject, message, VcsBundle.message("shelvedChanges.delete.title"), CommonBundle.message("button.delete"),
CommonBundle.getCancelButtonText(), Messages.getWarningIcon());
if (rc != Messages.OK) return;
for (ShelvedChangeList changeList : shelvedListsToDelete) {
ShelveChangesManager.getInstance(myProject).deleteChangeList(changeList);
}
for (ShelvedChangeList list : shelvedListsFromChanges) {
removeChangesFromChangeList(project, list, changesToDelete, binariesToDelete);
}
}
private List<ShelvedBinaryFile> getBinariesNotInLists(@NotNull List<ShelvedChangeList> listsToDelete,
@NotNull List<ShelvedBinaryFile> binaryFiles) {
List<ShelvedBinaryFile> result = new ArrayList<>(binaryFiles);
for (ShelvedChangeList list : listsToDelete) {
result.removeAll(list.getBinaryFiles());
}
return result;
}
@NotNull
private List<ShelvedChange> getChangesNotInLists(@NotNull List<ShelvedChangeList> listsToDelete,
@NotNull List<ShelvedChange> shelvedChanges) {
List<ShelvedChange> result = new ArrayList<>(shelvedChanges);
for (ShelvedChangeList list : listsToDelete) {
result.removeAll(list.getChanges(myProject));
}
return result;
}
@NotNull
private String constructShelvedListInfoMessage(int size, @Nullable ShelvedChangeList first) {
if (size == 0) return "";
String message;
if (size == 1 && first != null) {
message = "<b> one shelved changelist</b> named [<b>" + first.DESCRIPTION + "</b>]";
}
else {
message = "<b>" + size + " shelved " + StringUtil.pluralize("changelist", size) + "</b>";
}
return message + " with all changes inside";
}
@NotNull
private String constructDeleteFilesInfoMessage(int size) {
if (size == 0) return "";
return "<b>" + (size == 1 ? "one" : size) + StringUtil.pluralize(" file", size) + "</b>";
}
private void removeChangesFromChangeList(@NotNull Project project,
@NotNull ShelvedChangeList list,
@NotNull List<ShelvedChange> changes,
@NotNull List<ShelvedBinaryFile> binaryFiles) {
final ArrayList<ShelvedBinaryFile> oldBinaries = new ArrayList<>(list.getBinaryFiles());
final ArrayList<ShelvedChange> oldChanges = new ArrayList<>(list.getChanges(project));
oldBinaries.removeAll(binaryFiles);
oldChanges.removeAll(changes);
final CommitContext commitContext = new CommitContext();
final List<FilePatch> patches = new ArrayList<>();
final List<VcsException> exceptions = new ArrayList<>();
for (ShelvedChange change : oldChanges) {
try {
patches.add(change.loadFilePatch(myProject, commitContext));
}
catch (IOException | PatchSyntaxException e) {
exceptions.add(new VcsException(e));
}
}
myShelveChangesManager.saveRemainingPatches(list, patches, oldBinaries, commitContext);
if (! exceptions.isEmpty()) {
String title = list.DESCRIPTION == null ? "" : list.DESCRIPTION;
title = title.substring(0, Math.min(10, title.length()));
AbstractVcsHelper.getInstance(myProject).showErrors(exceptions, "Deleting files from '" + title + "'");
}
}
@Override
public boolean canDeleteElement(@NotNull DataContext dataContext) {
return !getShelvedLists(dataContext).isEmpty();
}
}
public class MyShelfContent extends DnDTargetContentAdapter {
private MyShelfContent(JPanel panel, String displayName, boolean isLockable) {
super(panel, displayName, isLockable);
}
@Override
public void drop(DnDEvent event) {
Object attachedObject = event.getAttachedObject();
if (attachedObject instanceof ChangeListDragBean) {
FileDocumentManager.getInstance().saveAllDocuments();
List<Change> changes = Arrays.asList(((ChangeListDragBean)attachedObject).getChanges());
myShelveChangesManager.shelveSilentlyUnderProgress(changes);
}
}
@Override
public boolean update(DnDEvent event) {
Object attachedObject = event.getAttachedObject();
if (attachedObject instanceof ChangeListDragBean) {
event.setDropPossible(((ChangeListDragBean)attachedObject).getChanges().length > 0);
return false;
}
return true;
}
}
@Nullable
private DnDDragStartBean createDragStartBean(@NotNull DnDActionInfo info) {
if (info.isMove()) {
DataContext dc = DataManager.getInstance().getDataContext(myTree);
return new DnDDragStartBean(new ShelvedChangeListDragBean(getShelveChanges(dc), getBinaryShelveChanges(dc), getShelvedLists(dc)));
}
return null;
}
@NotNull
private DnDImage createDraggedImage(@NotNull DnDActionInfo info) {
String imageText = "Unshelve changes";
Image image = DnDAwareTree.getDragImage(myTree, imageText, null).getFirst();
return new DnDImage(image, new Point(-image.getWidth(null), -image.getHeight(null)));
}
private class MyShelvedPreviewProcessor extends CacheDiffRequestProcessor<ShelvedWrapper> implements DiffPreviewUpdateProcessor {
@NotNull private final DiffShelvedChangesAction.PatchesPreloader myPreloader;
@Nullable private ShelvedWrapper myCurrentShelvedElement;
public MyShelvedPreviewProcessor(@NotNull Project project) {
super(project);
myPreloader = new DiffShelvedChangesAction.PatchesPreloader(project);
Disposer.register(project, this);
}
@NotNull
@Override
protected String getRequestName(@NotNull ShelvedWrapper provider) {
return provider.getRequestName();
}
@Override
protected ShelvedWrapper getCurrentRequestProvider() {
return myCurrentShelvedElement;
}
@CalledInAwt
@Override
public void clear() {
myCurrentShelvedElement = null;
updateRequest();
}
@CalledInAwt
public void refresh() {
DataContext dc = DataManager.getInstance().getDataContext(myTree);
List<ShelvedChange> selectedChanges = getShelveChanges(dc);
List<ShelvedBinaryFile> selectedBinaryChanges = getBinaryShelveChanges(dc);
if (selectedChanges.isEmpty() && selectedBinaryChanges.isEmpty()) {
clear();
return;
}
if (myCurrentShelvedElement != null) {
if (keepBinarySelection(selectedBinaryChanges, myCurrentShelvedElement.getBinaryFile()) ||
keepShelvedSelection(selectedChanges, myCurrentShelvedElement.getShelvedChange())) {
dropCachesIfNeededAndUpdate(myCurrentShelvedElement);
return;
}
}
//getFirstSelected
myCurrentShelvedElement = !selectedChanges.isEmpty()
? new ShelvedWrapper(selectedChanges.get(0))
: new ShelvedWrapper(selectedBinaryChanges.get(0));
dropCachesIfNeededAndUpdate(myCurrentShelvedElement);
}
private void dropCachesIfNeededAndUpdate(@NotNull ShelvedWrapper currentShelvedElement) {
ShelvedChange shelvedChange = currentShelvedElement.getShelvedChange();
boolean dropCaches = shelvedChange != null && myPreloader.isPatchFileChanged(shelvedChange.getPatchPath());
if (dropCaches) {
dropCaches();
}
updateRequest(dropCaches);
}
boolean keepShelvedSelection(@NotNull List<ShelvedChange> selectedChanges, @Nullable ShelvedChange currentShelvedChange) {
return currentShelvedChange != null && selectedChanges.contains(currentShelvedChange);
}
boolean keepBinarySelection(@NotNull List<ShelvedBinaryFile> selectedBinaryChanges, @Nullable ShelvedBinaryFile currentBinary) {
return currentBinary != null && selectedBinaryChanges.contains(currentBinary);
}
@NotNull
@Override
protected DiffRequest loadRequest(@NotNull ShelvedWrapper provider, @NotNull ProgressIndicator indicator)
throws ProcessCanceledException, DiffRequestProducerException {
try {
ShelvedChange shelvedChange = provider.getShelvedChange();
if (shelvedChange != null) {
return new PatchDiffRequest(createAppliedTextPatch(myPreloader.getPatch(shelvedChange, new CommitContext())));
}
DiffContentFactoryEx factory = DiffContentFactoryEx.getInstanceEx();
ShelvedBinaryFile binaryFile = assertNotNull(provider.getBinaryFile());
byte[] binaryContent = binaryFile.createBinaryContentRevision(myProject).getBinaryContent();
FileType fileType = VcsUtil.getFilePath(binaryFile.SHELVED_PATH).getFileType();
return new SimpleDiffRequest(getRequestName(provider), factory.createEmpty(),
factory.createBinary(myProject, binaryContent, fileType, getRequestName(provider)), null, null);
}
catch (VcsException | IOException e) {
throw new DiffRequestProducerException("Can't show diff for '" + getRequestName(provider) + "'", e);
}
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import com.google.common.base.Objects;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.RegexpFilter;
import org.elasticsearch.common.lucene.search.TermFilter;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.postingsformat.PostingFormats;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.List;
/**
*
*/
public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
public static class Defaults {
public static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.freeze();
}
public static final float BOOST = 1.0f;
}
public abstract static class OpenBuilder<T extends Builder, Y extends AbstractFieldMapper> extends AbstractFieldMapper.Builder<T, Y> {
protected OpenBuilder(String name, FieldType fieldType) {
super(name, fieldType);
}
@Override
public T index(boolean index) {
return super.index(index);
}
@Override
public T store(boolean store) {
return super.store(store);
}
@Override
public T storeTermVectors(boolean termVectors) {
return super.storeTermVectors(termVectors);
}
@Override
public T storeTermVectorOffsets(boolean termVectorOffsets) {
return super.storeTermVectorOffsets(termVectorOffsets);
}
@Override
public T storeTermVectorPositions(boolean termVectorPositions) {
return super.storeTermVectorPositions(termVectorPositions);
}
@Override
public T storeTermVectorPayloads(boolean termVectorPayloads) {
return super.storeTermVectorPayloads(termVectorPayloads);
}
@Override
public T tokenized(boolean tokenized) {
return super.tokenized(tokenized);
}
@Override
public T boost(float boost) {
return super.boost(boost);
}
@Override
public T omitNorms(boolean omitNorms) {
return super.omitNorms(omitNorms);
}
@Override
public T indexOptions(IndexOptions indexOptions) {
return super.indexOptions(indexOptions);
}
@Override
public T indexName(String indexName) {
return super.indexName(indexName);
}
@Override
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
return super.indexAnalyzer(indexAnalyzer);
}
@Override
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
return super.searchAnalyzer(searchAnalyzer);
}
@Override
public T similarity(SimilarityProvider similarity) {
return super.similarity(similarity);
}
public T fieldDataSettings(String settings) {
return super.fieldDataSettings(settings);
}
}
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
protected final FieldType fieldType;
protected float boost = Defaults.BOOST;
protected boolean omitNormsSet = false;
protected String indexName;
protected NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected PostingsFormatProvider provider;
protected SimilarityProvider similarity;
@Nullable
protected Settings fieldDataSettings;
protected Builder(String name, FieldType fieldType) {
super(name);
this.fieldType = fieldType;
}
protected T index(boolean index) {
this.fieldType.setIndexed(index);
return builder;
}
protected T store(boolean store) {
this.fieldType.setStored(store);
return builder;
}
protected T storeTermVectors(boolean termVectors) {
this.fieldType.setStoreTermVectors(termVectors);
return builder;
}
protected T storeTermVectorOffsets(boolean termVectorOffsets) {
this.fieldType.setStoreTermVectors(termVectorOffsets);
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
return builder;
}
protected T storeTermVectorPositions(boolean termVectorPositions) {
this.fieldType.setStoreTermVectors(termVectorPositions);
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
return builder;
}
protected T storeTermVectorPayloads(boolean termVectorPayloads) {
this.fieldType.setStoreTermVectors(termVectorPayloads);
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
return builder;
}
protected T tokenized(boolean tokenized) {
this.fieldType.setTokenized(tokenized);
return builder;
}
protected T boost(float boost) {
this.boost = boost;
return builder;
}
protected T omitNorms(boolean omitNorms) {
this.fieldType.setOmitNorms(omitNorms);
this.omitNormsSet = true;
return builder;
}
protected T indexOptions(IndexOptions indexOptions) {
this.fieldType.setIndexOptions(indexOptions);
this.indexOptionsSet = true;
return builder;
}
protected T indexName(String indexName) {
this.indexName = indexName;
return builder;
}
protected T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return builder;
}
protected T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
return builder;
}
protected T includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
protected T postingsFormat(PostingsFormatProvider postingsFormat) {
this.provider = postingsFormat;
return builder;
}
protected T similarity(SimilarityProvider similarity) {
this.similarity = similarity;
return builder;
}
protected T fieldDataSettings(String settings) {
this.fieldDataSettings = ImmutableSettings.builder().loadFromDelimitedString(settings, ';').build();
return builder;
}
protected Names buildNames(BuilderContext context) {
return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context), context.path().sourcePath());
}
protected String buildIndexName(BuilderContext context) {
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
protected String buildFullName(BuilderContext context) {
return context.path().fullPathAsText(name);
}
}
protected final Names names;
protected float boost;
protected final FieldType fieldType;
protected final NamedAnalyzer indexAnalyzer;
protected NamedAnalyzer searchAnalyzer;
protected PostingsFormatProvider postingsFormat;
protected final SimilarityProvider similarity;
protected Settings customFieldDataSettings;
protected FieldDataType fieldDataType;
protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, NamedAnalyzer indexAnalyzer,
NamedAnalyzer searchAnalyzer, PostingsFormatProvider postingsFormat, SimilarityProvider similarity,
@Nullable Settings fieldDataSettings) {
this.names = names;
this.boost = boost;
this.fieldType = fieldType;
this.fieldType.freeze();
// automatically set to keyword analyzer if its indexed and not analyzed
if (indexAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.indexAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.indexAnalyzer = indexAnalyzer;
}
// automatically set to keyword analyzer if its indexed and not analyzed
if (searchAnalyzer == null && !this.fieldType.tokenized() && this.fieldType.indexed()) {
this.searchAnalyzer = Lucene.KEYWORD_ANALYZER;
} else {
this.searchAnalyzer = searchAnalyzer;
}
if (postingsFormat == null) {
if (defaultPostingFormat() != null) {
postingsFormat = PostingFormats.getAsProvider(defaultPostingFormat());
}
}
this.postingsFormat = postingsFormat;
this.similarity = similarity;
this.customFieldDataSettings = fieldDataSettings;
if (fieldDataSettings == null) {
this.fieldDataType = defaultFieldDataType();
} else {
// create a new field data type, with the default settings as well as the "new ones"
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
);
}
}
@Nullable
protected String defaultPostingFormat() {
return null;
}
@Override
public String name() {
return names.name();
}
@Override
public Names names() {
return this.names;
}
public abstract FieldType defaultFieldType();
public abstract FieldDataType defaultFieldDataType();
@Override
public final FieldDataType fieldDataType() {
return fieldDataType;
}
@Override
public FieldType fieldType() {
return fieldType;
}
@Override
public float boost() {
return this.boost;
}
@Override
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
@Override
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@Override
public Analyzer searchQuoteAnalyzer() {
return this.searchAnalyzer;
}
@Override
public SimilarityProvider similarity() {
return similarity;
}
@Override
public void parse(ParseContext context) throws IOException {
try {
Field field = parseCreateField(context);
if (field == null) {
return;
}
if (!customBoost()) {
field.setBoost(boost);
}
if (context.listener().beforeFieldAdded(this, field, context)) {
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
}
}
protected abstract Field parseCreateField(ParseContext context) throws IOException;
/**
* Derived classes can override it to specify that boost value is set by derived classes.
*/
protected boolean customBoost() {
return false;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
fieldMapperListener.fieldMapper(this);
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
// nothing to do here...
}
@Override
public Object valueForSearch(Object value) {
return value;
}
@Override
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
@Override
public Query queryStringTermQuery(Term term) {
return null;
}
@Override
public boolean useTermQueryWithQueryString() {
return false;
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
return new TermQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
return new TermFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Filter termsFilter(List<Object> values, @Nullable QueryParseContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsFilter(names.indexName(), bytesRefs);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeQuery(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return new TermRangeFilter(names.indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, String minSim, int prefixLength, int maxExpansions, boolean transpositions) {
int edits = FuzzyQuery.floatToEdits(Float.parseFloat(minSim), value.codePointCount(0, value.length()));
return new FuzzyQuery(names.createIndexNameTerm(indexedValueForSearch(value)), edits, prefixLength, maxExpansions, transpositions);
}
@Override
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
PrefixQuery query = new PrefixQuery(names().createIndexNameTerm(indexedValueForSearch(value)));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter prefixFilter(Object value, @Nullable QueryParseContext context) {
return new PrefixFilter(names().createIndexNameTerm(indexedValueForSearch(value)));
}
@Override
public Query regexpQuery(Object value, int flags, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
RegexpQuery query = new RegexpQuery(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
@Override
public Filter regexpFilter(Object value, int flags, @Nullable QueryParseContext parseContext) {
return new RegexpFilter(names().createIndexNameTerm(indexedValueForSearch(value)), flags);
}
@Override
public Filter nullValueFilter() {
return null;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
String mergedType = mergeWith.getClass().getSimpleName();
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeContext.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
if (this.fieldType().indexed() != fieldMergeWith.fieldType().indexed() || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store values");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values");
}
if (this.indexAnalyzer == null) {
if (fieldMergeWith.indexAnalyzer != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
} else if (fieldMergeWith.indexAnalyzer == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
} else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different index_analyzer");
}
if (this.similarity == null) {
if (fieldMergeWith.similarity() != null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
} else if (fieldMergeWith.similarity() == null) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
} else if (!this.similarity().equals(fieldMergeWith.similarity())) {
mergeContext.addConflict("mapper [" + names.fullName() + "] has different similarity");
}
if (!mergeContext.mergeFlags().simulate()) {
// apply changeable values
this.boost = fieldMergeWith.boost;
if (fieldMergeWith.postingsFormat != null) {
this.postingsFormat = fieldMergeWith.postingsFormat;
}
if (fieldMergeWith.searchAnalyzer != null) {
this.searchAnalyzer = fieldMergeWith.searchAnalyzer;
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
ImmutableSettings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
);
mergeContext.addFieldDataChange(this);
}
}
}
}
@Override
public PostingsFormatProvider postingsFormatProvider() {
return postingsFormat;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(names.name());
doXContentBody(builder);
builder.endObject();
return builder;
}
protected void doXContentBody(XContentBuilder builder) throws IOException {
builder.field("type", contentType());
if (!names.name().equals(names.indexNameClean())) {
builder.field("index_name", names.indexNameClean());
}
if (boost != 1.0f) {
builder.field("boost", boost);
}
FieldType defaultFieldType = defaultFieldType();
if (fieldType.indexed() != defaultFieldType.indexed() ||
fieldType.tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
}
if (fieldType.stored() != defaultFieldType.stored()) {
builder.field("store", fieldType.stored());
}
if (fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
}
if (fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("omit_norms", fieldType.omitNorms());
}
if (fieldType.indexOptions() != defaultFieldType.indexOptions()) {
builder.field("index_options", indexOptionToString(fieldType.indexOptions()));
}
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_") && !indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_") && !searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
if (postingsFormat != null) {
if (!postingsFormat.name().equals(defaultPostingFormat())) {
builder.field("postings_format", postingsFormat.name());
}
}
if (similarity() != null) {
builder.field("similarity", similarity().name());
}
if (customFieldDataSettings != null) {
builder.field("fielddata", customFieldDataSettings.toDelimitedString(';'));
}
}
protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
return TypeParsers.INDEX_OPTIONS_OFFSETS;
case DOCS_AND_FREQS:
return TypeParsers.INDEX_OPTIONS_FREQS;
case DOCS_AND_FREQS_AND_POSITIONS:
return TypeParsers.INDEX_OPTIONS_POSITIONS;
case DOCS_ONLY:
return TypeParsers.INDEX_OPTIONS_DOCS;
default:
throw new ElasticSearchIllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
}
}
protected static String termVectorOptionsToString(FieldType fieldType) {
if (!fieldType.storeTermVectors()) {
return "no";
} else if(!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "yes";
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
return "with_offsets";
} else {
StringBuilder builder = new StringBuilder("with");
if (fieldType.storeTermVectorPositions()) {
builder.append("_positions");
}
if (fieldType.storeTermVectorOffsets()) {
builder.append("_offsets");
}
if (fieldType.storeTermVectorPayloads()) {
builder.append("_payloads");
}
return builder.toString();
}
}
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
if (!indexed) {
return "no";
} else if (tokenized) {
return "analyzed";
} else {
return "not_analyzed";
}
}
protected abstract String contentType();
@Override
public void close() {
// nothing to do here, sub classes to override if needed
}
}
| |
/*
* Copyright 2001-2005 Internet2
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nih.nci.cagrid.opensaml;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.namespace.QName;
import org.apache.log4j.Logger;
import org.apache.log4j.NDC;
import org.w3c.dom.*;
/**
* Implementation of SOAP binding packaging methods, useful as a base class
* for full binding implementations once a transport protocol is added by
* implementing the actual binding methods.
*
* @author Scott Cantor
* @created February 12, 2005
*/
public abstract class SOAPBinding implements SAMLSOAPBinding
{
private static SAMLConfig config = SAMLConfig.instance();
private Logger log = Logger.getLogger(SOAPBinding.class.getName());
private Map /* <SOAPHook,Object> */ soapHooks = Collections.synchronizedMap(new HashMap(4));
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#addHook(gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding.SOAPHook)
*/
public void addHook(SOAPHook h) {
addHook(h, null);
}
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#addHook(gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding.SOAPHook, java.lang.Object)
*/
public void addHook(SOAPHook h, Object globalCtx) {
soapHooks.put(h, globalCtx);
}
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#sendRequest(gov.nih.nci.cagrid.opensaml.SAMLRequest, java.lang.Object)
*/
public Element sendRequest(SAMLRequest request, Object callCtx) throws SAMLException {
NDC.push("sendRequest");
try {
// Turn the request into a DOM, and use its document for the SOAP nodes.
Document doc=request.toDOM().getOwnerDocument();
// Build a SOAP envelope and body.
Element e=doc.createElementNS(XML.SOAP11ENV_NS, "Envelope");
e.setAttributeNS(XML.XMLNS_NS, "xmlns", XML.SOAP11ENV_NS);
Element body=doc.createElementNS(XML.SOAP11ENV_NS, "Body");
e.appendChild(body);
// Attach SAML request.
body.appendChild(request.toDOM());
if (doc.getDocumentElement()==null)
doc.appendChild(e);
else
doc.replaceChild(e, doc.getDocumentElement());
// Run the outgoing client-side SOAP hooks.
for (Iterator hooks=soapHooks.entrySet().iterator(); hooks.hasNext();) {
Entry h = (Entry)hooks.next();
if (!((SOAPHook)h.getKey()).outgoing(e, h.getValue(), callCtx)) {
body.removeChild(request.toDOM());
log.warn("SOAP processing hook returned false, aborting outgoing request");
throw new BindingException(SAMLException.REQUESTER,"SOAPBinding.sendRequest() SOAP processing hook returned false, aborted outgoing request");
}
}
return e;
}
finally {
NDC.pop();
}
}
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#recvResponse(org.w3c.dom.Element, java.lang.Object)
*/
public SAMLResponse recvResponse(Element envelope, Object callCtx) throws SAMLException {
NDC.push("recvResponse");
try {
// The root must be a SOAP 1.1 envelope.
if (!XML.isElementNamed(envelope,XML.SOAP11ENV_NS,"Envelope"))
throw new BindingException("SOAPBinding.recvResponse() detected an incompatible or missing SOAP envelope");
// Run the incoming client-side SOAP hooks.
for (Iterator hooks=soapHooks.entrySet().iterator(); hooks.hasNext();) {
Entry h = (Entry)hooks.next();
if (!((SOAPHook)h.getKey()).incoming(envelope, h.getValue(), callCtx)) {
log.warn("SOAP processing hook returned false, aborting incoming response");
throw new BindingException(SAMLException.REQUESTER,"SOAPBinding.recvResponse() SOAP processing hook returned false, aborted incoming response");
}
}
Element n = XML.getFirstChildElement(envelope);
if (XML.isElementNamed(n,XML.SOAP11ENV_NS,"Header")) {
// Did somebody get a look at the headers for us?
if (soapHooks.isEmpty()) {
/* Walk the children. If we encounter any headers with mustUnderstand, we have to bail.
* The thinking here is, we're not a "real" SOAP processor, but we have to emulate one that
* understands no headers. For now, assume we're the recipient.
*/
Element header=XML.getFirstChildElement(n);
while (header!=null) {
if (((Element)header).getAttributeNS(XML.SOAP11ENV_NS,"mustUnderstand")!=null &&
((Element)header).getAttributeNS(XML.SOAP11ENV_NS,"mustUnderstand").equals("1"))
throw new SOAPException(SOAPException.MUSTUNDERSTAND,"SOAPBinding.recvResponse() detected a mandatory SOAP header");
header=XML.getNextSiblingElement(header);
}
}
n = XML.getNextSiblingElement(n); // advance to body
}
if (n != null) {
// Get the first (and only) child element of the Body.
n = XML.getFirstChildElement(n);
if (n != null) {
// Is it a fault?
if (XML.isElementNamed(n,XML.SOAP11ENV_NS,"Fault")) {
// Find the faultstring element and use it in the message.
NodeList nlist = n.getElementsByTagNameNS(null,"faultstring");
String msg;
if (nlist != null && nlist.getLength() > 0)
msg = nlist.item(0).getFirstChild().getNodeValue();
else
msg = "SAMLSOAPBinding.recvResponse() detected a SOAP fault";
nlist = n.getElementsByTagNameNS(null,"faultstring");
if (nlist != null && nlist.getLength() > 0)
throw new SOAPException(XML.getQNameTextNode((Text)nlist.item(0).getFirstChild()),msg);
else
throw new SOAPException(SOAPException.SERVER,msg);
}
return new SAMLResponse(n);
}
}
throw new SOAPException(SOAPException.SERVER,"SOAPBinding.recvResponse() unable to find a SAML response or fault in SOAP body");
}
finally {
NDC.pop();
}
}
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#recvRequest(org.w3c.dom.Element, java.lang.Object)
*/
public SAMLRequest recvRequest(Element envelope, Object callCtx) throws SAMLException {
NDC.push("recvRequest");
try {
// The root must be a SOAP 1.1 envelope.
if (!XML.isElementNamed(envelope,XML.SOAP11ENV_NS,"Envelope"))
throw new SOAPException(SOAPException.VERSION, "SOAPBinding.recvRequest() detected an incompatible or missing SOAP envelope");
// Run the incoming server-side SOAP hooks.
for (Iterator hooks=soapHooks.entrySet().iterator(); hooks.hasNext();) {
Entry h = (Entry)hooks.next();
if (!((SOAPHook)h.getKey()).incoming(envelope, h.getValue(), callCtx)) {
log.warn("SOAP processing hook returned false, aborting incoming request");
throw new BindingException(SAMLException.REQUESTER,"SOAPBinding.recvRequest() SOAP processing hook returned false, aborted incoming request");
}
}
Element child = XML.getFirstChildElement(envelope);
if (XML.isElementNamed(child,XML.SOAP11ENV_NS,"Header")) {
// Did somebody get a look at the headers for us?
if (soapHooks.isEmpty()) {
/* Walk the children. If we encounter any headers with mustUnderstand, we have to bail.
* The thinking here is, we're not a "real" SOAP processor, but we have to emulate one that
* understands no headers. For now, assume we're the recipient.
*/
Element header = XML.getFirstChildElement(child);
while (header != null) {
if (header.getAttributeNS(XML.SOAP11ENV_NS, "mustUnderstand").equals("1"))
throw new SOAPException(SOAPException.MUSTUNDERSTAND, "SOAPBinding.recvRequest() detected a mandatory SOAP header");
header = XML.getNextSiblingElement(header);
}
}
// Advance to the Body element.
child = XML.getNextSiblingElement(child);
}
/* The element after the optional Header is the mandatory Body (the meat). The SAML
SOAP binding specifies the samlp:Request be immediately inside the body. Until
we locate a Request (which we know validated), we're still in SOAP land. A SOAP
envelope without a samlp:Request inside it is treated as a SOAP Client fault.
*/
if (child != null)
child = XML.getFirstChildElement(child);
return new SAMLRequest(child);
}
finally {
NDC.pop();
}
}
/**
* @see gov.nih.nci.cagrid.opensaml.SAMLSOAPBinding#sendResponse(gov.nih.nci.cagrid.opensaml.SAMLResponse, gov.nih.nci.cagrid.opensaml.SAMLException, java.lang.Object)
*/
public Element sendResponse(SAMLResponse response, SAMLException e, Object callCtx) throws SAMLException {
NDC.push("sendResponse");
try {
Document doc = (e==null) ? response.toDOM().getOwnerDocument() : XML.parserPool.newDocument();
// Build the SOAP envelope and body for the response.
Element env = doc.createElementNS(XML.SOAP11ENV_NS, "soap:Envelope");
env.setAttributeNS(XML.XMLNS_NS,"xmlns:soap", XML.SOAP11ENV_NS);
env.setAttributeNS(XML.XMLNS_NS,"xmlns:xsd", XML.XSD_NS);
env.setAttributeNS(XML.XMLNS_NS,"xmlns:xsi", XML.XSI_NS);
if (doc.getDocumentElement()==null)
doc.appendChild(env);
else
doc.replaceChild(env, doc.getDocumentElement());
Element body = doc.createElementNS(XML.SOAP11ENV_NS, "soap:Body");
env.appendChild(body);
// If we're handed an exception, turn it into a SOAP fault.
if (e != null) {
Element fault = doc.createElementNS(XML.SOAP11ENV_NS, "soap:Fault");
body.appendChild(fault);
Element elem = doc.createElementNS(null,"faultcode");
if (e instanceof SOAPException) {
Iterator codes = e.getCodes();
if (codes.hasNext())
elem.appendChild(doc.createTextNode("soap:" + ((QName)codes.next()).getLocalPart()));
else
elem.appendChild(doc.createTextNode("soap:" + SOAPException.SERVER.getLocalPart()));
}
else
elem.appendChild(doc.createTextNode("soap:" + SOAPException.SERVER.getLocalPart()));
fault.appendChild(elem);
elem = doc.createElementNS(null,"faultstring");
fault.appendChild(elem).appendChild(doc.createTextNode(e.getMessage()));
}
else {
// Attach the SAML response.
body.appendChild(response.toDOM());
}
// Run the outgoing server-side SOAP hooks.
for (Iterator hooks=soapHooks.entrySet().iterator(); hooks.hasNext();) {
Entry h = (Entry)hooks.next();
if (!((SOAPHook)h.getKey()).outgoing(env, h.getValue(), callCtx)) {
body.removeChild(response.toDOM());
log.warn("SOAP processing hook returned false, aborting outgoing response");
throw new BindingException("SOAPBinding.sendResponse() SOAP processing hook returned false, aborted outgoing response");
}
}
return env;
}
finally {
NDC.pop();
}
}
}
| |
// Copyright (c) 2006, Regents of the University of California
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the University of California, San Diego (UCSD) nor
// the names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// FILE
// HTTPFileSystem.java - edu.sdsc.grid.io.HTTPFileSystem
//
// CLASS HIERARCHY
// java.lang.Object
// |
// +-edu.sdsc.grid.io.GeneralFileSystem
// |
// +-.RemoteFileSystem
// |
// +.http.HTTPFileSystem
//
// PRINCIPAL AUTHOR
// Lucas Gilbert, SDSC/UCSD
//
//
package edu.sdsc.grid.io.http;
import edu.sdsc.grid.io.local.*;
import edu.sdsc.grid.io.*;
import java.io.*;
import java.net.URI;
import java.net.URLConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The HTTPFileSystem class is the class for connection implementations to HTTP
* servers.
*<P>
*
* @author Lucas Gilbert, San Diego Supercomputer Center
* @since Jargon2.0
*/
public class HTTPFileSystem extends RemoteFileSystem {
/**
* HTTP has only one root, "/".
*/
public static final String HTTP_ROOT = "/";
/**
* Use this account object instead of the parent class's GeneralAccount
* object. Just so you don't have to recast it all the time.
*/
private HTTPAccount httpAccount;
URLConnection conn;
private static Logger log = LoggerFactory.getLogger(HTTPFileSystem.class);
/**
* Opens a socket connection to read from and write to. Opens the account
* held in the HTTPAccount object. The account information stored in this
* object cannot be changed once constructed.
*
* @param httpAccount
* The HTTP account information object.
* @throws NullPointerException
* if httpAccount is null.
* @throws IOException
* if an IOException occurs.
*/
public HTTPFileSystem(HTTPAccount httpAccount) throws IOException {
setAccount(httpAccount);
conn = httpAccount.getURL().openConnection();
}
/**
* Opens a socket connection to read from and write to. Opens the account
* held in the URI. The account information stored in this object cannot be
* changed once constructed.
*
* @param httpAccount
* The HTTP account information object.
* @throws NullPointerException
* if httpAccount is null.
* @throws IOException
* if an IOException occurs.
*/
public HTTPFileSystem(URI uri) throws IOException {
setAccount(uri);
conn = uri.toURL().openConnection();
}
// ----------------------------------------------------------------------
// Setters and Getters
// ----------------------------------------------------------------------
/**
* Loads the account information for this file system.
*/
protected void setAccount(GeneralAccount account) throws IOException {
if (account == null)
throw new NullPointerException("Account information cannot be null");
httpAccount = (HTTPAccount) account.clone();
this.account = httpAccount;
}
/**
* Loads the account information for this file system.
*/
protected void setAccount(URI uri) throws IOException {
if (uri == null)
throw new NullPointerException("Account information cannot be null");
httpAccount = new HTTPAccount(uri);
this.account = httpAccount;
}
/*
* These three methods and anything that use them are quite ridiculous.
* Create a new connection to the HTTP server everytime because
* URLConnection doesn't behave like a regular socket.
*/
URLConnection getNewConn() throws IOException {
return httpAccount.getURL().openConnection();
}
InputStream getNewInputStream() throws IOException {
return getNewConn().getInputStream();
}
OutputStream getNewOutputStream() throws IOException {
return getNewConn().getOutputStream();
}
// ----------------------------------------------------------------------
// Methods
// ----------------------------------------------------------------------
// General
/**
*
*/
public MetaDataRecordList[] query(MetaDataCondition[] conditions,
MetaDataSelect[] selects) throws IOException {
throw new UnsupportedOperationException();
}
/**
*
*/
public MetaDataRecordList[] query(MetaDataCondition[] conditions,
MetaDataSelect[] selects, int numberOfRecordsWanted)
throws IOException {
throw new UnsupportedOperationException();
}
/**
* Returns the root directories of the HTTP file system.
*/
public String[] getRootDirectories() {
String[] root = { HTTP_ROOT };
return root;
}
/**
* Tests this filesystem object for equality with the given object. Returns
* <code>true</code> if and only if the argument is not <code>null</code>
* and both are filesystem objects connected to the same filesystem using
* the same account information.
*
* @param obj
* The object to be compared with this abstract pathname
*
* @return <code>true</code> if and only if the objects are the same;
* <code>false</code> otherwise
*/
public boolean equals(Object obj) {
if (obj instanceof HTTPFileSystem) {
if (toString().equals(obj.toString())) {
return true;
}
}
return false;
}
/**
* Returns a string representation of this file system object.
*/
public String toString() {
return conn.getURL().toString();
}
/**
* Checks if the socket is connected.
*/
public boolean isConnected() {
if (conn != null) {
// just in case?
try {
conn.connect();
} catch (IOException e) {
return false;
}
return true;
}
return false;
}
/**
* Closes the connection to the HTTP file system. The filesystem cannot be
* reconnected after this method is called. If this object, or another
* object which uses this filesystem, tries to send a command to the server
* a ClosedChannelException will be thrown.
*/
public void close() throws IOException {
conn = null;
}
}
| |
package jokrey.utilities.encoder.tag_based.implementation.length_indicator;
import jokrey.utilities.simple.data_structure.ExtendedIterator;
import jokrey.utilities.encoder.tag_based.TagBasedEncoder;
import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.thread_safe.SynchronizingTagBasedEncoder;
import jokrey.utilities.transparent_storage.TransparentStorage;
import java.util.ArrayList;
import java.util.Iterator;
/**
* Non altering methods are reentrant and thread safe.
* Also long as non altering methods in the used {@link LIe} are thread safe.
*
* Implementation of a TagBasedEncoder using SF as the StorageFormat
* This implementation simply encodes tuples of tag and content into a LIse instance.
* Add: O(1)
* Delete: O(n)
* Search: O(n)
*
* Fasted way to decode everything is to use the iterator.
* Then decoding is still in O(n), but that is the best case to decode n elements with any data structure.
*
* NOT THREAD SAFE - by design for performance reasons
* For a thread safe version wrap an implementation of this class into a {@link SynchronizingTagBasedEncoder} or build a custom version.
* A custom versoin should preferably use read write locks, which is possible, because the non altering calls are reentrant.
*
* Due to performance optimizations heavily dependent on LIe (not it's subclasses though).
* This is perfectly fine, but has to be kept in mind
*
* @author jokrey
* @see TagBasedEncoder
*/
public abstract class LITagEncoder<SF> implements TagBasedEncoder<SF> {
//the underlying storage logic - untagged, sequence storage
protected final LIe<SF> lie;
/**
* Initialises the internal storage model with the provided custom storage logic.
*
*
* @param used_storage_logic previously encoded SF
*/
protected LITagEncoder(LIe<SF> used_storage_logic) {
lie = used_storage_logic;
// position = new AtomicReference<>(lie.reset());
}
/**
* Initialises the internal storage model with the provided parameter "workingSF".
* Useful if one want's to decode a previously stored encoded SF.
*
* @param encoded previously encoded SF
*/
public LITagEncoder(LIe<SF> used_storage_logic, SF encoded) {
this(used_storage_logic);
readFromEncoded(encoded);
}
//helper
protected String getTag(SF raw) {
return raw==null? null: getTypeTransformer().detransform_string(raw);
}
// private final AtomicReference<LIe.Position> position;
/**
* ret can be null or has three entries: [0]==entry_start_index, [1]==entry_end_index, [2]==unsafe_raw_storage_start_index (end index is equal to entry_end_index
* despite using a member variable and altering that member variable this search algorithm remains reentrant and thread safe.
*/
protected LITagSearchResult search(String tag) {
// TODO: search from old position, implemented but it has thread safety issues.
LIe.Position local_position = lie.reset();
// LIe.Position local_position = position.get().copy(); //copy so that the member variable isn't changed before completing the search
// would cause an issue if another thread was altering it, then we'd be jumping all over.
// LIe.Position start_search_position = local_position.copy(); // copy so that it isn't changed
long last_raw_position = lie.get(local_position);
do {
String dec_tag = getTag(lie.li_decode(local_position));
if(dec_tag != null) {
long entry_length = lie.li_skip(local_position); //skip also required to obtain a new valid pre-tag position.. ((%2==0))
if (tag.equals(dec_tag)) {
long entry_end_index = lie.get(local_position);
// position.set(local_position);
return new LITagSearchResult(entry_end_index - entry_length, entry_end_index, last_raw_position);
}
} else {
break;
// local_position = lie.reset();
}
last_raw_position = lie.get(local_position);
} while (true);
// } while (!start_search_position.equals(local_position));
// position.set(lie.reset());
return null;
}
@Override public boolean addEntry(String tag, SF entry) {
return TagBasedEncoder.super.addEntry(tag, entry);
}
@Override public LITagEncoder<SF> addEntry_nocheck(String tag, SF entry) {
lie.li_encode(getTypeTransformer().transform(tag)).li_encode(entry);
return this;
}
@Override public SF getEntry(String tag) {
LITagSearchResult sr = search(tag);
if(sr == null) return null;
return lie.getStorageSystem().sub(sr.entry_start_index, sr.entry_end_index);
}
@Override public SF deleteEntry(String tag) {
LITagSearchResult sr = search(tag);
if(sr == null) return null;
SF val = lie.getStorageSystem().sub(sr.entry_start_index, sr.entry_end_index);
lie.getStorageSystem().delete(sr.raw_storage_start_index, sr.entry_end_index);
// position.set(lie.reset()); //todo:
// long raw_pos = lie.get(position);
// if(raw_pos > sr.raw_storage_start_index)
// lie.set(position, raw_pos - (sr.entry_end_index - sr.raw_storage_start_index));
return val;
}
@Override public boolean deleteEntry_noReturn(String tag) {
LITagSearchResult sr = search(tag);
if(sr == null) return false;
lie.getStorageSystem().delete(sr.raw_storage_start_index, sr.entry_end_index);
// position.set(lie.reset());//todo:
// long raw_pos = lie.get(position);
// if(raw_pos > sr.raw_storage_start_index)
// lie.set(position, raw_pos - (sr.entry_end_index - sr.raw_storage_start_index));
return true;
}
@Override public boolean exists(String tag) {
return search(tag)!= null;
}
@Override public long length(String tag) {
LITagSearchResult sr = search(tag);
if(sr == null) return -1;
return sr.entry_end_index - sr.entry_start_index;
}
@Override public String[] getTags() {
ArrayList<String> toReturn = new ArrayList<>();
LIe.Position pos = lie.reset();
String dec_tag;
while((dec_tag = getTag(lie.li_decode(pos))) != null && lie.li_skip(pos) != -1)
toReturn.add(dec_tag);
return toReturn.toArray(new String[0]);
}
@Override public LITagEncoder<SF> clear() {
lie.clear();
// position.set(lie.reset());
return this;
}
@Override public TransparentStorage<SF> getRawStorageSystem() {
return lie.getStorageSystem();
}
@Override public LITagEncoder<SF> readFromEncoded(SF encoded_raw) {
lie.readFromEncoded(encoded_raw);
// position.set(lie.reset());
return this;
}
@Override public SF getEncoded() {
return lie.getEncoded();
}
@Override public Iterator<TaggedEntry<SF>> iterator() {
ExtendedIterator<SF> lie_iterator = lie.iterator();
return new Iterator<TaggedEntry<SF>>() {
@Override public boolean hasNext() {
return lie_iterator.hasNext(); //yes, this does work. Think about it! && Nooo, now you think it actually doesn't work! && Think again! && There you go!!! && Hooray
}
@Override public TaggedEntry<SF> next() {
return new TaggedEntry<>(getTag(lie_iterator.next()), lie_iterator.next());
}
@Override public void remove() {
lie_iterator.remove();
lie_iterator.skip();
lie_iterator.remove();
// position.set(lie.reset());
}
};
}
@Override public int hashCode() {
return lie.hashCode();
}
@Override public boolean equals(Object o) {
return o instanceof LITagEncoder && lie.equals(((LITagEncoder)o).lie);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.j2objc.PrefixDummy;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.EnumValueDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor.Type;
import com.google.protobuf.ExtensionRegistry;
import com.google.protobuf.ExtensionRegistryLite;
import com.google.protobuf.GeneratedMessage;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.MessageLite;
import com.google.protobuf.ProtocolMessageEnum;
import abc_def.gHiJkL.Foo2bar;
import abc_def.gHiJkL.Foo_bar;
import abc_def.gHiJkL.fooBar;
import protos.EmptyFile;
import protos.MsgWithDefaults;
import protos.MsgWithDefaultsOrBuilder;
import protos.MsgWithNestedExtensions;
import protos.MsgWithRequiredFields;
import protos.MsgWithSpecialFieldNames;
import protos.SingleFile;
import protos.Typical;
import protos.TypicalData;
import protos.TypicalDataMessage;
import protos.TypicalDataOrBuilder;
import protos.TypicalDataSet;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* Tests for various protocol buffer features to ensure that the generated
* objective-c wrappers and runtime are compatible where required.
* This test is run in both java and objective-c.
*/
public class CompatibilityTest extends ProtobufTest {
private static final File TESTDATA = getTestdataDir();
private static final File LARGEPROTO = new File(TESTDATA, "largeproto");
private static File getTestdataDir() {
File dir = new File("");
dir = dir.getAbsoluteFile();
while (dir != null) {
if ("protobuf".equals(dir.getName())) {
return new File(dir, "tests/testdata");
}
dir = dir.getParentFile();
}
throw new AssertionError("Could not find testdata dir");
}
private byte[] readFile(File file) throws IOException {
FileInputStream in = new FileInputStream(file);
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buf = new byte[4096];
while (true) {
int r = in.read(buf);
if (r == -1) {
break;
}
out.write(buf, 0, r);
}
return out.toByteArray();
} finally {
in.close();
}
}
public void testSetAndGetInt() throws Exception {
TypicalData data = TypicalData.newBuilder().setMyInt(42).build();
assertEquals(42, data.getMyInt());
}
public void testSetAndGetByteString() throws Exception {
ByteString bstr = ByteString.copyFrom("foo".getBytes());
TypicalData data = TypicalData.newBuilder().setMyBytes(bstr).build();
assertEquals("foo", new String(data.getMyBytes().toByteArray()));
}
public void testSetAndGetRepeatedInt32() throws Exception {
List<Integer> list = new ArrayList<Integer>();
list.add(34);
list.add(56);
TypicalData data = TypicalData.newBuilder()
.addRepeatedInt32(12)
.addAllRepeatedInt32(list)
.setRepeatedInt32(2, 67)
.build();
assertEquals(3, data.getRepeatedInt32Count());
assertEquals(12, data.getRepeatedInt32(0));
byte[] bytes = data.toByteArray();
TypicalData other = TypicalData.parseFrom(bytes);
assertEquals(12, other.getRepeatedInt32(0));
// compareTo will fail in objc if the returned type is not JavaLangInteger.
assertEquals(0, other.getRepeatedInt32List().get(1).compareTo(34));
assertEquals(67, other.getRepeatedInt32(2));
}
public void testSetAndGetRepeatedInt64() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedInt64(123).build();
assertEquals(1, data.getRepeatedInt64Count());
assertEquals(123, data.getRepeatedInt64(0));
}
public void testSetAndGetRepeatedUint32() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedUint32(123).build();
assertEquals(1, data.getRepeatedUint32Count());
assertEquals(123, data.getRepeatedUint32(0));
}
public void testSetAndGetRepeatedUint64() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedUint64(123).build();
assertEquals(1, data.getRepeatedUint64Count());
assertEquals(123, data.getRepeatedUint64(0));
}
public void testSetAndGetRepeatedbBool() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedBool(true).build();
assertEquals(1, data.getRepeatedBoolCount());
assertTrue(data.getRepeatedBool(0));
}
public void testSetAndGetRepeatedDouble() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedDouble(0.5).build();
assertEquals(1, data.getRepeatedDoubleCount());
assertEquals(0.5, data.getRepeatedDouble(0), 0.0001);
}
public void testSetAndGetRepeatedFloat() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedFloat(0.5f).build();
assertEquals(1, data.getRepeatedFloatCount());
assertEquals(0.5f, data.getRepeatedFloat(0), 0.0001);
}
public void testSetAndGetRepeatedString() throws Exception {
TypicalData data = TypicalData.newBuilder().addRepeatedString("coin").build();
assertEquals(1, data.getRepeatedStringCount());
assertEquals("coin", data.getRepeatedString(0));
}
public void testSetAndGetRepeatedBytes() throws Exception {
List<ByteString> list = new ArrayList<ByteString>();
list.add(ByteString.copyFrom("def".getBytes()));
list.add(ByteString.copyFrom("ghi".getBytes()));
TypicalData data = TypicalData.newBuilder()
.addRepeatedBytes(ByteString.copyFrom("abc".getBytes()))
.addAllRepeatedBytes(list)
.setRepeatedBytes(2, ByteString.copyFrom("jkl".getBytes()))
.build();
assertEquals(3, data.getRepeatedBytesCount());
assertEquals("abc", new String(data.getRepeatedBytes(0).toByteArray()));
byte[] bytes = data.toByteArray();
TypicalData other = TypicalData.parseFrom(bytes);
assertEquals("abc", new String(other.getRepeatedBytes(0).toByteArray()));
assertEquals("def", new String(other.getRepeatedBytesList().get(1).toByteArray()));
assertEquals("jkl", new String(other.getRepeatedBytes(2).toByteArray()));
}
public void testSetAndGetRepeatedEnum() throws Exception {
TypicalData data =
TypicalData.newBuilder().addRepeatedEnum(TypicalData.EnumType.VALUE1).build();
assertEquals(1, data.getRepeatedEnumCount());
assertEquals(TypicalData.EnumType.VALUE1, data.getRepeatedEnum(0));
assertEquals(TypicalData.EnumType.VALUE1, data.getRepeatedEnumList().get(0));
}
public void testSetAndGetRepeatedTypicalData() throws Exception {
TypicalData data = TypicalData.newBuilder().setMyInt(42).build();
TypicalDataSet dataset = TypicalDataSet.newBuilder().addRepeatedTypicalData(data).build();
assertEquals(1, dataset.getRepeatedTypicalDataCount());
assertEquals(42, dataset.getRepeatedTypicalData(0).getMyInt());
}
public void testClear() throws Exception {
TypicalData.Builder dataBuilder = TypicalData.newBuilder().setMyInt(22).setMyString("foo");
assertEquals(22, dataBuilder.getMyInt());
assertEquals("foo", dataBuilder.getMyString());
dataBuilder.clear();
assertFalse(dataBuilder.hasMyInt());
assertFalse(dataBuilder.hasMyString());
}
public void testClearExtension() throws Exception {
TypicalData.Builder builder = TypicalData.newBuilder();
builder.setExtension(Typical.myPrimitiveExtension, 11);
assertTrue(builder.hasExtension(Typical.myPrimitiveExtension));
builder.clearExtension(Typical.myPrimitiveExtension);
assertFalse(builder.hasExtension(Typical.myPrimitiveExtension));
}
public void testClearRepeatedField() throws Exception {
TypicalData.Builder builder = TypicalData.newBuilder()
.addRepeatedInt32(1)
.addRepeatedInt32(2);
assertEquals(2, builder.getRepeatedInt32Count());
builder.clearRepeatedInt32();
assertEquals(0, builder.getRepeatedInt32Count());
}
public void testProtocolMessageEnum() throws Exception {
TypicalData data = TypicalData.newBuilder()
.setMyEnumType(TypicalData.EnumType.VALUE1)
.build();
ProtocolMessageEnum type = data.getMyEnumType();
assertEquals(1, type.getNumber());
}
public void testMergeFrom() throws Exception {
TypicalData input = TypicalData.newBuilder()
.setMyInt(42)
.setMyMessage(TypicalDataMessage.newBuilder().setMyMessageInt(43))
.build();
TypicalData output = TypicalData.newBuilder()
.mergeFrom(input.toByteString(), ExtensionRegistry.getEmptyRegistry())
.build();
assertEquals(42, output.getMyInt());
assertEquals(43, output.getMyMessage().getMyMessageInt());
}
public void testMergeFromOtherMessage() throws Exception {
TypicalData data = TypicalData.newBuilder().setMyInt(123).build();
Message dataAsMsg = data;
TypicalData.Builder builder1 = TypicalData.newBuilder().mergeFrom(dataAsMsg);
TypicalData.Builder builder2 = TypicalData.newBuilder().mergeFrom(data);
assertEquals(123, builder1.getMyInt());
assertEquals(123, builder2.getMyInt());
}
public void testMergeFromInputStream() throws Exception {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
registry.add(Typical.myPrimitiveExtension);
byte[] rawData = asBytes(new int[]{
0x08, 0x06, 0x60, 0x01, 0x7A, 0x03, 0x62, 0x61, 0x72, 0xC8, 0x3E, 0x2D });
ByteArrayInputStream in = new ByteArrayInputStream(rawData);
TypicalData data = TypicalData.newBuilder().mergeFrom(in, registry).build();
assertEquals(6, data.getMyInt());
assertTrue(data.getMyBool());
assertEquals("bar", data.getMyString());
assertEquals(45, ((Integer) data.getExtension(Typical.myPrimitiveExtension)).intValue());
// test API without ExtensionRegistry
data = TypicalData.newBuilder().mergeFrom(new ByteArrayInputStream(rawData)).build();
assertEquals(6, data.getMyInt());
assertTrue(data.getMyBool());
assertEquals("bar", data.getMyString());
}
public void testMergeDelimitedFromInputStream() throws Exception {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
registry.add(Typical.myPrimitiveExtension);
byte[] rawData = asBytes(new int[]{
0x0C, 0x08, 0x06, 0x60, 0x01, 0x7A, 0x03, 0x62, 0x61, 0x72, 0xC8, 0x3E, 0x2D,
0x0C, 0x08, 0x06, 0x60, 0x01, 0x7A, 0x03, 0x62, 0x61, 0x72, 0xC8, 0x3E, 0x2D });
ByteArrayInputStream in = new ByteArrayInputStream(rawData);
TypicalData.Builder dataBuilder = TypicalData.newBuilder();
assertTrue(dataBuilder.mergeDelimitedFrom(in, registry));
// Test that the second message reads correctly.
dataBuilder = TypicalData.newBuilder();
assertTrue(dataBuilder.mergeDelimitedFrom(in, registry));
TypicalData data = dataBuilder.build();
assertEquals(6, data.getMyInt());
assertTrue(data.getMyBool());
assertEquals("bar", data.getMyString());
assertEquals(45, ((Integer) data.getExtension(Typical.myPrimitiveExtension)).intValue());
// test API without ExtensionRegistry
dataBuilder = TypicalData.newBuilder();
assertTrue(dataBuilder.mergeDelimitedFrom(new ByteArrayInputStream(rawData)));
data = dataBuilder.build();
assertEquals(6, data.getMyInt());
assertTrue(data.getMyBool());
assertEquals("bar", data.getMyString());
}
public void testWriteToOutputStream() throws Exception {
TypicalData data = TypicalData.newBuilder()
.setMyInt(7)
.setMyBool(true)
.setMyString("foo")
.setExtension(Typical.myPrimitiveExtension, 45)
.build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
data.writeTo(out);
byte[] bytes = out.toByteArray();
byte[] expected = new byte[]{
0x08, 0x07, 0x60, 0x01, 0x7A, 0x03, 0x66, 0x6F, 0x6F, (byte) 0xC8, 0x3E, 0x2D };
checkBytes(expected, bytes);
}
public void testWriteDelimitedToOutputStream() throws Exception {
TypicalData data = TypicalData.newBuilder()
.setMyInt(7).setMyBool(true).setMyString("foo").build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
data.writeDelimitedTo(out);
byte[] bytes = out.toByteArray();
byte[] expected = new byte[]{ 0x09, 0x08, 0x07, 0x60, 0x01, 0x7A, 0x03, 0x66, 0x6F, 0x6F };
checkBytes(expected, bytes);
}
public void testMergeFromInvalidProtocolBufferException() throws Exception {
try {
ByteArrayInputStream in = new ByteArrayInputStream(new byte[]{ 0x00 });
TypicalData output = TypicalData.newBuilder()
.mergeFrom(in, ExtensionRegistry.getEmptyRegistry())
.build();
fail("Expected InvalidProtocolBufferException to be thrown.");
} catch (InvalidProtocolBufferException e) {
// Expected
}
}
public void testMergeDelimitedFromInvalidProtocolBufferException() throws Exception {
try {
ByteArrayInputStream in = new ByteArrayInputStream(new byte[]{ 0x03, 0x01, 0x02 });
TypicalData.Builder builder = TypicalData.newBuilder();
builder.mergeDelimitedFrom(in, ExtensionRegistry.getEmptyRegistry());
builder.build();
fail("Expected InvalidProtocolBufferException to be thrown.");
} catch (InvalidProtocolBufferException e) {
// Expected
}
}
public void testParseFromInvalidProtocolBufferException() throws Exception {
try {
TypicalData output = TypicalData.parseFrom(new byte[]{ 0x08 });
fail("Expected InvalidProtocolBufferException to be thrown.");
} catch (InvalidProtocolBufferException e) {
// Expected
}
}
public void testFindFieldByNumber() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
Collection<FieldDescriptor> fields = descriptor.getFields();
for (FieldDescriptor field : fields) {
FieldDescriptor.Type type = field.getType();
int fieldId = field.getNumber();
switch (fieldId) {
case 1:
assertEquals(Type.INT32, type);
break;
case 2:
assertEquals(Type.BYTES, type);
break;
case 3:
assertEquals(Type.ENUM, type);
break;
}
FieldDescriptor result = descriptor.findFieldByNumber(fieldId);
assertEquals(field.getNumber(), result.getNumber());
assertEquals(field.getName(), result.getName());
}
}
public void testGetMessageType() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor fieldDescriptor = descriptor.findFieldByNumber(11);
Descriptor messageDescriptor = fieldDescriptor.getMessageType();
assertNotNull(messageDescriptor);
FieldDescriptor messageFieldDescriptor = messageDescriptor.findFieldByNumber(1);
assertEquals(1, messageFieldDescriptor.getNumber());
}
public void testGetJavaType() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor intField = descriptor.findFieldByNumber(1);
assertEquals(FieldDescriptor.JavaType.INT, intField.getJavaType());
FieldDescriptor bytesField = descriptor.findFieldByNumber(2);
assertEquals(FieldDescriptor.JavaType.BYTE_STRING, bytesField.getJavaType());
FieldDescriptor booleanField = descriptor.findFieldByNumber(5);
assertEquals(FieldDescriptor.JavaType.BOOLEAN, booleanField.getJavaType());
FieldDescriptor stringField = descriptor.findFieldByNumber(8);
assertEquals(FieldDescriptor.JavaType.STRING, stringField.getJavaType());
}
public void testNewBuilderForField() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor fieldDescriptor = descriptor.findFieldByNumber(11);
TypicalData.Builder dataBuilder = TypicalData.newBuilder();
TypicalDataMessage.Builder messageBuilder = (TypicalDataMessage.Builder)
dataBuilder.newBuilderForField(fieldDescriptor);
TypicalDataMessage message = messageBuilder.setMyMessageInt(10).build();
assertEquals(10, message.getMyMessageInt());
fieldDescriptor = descriptor.findFieldByNumber(1);
try {
dataBuilder.newBuilderForField(fieldDescriptor);
fail("Expected UnsupportedOperationException");
} catch (UnsupportedOperationException e) {
// Expected.
}
}
public void testEnumDescriptor() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor fieldDescriptor = descriptor.findFieldByNumber(3);
assertEquals(Type.ENUM, fieldDescriptor.getType());
EnumDescriptor enumDescriptor = fieldDescriptor.getEnumType();
assertNotNull(enumDescriptor);
EnumValueDescriptor enumValueDescriptor = enumDescriptor.findValueByNumber(1);
assertEquals(1, enumValueDescriptor.getNumber());
assertEquals("VALUE1", enumValueDescriptor.getName());
}
public void testExtensionRegistry() throws Exception {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
Typical.registerAllExtensions(registry);
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor fieldDescriptor = descriptor.findFieldByNumber(1);
assertFalse(fieldDescriptor.isExtension());
ExtensionRegistry.ExtensionInfo extensionInfo =
registry.findExtensionByNumber(descriptor, 1000);
assertNotNull(extensionInfo);
FieldDescriptor extensionFieldDescriptor = extensionInfo.descriptor;
assertNotNull(extensionFieldDescriptor);
assertEquals(1000, extensionFieldDescriptor.getNumber());
assertTrue(extensionFieldDescriptor.isExtension());
Message message = extensionInfo.defaultInstance;
assertTrue(message instanceof TypicalDataMessage);
TypicalDataMessage data = ((TypicalDataMessage.Builder) message.toBuilder())
.setMyMessageInt(100)
.build();
assertEquals(100, data.getMyMessageInt());
// Primitive extension
extensionInfo = registry.findExtensionByNumber(descriptor, 1001);
assertNotNull(extensionInfo);
extensionFieldDescriptor = extensionInfo.descriptor;
assertNotNull(extensionFieldDescriptor);
assertEquals(1001, extensionFieldDescriptor.getNumber());
assertTrue(extensionFieldDescriptor.isExtension());
assertNull(extensionInfo.defaultInstance);
}
public void testEnumValues() throws Exception {
TypicalData.EnumType[] values = TypicalData.EnumType.values();
assertEquals(5, values.length);
assertEquals(TypicalData.EnumType.VALUE1, values[0]);
assertEquals(TypicalData.EnumType.VALUE2, values[1]);
assertEquals(TypicalData.EnumType.VALUE3, values[2]);
assertEquals(TypicalData.EnumType.VALUE4, values[3]);
assertEquals(TypicalData.EnumType.VALUE9, values[4]);
}
public void testEnumOrdinal() throws Exception {
assertEquals(0, TypicalData.EnumType.VALUE1.ordinal());
assertEquals(1, TypicalData.EnumType.VALUE2.ordinal());
assertEquals(2, TypicalData.EnumType.VALUE3.ordinal());
assertEquals(3, TypicalData.EnumType.VALUE4.ordinal());
assertEquals(4, TypicalData.EnumType.VALUE9.ordinal());
}
public void testEnumGetNumber() throws Exception {
assertEquals(1, TypicalData.EnumType.VALUE1.getNumber());
assertEquals(2, TypicalData.EnumType.VALUE2.getNumber());
assertEquals(3, TypicalData.EnumType.VALUE3.getNumber());
assertEquals(4, TypicalData.EnumType.VALUE4.getNumber());
assertEquals(9, TypicalData.EnumType.VALUE9.getNumber());
}
public void testEnumValueOf() throws Exception {
assertEquals(TypicalData.EnumType.VALUE1, TypicalData.EnumType.valueOf(1));
assertEquals(TypicalData.EnumType.VALUE2, TypicalData.EnumType.valueOf(2));
assertEquals(TypicalData.EnumType.VALUE3, TypicalData.EnumType.valueOf(3));
assertEquals(TypicalData.EnumType.VALUE4, TypicalData.EnumType.valueOf(4));
assertEquals(TypicalData.EnumType.VALUE9, TypicalData.EnumType.valueOf(9));
}
public void testEnumValueOfWithString() throws Exception {
assertEquals(TypicalData.EnumType.VALUE1, TypicalData.EnumType.valueOf("VALUE1"));
assertEquals(TypicalData.EnumType.VALUE2, TypicalData.EnumType.valueOf("VALUE2"));
assertEquals(TypicalData.EnumType.VALUE3, TypicalData.EnumType.valueOf("VALUE3"));
assertEquals(TypicalData.EnumType.VALUE4, TypicalData.EnumType.valueOf("VALUE4"));
assertEquals(TypicalData.EnumType.VALUE9, TypicalData.EnumType.valueOf("VALUE9"));
}
private int switchHelper(TypicalData.EnumType enumType) {
switch (enumType) {
case VALUE1: return 1;
case VALUE2: return 2;
case VALUE3: return 3;
case VALUE4: return 4;
case VALUE9: return 9;
}
return -1;
}
public void testEnumSwitchStatement() throws Exception {
assertEquals(1, switchHelper(TypicalData.EnumType.VALUE1));
assertEquals(2, switchHelper(TypicalData.EnumType.VALUE2));
assertEquals(3, switchHelper(TypicalData.EnumType.VALUE3));
assertEquals(4, switchHelper(TypicalData.EnumType.VALUE4));
assertEquals(9, switchHelper(TypicalData.EnumType.VALUE9));
}
public void testGetFieldsCompiles() throws Exception {
Collection<FieldDescriptor> fields = TypicalData.Builder.getDescriptor().getFields();
assertTrue(fields.size() > 0);
fields = TypicalData.newBuilder().build().getDescriptorForType().getFields();
assertTrue(fields.size() > 0);
}
public void testMessageOrBuilderInterface() throws Exception {
TypicalDataOrBuilder builder = TypicalData.newBuilder().setMyInt(42);
assertTrue(builder.hasMyInt());
assertEquals(42, builder.getMyInt());
TypicalDataOrBuilder data = TypicalData.newBuilder().setMyInt(42).build();
assertTrue(data.hasMyInt());
assertEquals(42, data.getMyInt());
}
public void testMessageOrBuilderInterfaceSingleFile() throws Exception {
SingleFile.Data1.InternalOrBuilder internalBuilder = SingleFile.Data1.Internal.newBuilder()
.setIntValue(24);
assertEquals(24, internalBuilder.getIntValue());
SingleFile.Data1OrBuilder builder = SingleFile.Data1.newBuilder().setIntValue(42);
assertTrue(builder.hasIntValue());
assertEquals(42, builder.getIntValue());
SingleFile.Data1OrBuilder data = SingleFile.Data1.newBuilder().setIntValue(42).build();
assertTrue(data.hasIntValue());
assertEquals(42, data.getIntValue());
}
public void testSetAndGetFieldWithFieldDescriptor() throws Exception {
FieldDescriptor[] fields = new FieldDescriptor[19];
Descriptor descriptor = TypicalData.Builder.getDescriptor();
for (int i = 1; i <= 18; i++) {
fields[i] = descriptor.findFieldByNumber(i);
}
TypicalData.Builder dataBuilder = TypicalData.newBuilder();
dataBuilder.setField(fields[1], new Integer(42));
dataBuilder.setField(fields[2], ByteString.copyFrom("foo".getBytes()));
dataBuilder.setField(fields[3], TypicalData.EnumType.VALUE9.getValueDescriptor());
dataBuilder.setField(fields[11], TypicalDataMessage.newBuilder().build());
dataBuilder.setField(fields[12], Boolean.TRUE);
dataBuilder.setField(fields[13], new Float(43.8));
dataBuilder.setField(fields[14], new Double(44.5));
dataBuilder.setField(fields[15], "bar");
dataBuilder.setField(fields[16], new Integer(24));
dataBuilder.setField(fields[17], new Long(4422));
dataBuilder.setField(fields[18], new Long(2244));
dataBuilder.addRepeatedField(fields[4], new Integer(72));
dataBuilder.addRepeatedField(fields[8], "aaa");
dataBuilder.addRepeatedField(fields[8], "bbb");
dataBuilder.setRepeatedField(fields[8], 1, "ccc");
ArrayList<Double> doubles = new ArrayList<Double>();
doubles.add(1.2);
doubles.add(3.4);
dataBuilder.setField(fields[7], doubles);
checkGetters(dataBuilder.build(), fields);
checkGetters(dataBuilder, fields);
}
private void checkGetters(TypicalDataOrBuilder data, FieldDescriptor[] fields) {
assertEquals(42, data.getMyInt());
assertEquals("foo", new String(data.getMyBytes().toByteArray()));
assertEquals(TypicalData.EnumType.VALUE9, data.getMyEnumType());
assertTrue(data.getMyBool());
assertEquals(new Float(43.8), data.getMyFloat());
assertEquals(new Double(44.5), data.getMyDouble());
assertEquals("bar", data.getMyString());
assertEquals(24, data.getMyUint());
assertEquals(4422, data.getMyLong());
assertEquals(2244, data.getMyUlong());
assertEquals(1, data.getRepeatedInt32Count());
assertEquals(72, data.getRepeatedInt32(0));
assertEquals("aaa", data.getRepeatedString(0));
assertEquals("ccc", data.getRepeatedString(1));
assertEquals(1.2, data.getRepeatedDouble(0), 0.0001);
Object result;
result = data.getField(fields[1]);
assertTrue(result instanceof Integer);
assertEquals(42, result);
result = data.getField(fields[2]);
assertTrue(result instanceof ByteString);
assertEquals("foo", new String(((ByteString) result).toByteArray()));
result = data.getField(fields[3]);
assertTrue(result instanceof EnumValueDescriptor);
assertEquals(9, ((EnumValueDescriptor) result).getNumber());
result = data.getField(fields[11]);
assertTrue(result instanceof TypicalDataMessage);
assertEquals(TypicalDataMessage.newBuilder().build(), result);
result = data.getField(fields[12]);
assertTrue(result instanceof Boolean);
assertEquals(Boolean.TRUE, result);
result = data.getField(fields[13]);
assertTrue(result instanceof Float);
assertEquals(43.8, ((Float) result).floatValue(), 0.0001);
result = data.getField(fields[14]);
assertTrue(result instanceof Double);
assertEquals(44.5, ((Double) result).doubleValue(), 0.0001);
result = data.getField(fields[15]);
assertTrue(result instanceof String);
assertEquals("bar", result);
result = data.getField(fields[16]);
assertTrue(result instanceof Integer);
assertEquals(24, result);
result = data.getField(fields[17]);
assertTrue(result instanceof Long);
assertEquals(4422L, result);
result = data.getField(fields[18]);
assertTrue(result instanceof Long);
assertEquals(2244L, result);
assertEquals(1, data.getRepeatedFieldCount(fields[4]));
result = data.getRepeatedField(fields[4], 0);
assertTrue(result instanceof Integer);
assertEquals(72, result);
assertEquals(2, data.getRepeatedFieldCount(fields[8]));
result = data.getRepeatedField(fields[8], 1);
assertEquals("ccc", result);
assertEquals(2, data.getRepeatedFieldCount(fields[7]));
result = data.getRepeatedField(fields[7], 1);
assertEquals(3.4, ((Double) result).doubleValue(), 0.0001);
}
public void testClearFieldWithDescriptor() throws Exception {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor intField = descriptor.findFieldByNumber(1);
FieldDescriptor repeatedIntField = descriptor.findFieldByNumber(4);
TypicalData.Builder dataBuilder = TypicalData.newBuilder()
.setMyInt(42)
.addRepeatedInt32(43)
.addRepeatedInt32(44);
assertEquals(42, dataBuilder.getMyInt());
dataBuilder.clearField(intField);
assertFalse(dataBuilder.hasMyInt());
assertEquals(2, dataBuilder.getRepeatedInt32Count());
dataBuilder.clearField(repeatedIntField);
assertEquals(0, dataBuilder.getRepeatedInt32Count());
}
public void testGetUnsetField() throws Exception {
TypicalData data = TypicalData.newBuilder().build();
Descriptor descriptor = TypicalData.getDescriptor();
assertEquals(0, data.getField(descriptor.findFieldByNumber(1)));
Object result = data.getField(descriptor.findFieldByNumber(3));
assertTrue(result instanceof EnumValueDescriptor);
assertEquals(TypicalData.EnumType.VALUE1.getValueDescriptor().getNumber(),
((EnumValueDescriptor) result).getNumber());
assertTrue(data.getField(descriptor.findFieldByNumber(11)) instanceof TypicalDataMessage);
}
public void testFieldDescriptorMethodsThrowNullPointer() throws Exception {
TypicalData.Builder dataBuilder = TypicalData.newBuilder();
checkHasFieldThrowsNullPointer(dataBuilder);
checkHasFieldThrowsNullPointer(dataBuilder.build());
checkGetFieldThrowsNullPointer(dataBuilder);
checkGetFieldThrowsNullPointer(dataBuilder.build());
checkSetFieldThrowsNullPointer(dataBuilder);
}
public void checkHasFieldThrowsNullPointer(TypicalDataOrBuilder data) {
try {
data.hasField(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
}
public void checkGetFieldThrowsNullPointer(TypicalDataOrBuilder data) {
try {
data.getField(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
}
public void checkSetFieldThrowsNullPointer(TypicalData.Builder data) {
try {
data.setField(null, "foo");
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
}
public void testAddingNullValues() throws Exception {
TypicalData.Builder dataBuilder = TypicalData.newBuilder();
try {
dataBuilder.setMyMessage((TypicalDataMessage) null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setMyMessage((TypicalDataMessage.Builder) null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setMyString(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setMyBytes(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setMyEnumType(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addRepeatedMessage((TypicalDataMessage) null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addRepeatedMessage((TypicalDataMessage.Builder) null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setRepeatedMessage(0, (TypicalDataMessage) null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addRepeatedString(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setRepeatedString(0, null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addRepeatedBytes(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setRepeatedBytes(0, null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addRepeatedEnum(null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setRepeatedEnum(0, null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.setExtension(Typical.myExtension, null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
try {
dataBuilder.addExtension(Typical.myRepeatedExtension, null);
fail("Expected NullPointerException");
} catch (NullPointerException e) {
// Expected.
}
}
public void testGetSerializedSize() throws Exception {
GeneratedMessage data = TypicalData.newBuilder().setMyInt(1).build();
assertEquals(2, data.getSerializedSize());
}
public void testGetAllFields() throws Exception {
GeneratedMessage data = TypicalData.newBuilder()
.setMyInt(1)
.addRepeatedInt32(2)
.setExtension(Typical.myExtension, TypicalDataMessage.getDefaultInstance())
.setExtension(Typical.myPrimitiveExtension, 3)
.build();
Map<FieldDescriptor, Object> allFields = data.getAllFields();
assertEquals(4, allFields.size());
assertNotNull(allFields.get(Typical.myExtension.getDescriptor()));
assertEquals(4, data.toBuilder().getAllFields().size());
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor intField = descriptor.findFieldByNumber(1);
assertEquals(1, allFields.get(intField));
assertEquals(3, allFields.get(Typical.myPrimitiveExtension.getDescriptor()));
}
public void testFunnyNames() throws Exception {
Foo_bar msg1 = Foo_bar.newBuilder().build();
Foo2bar msg2 = Foo2bar.newBuilder().build();
fooBar msg3 = fooBar.newBuilder().build();
}
public void testPackagePrefix() throws Exception {
PrefixDummy dummy = PrefixDummy.newBuilder().build();
}
public void testToStringOnMessage() throws Exception {
// Using the Message type to ensure translation of toString compiles on the
// interface type.
Message data = TypicalData.newBuilder().setMyInt(31).build();
String result = data.toString();
assertTrue("Unexpected toString result: " + result,
// Java and ObjC results are not identical.
result.contains("my_int: 31") || result.contains("myInt: 31"));
}
public void testSetAndGetExtensions() throws Exception {
TypicalDataMessage extensionMessage =
TypicalDataMessage.newBuilder().setMyMessageInt(321).build();
List<Integer> repeatedInts = new ArrayList<Integer>();
repeatedInts.add(1);
repeatedInts.add(2);
List<TypicalDataMessage> repeatedData = new ArrayList<TypicalDataMessage>();
repeatedData.add(TypicalDataMessage.newBuilder().setMyMessageInt(432).build());
repeatedData.add(TypicalDataMessage.newBuilder().setMyMessageInt(543).build());
TypicalData.Builder dataBuilder = TypicalData.newBuilder()
.setExtension(Typical.myPrimitiveExtension, 123)
.setExtension(Typical.myExtension, extensionMessage)
.setExtension(Typical.myRepeatedPrimitiveExtension, repeatedInts)
.addExtension(Typical.myRepeatedPrimitiveExtension, 3)
.setExtension(Typical.myRepeatedExtension, repeatedData)
.setExtension(Typical.myEnumExtension, TypicalData.EnumType.VALUE1)
.setExtension(Typical.myBytesExtension, ByteString.copyFrom("abc".getBytes()))
.setExtension(Typical.myBoolExtension, Boolean.TRUE)
.setExtension(MsgWithNestedExtensions.intExt, 456);
checkGetExtensions(dataBuilder);
checkGetExtensions(dataBuilder.build());
ByteArrayOutputStream out = new ByteArrayOutputStream();
dataBuilder.build().writeTo(out);
byte[] msgBytes = asBytes(new int[] {
0xC2, 0x3E, 0x03, 0x08, 0xC1, 0x02, 0xC8, 0x3E, 0x7B, 0xD0, 0x3E, 0x01, 0xD0, 0x3E, 0x02,
0xD0, 0x3E, 0x03, 0xDA, 0x3E, 0x03, 0x08, 0xB0, 0x03, 0xDA, 0x3E, 0x03, 0x08, 0x9F, 0x04,
0xE0, 0x3E, 0x01, 0xEA, 0x3E, 0x03, 0x61, 0x62, 0x63, 0xF0, 0x3E, 0x01, 0x80, 0x7D, 0xC8,
0x03 });
checkBytes(msgBytes, out.toByteArray());
ExtensionRegistry registry = ExtensionRegistry.newInstance();
Typical.registerAllExtensions(registry);
ByteArrayInputStream in = new ByteArrayInputStream(msgBytes);
TypicalData data = TypicalData.newBuilder().mergeFrom(in, registry).build();
checkGetExtensions(data);
}
private void checkGetExtensions(TypicalDataOrBuilder data) {
assertEquals(123, ((Integer) data.getExtension(Typical.myPrimitiveExtension)).intValue());
Object msg = data.getExtension(Typical.myExtension);
assertTrue(msg instanceof TypicalDataMessage);
assertEquals(321, ((TypicalDataMessage) msg).getMyMessageInt());
Object result = data.getExtension(Typical.myRepeatedPrimitiveExtension);
assertTrue(result instanceof List);
assertTrue(((List) result).get(0) instanceof Integer);
assertEquals(3, data.getExtensionCount(Typical.myRepeatedPrimitiveExtension));
assertEquals(2,
((Integer) data.getExtension(Typical.myRepeatedPrimitiveExtension, 1)).intValue());
assertEquals(3,
((Integer) data.getExtension(Typical.myRepeatedPrimitiveExtension, 2)).intValue());
assertEquals(2, data.getExtensionCount(Typical.myRepeatedExtension));
result = data.getExtension(Typical.myRepeatedExtension, 1);
assertTrue(result instanceof TypicalDataMessage);
assertEquals(543, ((TypicalDataMessage) result).getMyMessageInt());
assertEquals(TypicalData.EnumType.VALUE1, data.getExtension(Typical.myEnumExtension));
result = data.getExtension(Typical.myBytesExtension);
assertTrue(result instanceof ByteString);
assertEquals("abc", new String(((ByteString) result).toByteArray()));
result = data.getExtension(Typical.myBoolExtension);
assertEquals(Boolean.TRUE, result);
assertEquals(456, ((Integer) data.getExtension(MsgWithNestedExtensions.intExt)).intValue());
}
public void testEmptyFieldOptions() {
Descriptor descriptor = TypicalData.Builder.getDescriptor();
FieldDescriptor intField = descriptor.findFieldByNumber(1);
assertNotNull(intField.getOptions());
}
public void testMessageGetClass() {
Message msg = TypicalData.newBuilder().build();
assertEquals(TypicalData.class, msg.getClass());
}
public void testToBuilder() throws Exception {
TypicalData data = TypicalData.newBuilder()
.setMyInt(42)
.setMyMessage(TypicalDataMessage.newBuilder().setMyMessageInt(43))
.build();
TypicalData.Builder builder = data.toBuilder();
TypicalDataMessage message = builder.getMyMessage();
assertNotNull(message);
assertEquals(43, message.getMyMessageInt());
}
public void testWriteDelimitedToLargeProto() throws Exception {
TypicalData.Builder builder1 = TypicalData.newBuilder();
for (int i = 0; i < 1000; i++) {
builder1.addRepeatedInt32(i);
builder1.addRepeatedUint32(i);
builder1.addRepeatedInt64(i);
builder1.addRepeatedUint64(i);
}
TypicalData.Builder builder2 = TypicalData.newBuilder();
for (int i = 1000; i < 2000; i++) {
builder2.addRepeatedInt32(i);
builder2.addRepeatedUint32(i);
builder2.addRepeatedInt64(i);
builder2.addRepeatedUint64(i);
}
TypicalData.Builder builder3 = TypicalData.newBuilder();
for (int i = 2000; i < 3000; i++) {
builder3.addRepeatedInt32(i);
builder3.addRepeatedUint32(i);
builder3.addRepeatedInt64(i);
builder3.addRepeatedUint64(i);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
builder1.build().writeDelimitedTo(out);
builder2.build().writeDelimitedTo(out);
builder3.build().writeDelimitedTo(out);
byte[] bytes = out.toByteArray();
byte[] expected = readFile(LARGEPROTO);
checkBytes(expected, bytes);
}
public void testMergeFromLargeProto() throws Exception {
TypicalData.Builder builder1 = TypicalData.newBuilder();
TypicalData.Builder builder2 = TypicalData.newBuilder();
TypicalData.Builder builder3 = TypicalData.newBuilder();
FileInputStream in = new FileInputStream(LARGEPROTO);
try {
assertTrue(builder1.mergeDelimitedFrom(in));
assertTrue(builder2.mergeDelimitedFrom(in));
assertTrue(builder3.mergeDelimitedFrom(in));
} finally {
in.close();
}
TypicalData data1 = builder1.build();
TypicalData data2 = builder2.build();
TypicalData data3 = builder3.build();
assertEquals(0, data1.getRepeatedInt32(0));
assertEquals(999, data1.getRepeatedInt32(999));
assertEquals(1000, data2.getRepeatedInt32(0));
assertEquals(1999, data2.getRepeatedInt32(999));
assertEquals(2000, data3.getRepeatedInt32(0));
assertEquals(2999, data3.getRepeatedInt32(999));
}
public void testDefaultInstance() throws Exception {
checkDefaults(MsgWithDefaults.getDefaultInstance());
checkDefaults(MsgWithDefaults.newBuilder());
checkDefaults(MsgWithDefaults.newBuilder().getDefaultInstanceForType());
checkDefaults(MsgWithDefaults.newBuilder().build().getDefaultInstanceForType());
checkDefaults(MsgWithDefaults.newBuilder().build().newBuilderForType());
checkDefaults((MsgWithDefaults) Typical.myExtensionWithDefaults.getMessageDefaultInstance());
}
private void checkDefaults(MsgWithDefaultsOrBuilder data) {
assertEquals(13, data.getMyInt32());
assertTrue(data.getMyBool());
assertEquals("foo", data.getMyString());
assertEquals(TypicalData.EnumType.VALUE4, data.getMyEnum());
}
public void testMessageLite() throws Exception {
// Mainly a compilation test for the Lite classes.
MessageLite.Builder builder = TypicalData.newBuilder();
MessageLite message = builder.build();
assertTrue(message instanceof MessageLite);
}
public void testMutatingBuilderDoesntMutateMessage() throws Exception {
TypicalData.Builder builder = TypicalData.newBuilder();
TypicalData data = builder.build();
builder.setMyInt(23);
assertEquals(0, data.getMyInt());
builder = data.toBuilder();
builder.setMyInt(45);
assertEquals(0, data.getMyInt());
}
public void testBuildUninitializedMessage() throws Exception {
MsgWithRequiredFields.Builder builder = MsgWithRequiredFields.newBuilder();
try {
builder.build();
fail("Expected UninitializedMessageException");
} catch (RuntimeException e) {
// Expected.
}
MsgWithRequiredFields uninitialized = builder.buildPartial(); // Shouldn't fail.
assertEquals(0, uninitialized.getRequiredInt32());
}
public void testSpecialFieldNames() throws Exception {
MsgWithSpecialFieldNames data = MsgWithSpecialFieldNames.newBuilder()
.setId(123)
.setAndEq(4.56)
.setZone("test")
.addSelf(777)
.addSelf(888)
.build();
assertEquals(123, data.getId());
assertEquals(4.56, data.getAndEq(), 0.0001);
assertEquals("test", data.getZone());
assertEquals(2, data.getSelfCount());
assertEquals(777, data.getSelf(0));
assertEquals(888, data.getSelf(1));
}
public void testEmptyFile() throws Exception {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
// Should be a noop, test that this compiles.
EmptyFile.registerAllExtensions(registry);
}
public void testSettingMessageFieldsWithBuilders() throws Exception {
TypicalDataMessage.Builder msgBuilder = TypicalDataMessage.newBuilder().setMyMessageInt(1);
TypicalData.Builder builder = TypicalData.newBuilder()
.setMyMessage(msgBuilder)
.addRepeatedMessage(msgBuilder);
msgBuilder.setMyMessageInt(2);
assertEquals(1, builder.getMyMessage().getMyMessageInt());
assertEquals(1, builder.getRepeatedMessage(0).getMyMessageInt());
}
public void testIsEqualAndHashCode() throws Exception {
TypicalDataMessage subMsg1 = TypicalDataMessage.newBuilder().setMyMessageInt(11).build();
TypicalData.Builder builder1 = TypicalData.newBuilder()
.setMyMessage(subMsg1)
.setMyInt(22);
TypicalData data1 = builder1.build();
TypicalDataMessage subMsg2 = TypicalDataMessage.newBuilder().setMyMessageInt(11).build();
TypicalData.Builder builder2 = TypicalData.newBuilder()
.setMyMessage(subMsg2)
.setMyInt(22);
TypicalData data2 = builder2.build();
TypicalDataMessage subMsg3 = TypicalDataMessage.newBuilder().setMyMessageInt(33).build();
TypicalData.Builder builder3 = TypicalData.newBuilder()
.setMyMessage(subMsg3)
.setMyInt(22);
TypicalData data3 = builder3.build();
// Builders are not equal.
assertFalse(builder1.equals(builder2));
assertFalse(builder2.equals(builder1));
assertTrue(data1.equals(data2));
assertTrue(data2.equals(data1));
assertEquals(data1.hashCode(), data2.hashCode());
assertFalse(data1.equals(data3));
assertFalse(data3.equals(data1));
}
public void testNewBuilderWithPrototype() throws Exception {
TypicalData data = TypicalData.newBuilder().setMyInt(123).build();
TypicalData.Builder builder = TypicalData.newBuilder(data);
assertEquals(123, builder.getMyInt());
}
public void testEnumValueConstants() throws Exception {
assertEquals(TypicalData.EnumType.VALUE1_VALUE, 1);
assertEquals(TypicalData.EnumType.VALUE2_VALUE, 2);
assertEquals(TypicalData.EnumType.VALUE3_VALUE, 3);
assertEquals(TypicalData.EnumType.VALUE4_VALUE, 4);
assertEquals(TypicalData.EnumType.VALUE9_VALUE, 9);
}
public void testMessageLiteInterface() throws Exception {
ExtensionRegistryLite registry = ExtensionRegistryLite.newInstance();
TypicalData data = TypicalData.newBuilder().build();
MessageLite messageLite = data;
MessageLite.Builder builderLite = messageLite.newBuilderForType();
messageLite.writeTo(new ByteArrayOutputStream());
messageLite.writeDelimitedTo(new ByteArrayOutputStream());
builderLite.mergeFrom(new ByteArrayInputStream(new byte[0]));
builderLite.mergeFrom(new ByteArrayInputStream(new byte[0]), registry);
builderLite.mergeDelimitedFrom(new ByteArrayInputStream(new byte[0]));
builderLite.mergeDelimitedFrom(new ByteArrayInputStream(new byte[0]), registry);
assertEquals(0, messageLite.getSerializedSize());
}
/**
* The returned list must be able to see edits to the internal list. However
* when the internal list is cleared the returned list maintains a view of the
* elements as they where before the clear.
*/
public void testGetRepeatedList() throws Exception {
TypicalData.Builder builder = TypicalData.newBuilder()
.addRepeatedInt32(1)
.addRepeatedInt32(2)
.addRepeatedInt32(3);
List<Integer> list = builder.getRepeatedInt32List();
assertEquals(3, list.size());
builder.setRepeatedInt32(1, 4);
assertEquals(4, list.get(1).intValue());
builder.clearRepeatedInt32();
assertEquals(3, list.size());
}
public void testGetByteArray() throws Exception {
// Make sure it compiles with the MessageLite type.
MessageLite data = TypicalData.newBuilder().setMyInt(42).build();
byte[] bytes = data.toByteArray();
byte[] expected = new byte[]{ 0x08, 0x2A };
checkBytes(expected, bytes);
}
public void testExtensionRegistryGetUnmodifiable() throws Exception {
ExtensionRegistry registry = ExtensionRegistry.newInstance();
ExtensionRegistry registry2 = registry.getUnmodifiable();
registry.add(Typical.myPrimitiveExtension);
// Extension added to registry should be visible in registry2.
Descriptor descriptor = TypicalData.getDescriptor();
ExtensionRegistry.ExtensionInfo extensionInfo =
registry2.findExtensionByNumber(descriptor, 1001);
assertNotNull(extensionInfo);
ExtensionRegistryLite registryLite = ExtensionRegistryLite.newInstance();
ExtensionRegistryLite registryLite2 = registryLite.getUnmodifiable();
}
public void testMessageLiteToBuilderAndMergeFrom() throws Exception {
TypicalData input = TypicalData.newBuilder().setMyInt(123).build();
MessageLite msg = TypicalData.getDefaultInstance();
// mergeFrom(byte[], ExtensionRegistryLite)
MessageLite.Builder builder = msg.toBuilder();
builder.mergeFrom(input.toByteString().toByteArray(), ExtensionRegistry.getEmptyRegistry());
assertEquals(123, ((TypicalData) builder.build()).getMyInt());
// mergeFrom(byte[])
builder = msg.toBuilder();
builder.mergeFrom(input.toByteString().toByteArray());
assertEquals(123, ((TypicalData) builder.build()).getMyInt());
// mergeFrom(ByteString, ExtensionRegistryLite)
builder = msg.toBuilder();
builder.mergeFrom(input.toByteString(), ExtensionRegistry.getEmptyRegistry());
assertEquals(123, ((TypicalData) builder.build()).getMyInt());
// mergeFrom(ByteString)
builder = msg.toBuilder();
builder.mergeFrom(input.toByteString());
assertEquals(123, ((TypicalData) builder.build()).getMyInt());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.kafka;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import org.I0Itec.zkclient.ZkClient;
import com.google.common.collect.ImmutableMap;
import kafka.admin.AdminUtils;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.MockTime;
import kafka.utils.Time;
import kafka.utils.ZKStringSerializer$;
import kafka.utils.ZkUtils;
import kafka.zk.EmbeddedZookeeper;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.test.TestUtils;
/**
* A private class for starting a suite of servers for Kafka
* Calls to start and shutdown are reference counted, so that the suite is started and shutdown in pairs.
* A suite of servers (Zk, Kafka etc) will be started just once per process
*/
@Slf4j
class KafkaServerSuite {
static KafkaServerSuite _instance;
static KafkaServerSuite getInstance()
{
if (null == _instance)
{
_instance = new KafkaServerSuite();
return _instance;
}
else
{
return _instance;
}
}
private int _brokerId = 0;
private EmbeddedZookeeper _zkServer;
private ZkClient _zkClient;
private KafkaServer _kafkaServer;
private final int _kafkaServerPort;
private String _zkConnectString;
private final AtomicInteger _numStarted;
public ZkClient getZkClient() {
return _zkClient;
}
public KafkaServer getKafkaServer() {
return _kafkaServer;
}
public int getKafkaServerPort() {
return _kafkaServerPort;
}
public String getZkConnectString() {
return _zkConnectString;
}
private KafkaServerSuite()
{
_kafkaServerPort = TestUtils.findFreePort();
_zkConnectString = "UNINITIALIZED_HOST_PORT";
_numStarted = new AtomicInteger(0);
}
void start()
throws RuntimeException {
if (_numStarted.incrementAndGet() == 1) {
log.warn("Starting up Kafka server suite. Zk at " + _zkConnectString + "; Kafka server at " + _kafkaServerPort);
_zkServer = new EmbeddedZookeeper();
_zkConnectString = "127.0.0.1:"+_zkServer.port();
_zkClient = new ZkClient(_zkConnectString, 30000, 30000, ZKStringSerializer$.MODULE$);
Properties props = kafka.utils.TestUtils.createBrokerConfig(
_brokerId,
_zkConnectString,
kafka.utils.TestUtils.createBrokerConfig$default$3(),
kafka.utils.TestUtils.createBrokerConfig$default$4(),
_kafkaServerPort,
kafka.utils.TestUtils.createBrokerConfig$default$6(),
kafka.utils.TestUtils.createBrokerConfig$default$7(),
kafka.utils.TestUtils.createBrokerConfig$default$8(),
kafka.utils.TestUtils.createBrokerConfig$default$9(),
kafka.utils.TestUtils.createBrokerConfig$default$10(),
kafka.utils.TestUtils.createBrokerConfig$default$11(),
kafka.utils.TestUtils.createBrokerConfig$default$12(),
kafka.utils.TestUtils.createBrokerConfig$default$13(),
kafka.utils.TestUtils.createBrokerConfig$default$14()
);
KafkaConfig config = new KafkaConfig(props);
Time mock = new MockTime();
_kafkaServer = kafka.utils.TestUtils.createServer(config, mock);
}
else
{
log.info("Kafka server suite already started... continuing");
}
}
void shutdown() {
if (_numStarted.decrementAndGet() == 0) {
log.info("Shutting down Kafka server suite");
_kafkaServer.shutdown();
_zkClient.close();
_zkServer.shutdown();
}
else {
log.info("Kafka server suite still in use ... not shutting down yet");
}
}
}
class KafkaConsumerSuite {
private final ConsumerConnector _consumer;
private final KafkaStream<byte[], byte[]> _stream;
private final ConsumerIterator<byte[], byte[]> _iterator;
private final String _topic;
KafkaConsumerSuite(String zkConnectString, String topic)
{
_topic = topic;
Properties consumeProps = new Properties();
consumeProps.put("zookeeper.connect", zkConnectString);
consumeProps.put("group.id", _topic+"-"+System.nanoTime());
consumeProps.put("zookeeper.session.timeout.ms", "10000");
consumeProps.put("zookeeper.sync.time.ms", "10000");
consumeProps.put("auto.commit.interval.ms", "10000");
consumeProps.put("_consumer.timeout.ms", "10000");
_consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumeProps));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
_consumer.createMessageStreams(ImmutableMap.of(this._topic, 1));
List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(this._topic);
_stream = streams.get(0);
_iterator = _stream.iterator();
}
void shutdown()
{
_consumer.shutdown();
}
public ConsumerIterator<byte[],byte[]> getIterator() {
return _iterator;
}
}
/**
* A Helper class for testing against Kafka
* A suite of servers (Zk, Kafka etc) will be started just once per process
* Consumer and iterator will be created per instantiation and is one instance per topic.
*/
public class KafkaTestBase implements Closeable {
private final KafkaServerSuite _kafkaServerSuite;
private final Map<String, KafkaConsumerSuite> _topicConsumerMap;
public KafkaTestBase() throws InterruptedException, RuntimeException {
this._kafkaServerSuite = KafkaServerSuite.getInstance();
this._topicConsumerMap = new HashMap<>();
}
public synchronized void startServers()
{
_kafkaServerSuite.start();
}
public void stopServers()
{
_kafkaServerSuite.shutdown();
}
public void start() {
startServers();
}
public void stopClients() throws IOException {
for (Map.Entry<String, KafkaConsumerSuite> consumerSuiteEntry: _topicConsumerMap.entrySet())
{
consumerSuiteEntry.getValue().shutdown();
AdminUtils.deleteTopic(ZkUtils.apply(_kafkaServerSuite.getZkClient(), false),
consumerSuiteEntry.getKey());
}
}
@Override
public void close() throws IOException {
stopClients();
stopServers();
}
public void provisionTopic(String topic) {
if (_topicConsumerMap.containsKey(topic)) {
// nothing to do: return
} else {
// provision topic
AdminUtils.createTopic(ZkUtils.apply(_kafkaServerSuite.getZkClient(), false),
topic, 1, 1, new Properties());
List<KafkaServer> servers = new ArrayList<>();
servers.add(_kafkaServerSuite.getKafkaServer());
kafka.utils.TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000);
KafkaConsumerSuite consumerSuite = new KafkaConsumerSuite(_kafkaServerSuite.getZkConnectString(), topic);
_topicConsumerMap.put(topic, consumerSuite);
}
}
public ConsumerIterator<byte[], byte[]> getIteratorForTopic(String topic) {
if (_topicConsumerMap.containsKey(topic))
{
return _topicConsumerMap.get(topic).getIterator();
}
else
{
throw new IllegalStateException("Could not find provisioned topic" + topic + ": call provisionTopic before");
}
}
public int getKafkaServerPort() {
return _kafkaServerSuite.getKafkaServerPort();
}
}
| |
/*
* Copyright (c) 2013, Effektif GmbH. All rights reserved.
*/
package com.heisenberg.impl.job;
import java.util.LinkedList;
import org.joda.time.LocalDateTime;
import org.joda.time.ReadablePeriod;
/**
* @author Tom Baeyens
*/
public class Job implements JobBuilder {
// private static final Logger log = LoggerFactory.getLogger(Job.class);
public JobService jobService;
public JobType jobType;
public String id;
public String key;
public LocalDateTime duedate;
public Lock lock;
public LinkedList<JobExecution> executions;
/** retries left. null when no retries have been performed. 0 when this job has permanently failed. */
public Long retries;
public Long retryDelay;
public LocalDateTime done;
public Boolean dead;
public String organizationId;
public String processId;
public String processDefinitionId;
public String workflowInstanceId;
public String activityInstanceId;
public String taskId;
public Job() {
}
public Job(JobService jobService, JobType jobType) {
this.jobService = jobService;
this.jobType = jobType;
}
public void save() {
jobService.saveJob(this);
}
/** setting the id means the job service will ensure there is
* exactly 1 such job in the system. */
public Job key(String key) {
this.key = key;
return this;
}
public Job activityInstanceId(String activityInstanceId) {
this.activityInstanceId = activityInstanceId;
return this;
}
public Job done(LocalDateTime done) {
this.done = done;
return this;
}
public Job duedate(LocalDateTime duedate) {
this.duedate = duedate;
return this;
}
public Job lock(Lock lock) {
this.lock = lock;
return this;
}
public Job organizationId(String organizationId) {
this.organizationId = organizationId;
return this;
}
public Job processId(String processId) {
this.processId = processId;
return this;
}
public Job processDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
return this;
}
public Job processInstanceId(String processInstanceId) {
this.workflowInstanceId = processInstanceId;
return this;
}
public Job taskId(String taskId) {
this.taskId = taskId;
return this;
}
public Job jobType(JobType jobType) {
this.jobType = jobType;
return this;
}
public Boolean getDead() {
return dead;
}
public void setDead(Boolean dead) {
this.dead = dead;
}
public void rescheduleFromNow(ReadablePeriod period) {
rescheduleFor(new LocalDateTime().plus(period));
}
public void rescheduleFor(LocalDateTime duedate) {
this.duedate = duedate;
}
public JobService getJobService() {
return jobService;
}
public JobType getJobType() {
return jobType;
}
public String getId() {
return id;
}
public String getKey() {
return key;
}
public LocalDateTime getDuedate() {
return duedate;
}
public Lock getLock() {
return lock;
}
public LinkedList<JobExecution> getExecutions() {
return executions;
}
public Long getRetries() {
return retries;
}
public Long getRetryDelay() {
return retryDelay;
}
public LocalDateTime getDone() {
return done;
}
public String getOrganizationId() {
return organizationId;
}
public String getProcessId() {
return processId;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public String getWorkflowInstanceId() {
return workflowInstanceId;
}
public String getActivityInstanceId() {
return activityInstanceId;
}
public String getTaskId() {
return taskId;
}
}
| |
/*
* Copyright (C) 2015 HaiYang Li
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.landawn.abacus.http;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.net.ssl.SSLSocketFactory;
import com.landawn.abacus.exception.AbacusException;
import com.landawn.abacus.exception.UncheckedIOException;
import com.landawn.abacus.logging.Logger;
import com.landawn.abacus.logging.LoggerFactory;
import com.landawn.abacus.type.Type;
import com.landawn.abacus.util.BufferedReader;
import com.landawn.abacus.util.BufferedWriter;
import com.landawn.abacus.util.ByteArrayOutputStream;
import com.landawn.abacus.util.IOUtil;
import com.landawn.abacus.util.N;
import com.landawn.abacus.util.Objectory;
import com.landawn.abacus.util.URLEncodedUtil;
import okhttp3.ConnectionPool;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.RequestBody;
/**
* Any header can be set into the parameter <code>settings</code>
*
* <br>HttpClient is thread safe.</br>
*
* @since 0.8
*
* @author Haiyang Li
*/
public final class OKHttpClient extends AbstractHttpClient {
static final Logger logger = LoggerFactory.getLogger(OKHttpClient.class);
private static final Map<String, MediaType> mediaTypePool = new ConcurrentHashMap<>();
private final OkHttpClient client;
private final AtomicInteger _activeConnectionCounter;
protected OKHttpClient(String url) {
this(url, DEFAULT_MAX_CONNECTION);
}
protected OKHttpClient(String url, int maxConnection) {
this(url, maxConnection, DEFAULT_CONNECTION_TIMEOUT, DEFAULT_READ_TIMEOUT);
}
protected OKHttpClient(String url, int maxConnection, long connTimeout, long readTimeout) {
this(url, maxConnection, connTimeout, readTimeout, null);
}
protected OKHttpClient(String url, int maxConnection, long connTimeout, long readTimeout, HttpSettings settings) throws UncheckedIOException {
this(url, maxConnection, connTimeout, readTimeout, settings, new AtomicInteger(0));
}
protected OKHttpClient(String url, int maxConnection, long connTimeout, long readTimeout, HttpSettings settings,
final AtomicInteger sharedActiveConnectionCounter) {
super(url, maxConnection, connTimeout, readTimeout, settings);
final SSLSocketFactory ssf = settings == null ? null : settings.getSSLSocketFactory();
final OkHttpClient.Builder builder = new OkHttpClient.Builder();
if (ssf != null) {
builder.socketFactory(ssf);
}
this.client = builder.connectionPool(new ConnectionPool(Math.min(8, maxConnection), 5, TimeUnit.MINUTES))
.connectTimeout(connTimeout, TimeUnit.MILLISECONDS)
.readTimeout(readTimeout, TimeUnit.MILLISECONDS)
.build();
this._activeConnectionCounter = sharedActiveConnectionCounter;
}
protected OKHttpClient(OkHttpClient client, String url, int maxConnection) {
this(client, url, maxConnection, null);
}
protected OKHttpClient(OkHttpClient client, String url, int maxConnection, HttpSettings settings) throws UncheckedIOException {
this(client, url, maxConnection, settings, new AtomicInteger(0));
}
protected OKHttpClient(OkHttpClient client, String url, int maxConnection, HttpSettings settings, final AtomicInteger sharedActiveConnectionCounter) {
super(url, maxConnection, client.connectTimeoutMillis(), client.readTimeoutMillis(), settings);
this.client = client;
this._activeConnectionCounter = sharedActiveConnectionCounter;
}
public static OKHttpClient create(String url) {
return new OKHttpClient(url);
}
public static OKHttpClient create(String url, int maxConnection) {
return new OKHttpClient(url, maxConnection);
}
public static OKHttpClient create(String url, long connTimeout, long readTimeout) {
return new OKHttpClient(url, DEFAULT_MAX_CONNECTION, connTimeout, readTimeout);
}
public static OKHttpClient create(String url, int maxConnection, long connTimeout, long readTimeout) {
return new OKHttpClient(url, maxConnection, connTimeout, readTimeout);
}
public static OKHttpClient create(String url, int maxConnection, long connTimeout, long readTimeout, HttpSettings settings) throws UncheckedIOException {
return new OKHttpClient(url, maxConnection, connTimeout, readTimeout, settings);
}
public static OKHttpClient create(String url, int maxConnection, long connTimeout, long readTimeout, HttpSettings settings,
final AtomicInteger sharedActiveConnectionCounter) {
return new OKHttpClient(url, maxConnection, connTimeout, readTimeout, settings, sharedActiveConnectionCounter);
}
public static OKHttpClient create(OkHttpClient client, String url, int maxConnection) {
return new OKHttpClient(client, url, maxConnection);
}
public static OKHttpClient create(OkHttpClient client, String url, int maxConnection, HttpSettings settings) throws UncheckedIOException {
return new OKHttpClient(client, url, maxConnection, settings);
}
public static OKHttpClient create(OkHttpClient client, String url, int maxConnection, HttpSettings settings,
final AtomicInteger sharedActiveConnectionCounter) {
return new OKHttpClient(client, url, maxConnection, settings, sharedActiveConnectionCounter);
}
@Override
public <T> T execute(final Class<T> resultClass, final HttpMethod httpMethod, final Object request, final HttpSettings settings)
throws UncheckedIOException {
return execute(resultClass, null, null, httpMethod, request, settings);
}
@Override
public void execute(final File output, final HttpMethod httpMethod, final Object request, final HttpSettings settings) throws UncheckedIOException {
OutputStream os = null;
try {
os = new FileOutputStream(output);
execute(os, httpMethod, request, settings);
} catch (FileNotFoundException e) {
throw new UncheckedIOException(e);
} finally {
IOUtil.close(os);
}
}
@Override
public void execute(final OutputStream output, final HttpMethod httpMethod, final Object request, final HttpSettings settings) throws UncheckedIOException {
execute(null, output, null, httpMethod, request, settings);
}
@Override
public void execute(final Writer output, final HttpMethod httpMethod, final Object request, final HttpSettings settings) throws UncheckedIOException {
execute(null, null, output, httpMethod, request, settings);
}
private <T> T execute(final Class<T> resultClass, final OutputStream outputStream, final Writer outputWriter, final HttpMethod httpMethod,
final Object request, final HttpSettings settings) throws UncheckedIOException {
if (_activeConnectionCounter.incrementAndGet() > _maxConnection) {
_activeConnectionCounter.decrementAndGet();
throw new AbacusException("Can not get connection, exceeded max connection number: " + _maxConnection);
}
final ContentFormat requestContentFormat = getContentFormat(settings);
final String contentType = getContentType(settings);
final String contentEncoding = getContentEncoding(settings);
final Charset requestCharset = HTTP.getCharset(settings == null || settings.headers().isEmpty() ? _settings.headers() : settings.headers());
okhttp3.Request httpRequest = null;
okhttp3.Response httpResponse = null;
boolean closeOkHttpResponse = true;
try {
final okhttp3.Request.Builder requestBuilder = new okhttp3.Request.Builder()
.url((request != null && (httpMethod.equals(HttpMethod.GET) || httpMethod.equals(HttpMethod.DELETE))) ? URLEncodedUtil.encode(_url, request)
: _url);
setHeaders(requestBuilder, settings == null ? _settings : settings);
if (request != null && (httpMethod.equals(HttpMethod.POST) || httpMethod.equals(HttpMethod.PUT))) {
MediaType mediaType = null;
if (N.notNullOrEmpty(contentType)) {
mediaType = mediaTypePool.get(contentType);
if (mediaType == null) {
mediaType = MediaType.parse(contentType);
if (mediaType != null) {
mediaTypePool.put(contentType, mediaType);
}
}
}
RequestBody body = null;
final Type<Object> type = N.typeOf(request.getClass());
final ByteArrayOutputStream bos = Objectory.createByteArrayOutputStream();
try {
final OutputStream os = HTTP.wrapOutputStream(bos, requestContentFormat);
if (request instanceof File) {
try (InputStream fileInputStream = new FileInputStream((File) request)) {
IOUtil.write(os, fileInputStream);
}
} else if (type.isInputStream()) {
IOUtil.write(os, (InputStream) request);
} else if (type.isReader()) {
final BufferedWriter bw = Objectory.createBufferedWriter(new OutputStreamWriter(os, requestCharset));
try {
IOUtil.write(bw, (Reader) request);
bw.flush();
} finally {
Objectory.recycle(bw);
}
} else {
if (type.isSerializable()) {
IOUtil.write(os, type.stringOf(request).getBytes(requestCharset));
} else {
HTTP.getParser(requestContentFormat).serialize(new OutputStreamWriter(os, requestCharset), request);
}
}
HTTP.flush(os);
body = RequestBody.create(mediaType, bos.toByteArray());
} finally {
Objectory.recycle(bos);
}
requestBuilder.method(httpMethod.name(), body);
if (N.notNullOrEmpty(contentType)) {
requestBuilder.addHeader(HttpHeaders.Names.CONTENT_TYPE, contentType);
}
if (N.notNullOrEmpty(contentEncoding)) {
requestBuilder.addHeader(HttpHeaders.Names.CONTENT_ENCODING, contentEncoding);
}
} else {
requestBuilder.method(httpMethod.name(), null);
}
httpRequest = requestBuilder.build();
httpResponse = client.newCall(httpRequest).execute();
final Map<String, List<String>> respHeaders = httpResponse.headers().toMultimap();
final Charset charset = HTTP.getCharset(respHeaders);
final ContentFormat responseContentFormat = HTTP.getContentFormat(httpResponse.header(HttpHeaders.Names.CONTENT_TYPE),
httpResponse.header(HttpHeaders.Names.CONTENT_ENCODING));
final InputStream is = N.defaultIfNull(HTTP.wrapInputStream(httpResponse.body().byteStream(), responseContentFormat), N.emptyInputStream());
if (httpResponse.isSuccessful() == false
&& (resultClass == null || !(resultClass.equals(HttpResponse.class) || resultClass.equals(okhttp3.Response.class)))) {
throw new UncheckedIOException(new IOException(httpResponse.code() + ": " + httpResponse.message() + ". " + IOUtil.readString(is, charset)));
}
if (isOneWayRequest(settings)) {
return null;
} else if (resultClass != null && resultClass.equals(okhttp3.Response.class)) {
closeOkHttpResponse = false;
return (T) httpResponse;
} else {
if (outputStream != null) {
IOUtil.write(outputStream, is, true);
return null;
} else if (outputWriter != null) {
final BufferedReader br = Objectory.createBufferedReader(new InputStreamReader(is, charset));
try {
IOUtil.write(outputWriter, br, true);
} finally {
Objectory.recycle(br);
}
return null;
} else {
if (resultClass != null && resultClass.equals(HttpResponse.class)) {
return (T) new HttpResponse(httpResponse.sentRequestAtMillis(), httpResponse.receivedResponseAtMillis(), httpResponse.code(),
httpResponse.message(), respHeaders, IOUtil.readBytes(is), responseContentFormat);
} else {
final Type<Object> type = resultClass == null ? null : N.typeOf(resultClass);
if (type == null) {
return (T) IOUtil.readString(is, charset);
} else if (byte[].class.equals(resultClass)) {
return (T) IOUtil.readBytes(is);
} else if (type.isSerializable()) {
return (T) type.valueOf(IOUtil.readString(is, charset));
} else {
return HTTP.getParser(responseContentFormat).deserialize(resultClass, IOUtil.newBufferedReader(is, charset));
}
}
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
_activeConnectionCounter.decrementAndGet();
if (httpResponse != null && closeOkHttpResponse) {
httpResponse.close();
}
}
}
private void setHeaders(okhttp3.Request.Builder requestBuilder, HttpSettings settings) throws UncheckedIOException {
final HttpHeaders headers = settings.headers();
if (headers != null) {
Object headerValue = null;
for (String headerName : headers.headerNameSet()) {
headerValue = headers.get(headerName);
if (headerValue instanceof Collection) {
final Iterator<Object> iter = ((Collection<Object>) headerValue).iterator();
if (iter.hasNext()) {
requestBuilder.header(headerName, N.stringOf(iter.next()));
}
while (iter.hasNext()) {
requestBuilder.addHeader(headerName, N.stringOf(iter.next()));
}
} else {
requestBuilder.header(headerName, N.stringOf(headerValue));
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams;
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.TransportSignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
/**
*
*/
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.SUITE)
public class SignificantTermsSignificanceScoreTests extends ElasticsearchIntegrationTest {
static final String INDEX_NAME = "testidx";
static final String DOC_TYPE = "doc";
static final String TEXT_FIELD = "text";
static final String CLASS_FIELD = "class";
@Override
public Settings nodeSettings(int nodeOrdinal) {
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
.put("plugin.types", CustomSignificanceHeuristicPlugin.class.getName())
.put("path.conf", this.getDataPath("config"))
.build();
}
public String randomExecutionHint() {
return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString();
}
@Test
public void testPlugin() throws Exception {
String type = randomBoolean() ? "string" : "long";
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
index01Docs(type, settings);
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(new TermsBuilder("class")
.field(CLASS_FIELD)
.subAggregation((new SignificantTermsBuilder("sig_terms"))
.field(TEXT_FIELD)
.significanceHeuristic(new SimpleHeuristic.SimpleHeuristicBuilder())
.minDocCount(1)
)
)
.execute()
.actionGet();
assertSearchResponse(response);
StringTerms classes = (StringTerms) response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2));
for (Terms.Bucket classBucket : classes.getBuckets()) {
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(2));
Iterator<SignificantTerms.Bucket> bucketIterator = agg.iterator();
SignificantTerms.Bucket sigBucket = bucketIterator.next();
String term = sigBucket.getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
assertThat(sigBucket.getSignificanceScore(), closeTo(2.0, 1.e-8));
sigBucket = bucketIterator.next();
assertThat(sigBucket.getSignificanceScore(), closeTo(1.0, 1.e-8));
}
// we run the same test again but this time we do not call assertSearchResponse() before the assertions
// the reason is that this would trigger toXContent and we would like to check that this has no potential side effects
response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(new TermsBuilder("class")
.field(CLASS_FIELD)
.subAggregation((new SignificantTermsBuilder("sig_terms"))
.field(TEXT_FIELD)
.significanceHeuristic(new SimpleHeuristic.SimpleHeuristicBuilder())
.minDocCount(1)
)
)
.execute()
.actionGet();
classes = (StringTerms) response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2));
for (Terms.Bucket classBucket : classes.getBuckets()) {
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(2));
Iterator<SignificantTerms.Bucket> bucketIterator = agg.iterator();
SignificantTerms.Bucket sigBucket = bucketIterator.next();
String term = sigBucket.getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
assertThat(sigBucket.getSignificanceScore(), closeTo(2.0, 1.e-8));
sigBucket = bucketIterator.next();
assertThat(sigBucket.getSignificanceScore(), closeTo(1.0, 1.e-8));
}
}
public static class CustomSignificanceHeuristicPlugin extends AbstractPlugin {
@Override
public String name() {
return "test-plugin-significance-heuristic";
}
@Override
public String description() {
return "Significance heuristic plugin";
}
public void onModule(SignificantTermsHeuristicModule significanceModule) {
significanceModule.registerParser(SimpleHeuristic.SimpleHeuristicParser.class);
}
public void onModule(TransportSignificantTermsHeuristicModule significanceModule) {
significanceModule.registerStream(SimpleHeuristic.STREAM);
}
public void onModule(ScriptModule module) {
module.registerScript(NativeSignificanceScoreScriptNoParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_NO_PARAMS, NativeSignificanceScoreScriptNoParams.Factory.class);
module.registerScript(NativeSignificanceScoreScriptWithParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_WITH_PARAMS, NativeSignificanceScoreScriptWithParams.Factory.class);
}
}
public static class SimpleHeuristic extends SignificanceHeuristic {
protected static final String[] NAMES = {"simple"};
public static final SignificanceHeuristicStreams.Stream STREAM = new SignificanceHeuristicStreams.Stream() {
@Override
public SignificanceHeuristic readResult(StreamInput in) throws IOException {
return readFrom(in);
}
@Override
public String getName() {
return NAMES[0];
}
};
public static SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return new SimpleHeuristic();
}
/**
* @param subsetFreq The frequency of the term in the selected sample
* @param subsetSize The size of the selected sample (typically number of docs)
* @param supersetFreq The frequency of the term in the superset from which the sample was taken
* @param supersetSize The size of the superset from which the sample was taken (typically number of docs)
* @return a "significance" score
*/
@Override
public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(STREAM.getName());
}
public static class SimpleHeuristicParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
parser.nextToken();
return new SimpleHeuristic();
}
@Override
public String[] getNames() {
return NAMES;
}
}
public static class SimpleHeuristicBuilder implements SignificanceHeuristicBuilder {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(STREAM.getName()).endObject();
return builder;
}
}
}
@Test
public void testXContentResponse() throws Exception {
String type = randomBoolean() ? "string" : "long";
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
index01Docs(type, settings);
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD)))
.execute()
.actionGet();
assertSearchResponse(response);
StringTerms classes = (StringTerms) response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2));
for (Terms.Bucket classBucket : classes.getBuckets()) {
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(1));
String term = agg.iterator().next().getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
}
XContentBuilder responseBuilder = XContentFactory.jsonBuilder();
classes.toXContent(responseBuilder, null);
String result = null;
if (type.equals("long")) {
result = "\"class\"{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"sig_terms\":{\"doc_count\":4,\"buckets\":[{\"key\":0,\"key_as_string\":\"0\",\"doc_count\":4,\"score\":0.39999999999999997,\"bg_count\":5}]}},{\"key\":\"1\",\"doc_count\":3,\"sig_terms\":{\"doc_count\":3,\"buckets\":[{\"key\":1,\"key_as_string\":\"1\",\"doc_count\":3,\"score\":0.75,\"bg_count\":4}]}}]}";
} else {
result = "\"class\"{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"sig_terms\":{\"doc_count\":4,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"score\":0.39999999999999997,\"bg_count\":5}]}},{\"key\":\"1\",\"doc_count\":3,\"sig_terms\":{\"doc_count\":3,\"buckets\":[{\"key\":\"1\",\"doc_count\":3,\"score\":0.75,\"bg_count\":4}]}}]}";
}
assertThat(responseBuilder.string(), equalTo(result));
}
@Test
public void testDeletesIssue7951() throws Exception {
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"string\",\"index\":\"not_analyzed\"}}}}";
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
String[] cat1v1 = {"constant", "one"};
String[] cat1v2 = {"constant", "uno"};
String[] cat2v1 = {"constant", "two"};
String[] cat2v2 = {"constant", "duo"};
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
.setSource(TEXT_FIELD, cat1v1, CLASS_FIELD, "1"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
.setSource(TEXT_FIELD, cat1v2, CLASS_FIELD, "1"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
.setSource(TEXT_FIELD, cat2v1, CLASS_FIELD, "2"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
.setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2"));
indexRandom(true, false, indexRequestBuilderList);
// Now create some holes in the index with selective deletes caused by updates.
// This is the scenario that caused this issue https://github.com/elasticsearch/elasticsearch/issues/7951
// Scoring algorithms throw exceptions if term docFreqs exceed the reported size of the index
// from which they are taken so need to make sure this doesn't happen.
String[] text = cat1v1;
indexRequestBuilderList.clear();
for (int i = 0; i < 50; i++) {
text = text == cat1v2 ? cat1v1 : cat1v2;
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1"));
}
indexRandom(true, false, indexRequestBuilderList);
SearchResponse response1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(new TermsBuilder("class")
.field(CLASS_FIELD)
.subAggregation(
new SignificantTermsBuilder("sig_terms")
.field(TEXT_FIELD)
.minDocCount(1)))
.execute()
.actionGet();
}
@Test
public void testBackgroundVsSeparateSet() throws Exception {
String type = randomBoolean() ? "string" : "long";
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
index01Docs(type, settings);
testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false));
testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false));
testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false));
}
// compute significance score by
// 1. terms agg on class and significant terms
// 2. filter buckets and set the background to the other class and set is_background false
// both should yield exact same result
public void testBackgroundVsSeparateSet(SignificanceHeuristicBuilder significanceHeuristicExpectingSuperset, SignificanceHeuristicBuilder significanceHeuristicExpectingSeparateSets) throws Exception {
SearchResponse response1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(new TermsBuilder("class")
.field(CLASS_FIELD)
.subAggregation(
new SignificantTermsBuilder("sig_terms")
.field(TEXT_FIELD)
.minDocCount(1)
.significanceHeuristic(
significanceHeuristicExpectingSuperset)))
.execute()
.actionGet();
assertSearchResponse(response1);
SearchResponse response2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation((new FilterAggregationBuilder("0"))
.filter(QueryBuilders.termQuery(CLASS_FIELD, "0"))
.subAggregation(new SignificantTermsBuilder("sig_terms")
.field(TEXT_FIELD)
.minDocCount(1)
.backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1"))
.significanceHeuristic(significanceHeuristicExpectingSeparateSets)))
.addAggregation((new FilterAggregationBuilder("1"))
.filter(QueryBuilders.termQuery(CLASS_FIELD, "1"))
.subAggregation(new SignificantTermsBuilder("sig_terms")
.field(TEXT_FIELD)
.minDocCount(1)
.backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0"))
.significanceHeuristic(significanceHeuristicExpectingSeparateSets)))
.execute()
.actionGet();
SignificantTerms sigTerms0 = ((SignificantTerms) (((StringTerms) response1.getAggregations().get("class")).getBucketByKey("0").getAggregations().asMap().get("sig_terms")));
assertThat(sigTerms0.getBuckets().size(), equalTo(2));
double score00Background = sigTerms0.getBucketByKey("0").getSignificanceScore();
double score01Background = sigTerms0.getBucketByKey("1").getSignificanceScore();
SignificantTerms sigTerms1 = ((SignificantTerms) (((StringTerms) response1.getAggregations().get("class")).getBucketByKey("1").getAggregations().asMap().get("sig_terms")));
double score10Background = sigTerms1.getBucketByKey("0").getSignificanceScore();
double score11Background = sigTerms1.getBucketByKey("1").getSignificanceScore();
double score00SeparateSets = ((SignificantTerms) ((InternalFilter) response2.getAggregations().get("0")).getAggregations().getAsMap().get("sig_terms")).getBucketByKey("0").getSignificanceScore();
double score01SeparateSets = ((SignificantTerms) ((InternalFilter) response2.getAggregations().get("0")).getAggregations().getAsMap().get("sig_terms")).getBucketByKey("1").getSignificanceScore();
double score10SeparateSets = ((SignificantTerms) ((InternalFilter) response2.getAggregations().get("1")).getAggregations().getAsMap().get("sig_terms")).getBucketByKey("0").getSignificanceScore();
double score11SeparateSets = ((SignificantTerms) ((InternalFilter) response2.getAggregations().get("1")).getAggregations().getAsMap().get("sig_terms")).getBucketByKey("1").getSignificanceScore();
assertThat(score00Background, equalTo(score00SeparateSets));
assertThat(score01Background, equalTo(score01SeparateSets));
assertThat(score10Background, equalTo(score10SeparateSets));
assertThat(score11Background, equalTo(score11SeparateSets));
}
private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException {
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}";
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
String[] gb = {"0", "1"};
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
indexRandom(true, false, indexRequestBuilderList);
}
@Test
public void testScoresEqualForPositiveAndNegative() throws Exception {
indexEqualTestData();
testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true));
testScoresEqualForPositiveAndNegative(new ChiSquare.ChiSquareBuilder(true, true));
}
public void testScoresEqualForPositiveAndNegative(SignificanceHeuristicBuilder heuristic) throws Exception {
//check that results for both classes are the same with exclude negatives = false and classes are routing ids
SearchResponse response = client().prepareSearch("test")
.addAggregation(new TermsBuilder("class").field("class").subAggregation(new SignificantTermsBuilder("mySignificantTerms")
.field("text")
.executionHint(randomExecutionHint())
.significanceHeuristic(heuristic)
.minDocCount(1).shardSize(1000).size(1000)))
.execute()
.actionGet();
assertSearchResponse(response);
StringTerms classes = (StringTerms) response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2));
Iterator<Terms.Bucket> classBuckets = classes.getBuckets().iterator();
Collection<SignificantTerms.Bucket> classA = ((SignificantTerms) classBuckets.next().getAggregations().get("mySignificantTerms")).getBuckets();
Iterator<SignificantTerms.Bucket> classBBucketIterator = ((SignificantTerms) classBuckets.next().getAggregations().get("mySignificantTerms")).getBuckets().iterator();
assertThat(classA.size(), greaterThan(0));
for (SignificantTerms.Bucket classABucket : classA) {
SignificantTerms.Bucket classBBucket = classBBucketIterator.next();
assertThat(classABucket.getKey(), equalTo(classBBucket.getKey()));
assertThat(classABucket.getSignificanceScore(), closeTo(classBBucket.getSignificanceScore(), 1.e-5));
}
}
private void indexEqualTestData() throws ExecutionException, InterruptedException {
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("doc",
"text", "type=string", "class", "type=string"));
createIndex("idx_unmapped");
ensureGreen();
String data[] = {
"A\ta",
"A\ta",
"A\tb",
"A\tb",
"A\tb",
"B\tc",
"B\tc",
"B\tc",
"B\tc",
"B\td",
"B\td",
"B\td",
"B\td",
"B\td",
"A\tc d",
"B\ta b"
};
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 0; i < data.length; i++) {
String[] parts = data[i].split("\t");
indexRequestBuilders.add(client().prepareIndex("test", "doc", "" + i)
.setSource("class", parts[0], "text", parts[1]));
}
indexRandom(true, false, indexRequestBuilders);
}
@Test
public void testScriptScore() throws ExecutionException, InterruptedException, IOException {
indexRandomFrequencies01(randomBoolean() ? "string" : "long");
ScriptHeuristic.ScriptHeuristicBuilder scriptHeuristicBuilder = getScriptSignificanceHeuristicBuilder();
ensureYellow();
SearchResponse response = client().prepareSearch(INDEX_NAME)
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("mySignificantTerms")
.field(TEXT_FIELD)
.executionHint(randomExecutionHint())
.significanceHeuristic(scriptHeuristicBuilder)
.minDocCount(1).shardSize(2).size(2)))
.execute()
.actionGet();
assertSearchResponse(response);
for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("class")).getBuckets()) {
for (SignificantTerms.Bucket bucket : ((SignificantTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets()) {
assertThat(bucket.getSignificanceScore(), is((double) bucket.getSubsetDf() + bucket.getSubsetSize() + bucket.getSupersetDf() + bucket.getSupersetSize()));
}
}
}
@Test
public void testNoNumberFormatExceptionWithDefaultScriptingEngine() throws ExecutionException, InterruptedException, IOException {
assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1)));
index("test", "doc", "1", "{\"field\":\"a\"}");
index("test", "doc", "11", "{\"field\":\"a\"}");
index("test", "doc", "2", "{\"field\":\"b\"}");
index("test", "doc", "22", "{\"field\":\"b\"}");
index("test", "doc", "3", "{\"field\":\"a b\"}");
index("test", "doc", "33", "{\"field\":\"a b\"}");
ScriptHeuristic.ScriptHeuristicBuilder scriptHeuristicBuilder = new ScriptHeuristic.ScriptHeuristicBuilder();
scriptHeuristicBuilder.setScript(new Script("_subset_freq/(_superset_freq - _subset_freq + 1)"));
ensureYellow();
refresh();
SearchResponse response = client()
.prepareSearch("test")
.addAggregation(
new TermsBuilder("letters").field("field").subAggregation(
new SignificantTermsBuilder("mySignificantTerms").field("field").executionHint(randomExecutionHint())
.significanceHeuristic(scriptHeuristicBuilder).minDocCount(1).shardSize(2).size(2))).execute()
.actionGet();
assertSearchResponse(response);
assertThat(((Terms) response.getAggregations().get("letters")).getBuckets().size(), equalTo(2));
for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("letters")).getBuckets()) {
assertThat(((SignificantStringTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets().size(), equalTo(2));
for (SignificantTerms.Bucket bucket : ((SignificantTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets()) {
assertThat(bucket.getSignificanceScore(),
closeTo((double) bucket.getSubsetDf() / (bucket.getSupersetDf() - bucket.getSubsetDf() + 1), 1.e-6));
}
}
}
private ScriptHeuristic.ScriptHeuristicBuilder getScriptSignificanceHeuristicBuilder() throws IOException {
Map<String, Object> params = null;
Script script = null;
String lang = null;
if (randomBoolean()) {
params = new HashMap<>();
params.put("param", randomIntBetween(1, 100));
}
int randomScriptKind = randomIntBetween(0, 3);
if (randomBoolean()) {
lang = "groovy";
}
switch (randomScriptKind) {
case 0: {
if (params == null) {
script = new Script("return _subset_freq + _subset_size + _superset_freq + _superset_size");
} else {
script = new Script("return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param",
ScriptType.INLINE, lang, params);
}
break;
}
case 1: {
String scriptString;
if (params == null) {
scriptString = "return _subset_freq + _subset_size + _superset_freq + _superset_size";
} else {
scriptString = "return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param";
}
client().prepareIndex().setIndex(ScriptService.SCRIPT_INDEX).setType(ScriptService.DEFAULT_LANG).setId("my_script")
.setSource(XContentFactory.jsonBuilder().startObject().field("script", scriptString).endObject()).get();
refresh();
script = new Script("my_script", ScriptType.INDEXED, lang, params);
break;
}
case 2: {
if (params == null) {
script = new Script("significance_script_no_params", ScriptType.FILE, lang, null);
} else {
script = new Script("significance_script_with_params", ScriptType.FILE, lang, params);
}
break;
}
case 3: {
logger.info("NATIVE SCRIPT");
if (params == null) {
script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null);
} else {
script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params);
}
lang = "native";
if (randomBoolean()) {
}
break;
}
}
ScriptHeuristic.ScriptHeuristicBuilder builder = new ScriptHeuristic.ScriptHeuristicBuilder().setScript(script);
return builder;
}
private void indexRandomFrequencies01(String type) throws ExecutionException, InterruptedException {
String mappings = "{\"" + DOC_TYPE + "\": {\"properties\":{\"" + TEXT_FIELD + "\": {\"type\":\"" + type + "\"}}}}";
assertAcked(prepareCreate(INDEX_NAME).addMapping(DOC_TYPE, mappings));
String[] gb = {"0", "1"};
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
for (int i = 0; i < randomInt(20); i++) {
int randNum = randomInt(2);
String[] text = new String[1];
if (randNum == 2) {
text = gb;
} else {
text[0] = gb[randNum];
}
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE)
.setSource(TEXT_FIELD, text, CLASS_FIELD, randomBoolean() ? "one" : "zero"));
}
indexRandom(true, indexRequestBuilderList);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.history;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.impl.SimpleDataContext;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsDataKeys;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.history.BaseDiffFromHistoryHandler;
import com.intellij.openapi.vcs.history.DiffFromHistoryHandler;
import com.intellij.openapi.vcs.history.VcsFileRevision;
import com.intellij.openapi.vcs.history.VcsHistorySession;
import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import git4idea.GitFileRevision;
import git4idea.GitRevisionNumber;
import git4idea.GitUtil;
import git4idea.changes.GitChangeUtils;
import git4idea.commands.Git;
import git4idea.commands.GitCommandResult;
import git4idea.repo.GitRepository;
import git4idea.repo.GitRepositoryManager;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* {@link DiffFromHistoryHandler#showDiffForTwo(com.intellij.openapi.project.Project, com.intellij.openapi.vcs.FilePath, com.intellij.openapi.vcs.history.VcsFileRevision, com.intellij.openapi.vcs.history.VcsFileRevision) "Show Diff" for 2 revision} calls the common code.
* {@link DiffFromHistoryHandler#showDiffForOne(com.intellij.openapi.actionSystem.AnActionEvent, com.intellij.openapi.vcs.FilePath, com.intellij.openapi.vcs.history.VcsFileRevision, com.intellij.openapi.vcs.history.VcsFileRevision) "Show diff" for 1 revision}
* behaves differently for merge commits: for them it shown a popup displaying the parents of the selected commit. Selecting a parent
* from the popup shows the difference with this parent.
* If an ordinary (not merge) revision with 1 parent, it is the same as usual: just compare with the parent;
*
* @author Kirill Likhodedov
*/
public class GitDiffFromHistoryHandler extends BaseDiffFromHistoryHandler<GitFileRevision> {
private static final Logger LOG = Logger.getInstance(GitDiffFromHistoryHandler.class);
@NotNull private final Git myGit;
@NotNull private final GitRepositoryManager myRepositoryManager;
public GitDiffFromHistoryHandler(@NotNull Project project) {
super(project);
myGit = ServiceManager.getService(project, Git.class);
myRepositoryManager = GitUtil.getRepositoryManager(project);
}
@Override
public void showDiffForOne(@NotNull AnActionEvent e,
@NotNull Project project, @NotNull FilePath filePath,
@NotNull VcsFileRevision previousRevision,
@NotNull VcsFileRevision revision) {
GitFileRevision rev = (GitFileRevision)revision;
Collection<String> parents = rev.getParents();
if (parents.size() < 2) {
super.showDiffForOne(e, project, filePath, previousRevision, revision);
}
else { // merge
showDiffForMergeCommit(e, filePath, rev, parents);
}
}
@NotNull
@Override
protected List<Change> getChangesBetweenRevisions(@NotNull FilePath path, @NotNull GitFileRevision rev1, @Nullable GitFileRevision rev2)
throws VcsException {
GitRepository repository = getRepository(path);
String hash1 = rev1.getHash();
String hash2 = rev2 != null ? rev2.getHash() : null;
return ContainerUtil
.newArrayList(GitChangeUtils.getDiff(repository.getProject(), repository.getRoot(), hash1, hash2, Collections.singletonList(path)));
}
@NotNull
@Override
protected List<Change> getAffectedChanges(@NotNull FilePath path, @NotNull GitFileRevision rev) throws VcsException {
GitRepository repository = getRepository(path);
return ContainerUtil.newArrayList(
GitChangeUtils.getRevisionChanges(repository.getProject(), repository.getRoot(), rev.getHash(), false, true, true).getChanges());
}
@NotNull
@Override
protected String getPresentableName(@NotNull GitFileRevision revision) {
return DvcsUtil.getShortHash(revision.getHash());
}
@NotNull
private GitRepository getRepository(@NotNull FilePath path) {
GitRepository repository = myRepositoryManager.getRepositoryForFile(path);
LOG.assertTrue(repository != null, "Repository is null for " + path);
return repository;
}
private void showDiffForMergeCommit(@NotNull final AnActionEvent event, @NotNull final FilePath filePath,
@NotNull final GitFileRevision rev, @NotNull final Collection<String> parents) {
VcsHistorySession session = event.getData(VcsDataKeys.HISTORY_SESSION);
List<VcsFileRevision> revisions = session != null ? session.getRevisionList() : null;
checkIfFileWasTouchedAndFindParentsInBackground(filePath, rev, parents, revisions, new Consumer<MergeCommitPreCheckInfo>() {
@Override
public void consume(MergeCommitPreCheckInfo info) {
if (!info.wasFileTouched()) {
String message = String.format("There were no changes in %s in this merge commit, besides those which were made in both branches",
filePath.getName());
VcsBalloonProblemNotifier.showOverVersionControlView(GitDiffFromHistoryHandler.this.myProject, message, MessageType.INFO);
}
showPopup(event, rev, filePath, info.getParents());
}
});
}
private static class MergeCommitPreCheckInfo {
private final boolean myWasFileTouched;
private final Collection<GitFileRevision> myParents;
private MergeCommitPreCheckInfo(boolean touched, Collection<GitFileRevision> parents) {
myWasFileTouched = touched;
myParents = parents;
}
public boolean wasFileTouched() {
return myWasFileTouched;
}
public Collection<GitFileRevision> getParents() {
return myParents;
}
}
private void checkIfFileWasTouchedAndFindParentsInBackground(@NotNull final FilePath filePath,
@NotNull final GitFileRevision rev,
@NotNull final Collection<String> parentHashes,
@Nullable final List<VcsFileRevision> revisions,
@NotNull final Consumer<MergeCommitPreCheckInfo> resultHandler) {
new Task.Backgroundable(myProject, "Loading changes...", true) {
private MergeCommitPreCheckInfo myInfo;
@Override public void run(@NotNull ProgressIndicator indicator) {
try {
GitRepository repository = getRepository(filePath);
boolean fileTouched = wasFileTouched(repository, rev);
Collection<GitFileRevision> parents = findParentRevisions(repository, rev, parentHashes, revisions);
myInfo = new MergeCommitPreCheckInfo(fileTouched, parents);
}
catch (VcsException e) {
String logMessage = "Error happened while executing git show " + rev + ":" + filePath;
showError(e, logMessage);
}
}
@Override
public void onSuccess() {
if (myInfo != null) { // if info == null => an exception happened
resultHandler.consume(myInfo);
}
}
}.queue();
}
@NotNull
private Collection<GitFileRevision> findParentRevisions(@NotNull GitRepository repository,
@NotNull GitFileRevision currentRevision,
@NotNull Collection<String> parentHashes,
@Nullable List<VcsFileRevision> revisions) throws VcsException {
// currentRevision is a merge revision.
// the file could be renamed in one of the branches, i.e. the name in one of the parent revisions may be different from the name
// in currentRevision. It can be different even in both parents, but it would a rename-rename conflict, and we don't handle such anyway.
Collection<GitFileRevision> parents = new ArrayList<>(parentHashes.size());
for (String parentHash : parentHashes) {
parents.add(createParentRevision(repository, currentRevision, parentHash, revisions));
}
return parents;
}
@NotNull
private GitFileRevision createParentRevision(@NotNull GitRepository repository,
@NotNull GitFileRevision currentRevision,
@NotNull String parentHash,
@Nullable List<VcsFileRevision> revisions) throws VcsException {
if (revisions != null) {
for (VcsFileRevision revision : revisions) {
if (((GitFileRevision)revision).getHash().equals(parentHash)) {
return (GitFileRevision)revision;
}
}
}
FilePath currentRevisionPath = currentRevision.getPath();
if (currentRevisionPath.isDirectory()) {
// for directories the history doesn't follow renames
return makeRevisionFromHash(currentRevisionPath, parentHash);
}
// can't limit by the path: in that case rename information will be missed
Collection<Change> changes = GitChangeUtils.getDiff(myProject, repository.getRoot(), parentHash, currentRevision.getHash(), null);
for (Change change : changes) {
ContentRevision afterRevision = change.getAfterRevision();
ContentRevision beforeRevision = change.getBeforeRevision();
if (afterRevision != null && afterRevision.getFile().equals(currentRevisionPath)) {
// if the file was renamed, taking the path how it was in the parent; otherwise the path didn't change
FilePath path = (beforeRevision != null ? beforeRevision.getFile() : afterRevision.getFile());
return makeRevisionFromHash(path, parentHash);
}
}
LOG.error(String.format("Could not find parent revision. Will use the path from parent revision. Current revision: %s, parent hash: %s",
currentRevision, parentHash));
return makeRevisionFromHash(currentRevisionPath, parentHash);
}
private void showPopup(@NotNull AnActionEvent event, @NotNull GitFileRevision rev, @NotNull FilePath filePath,
@NotNull Collection<GitFileRevision> parents) {
ActionGroup parentActions = createActionGroup(rev, filePath, parents);
DataContext dataContext = SimpleDataContext.getProjectContext(myProject);
ListPopup popup = JBPopupFactory.getInstance().createActionGroupPopup("Choose parent to compare", parentActions, dataContext,
JBPopupFactory.ActionSelectionAid.NUMBERING, true);
showPopupInBestPosition(popup, event, dataContext);
}
private static void showPopupInBestPosition(@NotNull ListPopup popup, @NotNull AnActionEvent event, @NotNull DataContext dataContext) {
if (event.getInputEvent() instanceof MouseEvent) {
if (!event.getPlace().equals(ActionPlaces.UPDATE_POPUP)) {
popup.show(new RelativePoint((MouseEvent)event.getInputEvent()));
}
else { // quick fix for invoking from the context menu: coordinates are calculated incorrectly there.
popup.showInBestPositionFor(dataContext);
}
}
else {
popup.showInBestPositionFor(dataContext);
}
}
@NotNull
private ActionGroup createActionGroup(@NotNull GitFileRevision rev, @NotNull FilePath filePath, @NotNull Collection<GitFileRevision> parents) {
Collection<AnAction> actions = new ArrayList<>(2);
for (GitFileRevision parent : parents) {
actions.add(createParentAction(rev, filePath, parent));
}
return new DefaultActionGroup(ArrayUtil.toObjectArray(actions, AnAction.class));
}
@NotNull
private AnAction createParentAction(@NotNull GitFileRevision rev, @NotNull FilePath filePath, @NotNull GitFileRevision parent) {
return new ShowDiffWithParentAction(filePath, rev, parent);
}
@NotNull
private GitFileRevision makeRevisionFromHash(@NotNull FilePath filePath, @NotNull String hash) {
return new GitFileRevision(myProject, filePath, new GitRevisionNumber(hash));
}
private boolean wasFileTouched(@NotNull GitRepository repository, @NotNull GitFileRevision rev) throws VcsException {
GitCommandResult result = myGit.show(repository, rev.getHash());
if (result.success()) {
return isFilePresentInOutput(repository, rev.getPath(), result.getOutput());
}
throw new VcsException(result.getErrorOutputAsJoinedString());
}
private static boolean isFilePresentInOutput(@NotNull GitRepository repository, @NotNull FilePath path, @NotNull List<String> output) {
String relativePath = getRelativePath(repository, path);
for (String line : output) {
if (line.startsWith("---") || line.startsWith("+++")) {
if (line.contains(relativePath)) {
return true;
}
}
}
return false;
}
@Nullable
private static String getRelativePath(@NotNull GitRepository repository, @NotNull FilePath path) {
return FileUtil.getRelativePath(repository.getRoot().getPath(), path.getPath(), '/');
}
@NotNull
private static String getRevisionDescription(@NotNull GitFileRevision parent) {
String hash = DvcsUtil.getShortHash(parent.getHash());
String message = parent.getCommitMessage();
if (message != null) {
int index = StringUtil.indexOfAny(message, "\n\r");
if (index != -1) message = message.substring(0, index) + "...";
if (message.length() > 40) message = message.substring(0, 35) + "...";
return hash + " - " + message;
}
return hash;
}
private class ShowDiffWithParentAction extends DumbAwareAction {
@NotNull private final FilePath myFilePath;
@NotNull private final GitFileRevision myRevision;
@NotNull private final GitFileRevision myParentRevision;
public ShowDiffWithParentAction(@NotNull FilePath filePath, @NotNull GitFileRevision rev, @NotNull GitFileRevision parent) {
super(getRevisionDescription(parent), parent.getCommitMessage(), null);
myFilePath = filePath;
myRevision = rev;
myParentRevision = parent;
}
@Override
public void actionPerformed(AnActionEvent e) {
doShowDiff(myFilePath, myParentRevision, myRevision);
}
}
}
| |
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 2000, 2013. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.awt;
import java.util.List;
import java.util.ArrayList;
import sun.util.logging.PlatformLogger;
/**
* A FocusTraversalPolicy that determines traversal order based on the order
* of child Components in a Container. From a particular focus cycle root, the
* policy makes a pre-order traversal of the Component hierarchy, and traverses
* a Container's children according to the ordering of the array returned by
* <code>Container.getComponents()</code>. Portions of the hierarchy that are
* not visible and displayable will not be searched.
* <p>
* By default, ContainerOrderFocusTraversalPolicy implicitly transfers focus
* down-cycle. That is, during normal forward focus traversal, the Component
* traversed after a focus cycle root will be the focus-cycle-root's default
* Component to focus. This behavior can be disabled using the
* <code>setImplicitDownCycleTraversal</code> method.
* <p>
* By default, methods of this class will return a Component only if it is
* visible, displayable, enabled, and focusable. Subclasses can modify this
* behavior by overriding the <code>accept</code> method.
* <p>
* This policy takes into account <a
* href="doc-files/FocusSpec.html#FocusTraversalPolicyProviders">focus traversal
* policy providers</a>. When searching for first/last/next/previous Component,
* if a focus traversal policy provider is encountered, its focus traversal
* policy is used to perform the search operation.
*
* @author David Mendenhall
*
* @see Container#getComponents
* @since 1.4
*/
public class ContainerOrderFocusTraversalPolicy extends FocusTraversalPolicy
implements java.io.Serializable
{
private static final PlatformLogger log = PlatformLogger.getLogger("java.awt.ContainerOrderFocusTraversalPolicy");
final private int FORWARD_TRAVERSAL = 0;
final private int BACKWARD_TRAVERSAL = 1;
/*
* JDK 1.4 serialVersionUID
*/
private static final long serialVersionUID = 486933713763926351L;
private boolean implicitDownCycleTraversal = true;
/**
* Used by getComponentAfter and getComponentBefore for efficiency. In
* order to maintain compliance with the specification of
* FocusTraversalPolicy, if traversal wraps, we should invoke
* getFirstComponent or getLastComponent. These methods may be overriden in
* subclasses to behave in a non-generic way. However, in the generic case,
* these methods will simply return the first or last Components of the
* sorted list, respectively. Since getComponentAfter and
* getComponentBefore have already built the list before determining
* that they need to invoke getFirstComponent or getLastComponent, the
* list should be reused if possible.
*/
transient private Container cachedRoot;
transient private List<Component> cachedCycle;
/*
* We suppose to use getFocusTraversalCycle & getComponentIndex methods in order
* to divide the policy into two parts:
* 1) Making the focus traversal cycle.
* 2) Traversing the cycle.
* The 1st point assumes producing a list of components representing the focus
* traversal cycle. The two methods mentioned above should implement this logic.
* The 2nd point assumes implementing the common concepts of operating on the
* cycle: traversing back and forth, retrieving the initial/default/first/last
* component. These concepts are described in the AWT Focus Spec and they are
* applied to the FocusTraversalPolicy in general.
* Thus, a descendant of this policy may wish to not reimplement the logic of
* the 2nd point but just override the implementation of the 1st one.
* A striking example of such a descendant is the javax.swing.SortingFocusTraversalPolicy.
*/
/*protected*/ private List<Component> getFocusTraversalCycle(Container aContainer) {
List<Component> cycle = new ArrayList<Component>();
enumerateCycle(aContainer, cycle);
return cycle;
}
/*protected*/ private int getComponentIndex(List<Component> cycle, Component aComponent) {
return cycle.indexOf(aComponent);
}
private void enumerateCycle(Container container, List<Component> cycle) {
if (!(container.isVisible() && container.isDisplayable())) {
return;
}
cycle.add(container);
Component[] components = container.getComponents();
for (int i = 0; i < components.length; i++) {
Component comp = components[i];
if (comp instanceof Container) {
Container cont = (Container)comp;
if (!cont.isFocusCycleRoot() && !cont.isFocusTraversalPolicyProvider()) {
enumerateCycle(cont, cycle);
continue;
}
}
cycle.add(comp);
}
}
private Container getTopmostProvider(Container focusCycleRoot, Component aComponent) {
Container aCont = aComponent.getParent();
Container ftp = null;
while (aCont != focusCycleRoot && aCont != null) {
if (aCont.isFocusTraversalPolicyProvider()) {
ftp = aCont;
}
aCont = aCont.getParent();
}
if (aCont == null) {
return null;
}
return ftp;
}
/*
* Checks if a new focus cycle takes place and returns a Component to traverse focus to.
* @param comp a possible focus cycle root or policy provider
* @param traversalDirection the direction of the traversal
* @return a Component to traverse focus to if {@code comp} is a root or provider
* and implicit down-cycle is set, otherwise {@code null}
*/
private Component getComponentDownCycle(Component comp, int traversalDirection) {
Component retComp = null;
if (comp instanceof Container) {
Container cont = (Container)comp;
if (cont.isFocusCycleRoot()) {
if (getImplicitDownCycleTraversal()) {
retComp = cont.getFocusTraversalPolicy().getDefaultComponent(cont);
if (retComp != null && log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Transfered focus down-cycle to " + retComp +
" in the focus cycle root " + cont);
}
} else {
return null;
}
} else if (cont.isFocusTraversalPolicyProvider()) {
retComp = (traversalDirection == FORWARD_TRAVERSAL ?
cont.getFocusTraversalPolicy().getDefaultComponent(cont) :
cont.getFocusTraversalPolicy().getLastComponent(cont));
if (retComp != null && log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Transfered focus to " + retComp + " in the FTP provider " + cont);
}
}
}
return retComp;
}
/**
* Returns the Component that should receive the focus after aComponent.
* aContainer must be a focus cycle root of aComponent or a focus traversal policy provider.
* <p>
* By default, ContainerOrderFocusTraversalPolicy implicitly transfers
* focus down-cycle. That is, during normal forward focus traversal, the
* Component traversed after a focus cycle root will be the focus-cycle-
* root's default Component to focus. This behavior can be disabled using
* the <code>setImplicitDownCycleTraversal</code> method.
* <p>
* If aContainer is <a href="doc-files/FocusSpec.html#FocusTraversalPolicyProviders">focus
* traversal policy provider</a>, the focus is always transferred down-cycle.
*
* @param aContainer a focus cycle root of aComponent or a focus traversal policy provider
* @param aComponent a (possibly indirect) child of aContainer, or
* aContainer itself
* @return the Component that should receive the focus after aComponent, or
* null if no suitable Component can be found
* @throws IllegalArgumentException if aContainer is not a focus cycle
* root of aComponent or focus traversal policy provider, or if either aContainer or
* aComponent is null
*/
public Component getComponentAfter(Container aContainer, Component aComponent) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Searching in " + aContainer + " for component after " + aComponent);
}
if (aContainer == null || aComponent == null) {
throw new IllegalArgumentException("aContainer and aComponent cannot be null");
}
if (!aContainer.isFocusTraversalPolicyProvider() && !aContainer.isFocusCycleRoot()) {
throw new IllegalArgumentException("aContainer should be focus cycle root or focus traversal policy provider");
} else if (aContainer.isFocusCycleRoot() && !aComponent.isFocusCycleRoot(aContainer)) {
throw new IllegalArgumentException("aContainer is not a focus cycle root of aComponent");
}
synchronized(aContainer.getTreeLock()) {
if (!(aContainer.isVisible() && aContainer.isDisplayable())) {
return null;
}
// Before all the ckecks below we first see if it's an FTP provider or a focus cycle root.
// If it's the case just go down cycle (if it's set to "implicit").
Component comp = getComponentDownCycle(aComponent, FORWARD_TRAVERSAL);
if (comp != null) {
return comp;
}
// See if the component is inside of policy provider.
Container provider = getTopmostProvider(aContainer, aComponent);
if (provider != null) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Asking FTP " + provider + " for component after " + aComponent);
}
// FTP knows how to find component after the given. We don't.
FocusTraversalPolicy policy = provider.getFocusTraversalPolicy();
Component afterComp = policy.getComponentAfter(provider, aComponent);
// Null result means that we overstepped the limit of the FTP's cycle.
// In that case we must quit the cycle, otherwise return the component found.
if (afterComp != null) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### FTP returned " + afterComp);
}
return afterComp;
}
aComponent = provider;
}
List<Component> cycle = getFocusTraversalCycle(aContainer);
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is " + cycle + ", component is " + aComponent);
}
int index = getComponentIndex(cycle, aComponent);
if (index < 0) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Didn't find component " + aComponent + " in a cycle " + aContainer);
}
return getFirstComponent(aContainer);
}
for (index++; index < cycle.size(); index++) {
comp = cycle.get(index);
if (accept(comp)) {
return comp;
} else if ((comp = getComponentDownCycle(comp, FORWARD_TRAVERSAL)) != null) {
return comp;
}
}
if (aContainer.isFocusCycleRoot()) {
this.cachedRoot = aContainer;
this.cachedCycle = cycle;
comp = getFirstComponent(aContainer);
this.cachedRoot = null;
this.cachedCycle = null;
return comp;
}
}
return null;
}
/**
* Returns the Component that should receive the focus before aComponent.
* aContainer must be a focus cycle root of aComponent or a <a
* href="doc-files/FocusSpec.html#FocusTraversalPolicyProviders">focus traversal policy
* provider</a>.
*
* @param aContainer a focus cycle root of aComponent or focus traversal policy provider
* @param aComponent a (possibly indirect) child of aContainer, or
* aContainer itself
* @return the Component that should receive the focus before aComponent,
* or null if no suitable Component can be found
* @throws IllegalArgumentException if aContainer is not a focus cycle
* root of aComponent or focus traversal policy provider, or if either aContainer or
* aComponent is null
*/
public Component getComponentBefore(Container aContainer, Component aComponent) {
if (aContainer == null || aComponent == null) {
throw new IllegalArgumentException("aContainer and aComponent cannot be null");
}
if (!aContainer.isFocusTraversalPolicyProvider() && !aContainer.isFocusCycleRoot()) {
throw new IllegalArgumentException("aContainer should be focus cycle root or focus traversal policy provider");
} else if (aContainer.isFocusCycleRoot() && !aComponent.isFocusCycleRoot(aContainer)) {
throw new IllegalArgumentException("aContainer is not a focus cycle root of aComponent");
}
synchronized(aContainer.getTreeLock()) {
if (!(aContainer.isVisible() && aContainer.isDisplayable())) {
return null;
}
// See if the component is inside of policy provider.
Container provider = getTopmostProvider(aContainer, aComponent);
if (provider != null) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Asking FTP " + provider + " for component after " + aComponent);
}
// FTP knows how to find component after the given. We don't.
FocusTraversalPolicy policy = provider.getFocusTraversalPolicy();
Component beforeComp = policy.getComponentBefore(provider, aComponent);
// Null result means that we overstepped the limit of the FTP's cycle.
// In that case we must quit the cycle, otherwise return the component found.
if (beforeComp != null) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### FTP returned " + beforeComp);
}
return beforeComp;
}
aComponent = provider;
// If the provider is traversable it's returned.
if (accept(aComponent)) {
return aComponent;
}
}
List<Component> cycle = getFocusTraversalCycle(aContainer);
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is " + cycle + ", component is " + aComponent);
}
int index = getComponentIndex(cycle, aComponent);
if (index < 0) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Didn't find component " + aComponent + " in a cycle " + aContainer);
}
return getLastComponent(aContainer);
}
Component comp = null;
Component tryComp = null;
for (index--; index>=0; index--) {
comp = cycle.get(index);
if (comp != aContainer && (tryComp = getComponentDownCycle(comp, BACKWARD_TRAVERSAL)) != null) {
return tryComp;
} else if (accept(comp)) {
return comp;
}
}
if (aContainer.isFocusCycleRoot()) {
this.cachedRoot = aContainer;
this.cachedCycle = cycle;
comp = getLastComponent(aContainer);
this.cachedRoot = null;
this.cachedCycle = null;
return comp;
}
}
return null;
}
/**
* Returns the first Component in the traversal cycle. This method is used
* to determine the next Component to focus when traversal wraps in the
* forward direction.
*
* @param aContainer the focus cycle root or focus traversal policy provider whose first
* Component is to be returned
* @return the first Component in the traversal cycle of aContainer,
* or null if no suitable Component can be found
* @throws IllegalArgumentException if aContainer is null
*/
public Component getFirstComponent(Container aContainer) {
List<Component> cycle;
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Getting first component in " + aContainer);
}
if (aContainer == null) {
throw new IllegalArgumentException("aContainer cannot be null");
}
synchronized(aContainer.getTreeLock()) {
if (!(aContainer.isVisible() && aContainer.isDisplayable())) {
return null;
}
if (this.cachedRoot == aContainer) {
cycle = this.cachedCycle;
} else {
cycle = getFocusTraversalCycle(aContainer);
}
if (cycle.size() == 0) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is empty");
}
return null;
}
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is " + cycle);
}
for (Component comp : cycle) {
if (accept(comp)) {
return comp;
} else if (comp != aContainer &&
(comp = getComponentDownCycle(comp, FORWARD_TRAVERSAL)) != null)
{
return comp;
}
}
}
return null;
}
/**
* Returns the last Component in the traversal cycle. This method is used
* to determine the next Component to focus when traversal wraps in the
* reverse direction.
*
* @param aContainer the focus cycle root or focus traversal policy provider whose last
* Component is to be returned
* @return the last Component in the traversal cycle of aContainer,
* or null if no suitable Component can be found
* @throws IllegalArgumentException if aContainer is null
*/
public Component getLastComponent(Container aContainer) {
List<Component> cycle;
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Getting last component in " + aContainer);
}
if (aContainer == null) {
throw new IllegalArgumentException("aContainer cannot be null");
}
synchronized(aContainer.getTreeLock()) {
if (!(aContainer.isVisible() && aContainer.isDisplayable())) {
return null;
}
if (this.cachedRoot == aContainer) {
cycle = this.cachedCycle;
} else {
cycle = getFocusTraversalCycle(aContainer);
}
if (cycle.size() == 0) {
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is empty");
}
return null;
}
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("### Cycle is " + cycle);
}
for (int i= cycle.size() - 1; i >= 0; i--) {
Component comp = cycle.get(i);
if (accept(comp)) {
return comp;
} else if (comp instanceof Container && comp != aContainer) {
Container cont = (Container)comp;
if (cont.isFocusTraversalPolicyProvider()) {
Component retComp = cont.getFocusTraversalPolicy().getLastComponent(cont);
if (retComp != null) {
return retComp;
}
}
}
}
}
return null;
}
/**
* Returns the default Component to focus. This Component will be the first
* to receive focus when traversing down into a new focus traversal cycle
* rooted at aContainer. The default implementation of this method
* returns the same Component as <code>getFirstComponent</code>.
*
* @param aContainer the focus cycle root or focus traversal policy provider whose default
* Component is to be returned
* @return the default Component in the traversal cycle of aContainer,
* or null if no suitable Component can be found
* @see #getFirstComponent
* @throws IllegalArgumentException if aContainer is null
*/
public Component getDefaultComponent(Container aContainer) {
return getFirstComponent(aContainer);
}
/**
* Sets whether this ContainerOrderFocusTraversalPolicy transfers focus
* down-cycle implicitly. If <code>true</code>, during normal forward focus
* traversal, the Component traversed after a focus cycle root will be the
* focus-cycle-root's default Component to focus. If <code>false</code>,
* the next Component in the focus traversal cycle rooted at the specified
* focus cycle root will be traversed instead. The default value for this
* property is <code>true</code>.
*
* @param implicitDownCycleTraversal whether this
* ContainerOrderFocusTraversalPolicy transfers focus down-cycle
* implicitly
* @see #getImplicitDownCycleTraversal
* @see #getFirstComponent
*/
public void setImplicitDownCycleTraversal(boolean implicitDownCycleTraversal) {
this.implicitDownCycleTraversal = implicitDownCycleTraversal;
}
/**
* Returns whether this ContainerOrderFocusTraversalPolicy transfers focus
* down-cycle implicitly. If <code>true</code>, during normal forward focus
* traversal, the Component traversed after a focus cycle root will be the
* focus-cycle-root's default Component to focus. If <code>false</code>,
* the next Component in the focus traversal cycle rooted at the specified
* focus cycle root will be traversed instead.
*
* @return whether this ContainerOrderFocusTraversalPolicy transfers focus
* down-cycle implicitly
* @see #setImplicitDownCycleTraversal
* @see #getFirstComponent
*/
public boolean getImplicitDownCycleTraversal() {
return implicitDownCycleTraversal;
}
/**
* Determines whether a Component is an acceptable choice as the new
* focus owner. By default, this method will accept a Component if and
* only if it is visible, displayable, enabled, and focusable.
*
* @param aComponent the Component whose fitness as a focus owner is to
* be tested
* @return <code>true</code> if aComponent is visible, displayable,
* enabled, and focusable; <code>false</code> otherwise
*/
protected boolean accept(Component aComponent) {
if (!aComponent.canBeFocusOwner()) {
return false;
}
// Verify that the Component is recursively enabled. Disabling a
// heavyweight Container disables its children, whereas disabling
// a lightweight Container does not.
if (!(aComponent instanceof Window)) {
for (Container enableTest = aComponent.getParent();
enableTest != null;
enableTest = enableTest.getParent())
{
if (!(enableTest.isEnabled() || enableTest.isLightweight())) {
return false;
}
if (enableTest instanceof Window) {
break;
}
}
}
return true;
}
}
| |
/*
* Copyright (C) 2021 pedroSG94.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pedro.encoder.input.gl.render.filters;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Build;
import androidx.annotation.RequiresApi;
import com.pedro.encoder.R;
import com.pedro.encoder.utils.gl.GlUtil;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by pedro on 1/02/18.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public class DuotoneFilterRender extends BaseFilterRender {
//rotation matrix
private final float[] squareVertexDataFilter = {
// X, Y, Z, U, V
-1f, -1f, 0f, 0f, 0f, //bottom left
1f, -1f, 0f, 1f, 0f, //bottom right
-1f, 1f, 0f, 0f, 1f, //top left
1f, 1f, 0f, 1f, 1f, //top right
};
private int program = -1;
private int aPositionHandle = -1;
private int aTextureHandle = -1;
private int uMVPMatrixHandle = -1;
private int uSTMatrixHandle = -1;
private int uSamplerHandle = -1;
private int uColorHandle = -1;
private int uColor2Handle = -1;
private static final String HEX_PATTERN = "^#([A-Fa-f0-9]{6})$";
//by default tint with green and blue
private float red = 0f;
private float green = 1f;
private float blue = 0f;
private float red2 = 0f;
private float green2 = 0f;
private float blue2 = 1f;
public DuotoneFilterRender() {
squareVertex = ByteBuffer.allocateDirect(squareVertexDataFilter.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
squareVertex.put(squareVertexDataFilter).position(0);
Matrix.setIdentityM(MVPMatrix, 0);
Matrix.setIdentityM(STMatrix, 0);
}
@Override
protected void initGlFilter(Context context) {
String vertexShader = GlUtil.getStringFromRaw(context, R.raw.simple_vertex);
String fragmentShader = GlUtil.getStringFromRaw(context, R.raw.duotone_fragment);
program = GlUtil.createProgram(vertexShader, fragmentShader);
aPositionHandle = GLES20.glGetAttribLocation(program, "aPosition");
aTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord");
uMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix");
uSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix");
uSamplerHandle = GLES20.glGetUniformLocation(program, "uSampler");
uColorHandle = GLES20.glGetUniformLocation(program, "uColor");
uColor2Handle = GLES20.glGetUniformLocation(program, "uColor2");
}
@Override
protected void drawFilter() {
GLES20.glUseProgram(program);
squareVertex.position(SQUARE_VERTEX_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex);
GLES20.glEnableVertexAttribArray(aPositionHandle);
squareVertex.position(SQUARE_VERTEX_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
SQUARE_VERTEX_DATA_STRIDE_BYTES, squareVertex);
GLES20.glEnableVertexAttribArray(aTextureHandle);
GLES20.glUniformMatrix4fv(uMVPMatrixHandle, 1, false, MVPMatrix, 0);
GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, STMatrix, 0);
GLES20.glUniform3f(uColorHandle, red, green, blue);
GLES20.glUniform3f(uColor2Handle, red2, green2, blue2);
GLES20.glUniform1i(uSamplerHandle, 4);
GLES20.glActiveTexture(GLES20.GL_TEXTURE4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, previousTexId);
}
@Override
public void release() {
GLES20.glDeleteProgram(program);
}
public float getRed() {
return red;
}
public float getGreen() {
return green;
}
public float getBlue() {
return blue;
}
public float getRed2() {
return red2;
}
public float getGreen2() {
return green2;
}
public float getBlue2() {
return blue2;
}
/**
* @param rgbHexColor color represented with 7 characters (1 to start with #, 2 for red, 2 for
* green and 2 for blue)
*/
public void setRGBColor(String rgbHexColor, String rgbHexColor2) {
Pattern pattern = Pattern.compile(HEX_PATTERN);
Matcher matcher = pattern.matcher(rgbHexColor);
if (!matcher.matches()) {
throw new IllegalArgumentException(
"Invalid hexColor pattern (Should be: " + HEX_PATTERN + ")");
}
int r = Integer.valueOf(rgbHexColor.substring(1, 3), 16);
int g = Integer.valueOf(rgbHexColor.substring(3, 5), 16);
int b = Integer.valueOf(rgbHexColor.substring(5, 7), 16);
red = (float) r / 255.0f;
green = (float) g / 255.0f;
blue = (float) b / 255.0f;
matcher = pattern.matcher(rgbHexColor2);
if (!matcher.matches()) {
throw new IllegalArgumentException(
"Invalid hexColor pattern (Should be: " + HEX_PATTERN + ")");
}
r = Integer.valueOf(rgbHexColor2.substring(1, 3), 16);
g = Integer.valueOf(rgbHexColor2.substring(3, 5), 16);
b = Integer.valueOf(rgbHexColor2.substring(5, 7), 16);
red2 = (float) r / 255.0f;
green2 = (float) g / 255.0f;
blue2 = (float) b / 255.0f;
}
/**
* Values range 0 to 255
*/
public void setRGBColor(int r, int g, int b, int r2, int g2, int b2) {
red = (float) r / 255.0f;
green = (float) g / 255.0f;
blue = (float) b / 255.0f;
red2 = (float) r2 / 255.0f;
green2 = (float) g2 / 255.0f;
blue2 = (float) b2 / 255.0f;
}
/**
* Get string color from color file resource and strip alpha values (alpha values is always auto
* completed)
*/
public void setColor(Resources resources, int colorResource, int colorResource2) {
String color = resources.getString(colorResource);
String color2 = resources.getString(colorResource2);
setRGBColor("#" + color.substring(3), "#" + color2.substring(3));
}
/**
* @param colorResource int from color class with Color.parse or Color.NAME_COLOR (Ex:
* Color.BLUE)
* @param colorResource2 int from color class with Color.parse or Color.NAME_COLOR (Ex:
* Color.BLUE)
*/
public void setColor(int colorResource, int colorResource2) {
red = Color.red(colorResource) / 255f;
green = Color.green(colorResource) / 255f;
blue = Color.blue(colorResource) / 255f;
red2 = Color.red(colorResource2) / 255f;
green2 = Color.green(colorResource2) / 255f;
blue2 = Color.blue(colorResource2) / 255f;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.ide.actions.QualifiedNameProviderUtil;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.ui.JBColor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.intellij.openapi.keymap.KeymapUtil.getPreferredShortcutText;
import static com.intellij.openapi.util.io.FileUtil.toSystemIndependentName;
import static com.intellij.psi.util.PsiTreeUtil.getStubOrPsiParentOfType;
import static com.intellij.psi.util.PsiUtilCore.getVirtualFile;
import static com.intellij.ui.ColorUtil.toHex;
public final class GutterTooltipHelper {
private static final JBColor SEPARATOR_COLOR = JBColor.namedColor("GutterTooltip.lineSeparatorColor", HintUtil.INFORMATION_BORDER_COLOR);
private static final JBColor CONTEXT_HELP_FOREGROUND
= JBColor.namedColor("GutterTooltip.infoForeground", new JBColor(0x787878, 0x878787));
private GutterTooltipHelper() {
}
/**
* @param elements a collection of elements to create a formatted tooltip text
* @param prefix a text to insert before all elements
* @param skipFirstMember {@code true} to skip a method (or field) name in the link to element
* @param actionId an action identifier to generate context help or {@code null} if not applicable
*/
@NotNull
public static <E extends PsiElement> String getTooltipText(@NotNull Collection<E> elements,
@NotNull String prefix,
boolean skipFirstMember,
@Nullable String actionId) {
String firstDivider = getElementDivider(true, true, elements.size());
String nextDivider = getElementDivider(false, true, elements.size());
AtomicReference<String> reference = new AtomicReference<>(firstDivider); // optimization: calculate next divider only once
return getTooltipText(prefix, elements, e -> reference.getAndSet(nextDivider), e -> skipFirstMember, actionId);
}
static String getElementDivider(boolean firstElement, boolean marginLeft, int elementsCount) {
if (elementsCount <= 1) return " ";
StringBuilder sb = new StringBuilder("</p><p style='margin-top:2pt");
if (marginLeft) sb.append(";margin-left:20pt");
if (!firstElement) sb.append(";border-top:thin solid #").append(toHex(SEPARATOR_COLOR));
return sb.append(";'>").toString();
}
/**
* @param elements a collection of elements to create a formatted tooltip text
* @param elementToPrefix a function that returns a text to insert before the current element
* @param skipFirstMemberOfElement a function that returns {@code true} to skip a method (or field) name for the current element
* @param actionId an action identifier to generate context help or {@code null} if not applicable
*/
@NotNull
public static <E extends PsiElement> String getTooltipText(@NotNull Collection<E> elements,
@NotNull Function<E, String> elementToPrefix,
@NotNull Predicate<E> skipFirstMemberOfElement,
@Nullable String actionId) {
return getTooltipText(null, elements, elementToPrefix, skipFirstMemberOfElement, actionId);
}
@NotNull
private static <E extends PsiElement> String getTooltipText(@Nullable String prefix,
@NotNull Collection<E> elements,
@NotNull Function<E, String> elementToPrefix,
@NotNull Predicate<E> skipFirstMemberOfElement,
@Nullable String actionId) {
StringBuilder sb = new StringBuilder("<html><body><p>");
if (prefix != null) sb.append(prefix);
for (E element : elements) {
String elementPrefix = elementToPrefix.apply(element);
if (elementPrefix != null) sb.append(elementPrefix);
appendElement(sb, element, skipFirstMemberOfElement.test(element));
}
appendContextHelp(sb, actionId);
sb.append("</p></body></html>");
return sb.toString();
}
private static void appendElement(@NotNull StringBuilder sb, @NotNull PsiElement element, boolean skip) {
boolean useSingleLink = Registry.is("gutter.tooltip.single.link");
String packageName = null;
boolean addedSingleLink = useSingleLink && appendLink(sb, element);
PsiElement original = element; // use original member as a first separate link
if (skip && (element instanceof PsiMethod || element instanceof PsiField)) {
element = getContainingElement(element);
}
while (element != null) {
String name = getPresentableName(element);
if (name != null) {
boolean addedLink = !useSingleLink && appendLink(sb, original != null ? original : element);
original = null; // do not use a link to the original element if it is already added
// Swing uses simple HTML processing and paints a link incorrectly if it contains different fonts.
// This is the reason why I use monospaced font not only for element name, but for a whole link.
// By the same reason I have to comment out support for deprecated elements.
//
// boolean deprecated = RefJavaUtil.isDeprecated(element);
// if (deprecated) sb.append("<strike>");
// sb.append("<code>");
sb.append(name);
// sb.append("</code>");
// if (deprecated) sb.append("</strike>");
if (addedLink) sb.append("</code></a>");
}
if (element instanceof PsiFile) break;
PsiElement parent = getContainingElement(element);
if (parent == null || parent instanceof PsiFile) {
if (element instanceof PsiClass && !(element instanceof PsiAnonymousClass)) {
String qualifiedName = ((PsiClass)element).getQualifiedName();
if (qualifiedName != null) packageName = StringUtil.getPackageName(qualifiedName);
break;
}
}
if (name != null && parent != null) sb.append(" in ");
element = parent;
}
if (addedSingleLink) sb.append("</code></a>");
appendPackageName(sb, packageName);
}
private static void appendPackageName(@NotNull StringBuilder sb, @Nullable String name) {
if (StringUtil.isEmpty(name)) return; // no package name
sb.append(" <font color='#").append(toHex(CONTEXT_HELP_FOREGROUND));
sb.append("'><code>(").append(name).append(")</code></font>");
}
private static void appendContextHelp(@NotNull StringBuilder sb, @Nullable String actionId) {
if (actionId == null) return; // action id is not set
AnAction action = ActionManager.getInstance().getAction(actionId);
if (action == null) return; // action is not exist
String text = getPreferredShortcutText(action.getShortcutSet().getShortcuts());
if (StringUtil.isEmpty(text)) return; // action have no shortcuts
sb.append("</p><p style='margin-top:8px;'><font size='2' color='#");
sb.append(toHex(CONTEXT_HELP_FOREGROUND));
sb.append("'>Press ").append(text).append(" to navigate</font>");
}
private static boolean appendLink(@NotNull StringBuilder sb, @NotNull PsiElement element) {
try {
String name = getQualifiedName(element);
if (!StringUtil.isEmpty(name)) {
sb.append("<a href=\"#element/").append(name).append("\"><code>");
return true;
}
VirtualFile file = getVirtualFile(element);
if (file == null) return false;
int offset = element.getTextOffset();
sb.append("<a href=\"#navigation/");
sb.append(toSystemIndependentName(file.getPath()));
sb.append(":").append(offset).append("\"><code>");
return true;
}
catch (Exception ignored) {
return false;
}
}
@Nullable
private static String getQualifiedName(@NotNull PsiElement element) {
PsiClass psiClass = element instanceof PsiClass ? (PsiClass)element : getStubOrPsiParentOfType(element, PsiClass.class);
if (psiClass instanceof PsiAnonymousClass) return null;
return QualifiedNameProviderUtil.getQualifiedName(element);
}
@Nullable
private static PsiElement getContainingElement(@NotNull PsiElement element) {
PsiMember member = getStubOrPsiParentOfType(element, PsiMember.class);
if (member == null && element instanceof PsiMember) {
member = ((PsiMember)element).getContainingClass();
}
return member != null ? member : element.getContainingFile();
}
@Nullable
private static String getPresentableName(@NotNull PsiElement element) {
if (element instanceof PsiEnumConstantInitializer) {
PsiEnumConstantInitializer initializer = (PsiEnumConstantInitializer)element;
return initializer.getEnumConstant().getName();
}
if (element instanceof PsiAnonymousClass) {
return "Anonymous";
}
if (element instanceof PsiNamedElement) {
PsiNamedElement named = (PsiNamedElement)element;
return named.getName();
}
return null;
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.eventbus.outside;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.Lists;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import junit.framework.TestCase;
import java.util.List;
/**
* Test that EventBus finds the correct subscribers.
*
* This test must be outside the c.g.c.eventbus package to test correctly.
* @author Louis Wasserman
*/
public class AnnotatedSubscriberFinderTests {
private static final Object EVENT = new Object();
abstract static class AbstractEventBusTest<H> extends TestCase {
abstract H createSubscriber();
private H subscriber;
H getSubscriber() {
return subscriber;
}
@Override
protected void setUp() throws Exception {
subscriber = createSubscriber();
EventBus bus = new EventBus();
bus.register(subscriber);
bus.post(EVENT);
}
@Override
protected void tearDown() throws Exception {
subscriber = null;
}
}
/*
* We break the tests up based on whether they are annotated or abstract in the superclass.
*/
public static class BaseSubscriberFinderTest extends
AbstractEventBusTest<BaseSubscriberFinderTest.Subscriber> {
static class Subscriber {
final List<Object> nonSubscriberEvents = Lists.newArrayList();
final List<Object> subscriberEvents = Lists.newArrayList();
public void notASubscriber(Object o) {
nonSubscriberEvents.add(o);
}
@Subscribe
public void subscriber(Object o) {
subscriberEvents.add(o);
}
}
public void testNonSubscriber() {
assertThat(getSubscriber().nonSubscriberEvents).isEmpty();
}
public void testSubscriber() {
assertThat(getSubscriber().subscriberEvents).contains(EVENT);
}
@Override
Subscriber createSubscriber() {
return new Subscriber();
}
}
public static class AnnotatedAndAbstractInSuperclassTest extends
AbstractEventBusTest<AnnotatedAndAbstractInSuperclassTest.SubClass> {
abstract static class SuperClass {
@Subscribe
public abstract void overriddenAndAnnotatedInSubclass(Object o);
@Subscribe
public abstract void overriddenInSubclass(Object o);
}
static class SubClass extends SuperClass {
final List<Object> overriddenAndAnnotatedInSubclassEvents = Lists.newArrayList();
final List<Object> overriddenInSubclassEvents = Lists.newArrayList();
@Subscribe
@Override
public void overriddenAndAnnotatedInSubclass(Object o) {
overriddenAndAnnotatedInSubclassEvents.add(o);
}
@Override
public void overriddenInSubclass(Object o) {
overriddenInSubclassEvents.add(o);
}
}
public void testOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenAndAnnotatedInSubclassEvents).contains(EVENT);
}
public void testOverriddenNotAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenInSubclassEvents).contains(EVENT);
}
@Override
SubClass createSubscriber() {
return new SubClass();
}
}
public static class AnnotatedNotAbstractInSuperclassTest extends
AbstractEventBusTest<AnnotatedNotAbstractInSuperclassTest.SubClass> {
static class SuperClass {
final List<Object> notOverriddenInSubclassEvents = Lists.newArrayList();
final List<Object> overriddenNotAnnotatedInSubclassEvents = Lists.newArrayList();
final List<Object> overriddenAndAnnotatedInSubclassEvents = Lists.newArrayList();
final List<Object> differentlyOverriddenNotAnnotatedInSubclassBadEvents = Lists
.newArrayList();
final List<Object> differentlyOverriddenAnnotatedInSubclassBadEvents = Lists.newArrayList();
@Subscribe
public void notOverriddenInSubclass(Object o) {
notOverriddenInSubclassEvents.add(o);
}
@Subscribe
public void overriddenNotAnnotatedInSubclass(Object o) {
overriddenNotAnnotatedInSubclassEvents.add(o);
}
@Subscribe
public void overriddenAndAnnotatedInSubclass(Object o) {
overriddenAndAnnotatedInSubclassEvents.add(o);
}
@Subscribe
public void differentlyOverriddenNotAnnotatedInSubclass(Object o) {
// the subclass overrides this and does *not* call super.dONAIS(o)
differentlyOverriddenNotAnnotatedInSubclassBadEvents.add(o);
}
@Subscribe
public void differentlyOverriddenAnnotatedInSubclass(Object o) {
// the subclass overrides this and does *not* call super.dOAIS(o)
differentlyOverriddenAnnotatedInSubclassBadEvents.add(o);
}
}
static class SubClass extends SuperClass {
final List<Object> differentlyOverriddenNotAnnotatedInSubclassGoodEvents = Lists
.newArrayList();
final List<Object> differentlyOverriddenAnnotatedInSubclassGoodEvents = Lists.newArrayList();
@Override
public void overriddenNotAnnotatedInSubclass(Object o) {
super.overriddenNotAnnotatedInSubclass(o);
}
@Subscribe
@Override
public void overriddenAndAnnotatedInSubclass(Object o) {
super.overriddenAndAnnotatedInSubclass(o);
}
@Override
public void differentlyOverriddenNotAnnotatedInSubclass(Object o) {
differentlyOverriddenNotAnnotatedInSubclassGoodEvents.add(o);
}
@Subscribe
@Override
public void differentlyOverriddenAnnotatedInSubclass(Object o) {
differentlyOverriddenAnnotatedInSubclassGoodEvents.add(o);
}
}
public void testNotOverriddenInSubclass() {
assertThat(getSubscriber().notOverriddenInSubclassEvents).contains(EVENT);
}
public void testOverriddenNotAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenNotAnnotatedInSubclassEvents).contains(EVENT);
}
public void testDifferentlyOverriddenNotAnnotatedInSubclass() {
assertThat(getSubscriber().differentlyOverriddenNotAnnotatedInSubclassGoodEvents)
.contains(EVENT);
assertThat(getSubscriber().differentlyOverriddenNotAnnotatedInSubclassBadEvents).isEmpty();
}
public void testOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenAndAnnotatedInSubclassEvents).contains(EVENT);
}
public void testDifferentlyOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().differentlyOverriddenAnnotatedInSubclassGoodEvents)
.contains(EVENT);
assertThat(getSubscriber().differentlyOverriddenAnnotatedInSubclassBadEvents).isEmpty();
}
@Override
SubClass createSubscriber() {
return new SubClass();
}
}
public static class AbstractNotAnnotatedInSuperclassTest extends
AbstractEventBusTest<AbstractNotAnnotatedInSuperclassTest.SubClass> {
abstract static class SuperClass {
public abstract void overriddenInSubclassNowhereAnnotated(Object o);
public abstract void overriddenAndAnnotatedInSubclass(Object o);
}
static class SubClass extends SuperClass {
final List<Object> overriddenInSubclassNowhereAnnotatedEvents = Lists.newArrayList();
final List<Object> overriddenAndAnnotatedInSubclassEvents = Lists.newArrayList();
@Override
public void overriddenInSubclassNowhereAnnotated(Object o) {
overriddenInSubclassNowhereAnnotatedEvents.add(o);
}
@Subscribe
@Override
public void overriddenAndAnnotatedInSubclass(Object o) {
overriddenAndAnnotatedInSubclassEvents.add(o);
}
}
public void testOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenAndAnnotatedInSubclassEvents).contains(EVENT);
}
public void testOverriddenInSubclassNowhereAnnotated() {
assertThat(getSubscriber().overriddenInSubclassNowhereAnnotatedEvents).isEmpty();
}
@Override
SubClass createSubscriber() {
return new SubClass();
}
}
public static class NeitherAbstractNorAnnotatedInSuperclassTest extends
AbstractEventBusTest<NeitherAbstractNorAnnotatedInSuperclassTest.SubClass> {
static class SuperClass {
final List<Object> neitherOverriddenNorAnnotatedEvents = Lists.newArrayList();
final List<Object> overriddenInSubclassNowhereAnnotatedEvents = Lists.newArrayList();
final List<Object> overriddenAndAnnotatedInSubclassEvents = Lists.newArrayList();
public void neitherOverriddenNorAnnotated(Object o) {
neitherOverriddenNorAnnotatedEvents.add(o);
}
public void overriddenInSubclassNowhereAnnotated(Object o) {
overriddenInSubclassNowhereAnnotatedEvents.add(o);
}
public void overriddenAndAnnotatedInSubclass(Object o) {
overriddenAndAnnotatedInSubclassEvents.add(o);
}
}
static class SubClass extends SuperClass {
@Override
public void overriddenInSubclassNowhereAnnotated(Object o) {
super.overriddenInSubclassNowhereAnnotated(o);
}
@Subscribe
@Override
public void overriddenAndAnnotatedInSubclass(Object o) {
super.overriddenAndAnnotatedInSubclass(o);
}
}
public void testNeitherOverriddenNorAnnotated() {
assertThat(getSubscriber().neitherOverriddenNorAnnotatedEvents).isEmpty();
}
public void testOverriddenInSubclassNowhereAnnotated() {
assertThat(getSubscriber().overriddenInSubclassNowhereAnnotatedEvents).isEmpty();
}
public void testOverriddenAndAnnotatedInSubclass() {
assertThat(getSubscriber().overriddenAndAnnotatedInSubclassEvents).contains(EVENT);
}
@Override
SubClass createSubscriber() {
return new SubClass();
}
}
public static class DeepInterfaceTest extends
AbstractEventBusTest<DeepInterfaceTest.SubscriberClass> {
interface Interface1 {
@Subscribe
void annotatedIn1(Object o);
@Subscribe
void annotatedIn1And2(Object o);
@Subscribe
void annotatedIn1And2AndClass(Object o);
void declaredIn1AnnotatedIn2(Object o);
void declaredIn1AnnotatedInClass(Object o);
void nowhereAnnotated(Object o);
}
interface Interface2 extends Interface1 {
@Override
@Subscribe
void declaredIn1AnnotatedIn2(Object o);
@Override
@Subscribe
void annotatedIn1And2(Object o);
@Override
@Subscribe
void annotatedIn1And2AndClass(Object o);
void declaredIn2AnnotatedInClass(Object o);
@Subscribe
void annotatedIn2(Object o);
}
static class SubscriberClass implements Interface2 {
final List<Object> annotatedIn1Events = Lists.newArrayList();
final List<Object> annotatedIn1And2Events = Lists.newArrayList();
final List<Object> annotatedIn1And2AndClassEvents = Lists.newArrayList();
final List<Object> declaredIn1AnnotatedIn2Events = Lists.newArrayList();
final List<Object> declaredIn1AnnotatedInClassEvents = Lists.newArrayList();
final List<Object> declaredIn2AnnotatedInClassEvents = Lists.newArrayList();
final List<Object> annotatedIn2Events = Lists.newArrayList();
final List<Object> nowhereAnnotatedEvents = Lists.newArrayList();
@Override
public void annotatedIn1(Object o) {
annotatedIn1Events.add(o);
}
@Subscribe
@Override
public void declaredIn1AnnotatedInClass(Object o) {
declaredIn1AnnotatedInClassEvents.add(o);
}
@Override
public void declaredIn1AnnotatedIn2(Object o) {
declaredIn1AnnotatedIn2Events.add(o);
}
@Override
public void annotatedIn1And2(Object o) {
annotatedIn1And2Events.add(o);
}
@Subscribe
@Override
public void annotatedIn1And2AndClass(Object o) {
annotatedIn1And2AndClassEvents.add(o);
}
@Subscribe
@Override
public void declaredIn2AnnotatedInClass(Object o) {
declaredIn2AnnotatedInClassEvents.add(o);
}
@Override
public void annotatedIn2(Object o) {
annotatedIn2Events.add(o);
}
@Override
public void nowhereAnnotated(Object o) {
nowhereAnnotatedEvents.add(o);
}
}
public void testAnnotatedIn1() {
assertThat(getSubscriber().annotatedIn1Events).contains(EVENT);
}
public void testAnnotatedIn2() {
assertThat(getSubscriber().annotatedIn2Events).contains(EVENT);
}
public void testAnnotatedIn1And2() {
assertThat(getSubscriber().annotatedIn1And2Events).contains(EVENT);
}
public void testAnnotatedIn1And2AndClass() {
assertThat(getSubscriber().annotatedIn1And2AndClassEvents).contains(EVENT);
}
public void testDeclaredIn1AnnotatedIn2() {
assertThat(getSubscriber().declaredIn1AnnotatedIn2Events).contains(EVENT);
}
public void testDeclaredIn1AnnotatedInClass() {
assertThat(getSubscriber().declaredIn1AnnotatedInClassEvents).contains(EVENT);
}
public void testDeclaredIn2AnnotatedInClass() {
assertThat(getSubscriber().declaredIn2AnnotatedInClassEvents).contains(EVENT);
}
public void testNowhereAnnotated() {
assertThat(getSubscriber().nowhereAnnotatedEvents).isEmpty();
}
@Override
SubscriberClass createSubscriber() {
return new SubscriberClass();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsClusterStateUpdateRequest;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.indices.IndicesService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
* Service responsible for submitting update index settings requests
*/
public class MetaDataUpdateSettingsService extends AbstractComponent implements ClusterStateListener {
private final ClusterService clusterService;
private final AllocationService allocationService;
private final IndexScopedSettings indexScopedSettings;
private final IndicesService indicesService;
@Inject
public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService,
IndexScopedSettings indexScopedSettings, IndicesService indicesService) {
super(settings);
this.clusterService = clusterService;
this.clusterService.add(this);
this.allocationService = allocationService;
this.indexScopedSettings = indexScopedSettings;
this.indicesService = indicesService;
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
// update an index with number of replicas based on data nodes if possible
if (!event.state().nodes().isLocalNodeElectedMaster()) {
return;
}
// we will want to know this for translating "all" to a number
final int dataNodeCount = event.state().nodes().getDataNodes().size();
Map<Integer, List<Index>> nrReplicasChanged = new HashMap<>();
// we need to do this each time in case it was changed by update settings
for (final IndexMetaData indexMetaData : event.state().metaData()) {
AutoExpandReplicas autoExpandReplicas = IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING.get(indexMetaData.getSettings());
if (autoExpandReplicas.isEnabled()) {
/*
* we have to expand the number of replicas for this index to at least min and at most max nodes here
* so we are bumping it up if we have to or reduce it depending on min/max and the number of datanodes.
* If we change the number of replicas we just let the shard allocator do it's thing once we updated it
* since it goes through the index metadata to figure out if something needs to be done anyway. Do do that
* we issue a cluster settings update command below and kicks off a reroute.
*/
final int min = autoExpandReplicas.getMinReplicas();
final int max = autoExpandReplicas.getMaxReplicas(dataNodeCount);
int numberOfReplicas = dataNodeCount - 1;
if (numberOfReplicas < min) {
numberOfReplicas = min;
} else if (numberOfReplicas > max) {
numberOfReplicas = max;
}
// same value, nothing to do there
if (numberOfReplicas == indexMetaData.getNumberOfReplicas()) {
continue;
}
if (numberOfReplicas >= min && numberOfReplicas <= max) {
if (!nrReplicasChanged.containsKey(numberOfReplicas)) {
nrReplicasChanged.put(numberOfReplicas, new ArrayList<>());
}
nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex());
}
}
}
if (nrReplicasChanged.size() > 0) {
// update settings and kick of a reroute (implicit) for them to take effect
for (final Integer fNumberOfReplicas : nrReplicasChanged.keySet()) {
Settings settings = Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, fNumberOfReplicas).build();
final List<Index> indices = nrReplicasChanged.get(fNumberOfReplicas);
UpdateSettingsClusterStateUpdateRequest updateRequest = new UpdateSettingsClusterStateUpdateRequest()
.indices(indices.toArray(new Index[indices.size()])).settings(settings)
.ackTimeout(TimeValue.timeValueMillis(0)) //no need to wait for ack here
.masterNodeTimeout(TimeValue.timeValueMinutes(10));
updateSettings(updateRequest, new ActionListener<ClusterStateUpdateResponse>() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
for (Index index : indices) {
logger.info("{} auto expanded replicas to [{}]", index, fNumberOfReplicas);
}
}
@Override
public void onFailure(Exception t) {
for (Index index : indices) {
logger.warn("{} fail to auto expand replicas to [{}]", index, fNumberOfReplicas);
}
}
});
}
}
}
public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
final Settings normalizedSettings = Settings.builder().put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build();
Settings.Builder settingsForClosedIndices = Settings.builder();
Settings.Builder settingsForOpenIndices = Settings.builder();
Settings.Builder skipppedSettings = Settings.builder();
indexScopedSettings.validate(normalizedSettings);
// never allow to change the number of shards
for (Map.Entry<String, String> entry : normalizedSettings.getAsMap().entrySet()) {
if (entry.getKey().equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) {
listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index"));
return;
}
Setting setting = indexScopedSettings.get(entry.getKey());
assert setting != null; // we already validated the normalized settings
settingsForClosedIndices.put(entry.getKey(), entry.getValue());
if (setting.isDynamic()) {
settingsForOpenIndices.put(entry.getKey(), entry.getValue());
} else {
skipppedSettings.put(entry.getKey(), entry.getValue());
}
}
final Settings skippedSettigns = skipppedSettings.build();
final Settings closedSettings = settingsForClosedIndices.build();
final Settings openSettings = settingsForOpenIndices.build();
final boolean preserveExisting = request.isPreserveExisting();
clusterService.submitStateUpdateTask("update-settings",
new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(Priority.URGENT, request, listener) {
@Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged);
}
@Override
public ClusterState execute(ClusterState currentState) {
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable());
MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData());
// allow to change any settings to a close index, and only allow dynamic settings to be changed
// on an open index
Set<Index> openIndices = new HashSet<>();
Set<Index> closeIndices = new HashSet<>();
final String[] actualIndices = new String[request.indices().length];
for (int i = 0; i < request.indices().length; i++) {
Index index = request.indices()[i];
actualIndices[i] = index.getName();
final IndexMetaData metaData = currentState.metaData().getIndexSafe(index);
if (metaData.getState() == IndexMetaData.State.OPEN) {
openIndices.add(index);
} else {
closeIndices.add(index);
}
}
if (closeIndices.size() > 0 && closedSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) {
throw new IllegalArgumentException(String.format(Locale.ROOT,
"Can't update [%s] on closed indices %s - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS,
closeIndices
));
}
if (!skippedSettigns.getAsMap().isEmpty() && !openIndices.isEmpty()) {
throw new IllegalArgumentException(String.format(Locale.ROOT,
"Can't update non dynamic settings [%s] for open indices %s",
skippedSettigns.getAsMap().keySet(),
openIndices
));
}
int updatedNumberOfReplicas = openSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, -1);
if (updatedNumberOfReplicas != -1 && preserveExisting == false) {
// we do *not* update the in sync allocation ids as they will be removed upon the first index
// operation which make these copies stale
// TODO: update the list once the data is deleted by the node?
routingTableBuilder.updateNumberOfReplicas(updatedNumberOfReplicas, actualIndices);
metaDataBuilder.updateNumberOfReplicas(updatedNumberOfReplicas, actualIndices);
logger.info("updating number_of_replicas to [{}] for indices {}", updatedNumberOfReplicas, actualIndices);
}
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_ONLY_BLOCK, IndexMetaData.INDEX_READ_ONLY_SETTING, openSettings);
maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_METADATA_BLOCK, IndexMetaData.INDEX_BLOCKS_METADATA_SETTING, openSettings);
maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_WRITE_BLOCK, IndexMetaData.INDEX_BLOCKS_WRITE_SETTING, openSettings);
maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_BLOCK, IndexMetaData.INDEX_BLOCKS_READ_SETTING, openSettings);
if (!openIndices.isEmpty()) {
for (Index index : openIndices) {
IndexMetaData indexMetaData = metaDataBuilder.getSafe(index);
Settings.Builder updates = Settings.builder();
Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings());
if (indexScopedSettings.updateDynamicSettings(openSettings, indexSettings, updates, index.getName())) {
if (preserveExisting) {
indexSettings.put(indexMetaData.getSettings());
}
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings));
}
}
}
if (!closeIndices.isEmpty()) {
for (Index index : closeIndices) {
IndexMetaData indexMetaData = metaDataBuilder.getSafe(index);
Settings.Builder updates = Settings.builder();
Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings());
if (indexScopedSettings.updateSettings(closedSettings, indexSettings, updates, index.getName())) {
if (preserveExisting) {
indexSettings.put(indexMetaData.getSettings());
}
metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings));
}
}
}
ClusterState updatedState = ClusterState.builder(currentState).metaData(metaDataBuilder).routingTable(routingTableBuilder.build()).blocks(blocks).build();
// now, reroute in case things change that require it (like number of replicas)
updatedState = allocationService.reroute(updatedState, "settings update");
try {
for (Index index : openIndices) {
final IndexMetaData currentMetaData = currentState.getMetaData().getIndexSafe(index);
final IndexMetaData updatedMetaData = updatedState.metaData().getIndexSafe(index);
indicesService.verifyIndexMetadata(currentMetaData, updatedMetaData);
}
for (Index index : closeIndices) {
final IndexMetaData currentMetaData = currentState.getMetaData().getIndexSafe(index);
final IndexMetaData updatedMetaData = updatedState.metaData().getIndexSafe(index);
indicesService.verifyIndexMetadata(currentMetaData, updatedMetaData);
}
} catch (IOException ex) {
throw ExceptionsHelper.convertToElastic(ex);
}
return updatedState;
}
});
}
/**
* Updates the cluster block only iff the setting exists in the given settings
*/
private static void maybeUpdateClusterBlock(String[] actualIndices, ClusterBlocks.Builder blocks, ClusterBlock block, Setting<Boolean> setting, Settings openSettings) {
if (setting.exists(openSettings)) {
final boolean updateReadBlock = setting.get(openSettings);
for (String index : actualIndices) {
if (updateReadBlock) {
blocks.addIndexBlock(index, block);
} else {
blocks.removeIndexBlock(index, block);
}
}
}
}
public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
clusterService.submitStateUpdateTask("update-index-compatibility-versions", new AckedClusterStateUpdateTask<ClusterStateUpdateResponse>(Priority.URGENT, request, listener) {
@Override
protected ClusterStateUpdateResponse newResponse(boolean acknowledged) {
return new ClusterStateUpdateResponse(acknowledged);
}
@Override
public ClusterState execute(ClusterState currentState) {
MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData());
for (Map.Entry<String, Tuple<Version, String>> entry : request.versions().entrySet()) {
String index = entry.getKey();
IndexMetaData indexMetaData = metaDataBuilder.get(index);
if (indexMetaData != null) {
if (Version.CURRENT.equals(indexMetaData.getCreationVersion()) == false) {
// No reason to pollute the settings, we didn't really upgrade anything
metaDataBuilder.put(IndexMetaData.builder(indexMetaData)
.settings(Settings.builder().put(indexMetaData.getSettings())
.put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, entry.getValue().v2())
.put(IndexMetaData.SETTING_VERSION_UPGRADED, entry.getValue().v1())
)
);
}
}
}
return ClusterState.builder(currentState).metaData(metaDataBuilder).build();
}
});
}
}
| |
package com.github.aleksandermielczarek.observablecacheexample;
import android.support.test.runner.AndroidJUnit4;
import com.github.aleksandermielczarek.observablecache2.ObservableCache;
import com.github.aleksandermielczarek.observablecache2.LruObservableCache;
import com.github.aleksandermielczarek.observablecache2.service.ObservableCacheService;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import io.reactivex.Completable;
import io.reactivex.Flowable;
import io.reactivex.Maybe;
import io.reactivex.Observable;
import io.reactivex.Single;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.functions.Action;
import io.reactivex.functions.Consumer;
/**
* Created by Aleksander Mielczarek on 02.03.2017.
*/
@RunWith(AndroidJUnit4.class)
public class ObservableCacheService2Test {
public static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
public static final int DELAY = 3;
public static final int TIMEOUT = 10;
public static final int ROTATE_TIMEOUT = 1;
public static final String RESULT = "result";
public static final String ERROR = "error";
private final ObservableCache observableCache = LruObservableCache.newInstance();
private final ObservableCacheService observableCacheService = new ObservableCacheService(observableCache);
private final Cached2Service cachedService = observableCacheService.createObservableCacheService(Cached2Service.class);
private final AtomicReference<String> result = new AtomicReference<>();
private final AtomicBoolean completableResult = new AtomicBoolean();
private final AtomicReference<String> error = new AtomicReference<>();
private final CompositeDisposable disposables = new CompositeDisposable();
public Flowable<String> flowable() {
return Flowable.fromCallable(() -> RESULT)
.delay(DELAY, TIME_UNIT);
}
public Observable<String> observable() {
return Observable.fromCallable(() -> RESULT)
.delay(DELAY, TIME_UNIT);
}
public Maybe<String> maybe() {
return Maybe.fromCallable(() -> RESULT)
.delay(DELAY, TIME_UNIT);
}
public Single<String> single() {
return Single.fromCallable(() -> RESULT)
.delay(DELAY, TIME_UNIT);
}
public Completable completable() {
return Completable.fromCallable(() -> null)
.delay(DELAY, TIME_UNIT);
}
public Flowable<String> flowableError() {
return flowable().map(observable -> {
throw new IllegalArgumentException(ERROR);
});
}
public Observable<String> observableError() {
return observable().map(observable -> {
throw new IllegalArgumentException(ERROR);
});
}
public Maybe<String> maybeError() {
return maybe().map(single -> {
throw new IllegalArgumentException(ERROR);
});
}
public Single<String> singleError() {
return single().map(single -> {
throw new IllegalArgumentException(ERROR);
});
}
public Completable completableError() {
return completable().andThen(Completable.fromCallable(() -> {
throw new IllegalArgumentException(ERROR);
}));
}
public void waitForRx() throws InterruptedException {
TIME_UNIT.sleep(TIMEOUT);
}
public void rotate() throws InterruptedException {
TIME_UNIT.sleep(ROTATE_TIMEOUT);
disposables.clear();
result.set("");
error.set("");
completableResult.set(false);
}
public void assertError() {
Assert.assertTrue(observableCache.isEmpty());
Assert.assertEquals(ERROR, error.get());
Assert.assertEquals("", result.get());
}
public void assertErrorCompletable() {
Assert.assertTrue(observableCache.isEmpty());
Assert.assertEquals(ERROR, error.get());
Assert.assertFalse(completableResult.get());
}
public void assertResult() {
Assert.assertTrue(observableCache.isEmpty());
Assert.assertEquals(RESULT, result.get());
Assert.assertEquals("", error.get());
}
public void assertResultCompletable() {
Assert.assertTrue(observableCache.isEmpty());
Assert.assertTrue(completableResult.get());
Assert.assertEquals("", error.get());
}
public Consumer<String> onNext() {
return result::set;
}
public Action onNextCompletable() {
return () -> completableResult.set(true);
}
public Consumer<Throwable> onError() {
return throwable -> error.set(throwable.getMessage());
}
@Before
public void clear() {
observableCache.clear();
result.set("");
error.set("");
completableResult.set(false);
disposables.clear();
}
@Test
public void flowableCompleteBeforeRotation() throws Exception {
disposables.add(flowable()
.compose(cachedService.flowable())
.subscribe(onNext(), onError()));
waitForRx();
assertResult();
}
@Test
public void flowableCompleteAfterRotation() throws Exception {
disposables.add(flowable()
.compose(cachedService.flowable())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedFlowable().ifPresent(flowableFromCache -> disposables.add(flowableFromCache.subscribe(onNext(), onError())));
waitForRx();
assertResult();
}
@Test
public void flowableErrorCompleteBeforeRotation() throws Exception {
disposables.add(flowableError()
.compose(cachedService.flowable())
.subscribe(onNext(), onError()));
waitForRx();
assertError();
}
@Test
public void flowableErrorCompleteAfterRotation() throws Exception {
disposables.add(flowableError()
.compose(cachedService.flowable())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedFlowable().ifPresent(flowableFromCache -> disposables.add(flowableFromCache.subscribe(onNext(), onError())));
waitForRx();
assertError();
}
@Test
public void observableCompleteBeforeRotation() throws Exception {
disposables.add(observable()
.compose(cachedService.observable())
.subscribe(onNext(), onError()));
waitForRx();
assertResult();
}
@Test
public void observableCompleteAfterRotation() throws Exception {
disposables.add(observable()
.compose(cachedService.observable())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedObservable().ifPresent(observableFromCache -> disposables.add(observableFromCache.subscribe(onNext(), onError())));
waitForRx();
assertResult();
}
@Test
public void observableErrorCompleteBeforeRotation() throws Exception {
disposables.add(observableError()
.compose(cachedService.observable())
.subscribe(onNext(), onError()));
waitForRx();
assertError();
}
@Test
public void observableErrorCompleteAfterRotation() throws Exception {
disposables.add(observableError()
.compose(cachedService.observable())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedObservable().ifPresent(observableFromCache -> disposables.add(observableFromCache.subscribe(onNext(), onError())));
waitForRx();
assertError();
}
@Test
public void maybeCompleteBeforeRotation() throws Exception {
disposables.add(maybe()
.compose(cachedService.maybe())
.subscribe(onNext(), onError()));
waitForRx();
assertResult();
}
@Test
public void maybeCompleteAfterRotation() throws Exception {
disposables.add(maybe()
.compose(cachedService.maybe())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedMaybe().ifPresent(maybeFromCache -> disposables.add(maybeFromCache.subscribe(onNext(), onError())));
waitForRx();
assertResult();
}
@Test
public void maybeErrorCompleteBeforeRotation() throws Exception {
disposables.add(maybeError()
.compose(cachedService.maybe())
.subscribe(onNext(), onError()));
waitForRx();
assertError();
}
@Test
public void maybeErrorCompleteAfterRotation() throws Exception {
disposables.add(maybeError()
.compose(cachedService.maybe())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedMaybe().ifPresent(maybeFromCache -> disposables.add(maybeFromCache.subscribe(onNext(), onError())));
waitForRx();
assertError();
}
@Test
public void singleCompleteBeforeRotation() throws Exception {
disposables.add(single()
.compose(cachedService.single())
.subscribe(onNext(), onError()));
waitForRx();
assertResult();
}
@Test
public void singleCompleteAfterRotation() throws Exception {
disposables.add(single()
.compose(cachedService.single())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedSingle().ifPresent(singleFromCache -> disposables.add(singleFromCache.subscribe(onNext(), onError())));
waitForRx();
assertResult();
}
@Test
public void singleErrorCompleteBeforeRotation() throws Exception {
disposables.add(singleError()
.compose(cachedService.single())
.subscribe(onNext(), onError()));
waitForRx();
assertError();
}
@Test
public void singleErrorCompleteAfterRotation() throws Exception {
disposables.add(singleError()
.compose(cachedService.single())
.subscribe(onNext(), onError()));
rotate();
cachedService.cachedSingle().ifPresent(singleFromCache -> disposables.add(singleFromCache.subscribe(onNext(), onError())));
waitForRx();
assertError();
}
@Test
public void completableCompleteBeforeRotation() throws Exception {
disposables.add(completable()
.compose(cachedService.completable())
.subscribe(onNextCompletable(), onError()));
waitForRx();
assertResultCompletable();
}
@Test
public void completableCompleteAfterRotation() throws Exception {
disposables.add(completable()
.compose(cachedService.completable())
.subscribe(onNextCompletable(), onError()));
rotate();
cachedService.cachedCompletable().ifPresent(completableFromCache -> disposables.add(completableFromCache.subscribe(onNextCompletable(), onError())));
waitForRx();
assertResultCompletable();
}
@Test
public void completableErrorCompleteBeforeRotation() throws Exception {
disposables.add(completableError()
.compose(cachedService.completable())
.subscribe(onNextCompletable(), onError()));
waitForRx();
assertErrorCompletable();
}
@Test
public void completableErrorCompleteAfterRotation() throws Exception {
disposables.add(completableError()
.compose(cachedService.completable())
.subscribe(onNextCompletable(), onError()));
rotate();
cachedService.cachedCompletable().ifPresent(completableFromCache -> disposables.add(completableFromCache.subscribe(onNextCompletable(), onError())));
waitForRx();
assertErrorCompletable();
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2007 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
*******************************************************************************/
package org.eclipse.php.internal.ui.preferences.includepath;
import java.util.*;
import org.eclipse.core.resources.*;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.dltk.core.DLTKCore;
import org.eclipse.dltk.core.IBuildpathEntry;
import org.eclipse.dltk.core.IScriptProject;
import org.eclipse.dltk.internal.ui.wizards.NewWizardMessages;
import org.eclipse.dltk.internal.ui.wizards.TypedViewerFilter;
import org.eclipse.dltk.internal.ui.wizards.buildpath.BPListElement;
import org.eclipse.dltk.internal.ui.wizards.buildpath.BuildPathBasePage;
import org.eclipse.dltk.ui.actions.AbstractOpenWizardAction;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.TrayDialog;
import org.eclipse.jface.util.IPropertyChangeListener;
import org.eclipse.jface.util.PropertyChangeEvent;
import org.eclipse.jface.viewers.ILabelProvider;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.model.BaseWorkbenchContentProvider;
import org.eclipse.ui.model.WorkbenchLabelProvider;
public class CreateMultipleSourceFoldersDialog extends TrayDialog {
private final class FakeFolderBaseWorkbenchContentProvider extends
BaseWorkbenchContentProvider {
/**
* {@inheritDoc}
*/
public Object getParent(Object element) {
Object object = fNonExistingFolders.get(element);
if (object != null)
return object;
return super.getParent(element);
}
/**
* {@inheritDoc}
*/
public Object[] getChildren(Object element) {
List result = new ArrayList();
// all keys with value element
Set keys = fNonExistingFolders.keySet();
for (Iterator iter = keys.iterator(); iter.hasNext();) {
Object key = iter.next();
if (fNonExistingFolders.get(key).equals(element)) {
result.add(key);
}
}
if (result.size() == 0)
return super.getChildren(element);
Object[] children = super.getChildren(element);
for (int i = 0; i < children.length; i++) {
result.add(children[i]);
}
return result.toArray();
}
}
private final IScriptProject fScriptProject;
private final BPListElement[] fExistingElements;
private final HashSet fRemovedElements;
private final HashSet fModifiedElements;
private final HashSet fInsertedElements;
private final Hashtable fNonExistingFolders;
public CreateMultipleSourceFoldersDialog(
final IScriptProject scriptProject,
final BPListElement[] existingElements, Shell shell) {
super(shell);
fScriptProject = scriptProject;
fExistingElements = existingElements;
fRemovedElements = new HashSet();
fModifiedElements = new HashSet();
fInsertedElements = new HashSet();
fNonExistingFolders = new Hashtable();
for (int i = 0; i < existingElements.length; i++) {
BPListElement cur = existingElements[i];
if (cur.getResource() == null || !cur.getResource().exists()) {
addFakeFolder(fScriptProject.getProject(), cur);
}
}
}
public int open() {
Class[] acceptedClasses = new Class[] { IProject.class, IFolder.class };
List existingContainers = getExistingContainers(fExistingElements);
IProject[] allProjects = ResourcesPlugin.getWorkspace().getRoot()
.getProjects();
ArrayList rejectedElements = new ArrayList(allProjects.length);
IProject currProject = fScriptProject.getProject();
for (int i = 0; i < allProjects.length; i++) {
if (!allProjects[i].equals(currProject)) {
rejectedElements.add(allProjects[i]);
}
}
ViewerFilter filter = new TypedViewerFilter(acceptedClasses,
rejectedElements.toArray());
ILabelProvider lp = new WorkbenchLabelProvider();
ITreeContentProvider cp = new FakeFolderBaseWorkbenchContentProvider();
String title = NewWizardMessages.SourceContainerWorkbookPage_ExistingSourceFolderDialog_new_title;
String message = NewWizardMessages.SourceContainerWorkbookPage_ExistingSourceFolderDialog_edit_description;
MultipleFolderSelectionDialog dialog = new MultipleFolderSelectionDialog(
getShell(), lp, cp) {
protected Control createDialogArea(Composite parent) {
Control result = super.createDialogArea(parent);
if (DLTKCore.DEBUG) {
System.err
.println("CreateMultipleSourceFoldersDialog: Add help support"); //$NON-NLS-1$
}
// PlatformUI.getWorkbench().getHelpSystem().setHelp(parent,
// IDLTKHelpContextIds.BP_CHOOSE_EXISTING_FOLDER_TO_MAKE_SOURCE_FOLDER);
return result;
}
protected Object createFolder(final IContainer container) {
final Object[] result = new Object[1];
final BPListElement newElement = new BPListElement(
fScriptProject, IBuildpathEntry.BPE_SOURCE, false);
final AddSourceFolderWizard wizard = newSourceFolderWizard(
newElement, fExistingElements, container);
AbstractOpenWizardAction action = new AbstractOpenWizardAction() {
protected INewWizard createWizard() throws CoreException {
return wizard;
}
};
action.addPropertyChangeListener(new IPropertyChangeListener() {
public void propertyChange(PropertyChangeEvent event) {
if (event.getProperty().equals(IAction.RESULT)) {
if (event.getNewValue().equals(Boolean.TRUE)) {
result[0] = addFakeFolder(
fScriptProject.getProject(), newElement);
} else {
wizard.cancel();
}
}
}
});
action.run();
return result[0];
}
};
dialog.setExisting(existingContainers.toArray());
dialog.setTitle(title);
dialog.setMessage(message);
dialog.addFilter(filter);
dialog.setInput(fScriptProject.getProject().getParent());
dialog.setInitialFocus(fScriptProject.getProject());
if (dialog.open() == Window.OK) {
Object[] elements = dialog.getResult();
for (int i = 0; i < elements.length; i++) {
IResource res = (IResource) elements[i];
fInsertedElements.add(new BPListElement(fScriptProject,
IBuildpathEntry.BPE_SOURCE, res.getFullPath(), res,
false));
}
if (fExistingElements.length == 1) {
} else {
ArrayList added = new ArrayList(fInsertedElements);
HashSet updatedEclusionPatterns = new HashSet();
addExlusionPatterns(added, updatedEclusionPatterns);
fModifiedElements.addAll(updatedEclusionPatterns);
}
return Window.OK;
} else {
return Window.CANCEL;
}
}
public List getInsertedElements() {
return new ArrayList(fInsertedElements);
}
public List getRemovedElements() {
return new ArrayList(fRemovedElements);
}
public List getModifiedElements() {
return new ArrayList(fModifiedElements);
}
private void addExlusionPatterns(List newEntries, Set modifiedEntries) {
BuildPathBasePage.fixNestingConflicts((BPListElement[]) newEntries
.toArray(new BPListElement[newEntries.size()]),
fExistingElements, modifiedEntries);
if (!modifiedEntries.isEmpty()) {
String title = NewWizardMessages.SourceContainerWorkbookPage_exclusion_added_title;
String message = NewWizardMessages.SourceContainerWorkbookPage_exclusion_added_message;
MessageDialog.openInformation(getShell(), title, message);
}
}
private AddSourceFolderWizard newSourceFolderWizard(BPListElement element,
BPListElement[] existing, IContainer parent) {
AddSourceFolderWizard wizard = new AddSourceFolderWizard(existing,
element, false, true, false, false, false, parent);
wizard.setDoFlushChange(false);
return wizard;
}
private List getExistingContainers(BPListElement[] existingElements) {
List res = new ArrayList();
for (int i = 0; i < existingElements.length; i++) {
IResource resource = existingElements[i].getResource();
if (resource instanceof IContainer) {
res.add(resource);
}
}
Set keys = fNonExistingFolders.keySet();
for (Iterator iter = keys.iterator(); iter.hasNext();) {
IFolder folder = (IFolder) iter.next();
res.add(folder);
}
return res;
}
private IFolder addFakeFolder(final IContainer container,
final BPListElement element) {
IFolder result;
IPath projectPath = fScriptProject.getPath();
IPath path = element.getPath();
if (projectPath.isPrefixOf(path)) {
path = path.removeFirstSegments(projectPath.segmentCount());
}
result = container.getFolder(path);
IFolder folder = result;
do {
IContainer parent = folder.getParent();
fNonExistingFolders.put(folder, parent);
if (parent instanceof IFolder) {
folder = (IFolder) parent;
} else {
folder = null;
}
} while (folder != null && !folder.exists());
return result;
}
}
| |
/*
* Sun Public License Notice
*
* The contents of this file are subject to the Sun Public License
* Version 1.0 (the "License"). You may not use this file except in
* compliance with the License. A copy of the License is available at
* http://www.sun.com/
*
* The Original Code is NetBeans. The Initial Developer of the Original
* Code is Sun Microsystems, Inc. Portions Copyright 1997-2000 Sun
* Microsystems, Inc. All Rights Reserved.
*/
package org.netbeans.lib.cvsclient;
import com.intellij.util.concurrency.Semaphore;
import org.jetbrains.annotations.NonNls;
import org.netbeans.lib.cvsclient.command.CommandAbortedException;
import org.netbeans.lib.cvsclient.command.CommandException;
import org.netbeans.lib.cvsclient.command.IGlobalOptions;
import org.netbeans.lib.cvsclient.command.IOCommandException;
import org.netbeans.lib.cvsclient.connection.AuthenticationException;
import org.netbeans.lib.cvsclient.event.IEventSender;
import org.netbeans.lib.cvsclient.file.FileDetails;
import org.netbeans.lib.cvsclient.file.FileObject;
import org.netbeans.lib.cvsclient.io.IStreamLogger;
import org.netbeans.lib.cvsclient.progress.sending.IRequestsProgressHandler;
import org.netbeans.lib.cvsclient.request.*;
import org.netbeans.lib.cvsclient.response.DefaultResponseHandler;
import org.netbeans.lib.cvsclient.response.IResponseHandler;
import org.netbeans.lib.cvsclient.response.ResponseParser;
import org.netbeans.lib.cvsclient.response.ValidRequestsResponseHandler;
import org.netbeans.lib.cvsclient.util.BugLog;
import java.io.IOException;
import java.io.Reader;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* @author Thomas Singer
*/
public final class RequestProcessor implements IRequestProcessor {
// Fields =================================================================
private final IGlobalOptions globalOptions;
private final IClientEnvironment clientEnvironment;
private final ResponseService responseServices;
private final IStreamLogger streamLogger;
private final ICvsCommandStopper commandStopper;
@NonNls private static final String OS_NAME_PROPERTY = "os.name";
@NonNls private static final String WINDOWS_PREFIX = "Windows";
@NonNls private static final String CASE_REQUEST = "Case";
@NonNls private static final String CVS_PASS_ENV_VARS_PROPERTY = "cvs.pass.env.vars";
@NonNls private static final String NO = "no";
private final long myTimeout;
// Setup ==================================================================
public RequestProcessor(IClientEnvironment clientEnvironment,
IGlobalOptions globalOptions,
IEventSender eventSender,
IStreamLogger streamLogger,
ICvsCommandStopper commandStopper) {
this(clientEnvironment, globalOptions, eventSender, streamLogger, commandStopper, -1);
}
public RequestProcessor(IClientEnvironment clientEnvironment,
IGlobalOptions globalOptions,
IEventSender eventSender,
IStreamLogger streamLogger,
ICvsCommandStopper commandStopper, final long timeout) {
myTimeout = timeout;
BugLog.getInstance().assertNotNull(globalOptions);
BugLog.getInstance().assertNotNull(clientEnvironment);
BugLog.getInstance().assertNotNull(eventSender);
BugLog.getInstance().assertNotNull(streamLogger);
BugLog.getInstance().assertNotNull(commandStopper);
this.globalOptions = globalOptions;
this.clientEnvironment = clientEnvironment;
this.responseServices = new ResponseService(eventSender);
this.streamLogger = streamLogger;
this.commandStopper = commandStopper;
}
// Implemented ============================================================
public boolean processRequests(Requests requests, IRequestsProgressHandler communicationProgressHandler) throws CommandException,
AuthenticationException {
IConnectionStreams connectionStreams = openConnection();
try {
return processRequests(requests, connectionStreams, communicationProgressHandler);
}
finally {
connectionStreams.close();
}
}
// Utils ==================================================================
private IConnectionStreams openConnection() throws CommandException, AuthenticationException {
clientEnvironment.getConnection().open(streamLogger);
ConnectionStreams connectionStreams =
new ConnectionStreams(clientEnvironment.getConnection(), streamLogger, clientEnvironment.getCharset());
boolean exception = true;
try {
updateValidRequests(connectionStreams);
sendRequest(new RootRequest(clientEnvironment.getConnection().getRepository()), connectionStreams);
sendSetRequests(globalOptions, connectionStreams);
// Handle gzip-compression
if (globalOptions.isUseGzip() && isValidRequest(GzipStreamRequest.REQUEST)) {
sendRequest(new GzipStreamRequest(), connectionStreams);
connectionStreams.setGzipped();
}
//TODO: set variables
sendRequest(new ValidResponsesRequest(), connectionStreams);
sendRequest(new UseUnchangedRequest(), connectionStreams);
sendGlobalOptionRequests(globalOptions, connectionStreams);
if (System.getProperty(OS_NAME_PROPERTY).startsWith(WINDOWS_PREFIX) && isValidRequest(CASE_REQUEST)) {
sendRequest(new CaseRequest(), connectionStreams);
}
exception = false;
return connectionStreams;
}
catch (IOException ex) {
BugLog.getInstance().showException(ex);
throw new IOCommandException(ex);
}
finally {
if (exception) {
connectionStreams.close();
}
}
}
private void sendSetRequests(IGlobalOptions globalOptions, ConnectionStreams connectionStreams)
throws CommandAbortedException, IOException {
Map envVariables = globalOptions.getEnvVariables();
if (envVariables == null) {
return;
}
for (Iterator iterator = envVariables.keySet().iterator(); iterator.hasNext();) {
String varName = (String)iterator.next();
String varValue = (String)envVariables.get(varName);
sendRequest(new SetRequest(varName, varValue), connectionStreams);
}
}
private boolean processRequests(final Requests requests,
final IConnectionStreams connectionStreams,
final IRequestsProgressHandler communicationProgressHandler)
throws CommandException, IOCommandException {
BugLog.getInstance().assertNotNull(requests);
/*final ProcessRequestsHelper helper = (myTimeout == -1) ?
new DirectProcessRequestHelper() : new TimedOutProcessRequestHelper();*/
final ProcessRequestsHelper helper = new DirectProcessRequestHelper();
return helper.processRequests(requests, connectionStreams, communicationProgressHandler);
}
private abstract class ProcessRequestsHelper {
protected IOException myIOException;
protected CommandException myCommandException;
protected boolean myResult;
protected abstract void before();
protected abstract void callRunnable(final Runnable runnable);
protected abstract void afterInRunnable();
protected abstract void after() throws CommandException;
public boolean processRequests(final Requests requests,
final IConnectionStreams connectionStreams,
final IRequestsProgressHandler communicationProgressHandler)
throws CommandException, IOCommandException {
final Runnable runnable = new Runnable() {
public void run() {
try {
checkCanceled();
sendRequests(requests, connectionStreams, communicationProgressHandler);
checkCanceled();
sendRequest(requests.getResponseExpectingRequest(), connectionStreams);
connectionStreams.flushForReading();
myResult = handleResponses(connectionStreams, new DefaultResponseHandler());
}
catch (IOException e) {
myIOException = e;
}
catch (CommandException e) {
myCommandException = e;
}
finally {
afterInRunnable();
}
}
};
before();
callRunnable(runnable);
if (myIOException != null) throw new IOCommandException(myIOException);
if (myCommandException != null) throw myCommandException;
after();
return myResult;
}
}
private class TimedOutProcessRequestHelper extends ProcessRequestsHelper {
private final Semaphore mySemaphore;
private Future<?> myFuture;
private TimedOutProcessRequestHelper() {
mySemaphore = new Semaphore();
}
@Override
protected void before() {
mySemaphore.down();
}
@Override
protected void callRunnable(Runnable runnable) {
myFuture = Executors.newSingleThreadExecutor().submit(runnable);
final long tOut = (myTimeout < 20000) ? 20000 : myTimeout;
while (true) {
mySemaphore.waitFor(tOut);
if (myFuture.isDone() || myFuture.isCancelled()) break;
if (! commandStopper.isAlive()) break;
commandStopper.resetAlive();
}
}
@Override
protected void afterInRunnable() {
mySemaphore.up();
}
@Override
protected void after() throws CommandException {
if ((! myFuture.isDone() && (! myFuture.isCancelled()) && (! commandStopper.isAlive()))) {
myFuture.cancel(true);
throw new CommandException(new CommandAbortedException(), "Command execution timed out");
}
}
}
private class DirectProcessRequestHelper extends ProcessRequestsHelper {
@Override
protected void before() {
}
@Override
protected void callRunnable(Runnable runnable) {
runnable.run();
}
@Override
protected void afterInRunnable() {
}
@Override
protected void after() throws CommandException {
}
}
private void sendRequests(Requests requests, IConnectionStreams connectionStreams, IRequestsProgressHandler communicationProgressHandler)
throws CommandAbortedException, IOException {
for (Iterator it = requests.getRequests().iterator(); it.hasNext();) {
final IRequest request = (IRequest)it.next();
sendRequest(request, connectionStreams);
final FileDetails fileDetails = request.getFileForTransmission();
if (fileDetails != null) {
sendFile(fileDetails, connectionStreams);
}
communicationProgressHandler.requestSent(request);
}
}
private void updateValidRequests(IConnectionStreams connectionStreams) throws CommandException, IOException {
sendRequest(new ValidRequestsRequest(), connectionStreams);
connectionStreams.flushForReading();
handleResponses(connectionStreams, new ValidRequestsResponseHandler());
if (responseServices.getValidRequests() == null) {
throw new ValidRequestsExpectedException();
}
}
private void sendGlobalOptionRequests(IGlobalOptions globalOptions, IConnectionStreams connectionStreams)
throws CommandAbortedException, IOException {
if (!isValidRequest(GlobalOptionRequest.REQUEST)) {
return;
}
if (globalOptions.isCheckedOutFilesReadOnly()) {
sendRequest(new GlobalOptionRequest("-r"), connectionStreams);
}
if (globalOptions.isDoNoChanges()) {
sendRequest(new GlobalOptionRequest("-n"), connectionStreams);
}
if (globalOptions.isNoHistoryLogging()) {
sendRequest(new GlobalOptionRequest("-l"), connectionStreams);
}
if (globalOptions.isSomeQuiet()) {
sendRequest(new GlobalOptionRequest("-q"), connectionStreams);
}
}
private boolean isValidRequest(String request) {
return responseServices.getValidRequests().indexOf(request) >= 0;
}
private void sendRequest(IRequest request, IConnectionStreams connectionStreams) throws CommandAbortedException, IOException {
checkCanceled();
connectionStreams.getLoggedWriter().write(request.getRequestString());
}
private void checkCanceled() throws CommandAbortedException {
if (commandStopper.isAborted()) {
throw new CommandAbortedException();
}
}
private boolean handleResponses(IConnectionStreams connectionStreams, IResponseHandler responseHandler)
throws CommandException, IOException {
//final ErrorDefendingResponseHandler proxy = new ErrorDefendingResponseHandler(myTimeout, responseHandler);
final ResponseParser responseParser = new ResponseParser(responseHandler, clientEnvironment.getCharset());
final StringBuilder responseBuffer = new StringBuilder(32);
while (true) {
readResponse(connectionStreams.getLoggedReader(), responseBuffer);
checkCanceled();
if (responseBuffer.length() == 0) {
return false;
}
final Boolean result =
responseParser.processResponse(responseBuffer.toString(), connectionStreams, responseServices, clientEnvironment);
if (result != null) {
return result.booleanValue();
}
checkCanceled();
/*if (proxy.interrupt()) {
throw new CommandException(null, "Aborted: consequent errors stream");
}*/
}
}
private static void readResponse(Reader reader, StringBuilder responseBuffer) throws IOException {
responseBuffer.setLength(0);
for (int chr = reader.read(); chr >= 0; chr = reader.read()) {
if (chr == '\n' || chr == ' ') {
break;
}
responseBuffer.append((char)chr);
}
}
private void sendFile(FileDetails fileDetails, IConnectionStreams connectionStreams) throws IOException {
final FileObject fileObject = fileDetails.getFileObject();
if (fileDetails.isBinary()) {
clientEnvironment.getLocalFileReader().transmitBinaryFile(fileObject, connectionStreams, clientEnvironment.getCvsFileSystem());
}
else {
clientEnvironment.getLocalFileReader().transmitTextFile(fileObject, connectionStreams, clientEnvironment.getCvsFileSystem());
}
}
}
| |
// =================================================================================================
// Copyright 2011 Twitter, Inc.
// -------------------------------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this work except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file, or at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =================================================================================================
package com.twitter.common.args;
import java.io.File;
import java.io.PrintStream;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.io.ByteStreams;
import org.junit.Before;
import org.junit.Test;
import com.twitter.common.args.ArgScannerTest.StandardArgs.Optimizations;
import com.twitter.common.args.constraints.NotEmpty;
import com.twitter.common.args.constraints.NotNegative;
import com.twitter.common.args.constraints.NotNull;
import com.twitter.common.args.constraints.Positive;
import com.twitter.common.args.constraints.Range;
import com.twitter.common.args.parsers.NonParameterizedTypeParser;
import com.twitter.common.base.Command;
import com.twitter.common.base.Function;
import com.twitter.common.base.MorePreconditions;
import com.twitter.common.collections.Pair;
import com.twitter.common.quantity.Amount;
import com.twitter.common.quantity.Data;
import com.twitter.common.quantity.Time;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author William Farner
*/
public class ArgScannerTest {
private static final Function<Class<?>, Predicate<Field>> TO_SCOPE_PREDICATE =
new Function<Class<?>, Predicate<Field>>() {
@Override public Predicate<Field> apply(final Class<?> cls) {
return new Predicate<Field>() {
@Override public boolean apply(Field field) {
return field.getDeclaringClass() == cls;
}
};
}
};
@Before
public void setUp() {
// Reset args in all classes before each test.
for (Class<?> cls : this.getClass().getDeclaredClasses()) {
resetArgs(cls);
}
}
public static class StandardArgs {
enum Optimizations { NONE, MINIMAL, ALL }
@CmdLine(name = "enum", help = "help")
static final Arg<Optimizations> ENUM_VAL = Arg.create(Optimizations.MINIMAL);
@CmdLine(name = "string", help = "help")
static final Arg<String> STRING_VAL = Arg.create("string");
@CmdLine(name = "char", help = "help")
static final Arg<Character> CHAR_VAL = Arg.create('c');
@CmdLine(name = "byte", help = "help")
static final Arg<Byte> BYTE_VAL = Arg.create((byte) 0);
@CmdLine(name = "short", help = "help")
static final Arg<Short> SHORT_VAL = Arg.create((short) 0);
@CmdLine(name = "int", help = "help")
static final Arg<Integer> INT_VAL = Arg.create(0);
@CmdLine(name = "long", help = "help")
static final Arg<Long> LONG_VAL = Arg.create(0L);
@CmdLine(name = "float", help = "help")
static final Arg<Float> FLOAT_VAL = Arg.create(0F);
@CmdLine(name = "double", help = "help")
static final Arg<Double> DOUBLE_VAL = Arg.create(0D);
@CmdLine(name = "bool", help = "help")
static final Arg<Boolean> BOOL = Arg.create(false);
@CmdLine(name = "regex", help = "help")
static final Arg<Pattern> REGEX = Arg.create(null);
@CmdLine(name = "time_amount", help = "help")
static final Arg<Amount<Long, Time>> TIME_AMOUNT = Arg.create(Amount.of(1L, Time.SECONDS));
@CmdLine(name = "data_amount", help = "help")
static final Arg<Amount<Long, Data>> DATA_AMOUNT = Arg.create(Amount.of(1L, Data.MB));
@CmdLine(name = "range", help = "help")
static final Arg<com.google.common.collect.Range<Integer>> RANGE =
Arg.create(com.google.common.collect.Range.closed(1, 5));
@Positional(help = "help")
static final Arg<List<Amount<Long, Time>>> POSITIONAL =
Arg.<List<Amount<Long, Time>>>create(ImmutableList.<Amount<Long, Time>>of());
}
@Test
public void testStandardArgs() {
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.ENUM_VAL.get(), is(Optimizations.ALL));
}
}, "enum", "ALL");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.STRING_VAL.get(), is("newstring"));
}
},
"string", "newstring");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.CHAR_VAL.get(), is('x')); }
},
"char", "x");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.BYTE_VAL.get(), is((byte) 10));
}
},
"byte", "10");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.SHORT_VAL.get(), is((short) 10));
}
},
"short", "10");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.INT_VAL.get(), is(10)); }
},
"int", "10");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.LONG_VAL.get(), is(10L)); }
},
"long", "10");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.FLOAT_VAL.get(), is(10f)); }
},
"float", "10.0");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.DOUBLE_VAL.get(), is(10d)); }
},
"double", "10.0");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.BOOL.get(), is(true)); }
},
"bool", "true");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.BOOL.get(), is(true)); }
},
"bool", "");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.REGEX.get().matcher("jack").matches(), is(true));
}
},
"regex", ".*ack$");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.BOOL.get(), is(false)); }
},
"no_bool", "");
test(StandardArgs.class,
new Command() {
@Override public void execute() { assertThat(StandardArgs.BOOL.get(), is(true)); }
},
"no_bool", "false");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.TIME_AMOUNT.get(), is(Amount.of(100L, Time.SECONDS)));
}
},
"time_amount", "100secs");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.DATA_AMOUNT.get(), is(Amount.of(1L, Data.Gb)));
}
},
"data_amount", "1Gb");
test(StandardArgs.class,
new Command() {
@Override public void execute() {
assertThat(StandardArgs.RANGE.get(), is(com.google.common.collect.Range.closed(1, 5)));
}
},
"range", "1-5");
resetArgs(StandardArgs.class);
assertTrue(parse(StandardArgs.class, "1mins", "2secs"));
assertEquals(ImmutableList.builder()
.add(Amount.of(60L, Time.SECONDS))
.add(Amount.of(2L, Time.SECONDS)).build(), StandardArgs.POSITIONAL.get());
}
public static class Name {
private final String name;
public Name(String name) {
this.name = MorePreconditions.checkNotBlank(name);
}
public String getName() {
return name;
}
@Override
public int hashCode() {
return this.name.hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj instanceof Name) && name.equals(((Name) obj).name);
}
}
@ArgParser
public static class NameParser extends NonParameterizedTypeParser<Name> {
@Override public Name doParse(String raw) {
return new Name(raw);
}
}
public static class MeaningOfLife {
private final Long answer;
public MeaningOfLife(Long answer) {
this.answer = Preconditions.checkNotNull(answer);
}
@Override
public int hashCode() {
return this.answer.hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj instanceof MeaningOfLife) && answer.equals(((MeaningOfLife) obj).answer);
}
}
public static class Monty extends NonParameterizedTypeParser<MeaningOfLife> {
@Override public MeaningOfLife doParse(String raw) {
return new MeaningOfLife(42L);
}
}
public static class CustomArgs {
@CmdLine(name = "custom1", help = "help")
static final Arg<Name> NAME_VAL = Arg.create(new Name("jim"));
@CmdLine(name = "custom2", help = "help", parser = Monty.class)
static final Arg<MeaningOfLife> MEANING_VAL = Arg.create(new MeaningOfLife(13L));
}
@Test
public void testCustomArgs() {
test(CustomArgs.class,
new Command() {
@Override public void execute() {
assertThat(CustomArgs.NAME_VAL.get(), is(new Name("jane")));
}
}, "custom1", "jane");
test(CustomArgs.class,
new Command() {
@Override public void execute() {
assertThat(CustomArgs.MEANING_VAL.get(), is(new MeaningOfLife(42L)));
}
}, "custom2", "jim");
}
@Test
public void testHelp() {
assertFalse(parse(StandardArgs.class, "-h"));
assertFalse(parse(StandardArgs.class, "-help"));
}
@Test
public void testAllowsEmptyString() {
parse(StandardArgs.class, "-string=");
assertThat(StandardArgs.STRING_VAL.get(), is(""));
resetArgs(StandardArgs.class);
parse(StandardArgs.class, "-string=''");
assertThat(StandardArgs.STRING_VAL.get(), is(""));
resetArgs(StandardArgs.class);
parse(StandardArgs.class, "-string=\"\"");
assertThat(StandardArgs.STRING_VAL.get(), is(""));
}
public static class CollectionArgs {
@CmdLine(name = "stringList", help = "help")
static final Arg<List<String>> STRING_LIST = Arg.create(null);
@CmdLine(name = "intList", help = "help")
static final Arg<List<Integer>> INT_LIST = Arg.create(null);
@CmdLine(name = "stringSet", help = "help")
static final Arg<Set<String>> STRING_SET = Arg.create(null);
@CmdLine(name = "intSet", help = "help")
static final Arg<Set<Integer>> INT_SET = Arg.create(null);
@CmdLine(name = "stringStringMap", help = "help")
static final Arg<Map<String, String>> STRING_STRING_MAP = Arg.create(null);
@CmdLine(name = "intIntMap", help = "help")
static final Arg<Map<Integer, Integer>> INT_INT_MAP = Arg.create(null);
@CmdLine(name = "stringIntMap", help = "help")
static final Arg<Map<String, Integer>> STRING_INT_MAP = Arg.create(null);
@CmdLine(name = "intStringMap", help = "help")
static final Arg<Map<Integer, String>> INT_STRING_MAP = Arg.create(null);
@CmdLine(name = "stringStringPair", help = "help")
static final Arg<Pair<String, String>> STRING_STRING_PAIR = Arg.create(null);
@CmdLine(name = "intIntPair", help = "help")
static final Arg<Pair<Integer, Integer>> INT_INT_PAIR = Arg.create(null);
@CmdLine(name = "stringTimeAmountPair", help = "help")
static final Arg<Pair<String, Amount<Long, Time>>> STRING_TIME_AMOUNT_PAIR = Arg.create(null);
}
@Test
public void testCollectionArgs() {
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
assertThat(CollectionArgs.STRING_LIST.get(), is(Arrays.asList("a", "b", "c", "d")));
}
},
"stringList", "a,b,c,d");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
assertThat(CollectionArgs.INT_LIST.get(), is(Arrays.asList(1, 2, 3, 4)));
}
},
"intList", "1, 2, 3, 4");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Set<String> expected = ImmutableSet.of("a", "b", "c", "d");
assertThat(CollectionArgs.STRING_SET.get(), is(expected));
}
},
"stringSet", "a,b,c,d");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Set<Integer> expected = ImmutableSet.of(1, 2, 3, 4);
assertThat(CollectionArgs.INT_SET.get(), is(expected));
}
},
"intSet", "1, 2, 3, 4");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Map<String, String> expected = ImmutableMap.of("a", "b", "c", "d", "e", "f", "g", "h");
assertThat(CollectionArgs.STRING_STRING_MAP.get(), is(expected));
}
},
"stringStringMap", "a=b, c=d, e=f, g=h");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Map<Integer, Integer> expected = ImmutableMap.of(1, 2, 3, 4, 5, 6, 7, 8);
assertThat(CollectionArgs.INT_INT_MAP.get(), is(expected));
}
},
"intIntMap", "1 = 2,3=4, 5=6 ,7=8");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Map<String, Integer> expected = ImmutableMap.of("a", 1, "b", 2, "c", 3, "d", 4);
assertThat(CollectionArgs.STRING_INT_MAP.get(), is(expected));
}
},
"stringIntMap", "a=1 , b=2, c=3 ,d=4");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
Map<Integer, String> expected = ImmutableMap.of(1, "1", 2, "2", 3, "3", 4, "4");
assertThat(CollectionArgs.INT_STRING_MAP.get(), is(expected));
}
},
"intStringMap", " 1=1 , 2=2, 3=3,4=4");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
assertThat(CollectionArgs.STRING_STRING_PAIR.get(), is(Pair.of("foo", "bar")));
}
},
"stringStringPair", "foo , bar");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
assertThat(CollectionArgs.INT_INT_PAIR.get(), is(Pair.of(10, 20)));
}
},
"intIntPair", "10 ,20");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
assertThat(CollectionArgs.STRING_TIME_AMOUNT_PAIR.get(),
is(Pair.of("fred", Amount.of(42L, Time.MINUTES))));
}
},
"stringTimeAmountPair", "fred ,42mins");
test(CollectionArgs.class,
new Command() {
@Override public void execute() {
CollectionArgs.STRING_TIME_AMOUNT_PAIR.get();
}
},
true, "stringTimeAmountPair", "george,1MB");
}
static class Serializable1 implements Serializable { }
static class Serializable2 implements Serializable { }
public static class WildcardArgs {
@CmdLine(name = "class", help = "help")
static final Arg<? extends Class<? extends Serializable>> CLAZZ =
Arg.create(Serializable1.class);
@CmdLine(name = "classList1", help = "help")
static final Arg<List<Class<? extends Serializable>>> CLASS_LIST_1 = Arg.create(null);
@CmdLine(name = "classList2", help = "help")
static final Arg<List<? extends Class<? extends Serializable>>> CLASS_LIST_2 = Arg.create(null);
}
@Test
public void testWildcardArgs() {
test(WildcardArgs.class,
new Command() {
@Override public void execute() {
assertSame(Serializable2.class, WildcardArgs.CLAZZ.get());
}
},
"class", Serializable2.class.getName());
test(WildcardArgs.class,
new Command() {
@Override public void execute() {
WildcardArgs.CLAZZ.get();
}
},
true, "class", Runnable.class.getName());
test(WildcardArgs.class,
new Command() {
@Override public void execute() {
assertEquals(ImmutableList.of(Serializable1.class, Serializable2.class),
WildcardArgs.CLASS_LIST_1.get());
}
},
"classList1", Serializable1.class.getName() + "," + Serializable2.class.getName());
test(WildcardArgs.class,
new Command() {
@Override public void execute() {
assertEquals(ImmutableList.of(Serializable2.class), WildcardArgs.CLASS_LIST_2.get());
}
},
"classList2", Serializable2.class.getName());
test(WildcardArgs.class,
new Command() {
@Override public void execute() {
WildcardArgs.CLASS_LIST_2.get();
}
},
true, "classList2", Serializable1.class.getName() + "," + Runnable.class.getName());
}
@Target(FIELD)
@Retention(RUNTIME)
public static @interface Equals {
String value();
}
@VerifierFor(Equals.class)
public static class SameName implements Verifier<Name> {
@Override
public void verify(Name value, Annotation annotation) {
Preconditions.checkArgument(getValue(annotation).equals(value.getName()));
}
@Override
public String toString(Class<? extends Name> argType, Annotation annotation) {
return "name = " + getValue(annotation);
}
private String getValue(Annotation annotation) {
return ((Equals) annotation).value();
}
}
public static class VerifyArgs {
@Equals("jake") @CmdLine(name = "custom", help = "help")
static final Arg<Name> CUSTOM_VAL = Arg.create(new Name("jake"));
@NotEmpty @CmdLine(name = "string", help = "help")
static final Arg<String> STRING_VAL = Arg.create("string");
@NotEmpty @CmdLine(name = "optional_string", help = "help")
static final Arg<String> OPTIONAL_STRING_VAL = Arg.create(null);
@Positive @CmdLine(name = "int", help = "help")
static final Arg<Integer> INT_VAL = Arg.create(1);
@NotNegative @CmdLine(name = "long", help = "help")
static final Arg<Long> LONG_VAL = Arg.create(0L);
@Range(lower = 10, upper = 20) @CmdLine(name = "float", help = "help")
static final Arg<Float> FLOAT_VAL = Arg.create(10F);
@CmdLine(name = "double", help = "help")
static final Arg<Double> DOUBLE_VAL = Arg.create(0D);
@CmdLine(name = "bool", help = "help")
static final Arg<Boolean> BOOL = Arg.create(false);
@CmdLine(name = "arg_without_default", help = "help")
static final Arg<Boolean> ARG_WITHOUT_DEFAULT = Arg.create();
}
@Test
public void testEnforcesConstraints() {
test(VerifyArgs.class,
new Command() {
@Override public void execute() {
assertThat(VerifyArgs.STRING_VAL.get(), is("newstring"));
assertThat(VerifyArgs.OPTIONAL_STRING_VAL.get(), nullValue(String.class));
}
},
"string", "newstring");
testFails(VerifyArgs.class, "custom", "jane");
testFails(VerifyArgs.class, "string", "");
testFails(VerifyArgs.class, "optional_string", "");
testFails(VerifyArgs.class, "int", "0");
testFails(VerifyArgs.class, "long", "-1");
test(VerifyArgs.class,
new Command() {
@Override public void execute() {
assertThat(VerifyArgs.FLOAT_VAL.get(), is(10.5f));
}
},
"float", "10.5");
testFails(VerifyArgs.class, "float", "9");
}
@Test
public void testJoinKeysToValues() {
assertThat(ArgScanner.joinKeysToValues(Arrays.asList("")), is(Arrays.asList("")));
assertThat(ArgScanner.joinKeysToValues(Arrays.asList("-a", "b", "-c", "-d")),
is(Arrays.asList("-a=b", "-c", "-d")));
assertThat(ArgScanner.joinKeysToValues(Arrays.asList("-a='b'", "-c", "-d", "'e'")),
is(Arrays.asList("-a='b'", "-c", "-d='e'")));
assertThat(ArgScanner.joinKeysToValues(Arrays.asList("-a=-b", "c", "-d", "\"e\"")),
is(Arrays.asList("-a=-b", "c", "-d=\"e\"")));
}
public static class ShortHelpArg {
@CmdLine(name = "h", help = "help")
static final Arg<String> SHORT_HELP = Arg.create("string");
}
@Test(expected = IllegalArgumentException.class)
public void testShortHelpReserved() {
parse(ShortHelpArg.class);
}
public static class LongHelpArg {
@CmdLine(name = "help", help = "help")
static final Arg<String> LONG_HELP = Arg.create("string");
}
@Test(expected = IllegalArgumentException.class)
public void testLongHelpReserved() {
parse(LongHelpArg.class);
}
public static class DuplicateNames {
@CmdLine(name = "string", help = "help") static final Arg<String> STRING_1 = Arg.create();
@CmdLine(name = "string", help = "help") static final Arg<String> STRING_2 = Arg.create();
}
@Test(expected = IllegalArgumentException.class)
public void testRejectsDuplicates() {
parse(DuplicateNames.class, "-string-str");
}
public static class OneRequired {
@CmdLine(name = "string1", help = "help")
static final Arg<String> STRING_1 = Arg.create(null);
@NotNull @CmdLine(name = "string2", help = "help")
static final Arg<String> STRING_2 = Arg.create(null);
}
@Test
public void testRequiredProvided() {
parse(OneRequired.class, "-string2=blah");
}
@Test(expected = IllegalArgumentException.class)
public void testMissingRequired() {
parse(OneRequired.class, "-string1=blah");
}
@Test(expected = IllegalArgumentException.class)
public void testUnrecognizedArg() {
parse(OneRequired.class, "-string2=blah", "-string3=blah");
}
public static class NameClashA {
@CmdLine(name = "string", help = "help")
static final Arg<String> STRING = Arg.create(null);
@CmdLine(name = "boolean", help = "help")
static final Arg<Boolean> BOOLEAN = Arg.create(true);
}
public static class NameClashB {
@CmdLine(name = "string", help = "help")
static final Arg<String> STRING_1 = Arg.create(null);
@CmdLine(name = "boolean", help = "help")
static final Arg<Boolean> BOOLEAN_1 = Arg.create(true);
}
@Test(expected = IllegalArgumentException.class)
public void testDisallowsShortNameOnArgCollision() {
parse(ImmutableList.of(NameClashA.class, NameClashB.class), "-string=blah");
}
@Test(expected = IllegalArgumentException.class)
public void testDisallowsShortNegNameOnArgCollision() {
parse(ImmutableList.of(NameClashA.class, NameClashB.class), "-no_boolean");
}
@Test
public void testAllowsCanonicalNameOnArgCollision() {
// TODO(William Farner): Fix.
parse(ImmutableList.of(NameClashA.class, NameClashB.class),
"-" + NameClashB.class.getCanonicalName() + ".string=blah");
}
@Test
public void testAllowsCanonicalNegNameOnArgCollision() {
parse(ImmutableList.of(NameClashA.class, NameClashB.class),
"-" + NameClashB.class.getCanonicalName() + ".no_boolean");
}
public static class AmountContainer {
@CmdLine(name = "time_amount", help = "help")
static final Arg<Amount<Integer, Time>> TIME_AMOUNT = Arg.create(null);
}
@Test(expected = IllegalArgumentException.class)
public void testBadUnitType() {
parse(ImmutableList.of(AmountContainer.class), "-time_amount=1Mb");
}
@Test(expected = IllegalArgumentException.class)
public void testUnrecognizedUnitType() {
parse(ImmutableList.of(AmountContainer.class), "-time_amount=1abcd");
}
static class Main1 {
@Positional(help = "halp")
static final Arg<List<String>> NAMES = Arg.create(null);
}
static class Main2 {
@Positional(help = "halp")
static final Arg<List<List<String>>> ROSTERS = Arg.create(null);
}
static class Main3 {
@Positional(help = "halp")
static final Arg<List<Double>> PERCENTILES = Arg.create(null);
@Positional(help = "halp")
static final Arg<List<File>> FILES = Arg.create(null);
}
private void resetMainArgs() {
resetArgs(Main1.class);
resetArgs(Main2.class);
resetArgs(Main3.class);
}
@Test
public void testMultiplePositionalsFails() {
// Indivdually these should work.
resetMainArgs();
assertTrue(parse(Main1.class, "jack,jill", "laurel,hardy"));
assertEquals(ImmutableList.of("jack,jill", "laurel,hardy"),
ImmutableList.copyOf(Main1.NAMES.get()));
resetMainArgs();
assertTrue(parse(Main2.class, "jack,jill", "laurel,hardy"));
assertEquals(
ImmutableList.of(
ImmutableList.of("jack", "jill"),
ImmutableList.of("laurel", "hardy")),
ImmutableList.copyOf(Main2.ROSTERS.get()));
// But if combined in the same class or across classes the @Positional is ambiguous and we
// should fail fast.
resetMainArgs();
try {
parse(ImmutableList.of(Main1.class, Main2.class), "jack,jill", "laurel,hardy");
fail("Expected more than 1 in-scope @Positional Arg List to trigger a failure.");
} catch (IllegalArgumentException e) {
// expected
}
resetMainArgs();
try {
parse(Main3.class, "50", "90", "99", "99.9");
fail("Expected more than 1 in-scope @Positional Arg List to trigger a failure.");
} catch (IllegalArgumentException e) {
// expected
}
}
// TODO(William Farner): Do we want to support nested parameterized args? If so, need to define a
// syntax for that and build it in.
// e.g. List<List<Integer>>, List<Pair<String, String>>
private static void testFails(Class<?> scope, String arg, String value) {
test(scope, null, true, arg, value);
}
private static void test(Class<?> scope, Command validate, String arg, String value) {
test(scope, validate, false, arg, value);
}
private static void test(Class<?> scope, Command validate, boolean expectFails, String arg,
String value) {
String canonicalName = scope.getCanonicalName() + "." + arg;
if (value.isEmpty()) {
testValidate(scope, validate, expectFails, String.format("-%s", arg));
testValidate(scope, validate, expectFails, String.format("-%s", canonicalName));
} else {
testValidate(scope, validate, expectFails, String.format("-%s=%s", arg, value));
testValidate(scope, validate, expectFails, String.format("-%s=%s", canonicalName, value));
testValidate(scope, validate, expectFails, String.format("-%s='%s'", arg, value));
testValidate(scope, validate, expectFails, String.format("-%s='%s'", canonicalName, value));
testValidate(scope, validate, expectFails, String.format("-%s=\"%s\"", arg, value));
testValidate(scope, validate, expectFails, String.format("-%s=\"%s\"", canonicalName, value));
testValidate(scope, validate, expectFails, String.format("-%s", arg), value);
testValidate(scope, validate, expectFails, String.format("-%s", canonicalName), value);
testValidate(scope, validate, expectFails,
String.format("-%s", arg), String.format("'%s'", value));
testValidate(scope, validate, expectFails,
String.format("-%s", canonicalName), String.format("'%s'", value));
testValidate(scope, validate, expectFails, String.format("-%s \"%s\"", arg, value));
testValidate(scope, validate, expectFails, String.format("-%s \"%s\"", canonicalName, value));
testValidate(scope, validate, expectFails,
String.format("-%s", arg), String.format("%s", value));
testValidate(scope, validate, expectFails,
String.format("-%s", canonicalName), String.format("%s", value));
}
}
private static void testValidate(Class<?> scope, Command validate, boolean expectFails,
String... args) {
resetArgs(scope);
IllegalArgumentException exception = null;
try {
assertTrue(parse(scope, args));
} catch (IllegalArgumentException e) {
exception = e;
}
if (!expectFails && exception != null) {
throw exception;
}
if (expectFails && exception == null) {
fail("Expected exception.");
}
if (validate != null) {
validate.execute();
}
resetArgs(scope);
}
private static void resetArgs(Class<?> scope) {
for (Field field : scope.getDeclaredFields()) {
if (Arg.class.isAssignableFrom(field.getType()) && Modifier.isStatic(field.getModifiers())) {
try {
((Arg) field.get(null)).reset();
} catch (IllegalAccessException e) {
fail(e.getMessage());
}
}
}
}
private static boolean parse(final Class<?> scope, String... args) {
return parse(ImmutableList.of(scope), args);
}
private static boolean parse(Iterable<? extends Class<?>> scopes, String... args) {
Predicate<Field> filter = Predicates.or(Iterables.transform(scopes, TO_SCOPE_PREDICATE));
PrintStream devNull = new PrintStream(ByteStreams.nullOutputStream());
return new ArgScanner(devNull).parse(filter, Arrays.asList(args));
}
}
| |
package server;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
public class Player extends Entity {
public static final List<String> registered = Arrays.asList("alias", "cut", "items", "list", "walk", "creep", "sneak", "quietly", "north", "south", "east", "west");
public boolean hunter, escaped = false;
public int moves = -65536, savedMoves;
public final Map<String, Double> items = new TreeMap<>();
private final Map<String, String> aliases = new HashMap<>();
protected final Socket socket;
protected final PrintWriter out;
protected final BufferedReader in;
public Player(Socket socket, int number, int x, int y) throws IOException {
super("PLAYER" + number, x, y, true, 0);
this.socket = socket;
this.out = new PrintWriter(socket.getOutputStream());
this.in = new BufferedReader(new InputStreamReader(socket.getInputStream()));
}
/**
* <ul>
* <li><b><i>print</i></b><br>
* <br>
* {@code public void print(String text)}<br>
* <br>
* Outputs the given text to the client.<br>
* @param text The text to output
* </ul>
*/
public void print(String text) {
if (text.contains("\n")) {
for (String s : text.split("\n")) {
print(s);
}
} else {
out.println(text);
out.flush();
System.out.println(Server.date.format(new Date()) + " [SERVER -> " + this.getName() + "] " + text);
}
}
public void printf(String format, Object... args) {
print(String.format(format, args));
}
/**
* <ul>
* <li><b><i>read</i></b><br>
* <br>
* {@code public String read() throws IOException}<br>
* <br>
* Reads from the client<br>
* @return input from the client, as returned by {@link BufferedReader#readLine()}
* @throws IOException If there is an IOException while reading.
* </ul>
*/
public String read() throws IOException {
String ret = in.readLine();
System.out.println(Server.date.format(new Date()) + " [" + this.getName() + "] " + ret);
return ret;
}
/**
* <ul>
* <li><b><i>close</i></b><br>
* <br>
* {@code public void close() throws IOException}<br>
* <br>
* Closes the socket and all streams.<br>
* @throws IOException If an IOException occurs while closing the socket or streams.
* </ul>
*/
public void close() throws IOException {
out.println((char) 4);
out.flush();
out.close();
in.close();
socket.close();
}
public void execute(String command) {
String[] commands = command.split("\\s+");
String cmd = "";
for (int c = 0; c < commands.length; c++) {
String repl = aliases.get(commands[c]);
cmd += (repl == null ? commands[c] : repl) + " ";
}
commands = cmd.trim().split("\\s+");
if (commands[0].equals("help")) {
if (commands.length == 1) {
print("Available commands: alias, creep, cut, help, items, rest, walk.\nUse help <command> for more details.");
} else {
switch (commands[1]) {
case "walk":
print("Usage: walk <direction>\nMove in the given direction.\nMove cost: 4");
break;
case "creep":
print("Usage: creep <direction>\nMove quietly in the given direction.\nMove cost: 7");
break;
case "alias":
print("Usage: alias <term> <replacement>\nDefine the given term as the replacement.\nUsage: alias list\nLists all currently defined aliases.");
break;
case "cut":
print("Usage: cut <direction>\nAttempt to cut down the tree in the given direction. Large trees will be damaged.\nMove cost: 3, or 1 if there is not a tree in the given direction.");
break;
case "items":
print("Usage: items\nLists all items that you have.");
break;
case "rest":
print("Usage: rest [moves]\nSave the specified number of moves (or 1, if no number is specified) until the next turn. Moves have a greater chance of being saved if you have fewer mvoes left.");
break;
case "help":
print("Usage: help\nGives a list of commands.\nUsage: help <command>\nGives details on the given command.");
break;
default:
print("That is not a valid command.");
break;
}
}
} else if (commands[0].equals("alias")) {
if (commands.length < 2) {
print("Please enter a term to define.");
return;
}
if (commands[1].equals("list")) {
if (aliases.isEmpty()) {
print("You haven't defined any aliases.");
} else {
aliases.forEach((alias, replacement) -> print(alias + " -> " + replacement));
}
return;
}
if (registered.contains(commands[1])) {
print("\"" + commands[1] + "\" cannot be redefined.");
return;
}
if (commands.length < 3) {
print("Please enter a term or terms to replace \"" + commands[1] + "\" with");
return;
}
aliases.put(commands[1], Arrays.stream(commands).skip(2).collect(Collectors.joining(" ")));
print(commands[1] + " has been redefined as " + aliases.get(commands[1]));
} else if (commands[0].equals("creep") || commands[0].equals("walk")) {
if (commands.length < 2) {
print("Please enter a direction to move in.");
return;
}
boolean quiet = commands[0].equals("creep");
String dir = commands[1];
if (commands[1].equals("quietly")) {
if (commands.length < 3) {
print("Please enter a direction to move in.");
return;
}
dir = commands[2];
quiet = true;
}
if (moves < (quiet ? 7 : 4)) {
print("You don't have enough moves left to do that.");
return;
}
int nx = this.getX(), ny = this.getY();
switch (dir) {
case "north":
ny--;
break;
case "east":
nx++;
break;
case "south":
ny++;
break;
case "west":
nx--;
break;
default:
print("You can't walk that way.");
return;
}
moves -= quiet ? 7 : 4;
if (nx < 0 || nx >= 30 || ny < 0 || ny >= 30) {
if (this.hunter || !this.items.containsKey("the crystal"))
print("You have reached the edge of the forest.");
else
escaped = true;
return;
}
boolean move = true;
String stand = null;
for (Entity e : Server.getEntitiesAt(nx, ny)) {
if (!e.isPassable()) {
print("You walked into a" + (e.getName().matches("^[aeiou].+?") ? "n " : " ") + e.getName() + ".");
move = false;
break;
}
if (e.getName().startsWith("item")) {
print("You found " + e.getName().substring(5));
items.put(e.getName().substring(5), e.getState());
} else if (e instanceof Silhouette && ((Silhouette) e).summoner != this) {
print("You see a humanoid figure. It walks into you, and dissolves into shadows.");
} else if (stand == null) {
stand = e.getName();
}
}
if (move) {
this.setX(nx);
this.setY(ny);
print("You walked " + dir + ".");
if (stand != null) print("You are standing on a " + stand + ".");
print(Server.world[this.getY()][this.getX()].getMessage());
}
} else if (commands[0].equals("cut")) {
if (moves < 3) {
print("You don't have enough moves left to do that.");
return;
}
if (commands.length < 2) {
print("Please enter a direction to cut in.");
return;
}
List<Entity> ents;
switch (commands[1]) {
case "north":
ents = Server.getEntitiesAt(this.getX(), this.getY() - 1);
break;
case "east":
ents = Server.getEntitiesAt(this.getX() + 1, this.getY());
break;
case "south":
ents = Server.getEntitiesAt(this.getX(), this.getY() + 1);
break;
case "west":
ents = Server.getEntitiesAt(this.getX() - 1, this.getY());
break;
default:
print("You can't cut in that direction.");
return;
}
if (ents.stream().allMatch(e -> e.isPassable())) {
if (ents.stream().anyMatch(e -> e instanceof Silhouette)) {
ents.stream().filter(e -> e instanceof Silhouette).forEach(Server.objects::remove);
print("You conjure a magical axe, and swing with all your might. Too late, you realize that what you though was a tree is actually a humanoid figure. As your axe passes thourhg it, it dissolves into shadow.");
moves -= Server.rand.nextInt(2) + 1;
} else {
if (Server.rand.nextInt(9) == 0) {
print("You conjure a magical axe, and swing with all your might. Too late, you realize that there's nothing there, and narrowly miss your leg.");
moves -= 2;
} else {
print("There's nothing to cut in the " + commands[1] + ".");
moves--;
}
}
} else {
if (ents.stream().anyMatch(e -> e.getName().equals("tree"))) {
for (Entity e : ents) {
if (e.getName().equals("tree")) {
if (e.addState(0, -Math.abs(Server.rand.nextGaussian() + 1) * 1.66 - 1) < 0) {
int dir = Server.rand.nextInt(4);
int cx = e.getState() > 4 ? (dir - 1) % 2 : 0;
int cy = e.getState() > 4 ? (dir - 2) % 2 : 0;
boolean hit = this.getX() == e.getX() + cx && this.getY() == e.getY() + cy;
if (hit) {
moves--;
if (moves < 0) savedMoves--;
}
print("You conjure a magical axe, and swing with all your might. The tree you hit falls over" + (hit ? ", toward you, and you jump out of the way. The tree narrowly misses you." : "."));
Server.objects.remove(e);
if (e.getState(1) > 4) {
Server.objects.add(new Entity("stump", e.getX(), e.getY(), true, 6));
for (int i = 1; i < e.getState(1) * 0.75; i++) {
Server.objects.add(new Entity("fallen log", e.getX() * cx * i, e.getY() + cy * i, true, 6, e.getState()));
}
}
} else {
print("You conjure a magical axe, and swing with all your might. You made a large dent in the tree, but it does not fall down.");
}
} else if (e.getName().startsWith("item")) {
print("You found " + e.getName().substring(5));
items.put(e.getName().substring(5), e.getState());
}
}
moves -= 3;
} else {
for (Entity e : ents) {
if (!e.isPassable()) {
print("You conjure a magical axe, and swing with all your might. Too late, you realize that what you though was a tree is actually a " + e.getName() + ". Your axe glances off, and you narrowly miss your leg.");
moves -= 3;
break;
}
}
}
}
} else if (commands[0].equals("items")) {
if (items.isEmpty()) {
print("You don't have any items.");
} else {
items.forEach((item, durability) -> printf("%s (%.1f durability)", item, durability));
}
} else if (commands[0].equals("rest")) {
int n = 1;
try {
n = Integer.parseInt(commands[1]);
} catch (Exception e) {}
if (n < 1) {
print("You can't save less than one move.");
return;
} else if (n > moves) {
print("You don't have that many moves to save.");
return;
}
int sm = 0;
for (int i = 0; i < n; i++) {
moves--;
if (Server.rand.nextInt(moves / 2 + 1) == 0) {
savedMoves++;
sm++;
}
}
print("You saved " + sm + " move" + (sm == 1 ? "." : "s."));
} else {
print("You cannot perform that action.");
}
}
public void resetMoves() {
this.moves = savedMoves + Math.max(3, 15 + (int) (Server.rand.nextGaussian() * 4));
this.savedMoves = 0;
}
public static class Noise {
public final String name;
public final double value;
public final int x, y;
public Noise(String name, int x, int y, double value) {
this.name = name;
this.x = x;
this.y = y;
this.value = value;
}
}
}
| |
/* Android Modem Status Client API
*
* Copyright (C) Intel 2012
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.intel.internal.telephony.stmd;
import java.io.IOException;
import java.io.InputStream;
import com.intel.internal.telephony.Constants;
import com.intel.internal.telephony.MmgrClientException;
import com.intel.internal.telephony.ModemNotification;
import com.intel.internal.telephony.ModemRequestArgs;
import com.intel.internal.telephony.ModemStatus;
import com.intel.internal.telephony.ModemStatusMonitor;
import com.intel.internal.telephony.ModemNotificationArgs;
import android.net.LocalSocket;
import android.net.LocalSocketAddress;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
public class MedfieldStmdClient implements ModemStatusMonitor, Runnable {
private final static byte MODEM_DOWN = 0;
private final static byte MODEM_UP = 1;
private final static byte PLATFORM_SHUTDOWN = 2;
private final static byte MODEM_COLD_RESET = 4;
protected LocalSocket clientSocket = null;
protected Handler handler = null;
protected Thread thread = null;
protected volatile boolean stopRequested = false;
protected int connectTimeoutMs = 4000;
public MedfieldStmdClient(Handler handler) {
this(handler, 4000);
}
public MedfieldStmdClient(Handler handler, int connectTimeoutMs) {
this.connectTimeoutMs = connectTimeoutMs;
this.setModemStatusHandler(handler);
}
protected LocalSocketAddress getSocketAddress() {
return new LocalSocketAddress("modem-status",
LocalSocketAddress.Namespace.RESERVED);
}
public void start(String clientName) {
this.stopRequested = false;
this.clientSocket = new LocalSocket();
this.thread = new Thread(this);
this.thread.setName("STMD Client for " + clientName);
this.thread.start();
}
public void stop() {
this.stopRequested = true;
this.cleanUp();
try {
this.thread.join();
} catch (InterruptedException ex) {
Log.e(Constants.LOG_TAG, ex.toString());
}
}
public void setModemStatusHandler(Handler handler) {
if (handler == null) {
throw new IllegalArgumentException("handler");
}
this.handler = handler;
}
public void run() {
byte[] recvBuffer = new byte[1024]; // should be large enough to contain
// response
InputStream inputStream = null;
int readCount = 0;
try {
this.clientSocket.connect(this.getSocketAddress());
inputStream = this.clientSocket.getInputStream();
} catch (Exception ex) {
Log.e(Constants.LOG_TAG, ex.toString());
this.handler.obtainMessage(ModemStatusMonitor.MSG_ERROR, ex)
.sendToTarget();
this.cleanUp();
return;
}
while (!this.stopRequested) {
try {
readCount = inputStream.read(recvBuffer);
this.handleResponse(recvBuffer, readCount);
} catch (IOException ex) {
Log.e(Constants.LOG_TAG, ex.toString());
this.handler.obtainMessage(ModemStatusMonitor.MSG_ERROR, ex)
.sendToTarget();
this.cleanUp();
return;
}
}
}
private void handleResponse(byte[] buffer, int length) {
ModemNotification notification = ModemNotification.NONE;
ModemStatus status = ModemStatus.NONE;
for (int i = 0; i < length; i += 4) {
switch (buffer[i]) {
case MedfieldStmdClient.MODEM_DOWN:
Log.i(Constants.LOG_TAG, "Modem status = MODEM_DOWN");
status = ModemStatus.DOWN;
break;
case MedfieldStmdClient.MODEM_UP:
Log.i(Constants.LOG_TAG, "Modem status = MODEM_UP");
status = ModemStatus.UP;
break;
case MedfieldStmdClient.MODEM_COLD_RESET:
Log.i(Constants.LOG_TAG,
"Modem notification = MODEM_COLD_RESET");
notification = ModemNotification.COLD_RESET;
break;
case MedfieldStmdClient.PLATFORM_SHUTDOWN:
Log.i(Constants.LOG_TAG,
"Modem notification = PLATFORM_SHUTDOWN");
notification = ModemNotification.PLATFORM_REBOOT;
break;
default:
Log.i(Constants.LOG_TAG, "Unknown data :" + buffer[i]);
}
if (status != ModemStatus.NONE) {
this.handler.obtainMessage(ModemStatusMonitor.MSG_STATUS,
status).sendToTarget();
}
if (notification != ModemNotification.NONE) {
this.handler.obtainMessage(ModemStatusMonitor.MSG_NOTIFICATION,
notification).sendToTarget();
}
}
}
protected void cleanUp() {
if (this.clientSocket != null) {
try {
this.clientSocket.shutdownInput();
this.clientSocket.close();
} catch (IOException ex) {
Log.e(Constants.LOG_TAG, ex.toString());
}
this.clientSocket = null;
}
}
public ModemNotificationArgs buildNotificationArgs() {
return new ModemNotificationArgs();
}
public void replyToNotification(ModemNotificationArgs args) {
throw new UnsupportedOperationException("Not supported by STMD");
}
@Override
public void sendRequest(ModemRequestArgs args) {
throw new UnsupportedOperationException("Not supported yet");
}
@Override
public void useModem() {
throw new UnsupportedOperationException("Not supported by STMD");
}
@Override
public void releaseModem() {
throw new UnsupportedOperationException("Not supported by STMD");
}
@Override
public void subscribeTo(ModemStatus status, ModemNotification notifications) {
throw new UnsupportedOperationException("Not supported by STMD");
}
@Override
public boolean handleMessage(Message msg) {
// Not supported
return false;
}
@Override
public void restartModem() throws MmgrClientException {
throw new UnsupportedOperationException("Not supported yet");
}
@Override
public void recoverModem(String[] causes) throws MmgrClientException {
throw new UnsupportedOperationException("Not supported yet");
}
@Override
public void shutdownModem() throws MmgrClientException {
throw new UnsupportedOperationException("Not supported yet");
}
@Override
public boolean waitForStatus(ModemStatus status, long timeout)
throws MmgrClientException {
throw new UnsupportedOperationException("Not supported yet");
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
/**
* A value that represents a package lookup result.
*
* <p>Package lookups will always produce a value. On success, the {@code #getRoot} returns the
* package path root under which the package resides and the package's BUILD file is guaranteed to
* exist (unless this is looking up a WORKSPACE file, in which case the underlying file may or may
* not exist. On failure, {@code #getErrorReason} and {@code #getErrorMsg} describe why the package
* doesn't exist.
*
* <p>Implementation detail: we use inheritance here to optimize for memory usage.
*/
public abstract class PackageLookupValue implements SkyValue {
public static final NoBuildFilePackageLookupValue NO_BUILD_FILE_VALUE =
new NoBuildFilePackageLookupValue();
public static final DeletedPackageLookupValue DELETED_PACKAGE_VALUE =
new DeletedPackageLookupValue();
enum ErrorReason {
// There is no BUILD file.
NO_BUILD_FILE,
// The package name is invalid.
INVALID_PACKAGE_NAME,
// The package is considered deleted because of --deleted_packages.
DELETED_PACKAGE
}
protected PackageLookupValue() {
}
public static PackageLookupValue success(Path root) {
return new SuccessfulPackageLookupValue(root);
}
public static PackageLookupValue workspace(Path root) {
return new WorkspacePackageLookupValue(root);
}
public static PackageLookupValue invalidPackageName(String errorMsg) {
return new InvalidNamePackageLookupValue(errorMsg);
}
public boolean isExternalPackage() {
return false;
}
/**
* For a successful package lookup, returns the root (package path entry) that the package
* resides in.
*/
public abstract Path getRoot();
/**
* Returns whether the package lookup was successful.
*/
public abstract boolean packageExists();
/**
* For an unsuccessful package lookup, gets the reason why {@link #packageExists} returns
* {@code false}.
*/
abstract ErrorReason getErrorReason();
/**
* For an unsuccessful package lookup, gets a detailed error message for {@link #getErrorReason}
* that is suitable for reporting to a user.
*/
abstract String getErrorMsg();
static SkyKey key(PathFragment directory) {
Preconditions.checkArgument(!directory.isAbsolute(), directory);
return key(PackageIdentifier.createInDefaultRepo(directory));
}
public static SkyKey key(PackageIdentifier pkgIdentifier) {
return new SkyKey(SkyFunctions.PACKAGE_LOOKUP, pkgIdentifier);
}
private static class SuccessfulPackageLookupValue extends PackageLookupValue {
private final Path root;
private SuccessfulPackageLookupValue(Path root) {
this.root = root;
}
@Override
public boolean packageExists() {
return true;
}
@Override
public Path getRoot() {
return root;
}
@Override
ErrorReason getErrorReason() {
throw new IllegalStateException();
}
@Override
String getErrorMsg() {
throw new IllegalStateException();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SuccessfulPackageLookupValue)) {
return false;
}
SuccessfulPackageLookupValue other = (SuccessfulPackageLookupValue) obj;
return root.equals(other.root);
}
@Override
public int hashCode() {
return root.hashCode();
}
}
// TODO(kchodorow): fix these semantics. This class should not exist, WORKSPACE lookup should
// just return success/failure like a "normal" package.
private static class WorkspacePackageLookupValue extends SuccessfulPackageLookupValue {
private WorkspacePackageLookupValue(Path root) {
super(root);
}
// TODO(kchodorow): get rid of this, the semantics are wrong (successful package lookup should
// mean the package exists).
@Override
public boolean packageExists() {
return getRoot().exists();
}
@Override
public boolean isExternalPackage() {
return true;
}
}
private abstract static class UnsuccessfulPackageLookupValue extends PackageLookupValue {
@Override
public boolean packageExists() {
return false;
}
@Override
public Path getRoot() {
throw new IllegalStateException();
}
}
/** Marker value for no build file found. */
public static class NoBuildFilePackageLookupValue extends UnsuccessfulPackageLookupValue {
private NoBuildFilePackageLookupValue() {
}
@Override
ErrorReason getErrorReason() {
return ErrorReason.NO_BUILD_FILE;
}
@Override
String getErrorMsg() {
return "BUILD file not found on package path";
}
}
private static class InvalidNamePackageLookupValue extends UnsuccessfulPackageLookupValue {
private final String errorMsg;
private InvalidNamePackageLookupValue(String errorMsg) {
this.errorMsg = errorMsg;
}
@Override
ErrorReason getErrorReason() {
return ErrorReason.INVALID_PACKAGE_NAME;
}
@Override
String getErrorMsg() {
return errorMsg;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof InvalidNamePackageLookupValue)) {
return false;
}
InvalidNamePackageLookupValue other = (InvalidNamePackageLookupValue) obj;
return errorMsg.equals(other.errorMsg);
}
@Override
public int hashCode() {
return errorMsg.hashCode();
}
}
/** Marker value for a deleted package. */
public static class DeletedPackageLookupValue extends UnsuccessfulPackageLookupValue {
private DeletedPackageLookupValue() {
}
@Override
ErrorReason getErrorReason() {
return ErrorReason.DELETED_PACKAGE;
}
@Override
String getErrorMsg() {
return "Package is considered deleted due to --deleted_packages";
}
}
}
| |
/*
* Copyright 2017, Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.pubsub.v1;
import static com.google.cloud.pubsub.v1.PagedResponseWrappers.ListTopicSubscriptionsPagedResponse;
import static com.google.cloud.pubsub.v1.PagedResponseWrappers.ListTopicsPagedResponse;
import com.google.api.core.BetaApi;
import com.google.api.gax.grpc.ChannelAndExecutor;
import com.google.api.gax.grpc.ClientContext;
import com.google.api.gax.grpc.UnaryCallable;
import com.google.auth.Credentials;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Empty;
import com.google.pubsub.v1.DeleteTopicRequest;
import com.google.pubsub.v1.GetTopicRequest;
import com.google.pubsub.v1.ListTopicSubscriptionsRequest;
import com.google.pubsub.v1.ListTopicSubscriptionsResponse;
import com.google.pubsub.v1.ListTopicsRequest;
import com.google.pubsub.v1.ListTopicsResponse;
import com.google.pubsub.v1.ProjectName;
import com.google.pubsub.v1.PublishRequest;
import com.google.pubsub.v1.PublishResponse;
import com.google.pubsub.v1.PubsubMessage;
import com.google.pubsub.v1.Topic;
import com.google.pubsub.v1.TopicName;
import io.grpc.ManagedChannel;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND SERVICE
/**
* Service Description: The service that an application uses to manipulate topics, and to send
* messages to a topic.
*
* <p>To publish messages to a topic, see the Publisher class.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>
* <code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName name = TopicName.create("[PROJECT]", "[TOPIC]");
* Topic response = topicAdminClient.createTopic(name);
* }
* </code>
* </pre>
*
* <p>Note: close() needs to be called on the topicAdminClient object to clean up resources such as
* threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <p>The surface of this class includes several types of Java methods for each of the API's
* methods:
*
* <ol>
* <li> A "flattened" method. With this type of method, the fields of the request type have been
* converted into function parameters. It may be the case that not all fields are available as
* parameters, and not every API method will have a flattened method entry point.
* <li> A "request object" method. This type of method only takes one parameter, a request object,
* which must be constructed before the call. Not every API method will have a request object
* method.
* <li> A "callable" method. This type of method takes no parameters and returns an immutable API
* callable object, which can be used to initiate calls to the service.
* </ol>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of TopicAdminSettings to
* create(). For example:
*
* <pre>
* <code>
* TopicAdminSettings topicAdminSettings =
* TopicAdminSettings.defaultBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* TopicAdminClient topicAdminClient =
* TopicAdminClient.create(topicAdminSettings);
* </code>
* </pre>
*/
@Generated("by GAPIC")
@BetaApi
public class TopicAdminClient implements AutoCloseable {
private final TopicAdminSettings settings;
private final ScheduledExecutorService executor;
private final ManagedChannel channel;
private final List<AutoCloseable> closeables = new ArrayList<>();
private final UnaryCallable<Topic, Topic> createTopicCallable;
private final UnaryCallable<PublishRequest, PublishResponse> publishCallable;
private final UnaryCallable<GetTopicRequest, Topic> getTopicCallable;
private final UnaryCallable<ListTopicsRequest, ListTopicsResponse> listTopicsCallable;
private final UnaryCallable<ListTopicsRequest, ListTopicsPagedResponse> listTopicsPagedCallable;
private final UnaryCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse>
listTopicSubscriptionsCallable;
private final UnaryCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsPagedResponse>
listTopicSubscriptionsPagedCallable;
private final UnaryCallable<DeleteTopicRequest, Empty> deleteTopicCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
/** Constructs an instance of TopicAdminClient with default settings. */
public static final TopicAdminClient create() throws IOException {
return create(TopicAdminSettings.defaultBuilder().build());
}
/**
* Constructs an instance of TopicAdminClient, using the given settings. The channels are created
* based on the settings passed in, or defaults for any settings that are not set.
*/
public static final TopicAdminClient create(TopicAdminSettings settings) throws IOException {
return new TopicAdminClient(settings);
}
/**
* Constructs an instance of TopicAdminClient, using the given settings. This is protected so that
* it easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected TopicAdminClient(TopicAdminSettings settings) throws IOException {
this.settings = settings;
ChannelAndExecutor channelAndExecutor = settings.getChannelAndExecutor();
this.executor = channelAndExecutor.getExecutor();
this.channel = channelAndExecutor.getChannel();
Credentials credentials = settings.getCredentialsProvider().getCredentials();
ClientContext clientContext =
ClientContext.newBuilder()
.setExecutor(this.executor)
.setChannel(this.channel)
.setCredentials(credentials)
.build();
this.createTopicCallable = UnaryCallable.create(settings.createTopicSettings(), clientContext);
this.publishCallable = UnaryCallable.create(settings.publishSettings(), clientContext);
this.getTopicCallable = UnaryCallable.create(settings.getTopicSettings(), clientContext);
this.listTopicsCallable = UnaryCallable.create(settings.listTopicsSettings(), clientContext);
this.listTopicsPagedCallable =
UnaryCallable.createPagedVariant(settings.listTopicsSettings(), clientContext);
this.listTopicSubscriptionsCallable =
UnaryCallable.create(settings.listTopicSubscriptionsSettings(), clientContext);
this.listTopicSubscriptionsPagedCallable =
UnaryCallable.createPagedVariant(settings.listTopicSubscriptionsSettings(), clientContext);
this.deleteTopicCallable = UnaryCallable.create(settings.deleteTopicSettings(), clientContext);
this.setIamPolicyCallable =
UnaryCallable.create(settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
UnaryCallable.create(settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
UnaryCallable.create(settings.testIamPermissionsSettings(), clientContext);
if (settings.getChannelProvider().shouldAutoClose()) {
closeables.add(
new Closeable() {
@Override
public void close() throws IOException {
channel.shutdown();
}
});
}
if (settings.getExecutorProvider().shouldAutoClose()) {
closeables.add(
new Closeable() {
@Override
public void close() throws IOException {
executor.shutdown();
}
});
}
}
public final TopicAdminSettings getSettings() {
return settings;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates the given topic with the given name.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName name = TopicName.create("[PROJECT]", "[TOPIC]");
* Topic response = topicAdminClient.createTopic(name);
* }
* </code></pre>
*
* @param name The name of the topic. It must have the format
* `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, and contain only
* letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`),
* tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in
* length, and it must not start with `"goog"`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final Topic createTopic(TopicName name) {
Topic request = Topic.newBuilder().setNameWithTopicName(name).build();
return createTopic(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates the given topic with the given name.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName name = TopicName.create("[PROJECT]", "[TOPIC]");
* Topic request = Topic.newBuilder()
* .setNameWithTopicName(name)
* .build();
* Topic response = topicAdminClient.createTopic(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
private final Topic createTopic(Topic request) {
return createTopicCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Creates the given topic with the given name.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName name = TopicName.create("[PROJECT]", "[TOPIC]");
* Topic request = Topic.newBuilder()
* .setNameWithTopicName(name)
* .build();
* ApiFuture<Topic> future = topicAdminClient.createTopicCallable().futureCall(request);
* // Do something
* Topic response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<Topic, Topic> createTopicCallable() {
return createTopicCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The
* message payload must not be empty; it must contain either a non-empty data field, or at least
* one attribute.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ByteString data = ByteString.copyFromUtf8("");
* PubsubMessage messagesElement = PubsubMessage.newBuilder()
* .setData(data)
* .build();
* List<PubsubMessage> messages = Arrays.asList(messagesElement);
* PublishResponse response = topicAdminClient.publish(topic, messages);
* }
* </code></pre>
*
* @param topic The messages in the request will be published on this topic. Format is
* `projects/{project}/topics/{topic}`.
* @param messages The messages to publish.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
/* package-private */ final PublishResponse publish(
TopicName topic, List<PubsubMessage> messages) {
PublishRequest request =
PublishRequest.newBuilder().setTopicWithTopicName(topic).addAllMessages(messages).build();
return publish(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The
* message payload must not be empty; it must contain either a non-empty data field, or at least
* one attribute.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ByteString data = ByteString.copyFromUtf8("");
* PubsubMessage messagesElement = PubsubMessage.newBuilder()
* .setData(data)
* .build();
* List<PubsubMessage> messages = Arrays.asList(messagesElement);
* PublishRequest request = PublishRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .addAllMessages(messages)
* .build();
* PublishResponse response = topicAdminClient.publish(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
/* package-private */ final PublishResponse publish(PublishRequest request) {
return publishCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The
* message payload must not be empty; it must contain either a non-empty data field, or at least
* one attribute.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ByteString data = ByteString.copyFromUtf8("");
* PubsubMessage messagesElement = PubsubMessage.newBuilder()
* .setData(data)
* .build();
* List<PubsubMessage> messages = Arrays.asList(messagesElement);
* PublishRequest request = PublishRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .addAllMessages(messages)
* .build();
* ApiFuture<PublishResponse> future = topicAdminClient.publishCallable().futureCall(request);
* // Do something
* PublishResponse response = future.get();
* }
* </code></pre>
*/
/* package-private */ final UnaryCallable<PublishRequest, PublishResponse> publishCallable() {
return publishCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the configuration of a topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* Topic response = topicAdminClient.getTopic(topic);
* }
* </code></pre>
*
* @param topic The name of the topic to get. Format is `projects/{project}/topics/{topic}`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final Topic getTopic(TopicName topic) {
GetTopicRequest request = GetTopicRequest.newBuilder().setTopicWithTopicName(topic).build();
return getTopic(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the configuration of a topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* GetTopicRequest request = GetTopicRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* Topic response = topicAdminClient.getTopic(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
private final Topic getTopic(GetTopicRequest request) {
return getTopicCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the configuration of a topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* GetTopicRequest request = GetTopicRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* ApiFuture<Topic> future = topicAdminClient.getTopicCallable().futureCall(request);
* // Do something
* Topic response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<GetTopicRequest, Topic> getTopicCallable() {
return getTopicCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists matching topics.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* ProjectName project = ProjectName.create("[PROJECT]");
* for (Topic element : topicAdminClient.listTopics(project).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param project The name of the cloud project that topics belong to. Format is
* `projects/{project}`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final ListTopicsPagedResponse listTopics(ProjectName project) {
ListTopicsRequest request =
ListTopicsRequest.newBuilder().setProjectWithProjectName(project).build();
return listTopics(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists matching topics.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* ProjectName project = ProjectName.create("[PROJECT]");
* ListTopicsRequest request = ListTopicsRequest.newBuilder()
* .setProjectWithProjectName(project)
* .build();
* for (Topic element : topicAdminClient.listTopics(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final ListTopicsPagedResponse listTopics(ListTopicsRequest request) {
return listTopicsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists matching topics.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* ProjectName project = ProjectName.create("[PROJECT]");
* ListTopicsRequest request = ListTopicsRequest.newBuilder()
* .setProjectWithProjectName(project)
* .build();
* ApiFuture<ListTopicsPagedResponse> future = topicAdminClient.listTopicsPagedCallable().futureCall(request);
* // Do something
* for (Topic element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListTopicsRequest, ListTopicsPagedResponse> listTopicsPagedCallable() {
return listTopicsPagedCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists matching topics.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* ProjectName project = ProjectName.create("[PROJECT]");
* ListTopicsRequest request = ListTopicsRequest.newBuilder()
* .setProjectWithProjectName(project)
* .build();
* while (true) {
* ListTopicsResponse response = topicAdminClient.listTopicsCallable().call(request);
* for (Topic element : response.getTopicsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListTopicsRequest, ListTopicsResponse> listTopicsCallable() {
return listTopicsCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists the name of the subscriptions for this topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* for (SubscriptionName element : topicAdminClient.listTopicSubscriptions(topic).iterateAllAsSubscriptionName()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param topic The name of the topic that subscriptions are attached to. Format is
* `projects/{project}/topics/{topic}`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final ListTopicSubscriptionsPagedResponse listTopicSubscriptions(TopicName topic) {
ListTopicSubscriptionsRequest request =
ListTopicSubscriptionsRequest.newBuilder().setTopicWithTopicName(topic).build();
return listTopicSubscriptions(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists the name of the subscriptions for this topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* for (SubscriptionName element : topicAdminClient.listTopicSubscriptions(request).iterateAllAsSubscriptionName()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final ListTopicSubscriptionsPagedResponse listTopicSubscriptions(
ListTopicSubscriptionsRequest request) {
return listTopicSubscriptionsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists the name of the subscriptions for this topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* ApiFuture<ListTopicSubscriptionsPagedResponse> future = topicAdminClient.listTopicSubscriptionsPagedCallable().futureCall(request);
* // Do something
* for (SubscriptionName element : future.get().iterateAllAsSubscriptionName()) {
* // doThingsWith(element);
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsPagedResponse>
listTopicSubscriptionsPagedCallable() {
return listTopicSubscriptionsPagedCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Lists the name of the subscriptions for this topic.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* ListTopicSubscriptionsRequest request = ListTopicSubscriptionsRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* while (true) {
* ListTopicSubscriptionsResponse response = topicAdminClient.listTopicSubscriptionsCallable().call(request);
* for (SubscriptionName element : response.getSubscriptionsListAsSubscriptionNameList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* </code></pre>
*/
public final UnaryCallable<ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse>
listTopicSubscriptionsCallable() {
return listTopicSubscriptionsCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a
* topic is deleted, a new topic may be created with the same name; this is an entirely new topic
* with none of the old configuration or subscriptions. Existing subscriptions to this topic are
* not deleted, but their `topic` field is set to `_deleted-topic_`.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* topicAdminClient.deleteTopic(topic);
* }
* </code></pre>
*
* @param topic Name of the topic to delete. Format is `projects/{project}/topics/{topic}`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final void deleteTopic(TopicName topic) {
DeleteTopicRequest request =
DeleteTopicRequest.newBuilder().setTopicWithTopicName(topic).build();
deleteTopic(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a
* topic is deleted, a new topic may be created with the same name; this is an entirely new topic
* with none of the old configuration or subscriptions. Existing subscriptions to this topic are
* not deleted, but their `topic` field is set to `_deleted-topic_`.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* DeleteTopicRequest request = DeleteTopicRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* topicAdminClient.deleteTopic(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
private final void deleteTopic(DeleteTopicRequest request) {
deleteTopicCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a
* topic is deleted, a new topic may be created with the same name; this is an entirely new topic
* with none of the old configuration or subscriptions. Existing subscriptions to this topic are
* not deleted, but their `topic` field is set to `_deleted-topic_`.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* TopicName topic = TopicName.create("[PROJECT]", "[TOPIC]");
* DeleteTopicRequest request = DeleteTopicRequest.newBuilder()
* .setTopicWithTopicName(topic)
* .build();
* ApiFuture<Void> future = topicAdminClient.deleteTopicCallable().futureCall(request);
* // Do something
* future.get();
* }
* </code></pre>
*/
public final UnaryCallable<DeleteTopicRequest, Empty> deleteTopicCallable() {
return deleteTopicCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Sets the access control policy on the specified resource. Replaces any existing policy.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* Policy policy = Policy.newBuilder().build();
* Policy response = topicAdminClient.setIamPolicy(formattedResource, policy);
* }
* </code></pre>
*
* @param resource REQUIRED: The resource for which the policy is being specified. `resource` is
* usually specified as a path. For example, a Project resource is specified as
* `projects/{project}`.
* @param policy REQUIRED: The complete policy to be applied to the `resource`. The size of the
* policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud
* Platform services (such as Projects) might reject them.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final Policy setIamPolicy(String resource, Policy policy) {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder().setResource(resource).setPolicy(policy).build();
return setIamPolicy(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Sets the access control policy on the specified resource. Replaces any existing policy.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* Policy policy = Policy.newBuilder().build();
* SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder()
* .setResource(formattedResource)
* .setPolicy(policy)
* .build();
* Policy response = topicAdminClient.setIamPolicy(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final Policy setIamPolicy(SetIamPolicyRequest request) {
return setIamPolicyCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Sets the access control policy on the specified resource. Replaces any existing policy.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* Policy policy = Policy.newBuilder().build();
* SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder()
* .setResource(formattedResource)
* .setPolicy(policy)
* .build();
* ApiFuture<Policy> future = topicAdminClient.setIamPolicyCallable().futureCall(request);
* // Do something
* Policy response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the access control policy for a resource. Returns an empty policy if the resource exists
* and does not have a policy set.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* Policy response = topicAdminClient.getIamPolicy(formattedResource);
* }
* </code></pre>
*
* @param resource REQUIRED: The resource for which the policy is being requested. `resource` is
* usually specified as a path. For example, a Project resource is specified as
* `projects/{project}`.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final Policy getIamPolicy(String resource) {
GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder().setResource(resource).build();
return getIamPolicy(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the access control policy for a resource. Returns an empty policy if the resource exists
* and does not have a policy set.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder()
* .setResource(formattedResource)
* .build();
* Policy response = topicAdminClient.getIamPolicy(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
private final Policy getIamPolicy(GetIamPolicyRequest request) {
return getIamPolicyCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Gets the access control policy for a resource. Returns an empty policy if the resource exists
* and does not have a policy set.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder()
* .setResource(formattedResource)
* .build();
* ApiFuture<Policy> future = topicAdminClient.getIamPolicyCallable().futureCall(request);
* // Do something
* Policy response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Returns permissions that a caller has on the specified resource. If the resource does not
* exist, this will return an empty set of permissions, not a NOT_FOUND error.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* List<String> permissions = new ArrayList<>();
* TestIamPermissionsResponse response = topicAdminClient.testIamPermissions(formattedResource, permissions);
* }
* </code></pre>
*
* @param resource REQUIRED: The resource for which the policy detail is being requested.
* `resource` is usually specified as a path. For example, a Project resource is specified as
* `projects/{project}`.
* @param permissions The set of permissions to check for the `resource`. Permissions with
* wildcards (such as '*' or 'storage.*') are not allowed. For more information see
* [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final TestIamPermissionsResponse testIamPermissions(
String resource, List<String> permissions) {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(resource)
.addAllPermissions(permissions)
.build();
return testIamPermissions(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Returns permissions that a caller has on the specified resource. If the resource does not
* exist, this will return an empty set of permissions, not a NOT_FOUND error.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* List<String> permissions = new ArrayList<>();
* TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder()
* .setResource(formattedResource)
* .addAllPermissions(permissions)
* .build();
* TestIamPermissionsResponse response = topicAdminClient.testIamPermissions(request);
* }
* </code></pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.grpc.ApiException if the remote call fails
*/
public final TestIamPermissionsResponse testIamPermissions(TestIamPermissionsRequest request) {
return testIamPermissionsCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD
/**
* Returns permissions that a caller has on the specified resource. If the resource does not
* exist, this will return an empty set of permissions, not a NOT_FOUND error.
*
* <p>Sample code:
*
* <pre><code>
* try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
* String formattedResource = TopicName.create("[PROJECT]", "[TOPIC]").toString();
* List<String> permissions = new ArrayList<>();
* TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder()
* .setResource(formattedResource)
* .addAllPermissions(permissions)
* .build();
* ApiFuture<TestIamPermissionsResponse> future = topicAdminClient.testIamPermissionsCallable().futureCall(request);
* // Do something
* TestIamPermissionsResponse response = future.get();
* }
* </code></pre>
*/
public final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
/**
* Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately
* cancelled.
*/
@Override
public final void close() throws Exception {
for (AutoCloseable closeable : closeables) {
closeable.close();
}
}
}
| |
// This file was generated by Mendix Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package system.proxies;
public class User
{
private final com.mendix.systemwideinterfaces.core.IMendixObject userMendixObject;
private final com.mendix.systemwideinterfaces.core.IContext context;
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "System.User";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
Name("Name"),
Password("Password"),
LastLogin("LastLogin"),
Blocked("Blocked"),
Active("Active"),
FailedLogins("FailedLogins"),
WebServiceUser("WebServiceUser"),
IsAnonymous("IsAnonymous"),
UserRoles("System.UserRoles"),
User_Language("System.User_Language"),
User_TimeZone("System.User_TimeZone");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public User(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "System.User"));
}
protected User(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject userMendixObject)
{
if (userMendixObject == null)
throw new java.lang.IllegalArgumentException("The given object cannot be null.");
if (!com.mendix.core.Core.isSubClassOf("System.User", userMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a System.User");
this.userMendixObject = userMendixObject;
this.context = context;
}
/**
* @deprecated Use 'User.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static system.proxies.User initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return system.proxies.User.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access).
*/
public static system.proxies.User initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
if (com.mendix.core.Core.isSubClassOf("Administration.Account", mendixObject.getType()))
return administration.proxies.Account.initialize(context, mendixObject);
return new system.proxies.User(context, mendixObject);
}
public static system.proxies.User load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return system.proxies.User.initialize(context, mendixObject);
}
public static java.util.List<? extends system.proxies.User> load(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String xpathConstraint) throws com.mendix.core.CoreException
{
java.util.List<system.proxies.User> result = new java.util.ArrayList<system.proxies.User>();
for (com.mendix.systemwideinterfaces.core.IMendixObject obj : com.mendix.core.Core.retrieveXPathQuery(context, "//System.User" + xpathConstraint))
result.add(system.proxies.User.initialize(context, obj));
return result;
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(com.mendix.systemwideinterfaces.core.IContext context)
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* @return value of Name
*/
public final String getName()
{
return getName(getContext());
}
/**
* @param context
* @return value of Name
*/
public final String getName(com.mendix.systemwideinterfaces.core.IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.Name.toString());
}
/**
* Set value of Name
* @param name
*/
public final void setName(String name)
{
setName(getContext(), name);
}
/**
* Set value of Name
* @param context
* @param name
*/
public final void setName(com.mendix.systemwideinterfaces.core.IContext context, String name)
{
getMendixObject().setValue(context, MemberNames.Name.toString(), name);
}
/**
* Set value of Password
* @param password
*/
public final void setPassword(String password)
{
setPassword(getContext(), password);
}
/**
* Set value of Password
* @param context
* @param password
*/
public final void setPassword(com.mendix.systemwideinterfaces.core.IContext context, String password)
{
getMendixObject().setValue(context, MemberNames.Password.toString(), password);
}
/**
* @return value of LastLogin
*/
public final java.util.Date getLastLogin()
{
return getLastLogin(getContext());
}
/**
* @param context
* @return value of LastLogin
*/
public final java.util.Date getLastLogin(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.util.Date) getMendixObject().getValue(context, MemberNames.LastLogin.toString());
}
/**
* Set value of LastLogin
* @param lastlogin
*/
public final void setLastLogin(java.util.Date lastlogin)
{
setLastLogin(getContext(), lastlogin);
}
/**
* Set value of LastLogin
* @param context
* @param lastlogin
*/
public final void setLastLogin(com.mendix.systemwideinterfaces.core.IContext context, java.util.Date lastlogin)
{
getMendixObject().setValue(context, MemberNames.LastLogin.toString(), lastlogin);
}
/**
* @return value of Blocked
*/
public final Boolean getBlocked()
{
return getBlocked(getContext());
}
/**
* @param context
* @return value of Blocked
*/
public final Boolean getBlocked(com.mendix.systemwideinterfaces.core.IContext context)
{
return (Boolean) getMendixObject().getValue(context, MemberNames.Blocked.toString());
}
/**
* Set value of Blocked
* @param blocked
*/
public final void setBlocked(Boolean blocked)
{
setBlocked(getContext(), blocked);
}
/**
* Set value of Blocked
* @param context
* @param blocked
*/
public final void setBlocked(com.mendix.systemwideinterfaces.core.IContext context, Boolean blocked)
{
getMendixObject().setValue(context, MemberNames.Blocked.toString(), blocked);
}
/**
* @return value of Active
*/
public final Boolean getActive()
{
return getActive(getContext());
}
/**
* @param context
* @return value of Active
*/
public final Boolean getActive(com.mendix.systemwideinterfaces.core.IContext context)
{
return (Boolean) getMendixObject().getValue(context, MemberNames.Active.toString());
}
/**
* Set value of Active
* @param active
*/
public final void setActive(Boolean active)
{
setActive(getContext(), active);
}
/**
* Set value of Active
* @param context
* @param active
*/
public final void setActive(com.mendix.systemwideinterfaces.core.IContext context, Boolean active)
{
getMendixObject().setValue(context, MemberNames.Active.toString(), active);
}
/**
* @return value of FailedLogins
*/
public final Integer getFailedLogins()
{
return getFailedLogins(getContext());
}
/**
* @param context
* @return value of FailedLogins
*/
public final Integer getFailedLogins(com.mendix.systemwideinterfaces.core.IContext context)
{
return (Integer) getMendixObject().getValue(context, MemberNames.FailedLogins.toString());
}
/**
* Set value of FailedLogins
* @param failedlogins
*/
public final void setFailedLogins(Integer failedlogins)
{
setFailedLogins(getContext(), failedlogins);
}
/**
* Set value of FailedLogins
* @param context
* @param failedlogins
*/
public final void setFailedLogins(com.mendix.systemwideinterfaces.core.IContext context, Integer failedlogins)
{
getMendixObject().setValue(context, MemberNames.FailedLogins.toString(), failedlogins);
}
/**
* @return value of WebServiceUser
*/
public final Boolean getWebServiceUser()
{
return getWebServiceUser(getContext());
}
/**
* @param context
* @return value of WebServiceUser
*/
public final Boolean getWebServiceUser(com.mendix.systemwideinterfaces.core.IContext context)
{
return (Boolean) getMendixObject().getValue(context, MemberNames.WebServiceUser.toString());
}
/**
* Set value of WebServiceUser
* @param webserviceuser
*/
public final void setWebServiceUser(Boolean webserviceuser)
{
setWebServiceUser(getContext(), webserviceuser);
}
/**
* Set value of WebServiceUser
* @param context
* @param webserviceuser
*/
public final void setWebServiceUser(com.mendix.systemwideinterfaces.core.IContext context, Boolean webserviceuser)
{
getMendixObject().setValue(context, MemberNames.WebServiceUser.toString(), webserviceuser);
}
/**
* @return value of IsAnonymous
*/
public final Boolean getIsAnonymous()
{
return getIsAnonymous(getContext());
}
/**
* @param context
* @return value of IsAnonymous
*/
public final Boolean getIsAnonymous(com.mendix.systemwideinterfaces.core.IContext context)
{
return (Boolean) getMendixObject().getValue(context, MemberNames.IsAnonymous.toString());
}
/**
* Set value of IsAnonymous
* @param isanonymous
*/
public final void setIsAnonymous(Boolean isanonymous)
{
setIsAnonymous(getContext(), isanonymous);
}
/**
* Set value of IsAnonymous
* @param context
* @param isanonymous
*/
public final void setIsAnonymous(com.mendix.systemwideinterfaces.core.IContext context, Boolean isanonymous)
{
getMendixObject().setValue(context, MemberNames.IsAnonymous.toString(), isanonymous);
}
/**
* @return value of UserRoles
*/
public final java.util.List<system.proxies.UserRole> getUserRoles() throws com.mendix.core.CoreException
{
return getUserRoles(getContext());
}
/**
* @param context
* @return value of UserRoles
*/
@SuppressWarnings("unchecked")
public final java.util.List<system.proxies.UserRole> getUserRoles(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
java.util.List<system.proxies.UserRole> result = new java.util.ArrayList<system.proxies.UserRole>();
Object valueObject = getMendixObject().getValue(context, MemberNames.UserRoles.toString());
if (valueObject == null)
return result;
for (com.mendix.systemwideinterfaces.core.IMendixObject mendixObject : com.mendix.core.Core.retrieveIdList(context, (java.util.List<com.mendix.systemwideinterfaces.core.IMendixIdentifier>) valueObject))
result.add(system.proxies.UserRole.initialize(context, mendixObject));
return result;
}
/**
* Set value of UserRoles
* @param userroles
*/
public final void setUserRoles(java.util.List<system.proxies.UserRole> userroles)
{
setUserRoles(getContext(), userroles);
}
/**
* Set value of UserRoles
* @param context
* @param userroles
*/
public final void setUserRoles(com.mendix.systemwideinterfaces.core.IContext context, java.util.List<system.proxies.UserRole> userroles)
{
java.util.List<com.mendix.systemwideinterfaces.core.IMendixIdentifier> identifiers = new java.util.ArrayList<com.mendix.systemwideinterfaces.core.IMendixIdentifier>();
for (system.proxies.UserRole proxyObject : userroles)
identifiers.add(proxyObject.getMendixObject().getId());
getMendixObject().setValue(context, MemberNames.UserRoles.toString(), identifiers);
}
/**
* @return value of User_Language
*/
public final system.proxies.Language getUser_Language() throws com.mendix.core.CoreException
{
return getUser_Language(getContext());
}
/**
* @param context
* @return value of User_Language
*/
public final system.proxies.Language getUser_Language(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
system.proxies.Language result = null;
com.mendix.systemwideinterfaces.core.IMendixIdentifier identifier = getMendixObject().getValue(context, MemberNames.User_Language.toString());
if (identifier != null)
result = system.proxies.Language.load(context, identifier);
return result;
}
/**
* Set value of User_Language
* @param user_language
*/
public final void setUser_Language(system.proxies.Language user_language)
{
setUser_Language(getContext(), user_language);
}
/**
* Set value of User_Language
* @param context
* @param user_language
*/
public final void setUser_Language(com.mendix.systemwideinterfaces.core.IContext context, system.proxies.Language user_language)
{
if (user_language == null)
getMendixObject().setValue(context, MemberNames.User_Language.toString(), null);
else
getMendixObject().setValue(context, MemberNames.User_Language.toString(), user_language.getMendixObject().getId());
}
/**
* @return value of User_TimeZone
*/
public final system.proxies.TimeZone getUser_TimeZone() throws com.mendix.core.CoreException
{
return getUser_TimeZone(getContext());
}
/**
* @param context
* @return value of User_TimeZone
*/
public final system.proxies.TimeZone getUser_TimeZone(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
system.proxies.TimeZone result = null;
com.mendix.systemwideinterfaces.core.IMendixIdentifier identifier = getMendixObject().getValue(context, MemberNames.User_TimeZone.toString());
if (identifier != null)
result = system.proxies.TimeZone.load(context, identifier);
return result;
}
/**
* Set value of User_TimeZone
* @param user_timezone
*/
public final void setUser_TimeZone(system.proxies.TimeZone user_timezone)
{
setUser_TimeZone(getContext(), user_timezone);
}
/**
* Set value of User_TimeZone
* @param context
* @param user_timezone
*/
public final void setUser_TimeZone(com.mendix.systemwideinterfaces.core.IContext context, system.proxies.TimeZone user_timezone)
{
if (user_timezone == null)
getMendixObject().setValue(context, MemberNames.User_TimeZone.toString(), null);
else
getMendixObject().setValue(context, MemberNames.User_TimeZone.toString(), user_timezone.getMendixObject().getId());
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final com.mendix.systemwideinterfaces.core.IMendixObject getMendixObject()
{
return userMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final com.mendix.systemwideinterfaces.core.IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final system.proxies.User that = (system.proxies.User) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "System.User";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
package jp.ac.teu.tlab.vimana.base;
import jp.ac.teu.tlab.vimana.object.Config;
import com.github.mustachejava.DefaultMustacheFactory;
import com.github.mustachejava.Mustache;
import com.github.mustachejava.MustacheFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import jp.ac.teu.tlab.vimana.annotation.GET;
import jp.ac.teu.tlab.vimana.annotation.POST;
import jp.ac.teu.tlab.vimana.annotation.Path;
import static jp.ac.teu.tlab.vimana.base.Controller.getRealPath;
import static jp.ac.teu.tlab.vimana.base.Controller.setHttpServlet;
import static jp.ac.teu.tlab.vimana.base.FileAccessUtil.getContentType;
import static jp.ac.teu.tlab.vimana.base.FileAccessUtil.setMinetypes;
import jp.ac.teu.tlab.vimana.object.Navigation;
import jp.ac.teu.tlab.vimana.object.page.ContentPage;
import static jp.ac.teu.tlab.vimana.object.file.PseudoStaticFile.PSEUDO_PATH;
import jp.ac.teu.tlab.vimana.object.file.StaticFile;
import static jp.ac.teu.tlab.vimana.object.file.StaticFile.getStaticFile;
import static jp.ac.teu.tlab.vimana.object.file.StaticFile.STATIC_PATH;
import net.sf.json.JSONArray;
/**
*
* @author Yuta YAMAGUCHI
*/
@WebServlet(name = "Controller", urlPatterns = {"/*"})
public class Engine extends HttpServlet {
private final String CONFIG_FILE = "/WEB-INF/config.json";
private Config config;
private Mustache templateMustache;
public static Mustache staticContentPageMustache;
public static Mustache dynamicContentPageMustache;
private GlobalSettings globalSettings;
private List<Controller> controllersList;
@Override
public void init() throws ServletException {
super.init();
setHttpServlet(this);
setMinetypes();
controllersList = new ArrayList<>();
try {
config = new Config(getRealPath(CONFIG_FILE));
MustacheFactory mustacheFactory = new DefaultMustacheFactory();
templateMustache = mustacheFactory.compile(Controller.getRealPath(config.getTemplateLayoutPath()));
staticContentPageMustache = mustacheFactory.compile(Controller.getRealPath(config.getStaticContentPageLayoutPath()));
dynamicContentPageMustache = mustacheFactory.compile(Controller.getRealPath(config.getDynamicContentPageLayoutPath()));
String globalSettingClassName = config.getGlobalClassName();
if (!globalSettingClassName.isEmpty()) {
Class globalSettingClass = Class.forName(globalSettingClassName);
globalSettings = (GlobalSettings) globalSettingClass.newInstance();
} else {
globalSettings = new GlobalSettings() {};
}
JSONArray controllerClassNames = config.getControllerClassNames();
for (int i = 0; i < controllerClassNames.size(); i++) {
Class controllerClass = Class.forName(controllerClassNames.getString(i));
controllersList.add((Controller) controllerClass.newInstance());
}
PluginLoader.loadPlugin(config.getPluginLoadPath());
globalSettings.onStart();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IOException ex) {
Logger.getLogger(Engine.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException {
request.setCharacterEncoding(config.getCharset());
response.setContentType("text/html; charset=" + config.getCharset());
String hpjaxHeader = request.getHeader("X-HPJAX");
String pathInfo = request.getPathInfo();
if (pathInfo == null || pathInfo.equals("")) {
pathInfo = "/";
}
try {
if (isPseudoStaticFile(pathInfo)) {
outPutFileData(response, getStaticFile(pathInfo));
return;
}
if (isStaticContents(pathInfo)) {
outPutFileData(response, getStaticFile(pathInfo, getRealPath(pathInfo)));
return;
}
for (Controller controller : controllersList) {
Method[] methods = controller.getClass().getDeclaredMethods();
for (Method method : methods) {
if(!hasGetAnnotation(method)) {
continue;
}
method.setAccessible(true);
Path path = method.getAnnotation(Path.class);
String[] values = path.value();
for (String value : values) {
if (isTargetPath(value, pathInfo)) {
if (method.getGenericReturnType() == Navigation.class) {
boolean isHpjax = false;
if (hpjaxHeader != null) {
isHpjax = (hpjaxHeader.equals("true"));
}
Navigation navigation = executeMethodGetNavigation(controller, method, request, response);
ContentPage page = navigation.getPage(pathInfo);
if (!isHpjax) {
templateMustache.execute(response.getWriter(), page).flush();
} else {
page.executeFlash(response.getWriter());
}
return;
} else {
executeMethod(controller, method, request, response);
return;
}
}
}
}
}
globalSettings.onNotFoundError(response);
} catch (FileNotFoundException ex) {
globalSettings.onNotFoundError(response);
} catch (InvocationTargetException | IllegalAccessException | IllegalArgumentException | IOException ex) {
Logger.getLogger(Controller.class.getName()).log(Level.SEVERE, null, ex);
globalSettings.onError(response, ex);
}
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException {
request.setCharacterEncoding(config.getCharset());
response.setContentType("text/html; charset=" + config.getCharset());
String pathInfo = request.getPathInfo();
if (pathInfo == null) {
pathInfo = "/";
}
try {
for (Controller controller : controllersList) {
Method[] methods = controller.getClass().getDeclaredMethods();
for (Method method : methods) {
if(!hasPostAnnotation(method)) {
continue;
}
method.setAccessible(true);
Path path = method.getAnnotation(Path.class);
String[] values = path.value();
for (String value : values) {
if (isTargetPath(value, pathInfo)) {
executeMethod(controller, method, request, response);
return;
}
}
}
}
globalSettings.onBadRequestError(response);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
Logger.getLogger(Engine.class.getName()).log(Level.SEVERE, null, ex);
globalSettings.onError(response, ex);
}
}
private boolean isPseudoStaticFile(String path) {
return path.startsWith(PSEUDO_PATH);
}
private boolean isStaticContents(String path) {
return path.startsWith(STATIC_PATH);
}
private boolean hasGetAnnotation(Method method) {
return method.getAnnotation(GET.class) != null;
}
private boolean hasPostAnnotation(Method method) {
return method.getAnnotation(POST.class) != null;
}
private boolean isTargetPath(String annotationValue, String pathInfo) {
if (annotationValue.endsWith("/*")) {
annotationValue = annotationValue.substring(0, annotationValue.length()-2);
if(pathInfo.indexOf(annotationValue) == 0){
return true;
}
} else {
if(annotationValue.equals(pathInfo)) {
return true;
}
}
return false;
}
private void outPutFileData(HttpServletResponse response, StaticFile staticFile)
throws FileNotFoundException, IOException {
if (staticFile == null) {
throw new FileNotFoundException();
}
byte[] data = staticFile.getFileData();
OutputStream outputStream = null;
response.setContentType(getContentType(staticFile.getFile()) + "; charset=UTF-8");
response.setHeader("Content-Disposition", "filename=\"" + staticFile.getFile().getName() + "\"");
try {
outputStream = response.getOutputStream();
outputStream.write(data);
} finally {
if (outputStream != null) {
outputStream.close();
}
}
}
private Navigation executeMethodGetNavigation(Controller controller, Method method, HttpServletRequest request, HttpServletResponse response)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Class<? extends Object>[] methodArgs = method.getParameterTypes();
List<Object> args = new ArrayList<>();
for (Class methodArg : methodArgs) {
if (methodArg == HttpServletRequest.class) {
args.add(request);
} else if (methodArg == HttpServletResponse.class) {
args.add(response);
}
}
return (Navigation) method.invoke(controller, args.toArray(new Object[args.size()]));
}
private void executeMethod(Controller controller, Method method, HttpServletRequest request, HttpServletResponse response)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Class<? extends Object>[] methodArgs = method.getParameterTypes();
List<Object> args = new ArrayList<>();
for (Class methodArg : methodArgs) {
if (methodArg == HttpServletRequest.class) {
args.add(request);
} else if (methodArg == HttpServletResponse.class) {
args.add(response);
}
}
method.invoke(controller, args.toArray(new Object[args.size()]));
}
}
| |
package net.minecraft.world.gen;
import java.util.List;
import java.util.Random;
import net.minecraft.block.BlockFalling;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.init.Blocks;
import net.minecraft.util.BlockPos;
import net.minecraft.util.IProgressUpdate;
import net.minecraft.util.MathHelper;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.ChunkPrimer;
import net.minecraft.world.chunk.IChunkProvider;
public class ChunkProviderEnd implements IChunkProvider
{
private Random endRNG;
private NoiseGeneratorOctaves noiseGen1;
private NoiseGeneratorOctaves noiseGen2;
private NoiseGeneratorOctaves noiseGen3;
public NoiseGeneratorOctaves noiseGen4;
public NoiseGeneratorOctaves noiseGen5;
private World endWorld;
private double[] densities;
/** The biomes that are used to generate the chunk */
private BiomeGenBase[] biomesForGeneration;
double[] noiseData1;
double[] noiseData2;
double[] noiseData3;
double[] noiseData4;
double[] noiseData5;
public ChunkProviderEnd(World worldIn, long p_i2007_2_)
{
this.endWorld = worldIn;
this.endRNG = new Random(p_i2007_2_);
this.noiseGen1 = new NoiseGeneratorOctaves(this.endRNG, 16);
this.noiseGen2 = new NoiseGeneratorOctaves(this.endRNG, 16);
this.noiseGen3 = new NoiseGeneratorOctaves(this.endRNG, 8);
this.noiseGen4 = new NoiseGeneratorOctaves(this.endRNG, 10);
this.noiseGen5 = new NoiseGeneratorOctaves(this.endRNG, 16);
}
public void func_180520_a(int p_180520_1_, int p_180520_2_, ChunkPrimer p_180520_3_)
{
int i = 2;
int j = i + 1;
int k = 33;
int l = i + 1;
this.densities = this.initializeNoiseField(this.densities, p_180520_1_ * i, 0, p_180520_2_ * i, j, k, l);
for (int i1 = 0; i1 < i; ++i1)
{
for (int j1 = 0; j1 < i; ++j1)
{
for (int k1 = 0; k1 < 32; ++k1)
{
double d0 = 0.25D;
double d1 = this.densities[((i1 + 0) * l + j1 + 0) * k + k1 + 0];
double d2 = this.densities[((i1 + 0) * l + j1 + 1) * k + k1 + 0];
double d3 = this.densities[((i1 + 1) * l + j1 + 0) * k + k1 + 0];
double d4 = this.densities[((i1 + 1) * l + j1 + 1) * k + k1 + 0];
double d5 = (this.densities[((i1 + 0) * l + j1 + 0) * k + k1 + 1] - d1) * d0;
double d6 = (this.densities[((i1 + 0) * l + j1 + 1) * k + k1 + 1] - d2) * d0;
double d7 = (this.densities[((i1 + 1) * l + j1 + 0) * k + k1 + 1] - d3) * d0;
double d8 = (this.densities[((i1 + 1) * l + j1 + 1) * k + k1 + 1] - d4) * d0;
for (int l1 = 0; l1 < 4; ++l1)
{
double d9 = 0.125D;
double d10 = d1;
double d11 = d2;
double d12 = (d3 - d1) * d9;
double d13 = (d4 - d2) * d9;
for (int i2 = 0; i2 < 8; ++i2)
{
double d14 = 0.125D;
double d15 = d10;
double d16 = (d11 - d10) * d14;
for (int j2 = 0; j2 < 8; ++j2)
{
IBlockState iblockstate = null;
if (d15 > 0.0D)
{
iblockstate = Blocks.end_stone.getDefaultState();
}
int k2 = i2 + i1 * 8;
int l2 = l1 + k1 * 4;
int i3 = j2 + j1 * 8;
p_180520_3_.setBlockState(k2, l2, i3, iblockstate);
d15 += d16;
}
d10 += d12;
d11 += d13;
}
d1 += d5;
d2 += d6;
d3 += d7;
d4 += d8;
}
}
}
}
}
public void func_180519_a(ChunkPrimer p_180519_1_)
{
for (int i = 0; i < 16; ++i)
{
for (int j = 0; j < 16; ++j)
{
int k = 1;
int l = -1;
IBlockState iblockstate = Blocks.end_stone.getDefaultState();
IBlockState iblockstate1 = Blocks.end_stone.getDefaultState();
for (int i1 = 127; i1 >= 0; --i1)
{
IBlockState iblockstate2 = p_180519_1_.getBlockState(i, i1, j);
if (iblockstate2.getBlock().getMaterial() == Material.air)
{
l = -1;
}
else if (iblockstate2.getBlock() == Blocks.stone)
{
if (l == -1)
{
if (k <= 0)
{
iblockstate = Blocks.air.getDefaultState();
iblockstate1 = Blocks.end_stone.getDefaultState();
}
l = k;
if (i1 >= 0)
{
p_180519_1_.setBlockState(i, i1, j, iblockstate);
}
else
{
p_180519_1_.setBlockState(i, i1, j, iblockstate1);
}
}
else if (l > 0)
{
--l;
p_180519_1_.setBlockState(i, i1, j, iblockstate1);
}
}
}
}
}
}
/**
* Will return back a chunk, if it doesn't exist and its not a MP client it will generates all the blocks for the
* specified chunk from the map seed and chunk seed
*/
public Chunk provideChunk(int x, int z)
{
this.endRNG.setSeed((long)x * 341873128712L + (long)z * 132897987541L);
ChunkPrimer chunkprimer = new ChunkPrimer();
this.biomesForGeneration = this.endWorld.getWorldChunkManager().loadBlockGeneratorData(this.biomesForGeneration, x * 16, z * 16, 16, 16);
this.func_180520_a(x, z, chunkprimer);
this.func_180519_a(chunkprimer);
Chunk chunk = new Chunk(this.endWorld, chunkprimer, x, z);
byte[] abyte = chunk.getBiomeArray();
for (int i = 0; i < abyte.length; ++i)
{
abyte[i] = (byte)this.biomesForGeneration[i].biomeID;
}
chunk.generateSkylightMap();
return chunk;
}
/**
* generates a subset of the level's terrain data. Takes 7 arguments: the [empty] noise array, the position, and the
* size.
*/
private double[] initializeNoiseField(double[] p_73187_1_, int p_73187_2_, int p_73187_3_, int p_73187_4_, int p_73187_5_, int p_73187_6_, int p_73187_7_)
{
if (p_73187_1_ == null)
{
p_73187_1_ = new double[p_73187_5_ * p_73187_6_ * p_73187_7_];
}
double d0 = 684.412D;
double d1 = 684.412D;
this.noiseData4 = this.noiseGen4.generateNoiseOctaves(this.noiseData4, p_73187_2_, p_73187_4_, p_73187_5_, p_73187_7_, 1.121D, 1.121D, 0.5D);
this.noiseData5 = this.noiseGen5.generateNoiseOctaves(this.noiseData5, p_73187_2_, p_73187_4_, p_73187_5_, p_73187_7_, 200.0D, 200.0D, 0.5D);
d0 = d0 * 2.0D;
this.noiseData1 = this.noiseGen3.generateNoiseOctaves(this.noiseData1, p_73187_2_, p_73187_3_, p_73187_4_, p_73187_5_, p_73187_6_, p_73187_7_, d0 / 80.0D, d1 / 160.0D, d0 / 80.0D);
this.noiseData2 = this.noiseGen1.generateNoiseOctaves(this.noiseData2, p_73187_2_, p_73187_3_, p_73187_4_, p_73187_5_, p_73187_6_, p_73187_7_, d0, d1, d0);
this.noiseData3 = this.noiseGen2.generateNoiseOctaves(this.noiseData3, p_73187_2_, p_73187_3_, p_73187_4_, p_73187_5_, p_73187_6_, p_73187_7_, d0, d1, d0);
int i = 0;
for (int j = 0; j < p_73187_5_; ++j)
{
for (int k = 0; k < p_73187_7_; ++k)
{
float f = (float)(j + p_73187_2_) / 1.0F;
float f1 = (float)(k + p_73187_4_) / 1.0F;
float f2 = 100.0F - MathHelper.sqrt_float(f * f + f1 * f1) * 8.0F;
if (f2 > 80.0F)
{
f2 = 80.0F;
}
if (f2 < -100.0F)
{
f2 = -100.0F;
}
for (int l = 0; l < p_73187_6_; ++l)
{
double d2 = 0.0D;
double d3 = this.noiseData2[i] / 512.0D;
double d4 = this.noiseData3[i] / 512.0D;
double d5 = (this.noiseData1[i] / 10.0D + 1.0D) / 2.0D;
if (d5 < 0.0D)
{
d2 = d3;
}
else if (d5 > 1.0D)
{
d2 = d4;
}
else
{
d2 = d3 + (d4 - d3) * d5;
}
d2 = d2 - 8.0D;
d2 = d2 + (double)f2;
int i1 = 2;
if (l > p_73187_6_ / 2 - i1)
{
double d6 = (double)((float)(l - (p_73187_6_ / 2 - i1)) / 64.0F);
d6 = MathHelper.clamp_double(d6, 0.0D, 1.0D);
d2 = d2 * (1.0D - d6) + -3000.0D * d6;
}
i1 = 8;
if (l < i1)
{
double d7 = (double)((float)(i1 - l) / ((float)i1 - 1.0F));
d2 = d2 * (1.0D - d7) + -30.0D * d7;
}
p_73187_1_[i] = d2;
++i;
}
}
}
return p_73187_1_;
}
/**
* Checks to see if a chunk exists at x, z
*/
public boolean chunkExists(int x, int z)
{
return true;
}
/**
* Populates chunk with ores etc etc
*/
public void populate(IChunkProvider p_73153_1_, int p_73153_2_, int p_73153_3_)
{
BlockFalling.fallInstantly = true;
BlockPos blockpos = new BlockPos(p_73153_2_ * 16, 0, p_73153_3_ * 16);
this.endWorld.getBiomeGenForCoords(blockpos.add(16, 0, 16)).decorate(this.endWorld, this.endWorld.rand, blockpos);
BlockFalling.fallInstantly = false;
}
public boolean func_177460_a(IChunkProvider p_177460_1_, Chunk p_177460_2_, int p_177460_3_, int p_177460_4_)
{
return false;
}
/**
* Two modes of operation: if passed true, save all Chunks in one go. If passed false, save up to two chunks.
* Return true if all chunks have been saved.
*/
public boolean saveChunks(boolean p_73151_1_, IProgressUpdate progressCallback)
{
return true;
}
/**
* Save extra data not associated with any Chunk. Not saved during autosave, only during world unload. Currently
* unimplemented.
*/
public void saveExtraData()
{
}
/**
* Unloads chunks that are marked to be unloaded. This is not guaranteed to unload every such chunk.
*/
public boolean unloadQueuedChunks()
{
return false;
}
/**
* Returns if the IChunkProvider supports saving.
*/
public boolean canSave()
{
return true;
}
/**
* Converts the instance data to a readable string.
*/
public String makeString()
{
return "RandomLevelSource";
}
public List<BiomeGenBase.SpawnListEntry> getPossibleCreatures(EnumCreatureType creatureType, BlockPos pos)
{
return this.endWorld.getBiomeGenForCoords(pos).getSpawnableList(creatureType);
}
public BlockPos getStrongholdGen(World worldIn, String structureName, BlockPos position)
{
return null;
}
public int getLoadedChunkCount()
{
return 0;
}
public void recreateStructures(Chunk p_180514_1_, int p_180514_2_, int p_180514_3_)
{
}
public Chunk provideChunk(BlockPos blockPosIn)
{
return this.provideChunk(blockPosIn.getX() >> 4, blockPosIn.getZ() >> 4);
}
}
| |
package com.thaiopensource.relaxng.output.rng;
import com.thaiopensource.relaxng.edit.AbstractVisitor;
import com.thaiopensource.relaxng.edit.Annotated;
import com.thaiopensource.relaxng.edit.AnnotationChild;
import com.thaiopensource.relaxng.edit.AnyNameNameClass;
import com.thaiopensource.relaxng.edit.AttributeAnnotation;
import com.thaiopensource.relaxng.edit.AttributePattern;
import com.thaiopensource.relaxng.edit.ChoiceNameClass;
import com.thaiopensource.relaxng.edit.Component;
import com.thaiopensource.relaxng.edit.CompositePattern;
import com.thaiopensource.relaxng.edit.Container;
import com.thaiopensource.relaxng.edit.DataPattern;
import com.thaiopensource.relaxng.edit.DefineComponent;
import com.thaiopensource.relaxng.edit.DivComponent;
import com.thaiopensource.relaxng.edit.ElementAnnotation;
import com.thaiopensource.relaxng.edit.ExternalRefPattern;
import com.thaiopensource.relaxng.edit.GrammarPattern;
import com.thaiopensource.relaxng.edit.IncludeComponent;
import com.thaiopensource.relaxng.edit.NameClass;
import com.thaiopensource.relaxng.edit.NameClassedPattern;
import com.thaiopensource.relaxng.edit.NameNameClass;
import com.thaiopensource.relaxng.edit.NsNameNameClass;
import com.thaiopensource.relaxng.edit.Param;
import com.thaiopensource.relaxng.edit.Pattern;
import com.thaiopensource.relaxng.edit.UnaryPattern;
import com.thaiopensource.relaxng.edit.ValuePattern;
import com.thaiopensource.relaxng.edit.NamespaceContext;
import com.thaiopensource.util.VoidValue;
import com.thaiopensource.xml.util.WellKnownNamespaces;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class Analyzer extends AbstractVisitor {
private VoidValue visitAnnotated(Annotated anno) {
if (anno.getAttributeAnnotations().size() > 0
|| anno.getChildElementAnnotations().size() > 0
|| anno.getFollowingElementAnnotations().size() > 0)
noteContext(anno.getContext());
visitAnnotationAttributes(anno.getAttributeAnnotations());
visitAnnotationChildren(anno.getChildElementAnnotations());
visitAnnotationChildren(anno.getFollowingElementAnnotations());
return VoidValue.VOID;
}
private void visitAnnotationAttributes(List<AttributeAnnotation> list) {
for (int i = 0, len = list.size(); i < len; i++) {
AttributeAnnotation att = list.get(i);
if (att.getNamespaceUri().length() != 0)
noteNs(att.getPrefix(), att.getNamespaceUri());
}
}
private void visitAnnotationChildren(List<AnnotationChild> list) {
for (int i = 0, len = list.size(); i < len; i++) {
AnnotationChild ac = list.get(i);
if (ac instanceof ElementAnnotation) {
ElementAnnotation elem = (ElementAnnotation)ac;
if (elem.getPrefix() != null)
noteNs(elem.getPrefix(), elem.getNamespaceUri());
visitAnnotationAttributes(elem.getAttributes());
visitAnnotationChildren(elem.getChildren());
}
}
}
public VoidValue visitPattern(Pattern p) {
return visitAnnotated(p);
}
public VoidValue visitDefine(DefineComponent c) {
visitAnnotated(c);
return c.getBody().accept(this);
}
public VoidValue visitDiv(DivComponent c) {
visitAnnotated(c);
return visitContainer(c);
}
public VoidValue visitInclude(IncludeComponent c) {
visitAnnotated(c);
noteInheritNs(c.getNs());
return visitContainer(c);
}
public VoidValue visitGrammar(GrammarPattern p) {
visitAnnotated(p);
return visitContainer(p);
}
private VoidValue visitContainer(Container c) {
List<Component> list = c.getComponents();
for (int i = 0, len = list.size(); i < len; i++)
(list.get(i)).accept(this);
return VoidValue.VOID;
}
public VoidValue visitUnary(UnaryPattern p) {
visitAnnotated(p);
return p.getChild().accept(this);
}
public VoidValue visitComposite(CompositePattern p) {
visitAnnotated(p);
List<Pattern> list = p.getChildren();
for (int i = 0, len = list.size(); i < len; i++)
(list.get(i)).accept(this);
return VoidValue.VOID;
}
public VoidValue visitNameClassed(NameClassedPattern p) {
p.getNameClass().accept(this);
return visitUnary(p);
}
public VoidValue visitAttribute(AttributePattern p) {
NameClass nc = p.getNameClass();
if (nc instanceof NameNameClass
&& ((NameNameClass)nc).getNamespaceUri().equals(""))
return visitUnary(p);
return visitNameClassed(p);
}
public VoidValue visitChoice(ChoiceNameClass nc) {
visitAnnotated(nc);
List<NameClass> list = nc.getChildren();
for (int i = 0, len = list.size(); i < len; i++)
(list.get(i)).accept(this);
return VoidValue.VOID;
}
public VoidValue visitValue(ValuePattern p) {
visitAnnotated(p);
if (!p.getType().equals("token") || !p.getDatatypeLibrary().equals(""))
noteDatatypeLibrary(p.getDatatypeLibrary());
for (Map.Entry<String, String> entry : p.getPrefixMap().entrySet()) {
noteNs(entry.getKey(), entry.getValue());
}
return VoidValue.VOID;
}
public VoidValue visitData(DataPattern p) {
visitAnnotated(p);
noteDatatypeLibrary(p.getDatatypeLibrary());
Pattern except = p.getExcept();
if (except != null)
except.accept(this);
for (Param param : p.getParams())
visitAnnotated(param);
return VoidValue.VOID;
}
public VoidValue visitExternalRef(ExternalRefPattern p) {
visitAnnotated(p);
noteInheritNs(p.getNs());
return VoidValue.VOID;
}
public VoidValue visitName(NameNameClass nc) {
visitAnnotated(nc);
noteNs(nc.getPrefix(), nc.getNamespaceUri());
return VoidValue.VOID;
}
public VoidValue visitAnyName(AnyNameNameClass nc) {
visitAnnotated(nc);
NameClass except = nc.getExcept();
if (except != null)
except.accept(this);
return VoidValue.VOID;
}
public VoidValue visitNsName(NsNameNameClass nc) {
visitAnnotated(nc);
noteInheritNs(nc.getNs());
NameClass except = nc.getExcept();
if (except != null)
except.accept(this);
return VoidValue.VOID;
}
private String datatypeLibrary = null;
private final Map<String, String> prefixMap = new HashMap<String, String>();
private boolean haveInherit = false;
private NamespaceContext lastContext = null;
private String noPrefixNs = null;
private void noteDatatypeLibrary(String uri) {
if (datatypeLibrary == null || datatypeLibrary.length() == 0)
datatypeLibrary = uri;
}
private void noteInheritNs(String ns) {
if (ns == NameClass.INHERIT_NS)
haveInherit = true;
else
noPrefixNs = ns;
}
private void noteNs(String prefix, String ns) {
if (ns == NameClass.INHERIT_NS) {
haveInherit = true;
return;
}
if (prefix == null)
prefix = "";
if (ns == null || (ns.length() == 0 && prefix.length() != 0) || prefixMap.containsKey(prefix))
return;
prefixMap.put(prefix, ns);
}
private void noteContext(NamespaceContext context) {
if (context == null || context == lastContext)
return;
lastContext = context;
for (String prefix : context.getPrefixes())
noteNs(prefix, context.getNamespace(prefix));
}
Map<String, String> getPrefixMap() {
if (haveInherit)
prefixMap.remove("");
else if (noPrefixNs != null && !prefixMap.containsKey(""))
prefixMap.put("", noPrefixNs);
prefixMap.put("xml", WellKnownNamespaces.XML);
return prefixMap;
}
String getDatatypeLibrary() {
return datatypeLibrary;
}
}
| |
/**
* Copyright 2016 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.processor.http;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.io.Resources;
import com.streamsets.pipeline.api.Field;
import com.streamsets.pipeline.api.Processor;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.lib.http.HttpMethod;
import com.streamsets.pipeline.sdk.ProcessorRunner;
import com.streamsets.pipeline.sdk.RecordCreator;
import com.streamsets.pipeline.sdk.StageRunner;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.glassfish.jersey.test.TestProperties;
import org.junit.Test;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class HttpProcessorIT extends JerseyTest {
private static String getBody(String path) {
try {
return Resources.toString(Resources.getResource(path), Charsets.UTF_8);
} catch (IOException e) {
throw new RuntimeException("Failed to read test resource: " + path);
}
}
@Path("/test/get")
@Produces(MediaType.APPLICATION_JSON)
public static class TestGet {
@GET
public Response get() {
return Response.ok(getBody("http/get_response.json")).build();
}
}
public static class TestInput {
public TestInput() {};
public TestInput(String hello) {
this.hello = hello;
}
@JsonProperty("hello")
public String hello;
}
@Path("/test/put")
@Consumes(MediaType.APPLICATION_JSON)
public static class TestPut {
@POST
@Consumes(MediaType.APPLICATION_JSON)
public Response put(TestInput input) {
return Response.ok(
"{\"hello\":\"" + input.hello + "\"}"
).build();
}
}
@Path("/test/xml/get")
@Produces(MediaType.APPLICATION_XML)
public static class TestXmlGet {
@GET
public Response get() {
return Response.ok("<r><e>Hello</e><e>Bye</e></r>").build();
}
}
@Override
protected Application configure() {
forceSet(TestProperties.CONTAINER_PORT, "0");
return new ResourceConfig(
Sets.newHashSet(
TestGet.class,
TestPut.class,
TestXmlGet.class
)
);
}
@Test
public void testHttpGet() throws Exception {
HttpProcessorConfig conf = new HttpProcessorConfig();
conf.httpMethod = HttpMethod.GET;
conf.outputField = "/output";
conf.dataFormat = DataFormat.TEXT;
conf.resourceUrl = getBaseUri() + "test/get";
Record record = RecordCreator.create();
record.set("/", Field.create(new HashMap<String, Field>()));
List<Record> records = ImmutableList.of(record);
Processor processor = new HttpProcessor(conf);
ProcessorRunner runner = new ProcessorRunner.Builder(HttpDProcessor.class, processor)
.addOutputLane("lane")
.build();
runner.runInit();
try {
StageRunner.Output output = runner.runProcess(records);
List<Record> outputRecords = output.getRecords().get("lane");
assertTrue(runner.getErrorRecords().isEmpty());
assertEquals(1, outputRecords.size());
assertTrue(outputRecords.get(0).has("/output"));
assertEquals("{\"hello\":\"world!\"}", outputRecords.get(0).get("/output").getValueAsString());
} finally {
runner.runDestroy();
}
}
@Test
public void testHttpGetJson() throws Exception {
HttpProcessorConfig conf = new HttpProcessorConfig();
conf.httpMethod = HttpMethod.GET;
conf.outputField = "/output";
conf.dataFormat = DataFormat.JSON;
conf.resourceUrl = getBaseUri() + "test/get";
Record record = RecordCreator.create();
record.set("/", Field.create(new HashMap<String, Field>()));
List<Record> records = ImmutableList.of(record);
Processor processor = new HttpProcessor(conf);
ProcessorRunner runner = new ProcessorRunner.Builder(HttpDProcessor.class, processor)
.addOutputLane("lane")
.build();
runner.runInit();
try {
StageRunner.Output output = runner.runProcess(records);
List<Record> outputRecords = output.getRecords().get("lane");
assertTrue(runner.getErrorRecords().isEmpty());
assertEquals(1, outputRecords.size());
assertTrue(outputRecords.get(0).has("/output"));
Map<String, Field> outputMap = outputRecords.get(0).get("/output").getValueAsMap();
assertTrue(!outputMap.isEmpty());
assertTrue(outputMap.containsKey("hello"));
assertEquals("world!", outputMap.get("hello").getValueAsString());
} finally {
runner.runDestroy();
}
}
@Test
public void testHttpPutJson() throws Exception {
HttpProcessorConfig conf = new HttpProcessorConfig();
conf.httpMethod = HttpMethod.POST;
conf.outputField = "/output";
conf.dataFormat = DataFormat.JSON;
conf.resourceUrl = getBaseUri() + "test/put";
conf.headers.put("Content-Type", "application/json");
conf.requestBody = "{\"hello\":\"world!\"}";
Record record = RecordCreator.create();
record.set("/", Field.create(new HashMap<String, Field>()));
List<Record> records = ImmutableList.of(record);
Processor processor = new HttpProcessor(conf);
ProcessorRunner runner = new ProcessorRunner.Builder(HttpDProcessor.class, processor)
.addOutputLane("lane")
.build();
runner.runInit();
try {
StageRunner.Output output = runner.runProcess(records);
List<Record> outputRecords = output.getRecords().get("lane");
assertTrue(runner.getErrorRecords().isEmpty());
assertEquals(1, outputRecords.size());
assertTrue(outputRecords.get(0).has("/output"));
Map<String, Field> outputMap = outputRecords.get(0).get("/output").getValueAsMap();
assertTrue(!outputMap.isEmpty());
assertTrue(outputMap.containsKey("hello"));
assertEquals("world!", outputMap.get("hello").getValueAsString());
} finally {
runner.runDestroy();
}
}
@Test
public void testHttpGetXml() throws Exception {
HttpProcessorConfig conf = new HttpProcessorConfig();
conf.httpMethod = HttpMethod.GET;
conf.outputField = "/output";
conf.dataFormat = DataFormat.XML;
conf.resourceUrl = getBaseUri() + "test/xml/get";
Record record = RecordCreator.create();
record.set("/", Field.create(new HashMap<String, Field>()));
List<Record> records = ImmutableList.of(record);
Processor processor = new HttpProcessor(conf);
ProcessorRunner runner = new ProcessorRunner.Builder(HttpDProcessor.class, processor)
.addOutputLane("lane")
.build();
runner.runInit();
try {
StageRunner.Output output = runner.runProcess(records);
List<Record> outputRecords = output.getRecords().get("lane");
assertTrue(runner.getErrorRecords().isEmpty());
assertEquals(1, outputRecords.size());
assertTrue(outputRecords.get(0).has("/output"));
Map<String, Field> outputField = outputRecords.get(0).get("/output").getValueAsMap();
List<Field> xmlFields = outputField.get("e").getValueAsList();
assertEquals("Hello", xmlFields.get(0).getValueAsMap().get("value").getValueAsString());
assertEquals("Bye", xmlFields.get(1).getValueAsMap().get("value").getValueAsString());
} finally {
runner.runDestroy();
}
}
}
| |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.datatype.api.color;
import java.util.Objects;
import net.sf.mmm.util.lang.api.AbstractDatatype;
import net.sf.mmm.util.lang.api.BasicHelper;
/**
* This is the {@link net.sf.mmm.util.lang.api.Datatype} for a {@link Color} based on {@link Factor factors}. <br>
* <b>Note:</b><br>
* Use {@link Color} for simple and efficient representation and transport of color information. However, if precision
* is required or for transformation between different {@link ColorModel color models} use this class instead. <br>
* <b>Credits:</b><br>
* The algorithms for transformation of the color models are mainly taken from
* <a href="http://en.wikipedia.org/wiki/HSL_and_HSV">HSL and HSV on wikipedia</a>. <br>
* <b>ATTENTION:</b><br>
* This implementation does not support color profiles or the Adobe RGB color space.
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 1.0.0
*/
public final class GenericColor extends AbstractDatatype {
private static final long serialVersionUID = 3175467633850341788L;
private Alpha alpha;
private Hue hue;
private Saturation saturationHsb;
private Saturation saturationHsl;
private Brightness brightness;
private Lightness lightness;
private Chroma chroma;
private Red red;
private Blue blue;
private Green green;
/**
* The constructor. Use {@code valueOf} methods to instantiate.
*/
protected GenericColor() {
super();
}
/**
* Parses the {@link GenericColor} given as {@link String} representation.
*
* @param color is the color as {@link String}.
* @return the parsed {@link GenericColor}.
*/
public static GenericColor valueOf(String color) {
Objects.requireNonNull(color, "color");
Color namedColor = Color.fromName(color);
if (namedColor != null) {
return valueOf(namedColor);
}
int length = color.length();
Throwable cause = null;
try {
// "#RRGGBB" / #AARRGGBB
Color hexColor = Color.parseHexString(color);
if (hexColor != null) {
return valueOf(hexColor);
}
// "rgb(1,1,1)" = 10
if (length >= 7) {
String model = BasicHelper.toUpperCase(color.substring(0, 3));
ColorModel colorModel = ColorModel.valueOf(model);
int index = 3;
boolean hasAlpha = false;
char c = Character.toLowerCase(color.charAt(index));
if (c == 'a') {
hasAlpha = true;
index++;
c = color.charAt(index);
}
if (c == '(') {
index++;
int endIndex = color.indexOf(',', index);
if (endIndex > 0) {
String firstSegment = color.substring(index, endIndex).trim();
index = endIndex + 1;
endIndex = color.indexOf(',', index);
if (endIndex > 0) {
String secondSegment = color.substring(index, endIndex).trim();
index = endIndex + 1;
if (hasAlpha) {
endIndex = color.indexOf(',', index);
} else {
endIndex = length - 1;
}
if (endIndex > 0) {
String thirdSegment = color.substring(index, endIndex).trim();
Alpha alpha;
if (hasAlpha) {
alpha = new Alpha(color.substring(endIndex + 1, length - 1));
} else {
alpha = Alpha.OPAQUE;
}
switch (colorModel) {
case RGB:
return valueOf(new Red(firstSegment), new Green(secondSegment), new Blue(thirdSegment), alpha);
case HSL:
return valueOf(new Hue(firstSegment), new Saturation(secondSegment), new Lightness(thirdSegment), alpha);
case HSV:
case HSB:
return valueOf(new Hue(firstSegment), new Saturation(secondSegment), new Brightness(thirdSegment), alpha);
default :
throw new IllegalStateException("" + colorModel);
}
}
}
}
}
}
} catch (RuntimeException e) {
cause = e;
}
throw new IllegalArgumentException(color, cause);
}
/**
* Converts the given {@link Color} to a {@link GenericColor}.
*
* @param color is the discrete RGBA {@link Color}.
* @return the corresponding {@link GenericColor}.
*/
public static GenericColor valueOf(Color color) {
Objects.requireNonNull(color, "color");
Red red = new Red(color.getRed());
Green green = new Green(color.getGreen());
Blue blue = new Blue(color.getBlue());
Alpha alpha = new Alpha(color.getAlpha());
return valueOf(red, green, blue, alpha);
}
/**
* Creates a {@link GenericColor} from the given {@link Segment}s of {@link ColorModel#RGB}.
*
* @param red is the {@link Red} part.
* @param green is the {@link Green} part.
* @param blue is the {@link Blue} part.
* @param alpha is the {@link Alpha} value.
* @return the {@link GenericColor}.
*/
public static GenericColor valueOf(Red red, Green green, Blue blue, Alpha alpha) {
Objects.requireNonNull(red, "red");
Objects.requireNonNull(green, "green");
Objects.requireNonNull(blue, "blue");
Objects.requireNonNull(alpha, "alpha");
GenericColor genericColor = new GenericColor();
genericColor.red = red;
genericColor.green = green;
genericColor.blue = blue;
genericColor.alpha = alpha;
// calculate min/max
double r = red.getValueAsFactor();
double g = green.getValueAsFactor();
double b = blue.getValueAsFactor();
double max = r;
if (g > max) {
max = g;
}
if (b > max) {
max = b;
}
double min = r;
if (g < min) {
min = g;
}
if (b < min) {
min = b;
}
double chroma = max - min;
genericColor.chroma = new Chroma(chroma);
double hue = calculateHue(r, g, b, max, chroma);
genericColor.hue = new Hue(hue);
double s;
if (max == 0) {
s = 0;
} else {
s = chroma / max;
}
genericColor.saturationHsb = new Saturation(s);
double lightness = (max + min) / 2;
genericColor.lightness = new Lightness(lightness);
double saturationHsl = calculateSaturationHsl(chroma, lightness);
genericColor.saturationHsl = new Saturation(saturationHsl);
genericColor.brightness = new Brightness(max);
return genericColor;
}
/**
* Calculate the {@link Saturation} for {@link ColorModel#HSL}.
*
* @param chroma is the {@link Chroma} value.
* @param lightness is the {@link Lightness} value.
* @return the {@link Saturation}.
*/
private static double calculateSaturationHsl(double chroma, double lightness) {
double d = 1 - Math.abs(2 * lightness - 1);
if (d == 0) {
return 0;
}
return chroma / d;
}
/**
* Calculate the {@link Hue}.
*
* @param red is the {@link Red} value.
* @param green is the {@link Green} value.
* @param blue is the {@link Blue} value.
* @param max is the maximum of RGB.
* @param chroma is the {@link Chroma} value.
* @return the {@link Saturation}.
*/
private static double calculateHue(double red, double green, double blue, double max, double chroma) {
if (chroma == 0) {
return 0;
} else {
double hue;
if (red == max) {
hue = (green - blue) / chroma;
} else if (green == max) {
hue = (blue - red) / chroma + 2;
} else {
hue = (red - green) / chroma + 4;
}
hue = hue * 60.0;
if (hue < 0) {
hue = hue + Hue.MAX_VALUE;
}
return hue;
}
}
/**
* Creates a {@link GenericColor} from the given {@link Segment}s of {@link ColorModel#HSB}.
*
* @param hue is the {@link Hue} part.
* @param saturation is the {@link Saturation} part.
* @param brightness is the {@link Brightness} part.
* @param alpha is the {@link Alpha} value.
* @return the {@link GenericColor}.
*/
public static GenericColor valueOf(Hue hue, Saturation saturation, Brightness brightness, Alpha alpha) {
Objects.requireNonNull(hue, "hue");
Objects.requireNonNull(saturation, "saturation");
Objects.requireNonNull(brightness, "brightness");
Objects.requireNonNull(alpha, "alpha");
GenericColor genericColor = new GenericColor();
genericColor.hue = hue;
genericColor.saturationHsb = saturation;
genericColor.brightness = brightness;
genericColor.alpha = alpha;
double b = brightness.getValueAsFactor();
double chroma = b * saturation.getValueAsFactor();
genericColor.chroma = new Chroma(chroma);
double min = b - chroma;
double lightness = (min + b) / 2;
genericColor.lightness = new Lightness(lightness);
double saturationHsl = calculateSaturationHsl(chroma, lightness);
genericColor.saturationHsl = new Saturation(saturationHsl);
calculateRgb(genericColor, hue, min, chroma);
return genericColor;
}
/**
* Creates a {@link GenericColor} from the given {@link Segment}s of {@link ColorModel#HSL}.
*
* @param hue is the {@link Hue} part.
* @param saturation is the {@link Saturation} part.
* @param lightness is the {@link Lightness} part.
* @param alpha is the {@link Alpha} value.
* @return the {@link GenericColor}.
*/
public static GenericColor valueOf(Hue hue, Saturation saturation, Lightness lightness, Alpha alpha) {
Objects.requireNonNull(hue, "hue");
Objects.requireNonNull(saturation, "saturation");
Objects.requireNonNull(lightness, "lightness");
Objects.requireNonNull(alpha, "alpha");
GenericColor genericColor = new GenericColor();
genericColor.hue = hue;
genericColor.saturationHsl = saturation;
genericColor.lightness = lightness;
genericColor.alpha = alpha;
double l = lightness.getValueAsFactor();
double chroma;
if (l >= 0.5) {
chroma = saturation.getValueAsFactor() * (2 - 2 * l);
} else {
chroma = saturation.getValueAsFactor() * 2 * l;
}
double m = l - (chroma / 2);
double saturationHsb;
double b = chroma + m;
genericColor.brightness = new Brightness(b);
if (l == 0) {
saturationHsb = 0;
} else {
saturationHsb = chroma / b;
}
genericColor.saturationHsb = new Saturation(saturationHsb);
calculateRgb(genericColor, hue, m, chroma);
return genericColor;
}
/**
* Calculates and the RGB values and sets them in the given {@link GenericColor}.
*
* @param genericColor is the {@link GenericColor} to complete.
* @param hue is the {@link Hue} value.
* @param min is the minimum {@link Factor} of R/G/B.
* @param chroma is the {@link Chroma} value.
*/
private static void calculateRgb(GenericColor genericColor, Hue hue, double min, double chroma) {
genericColor.chroma = new Chroma(chroma);
double hueX = hue.getValue().doubleValue() / 60;
double x = chroma * (1 - Math.abs((hueX % 2) - 1));
double red, green, blue;
if (hueX < 1) {
red = chroma + min;
green = x + min;
blue = min;
} else if (hueX < 2) {
red = x + min;
green = chroma + min;
blue = min;
} else if (hueX < 3) {
red = min;
green = chroma + min;
blue = x + min;
} else if (hueX < 4) {
red = min;
green = x + min;
blue = chroma + min;
} else if (hueX < 5) {
red = x + min;
green = min;
blue = chroma + min;
} else {
red = chroma + min;
green = min;
blue = x + min;
}
genericColor.red = new Red(red);
genericColor.green = new Green(green);
genericColor.blue = new Blue(blue);
}
/**
* @return the {@link Alpha alpha value as factor}.
*/
public Alpha getAlpha() {
return this.alpha;
}
/**
* @return the {@link Hue}.
*/
public Hue getHue() {
return this.hue;
}
/**
* @see ColorSegmentType#SATURATION_HSB
* @return the {@link Saturation} in {@link ColorModel#HSB}/{@link ColorModel#HSV} color model (hexcone).
*/
public Saturation getSaturationHsb() {
return this.saturationHsb;
}
/**
* @see ColorSegmentType#SATURATION_HSL
* @return the {@link Saturation} in {@link ColorModel#HSL} {@link ColorModel color model} (bi-hexcone).
*/
public Saturation getSaturationHsl() {
return this.saturationHsl;
}
/**
* @return the brightness
*/
public Brightness getBrightness() {
return this.brightness;
}
/**
* @return the lightness
*/
public Lightness getLightness() {
return this.lightness;
}
/**
* @return the chroma
*/
public Chroma getChroma() {
return this.chroma;
}
/**
* @return the red
*/
public Red getRed() {
return this.red;
}
/**
* @return the blue
*/
public Blue getBlue() {
return this.blue;
}
/**
* @return the green
*/
public Green getGreen() {
return this.green;
}
/**
* @param type is the {@link ColorSegmentType} identifying the requested {@link Segment}.
* @return the {@link Segment} of the given {@code type}.
*/
public AbstractDoubleSegment<?> getSegment(ColorSegmentType type) {
Objects.requireNonNull(type, "type");
switch (type) {
case RED:
return this.red;
case GREEN:
return this.green;
case BLUE:
return this.blue;
case HUE:
return this.hue;
case SATURATION_HSB:
return this.saturationHsb;
case SATURATION_HSL:
return this.saturationHsl;
case BRIGHTNESS:
return this.brightness;
case LIGHTNESS:
return this.lightness;
case ALPHA:
return this.alpha;
default :
throw new IllegalStateException("" + type);
}
}
/**
* @return the converted {@link Color} corresponding to this {@link GenericColor}.
*/
public Color toColor() {
return new Color(this.red.getValueAsByte(), this.green.getValueAsByte(), this.blue.getValueAsByte(), this.alpha.getValueAsByte());
}
/**
* @param model the {@link ColorModel} indicating the {@link Segment}s to {@link AbstractDoubleSegment#invert()
* invert}. Typically {@link ColorModel#RGB} to build the complement of the color.
* @return the complementary (or inverse) color.
*/
public GenericColor invert(ColorModel model) {
switch (model) {
case RGB:
return valueOf(this.red.invert(), this.green.invert(), this.blue.invert(), this.alpha);
case HSL:
return valueOf(this.hue.invert(), this.saturationHsl.invert(), this.lightness.invert(), this.alpha);
case HSB:
case HSV:
return valueOf(this.hue.invert(), this.saturationHsb.invert(), this.brightness.invert(), this.alpha);
default :
throw new IllegalStateException("" + model);
}
}
/**
* Lightens this color by the given {@code factor}.
*
* @param factor is the factor to increase by. E.g. {@code 0.0} will cause no change, while {@code 1.0} will return
* {@link Color#WHITE white}.
* @return a new color lighter by the given {@code factor}.
*/
public GenericColor lighten(ColorFactor factor) {
return valueOf(this.red.increase(factor), this.green.increase(factor), this.blue.increase(factor), this.alpha);
}
/**
* Darkens this color by the given {@code factor}.
*
* @param factor is the factor to decrease by. E.g. {@code 0.0} will cause no change, while {@code 1.0} will return
* {@link Color#BLACK black}.
* @return a new color darker by the given {@code factor}.
*/
public GenericColor darken(ColorFactor factor) {
return valueOf(this.red.decrease(factor), this.green.decrease(factor), this.blue.decrease(factor), this.alpha);
}
@Override
public String toString() {
return toString(ColorModel.RGB);
}
/**
* @param colorModel is the {@link ColorModel}.
* @return this color as {@link String} in notation of the given {@link ColorModel} (e.g. "rgba(255, 128, 64, 1.0)"
* for {@link ColorModel#RGB}).
*/
public String toString(ColorModel colorModel) {
StringBuilder buffer = new StringBuilder(BasicHelper.toLowerCase(colorModel.toString()));
buffer.append("a(");
buffer.append(getSegment(colorModel.getFirstSegmentType()));
buffer.append(',');
buffer.append(getSegment(colorModel.getSecondSegmentType()));
buffer.append(',');
buffer.append(getSegment(colorModel.getThirdSegmentType()));
buffer.append(',');
buffer.append(this.alpha);
buffer.append(')');
return buffer.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + this.alpha.hashCode();
result = prime * result + this.blue.hashCode();
result = prime * result + this.brightness.hashCode();
result = prime * result + this.chroma.hashCode();
result = prime * result + this.green.hashCode();
result = prime * result + this.hue.hashCode();
result = prime * result + this.lightness.hashCode();
result = prime * result + this.red.hashCode();
result = prime * result + this.saturationHsb.hashCode();
result = prime * result + this.saturationHsl.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
GenericColor other = (GenericColor) obj;
if (!Objects.equals(this.alpha, other.alpha)) {
return false;
}
if (!Objects.equals(this.red, other.red)) {
return false;
}
if (!Objects.equals(this.green, other.green)) {
return false;
}
if (!Objects.equals(this.blue, other.blue)) {
return false;
}
if (!Objects.equals(this.brightness, other.brightness)) {
return false;
}
if (!Objects.equals(this.chroma, other.chroma)) {
return false;
}
if (!Objects.equals(this.hue, other.hue)) {
return false;
}
if (!Objects.equals(this.lightness, other.lightness)) {
return false;
}
if (!Objects.equals(this.saturationHsb, other.saturationHsb)) {
return false;
}
if (!Objects.equals(this.saturationHsl, other.saturationHsl)) {
return false;
}
return true;
}
}
| |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pluto.container.om.portlet.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.pluto.container.om.portlet.Description;
import org.apache.pluto.container.om.portlet.DisplayName;
import org.apache.pluto.container.om.portlet.Listener;
/**
* A single event declaration
*
* @author Scott Nicklous
*/
public class ListenerImpl implements Listener {
private final List<Description> descs = new ArrayList<Description>();
private final List<DisplayName> dispNames = new ArrayList<DisplayName>();
private String listenerClass = "";
private String listenerName = "";
private int ordinal = 0;
/**
* Copy constructor
*
* @param lis
*/
public ListenerImpl(Listener lis) {
for (Description desc : lis.getDescriptions()) {
descs.add(new DescriptionImpl(desc));
}
for (DisplayName disp : lis.getDisplayNames()) {
dispNames.add(new DisplayNameImpl(disp));
}
listenerClass = lis.getListenerClass();
listenerName = lis.getListenerName();
ordinal = lis.getOrdinal();
}
/**
* Constructor
* @param fn Listener name
*/
public ListenerImpl(String cls) {
this.listenerClass = cls;
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#getDescription(java.util.Locale)
*/
@Override
public Description getDescription(Locale locale) {
Description ret = null;
for (Description item : descs) {
if (item.getLocale().equals(locale)) {
ret = new DescriptionImpl(item);
}
}
return ret;
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#getDescriptions()
*/
@Override
public List<Description> getDescriptions() {
return new ArrayList<Description>(descs);
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#addDescription(org.apache.pluto.container.om.portlet.Description)
*/
@Override
public void addDescription(Description desc) {
descs.add(desc);
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#getDisplayName(java.util.Locale)
*/
@Override
public DisplayName getDisplayName(Locale locale) {
DisplayName ret = null;
for (DisplayName item : dispNames) {
if (item.getLocale().equals(locale)) {
ret = new DisplayNameImpl(item);
}
}
return ret;
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#getDisplayNames()
*/
@Override
public List<DisplayName> getDisplayNames() {
return new ArrayList<DisplayName>(dispNames);
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#addDisplayName(org.apache.pluto.container.om.portlet.DisplayName)
*/
@Override
public void addDisplayName(DisplayName desc) {
dispNames.add(desc);
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#getListenerClass()
*/
@Override
public String getListenerClass() {
return listenerClass;
}
/* (non-Javadoc)
* @see org.apache.pluto.container.om.portlet.Listener#setListenerClass(java.lang.String)
*/
@Override
public void setListenerClass(String filterClass) {
listenerClass = filterClass;
}
/**
* @return the listenerName
*/
@Override
public String getListenerName() {
return listenerName;
}
/**
* @param listenerName the listenerName to set
*/
@Override
public void setListenerName(String listenerName) {
this.listenerName = listenerName;
}
/**
* @return the ordinal
*/
@Override
public int getOrdinal() {
return ordinal;
}
/**
* @param ordinal the ordinal to set
*/
@Override
public void setOrdinal(int ordinal) {
this.ordinal = ordinal;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((listenerClass == null) ? 0 : listenerClass.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ListenerImpl other = (ListenerImpl) obj;
if (listenerClass == null) {
if (other.listenerClass != null) {
return false;
}
} else if (!listenerClass.equals(other.listenerClass)) {
return false;
}
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import java.io.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.io.*;
/**************************************************
* A Block is a Hadoop FS primitive, identified by a
* long.
*
**************************************************/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class Block implements Writable, Comparable<Block> {
public static final String BLOCK_FILE_PREFIX = "blk_";
public static final String METADATA_EXTENSION = ".meta";
static { // register a ctor
WritableFactories.setFactory
(Block.class,
new WritableFactory() {
@Override
public Writable newInstance() { return new Block(); }
});
}
public static final Pattern blockFilePattern = Pattern
.compile(BLOCK_FILE_PREFIX + "(-??\\d++)$");
public static final Pattern metaFilePattern = Pattern
.compile(BLOCK_FILE_PREFIX + "(-??\\d++)_(\\d++)\\" + METADATA_EXTENSION
+ "$");
public static boolean isBlockFilename(File f) {
String name = f.getName();
return blockFilePattern.matcher(name).matches();
}
public static long filename2id(String name) {
Matcher m = blockFilePattern.matcher(name);
return m.matches() ? Long.parseLong(m.group(1)) : 0;
}
public static boolean isMetaFilename(String name) {
return metaFilePattern.matcher(name).matches();
}
/**
* Get generation stamp from the name of the metafile name
*/
public static long getGenerationStamp(String metaFile) {
Matcher m = metaFilePattern.matcher(metaFile);
return m.matches() ? Long.parseLong(m.group(2))
: GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
/**
* Get the blockId from the name of the metafile name
*/
public static long getBlockId(String metaFile) {
Matcher m = metaFilePattern.matcher(metaFile);
return m.matches() ? Long.parseLong(m.group(1)) : 0;
}
protected long blockId;
protected long sid; // sid = rtc clock
protected long numBytes;
protected long generationStamp;
public Block() {this(0, -1, 0, 0);}
public Block(final long blkid, final long len, final long generationStamp) {
this(blkid, -1, len, generationStamp);
}
public Block(final long blkid) {
this(blkid, -1, 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
}
public Block(final long blkid, final long sid, final long len, final long generationStamp) {
this.blockId = blkid;
this.sid = sid;
this.numBytes = len;
this.generationStamp = generationStamp;
}
public Block(Block blk) {
this(blk.blockId, blk.sid, blk.numBytes, blk.generationStamp);
}
/**
* Find the blockid from the given filename
*/
public Block(File f, long len, long genstamp) {
this(filename2id(f.getName()), -1, len, genstamp);
}
public void set(long blkid, long len, long genStamp) {
this.blockId = blkid;
this.numBytes = len;
this.generationStamp = genStamp;
}
/**
*/
public long getBlockId() {
return blockId;
}
public void setBlockId(long bid) {
blockId = bid;
}
/**
*/
public String getBlockName() {
return BLOCK_FILE_PREFIX + String.valueOf(blockId) + getSid();
}
/**
*/
public long getNumBytes() {
return numBytes;
}
public void setNumBytes(long len) {
this.numBytes = len;
}
public long getGenerationStamp() {
return generationStamp;
}
public void setGenerationStamp(long stamp) {
generationStamp = stamp;
}
/**
*/
@Override
public String toString() {
return getBlockName() + "_" + getGenerationStamp();
}
public void appendStringTo(StringBuilder sb) {
sb.append(BLOCK_FILE_PREFIX)
.append(blockId)
.append("_")
.append(getGenerationStamp());
}
public String getSid() {
return "[" + String.valueOf(sid) + "]";
}
public static String getDefaultSid() {
return "[-1]";
}
public long getLongSid() {
return sid;
}
public void setSid(long id) {
sid = id;
}
/////////////////////////////////////
// Writable
/////////////////////////////////////
@Override // Writable
public void write(DataOutput out) throws IOException {
writeHelper(out);
}
@Override // Writable
public void readFields(DataInput in) throws IOException {
readHelper(in);
}
final void writeHelper(DataOutput out) throws IOException {
out.writeLong(blockId);
out.writeLong(numBytes);
out.writeLong(generationStamp);
out.writeLong(sid);
}
final void readHelper(DataInput in) throws IOException {
this.blockId = in.readLong();
this.numBytes = in.readLong();
this.generationStamp = in.readLong();
this.sid = in.readLong();
if (numBytes < 0) {
throw new IOException("Unexpected block size: " + numBytes);
}
}
// write only the identifier part of the block
public void writeId(DataOutput out) throws IOException {
out.writeLong(blockId);
out.writeLong(sid);
out.writeLong(generationStamp);
}
// Read only the identifier part of the block
public void readId(DataInput in) throws IOException {
this.blockId = in.readLong();
this.sid = in.readLong();
this.generationStamp = in.readLong();
}
@Override // Comparable
public int compareTo(Block b) {
return blockId < b.blockId ? -1 :
blockId > b.blockId ? 1 :
sid > b.sid? 1:
sid < b.sid? -1:0;
}
@Override // Object
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Block)) {
return false;
}
return compareTo((Block)o) == 0;
}
/**
* @return true if the two blocks have the same block ID and the same
* generation stamp, or if both blocks are null.
*/
public static boolean matchingIdAndGenStamp(Block a, Block b) {
if (a == b) return true; // same block, or both null
if (a == null || b == null) return false; // only one null
return a.blockId == b.blockId &&
a.generationStamp == b.generationStamp;
}
@Override // Object
public int hashCode() {
//GenerationStamp is IRRELEVANT and should not be used here
return (int)(blockId^(blockId>>>32));
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.ocaml;
import com.facebook.buck.cxx.CxxHeaders;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.NativeLinkableInput;
import com.facebook.buck.cxx.Preprocessor;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.UnflavoredBuildTarget;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.RuleKeyObjectSink;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.util.MoreIterables;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.immutables.value.Value;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Optional;
/**
* OCaml build context
*
* OCaml has two build modes, "native" (ocamlopt) and "bytecode" (ocamlc), and that terminology is
* used throughout this file -- not to be confused with the "native" terminology used in
* com.facebook.buck.cxx.NativeLinkableInput.
*/
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractOcamlBuildContext implements RuleKeyAppendable {
static final String OCAML_COMPILED_BYTECODE_DIR = "bc";
static final String OCAML_COMPILED_DIR = "opt";
private static final String OCAML_GENERATED_SOURCE_DIR = "gen";
static final Path DEFAULT_OCAML_INTEROP_INCLUDE_DIR = Paths.get("/usr/local/lib/ocaml");
public abstract UnflavoredBuildTarget getBuildTarget();
public abstract ProjectFilesystem getProjectFilesystem();
public abstract SourcePathResolver getSourcePathResolver();
public abstract boolean isLibrary();
public abstract List<Arg> getFlags();
public abstract List<SourcePath> getInput();
public abstract List<String> getNativeIncludes();
public abstract List<String> getBytecodeIncludes();
/**
* Inputs for the native (ocamlopt) build
*/
public abstract NativeLinkableInput getNativeLinkableInput();
/**
* Inputs for the bytecode (ocamlc) build
*/
public abstract NativeLinkableInput getBytecodeLinkableInput();
/**
* Inputs for the C compiler (both builds)
*/
public abstract NativeLinkableInput getCLinkableInput();
public abstract List<OcamlLibrary> getOcamlInput();
public abstract CxxPreprocessorInput getCxxPreprocessorInput();
public abstract ImmutableSortedSet<BuildRule> getNativeCompileDeps();
public abstract ImmutableSortedSet<BuildRule> getBytecodeCompileDeps();
public abstract ImmutableSortedSet<BuildRule> getBytecodeLinkDeps();
public abstract Optional<Tool> getOcamlDepTool();
public abstract Optional<Tool> getOcamlCompiler();
public abstract Optional<Tool> getOcamlDebug();
public abstract Optional<Tool> getYaccCompiler();
public abstract Optional<Tool> getLexCompiler();
public abstract Optional<Tool> getOcamlBytecodeCompiler();
protected abstract List<String> getCFlags();
protected abstract Optional<String> getOcamlInteropIncludesDir();
protected abstract List<String> getLdFlags();
protected abstract Preprocessor getCPreprocessor();
public ImmutableList<SourcePath> getCInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_C))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getLexInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_MLL))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getYaccInput() {
return FluentIterable.from(getInput())
.filter(OcamlUtil.sourcePathExt(getSourcePathResolver(), OcamlCompilables.OCAML_MLY))
.toSet()
.asList();
}
public ImmutableList<SourcePath> getMLInput() {
return FluentIterable.from(getInput())
.filter(
OcamlUtil.sourcePathExt(
getSourcePathResolver(),
OcamlCompilables.OCAML_ML,
OcamlCompilables.OCAML_RE,
OcamlCompilables.OCAML_MLI,
OcamlCompilables.OCAML_REI))
.append(getLexOutput(getLexInput()))
.append(getYaccOutput(getYaccInput()))
.toSet()
.asList();
}
private static Path getArchiveNativeOutputPath(
UnflavoredBuildTarget target,
ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(
filesystem,
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMXA);
}
private static Path getArchiveBytecodeOutputPath(
UnflavoredBuildTarget target,
ProjectFilesystem filesystem) {
return BuildTargets.getGenPath(
filesystem,
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMA);
}
public Path getNativeOutput() {
return getNativeOutputPath(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public Path getNativePluginOutput() {
UnflavoredBuildTarget target = getBuildTarget();
return BuildTargets.getGenPath(
getProjectFilesystem(),
BuildTarget.of(target),
"%s/lib" + target.getShortName() + OcamlCompilables.OCAML_CMXS
);
}
public static Path getNativeOutputPath(
UnflavoredBuildTarget target,
ProjectFilesystem filesystem,
boolean isLibrary) {
if (isLibrary) {
return getArchiveNativeOutputPath(target, filesystem);
} else {
return BuildTargets.getScratchPath(
filesystem,
BuildTarget.of(target),
"%s/" + target.getShortName() + ".opt");
}
}
public Path getBytecodeOutput() {
return getBytecodeOutputPath(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public static Path getBytecodeOutputPath(
UnflavoredBuildTarget target,
ProjectFilesystem filesystem,
boolean isLibrary) {
if (isLibrary) {
return getArchiveBytecodeOutputPath(target, filesystem);
} else {
return BuildTargets.getScratchPath(
filesystem,
BuildTarget.of(target),
"%s/" + target.getShortName());
}
}
public Path getGeneratedSourceDir() {
return getNativeOutput().getParent().resolve(OCAML_GENERATED_SOURCE_DIR);
}
public Path getCompileNativeOutputDir() {
return getCompileNativeOutputDir(getBuildTarget(), getProjectFilesystem(), isLibrary());
}
public static Path getCompileNativeOutputDir(
UnflavoredBuildTarget buildTarget,
ProjectFilesystem filesystem,
boolean isLibrary) {
return getNativeOutputPath(buildTarget, filesystem, isLibrary).getParent().resolve(
OCAML_COMPILED_DIR);
}
public Path getCompileBytecodeOutputDir() {
return getNativeOutput().getParent().resolve(OCAML_COMPILED_BYTECODE_DIR);
}
public Path getCOutput(Path cSrc) {
String inputFileName = cSrc.getFileName().toString();
String outputFileName = inputFileName
.replaceFirst(
OcamlCompilables.OCAML_C_REGEX,
OcamlCompilables.OCAML_O);
return getCompileNativeOutputDir().resolve(outputFileName);
}
public ImmutableList<String> getIncludeDirectories(boolean isBytecode, boolean excludeDeps) {
ImmutableSet.Builder<String> includeDirs = ImmutableSet.builder();
for (SourcePath mlFile : getMLInput()) {
Path parent = getSourcePathResolver().getAbsolutePath(mlFile).getParent();
if (parent != null) {
includeDirs.add(parent.toString());
}
}
if (!excludeDeps) {
includeDirs.addAll(isBytecode ? this.getBytecodeIncludes() : this.getNativeIncludes());
}
return ImmutableList.copyOf(includeDirs.build());
}
public ImmutableList<String> getIncludeFlags(boolean isBytecode, boolean excludeDeps) {
return ImmutableList.copyOf(
MoreIterables.zipAndConcat(
Iterables.cycle(OcamlCompilables.OCAML_INCLUDE_FLAG),
getIncludeDirectories(isBytecode, excludeDeps)));
}
public ImmutableList<String> getBytecodeIncludeFlags() {
return ImmutableList.copyOf(
MoreIterables.zipAndConcat(
Iterables.cycle(OcamlCompilables.OCAML_INCLUDE_FLAG),
getBytecodeIncludeDirectories()));
}
public ImmutableList<String> getBytecodeIncludeDirectories() {
ImmutableList.Builder<String> includesBuilder = ImmutableList.builder();
includesBuilder.addAll(getIncludeDirectories(true, /* excludeDeps */ true));
includesBuilder.add(getCompileBytecodeOutputDir().toString());
return includesBuilder.build();
}
protected FluentIterable<SourcePath> getLexOutput(Iterable<SourcePath> lexInputs) {
return FluentIterable.from(lexInputs)
.transform(
lexInput -> {
Path fileName = getSourcePathResolver().getAbsolutePath(lexInput).getFileName();
Path out = getGeneratedSourceDir().resolve(
fileName.toString().replaceFirst(
OcamlCompilables.OCAML_MLL_REGEX,
OcamlCompilables.OCAML_ML));
return new PathSourcePath(getProjectFilesystem(), out);
});
}
protected FluentIterable<SourcePath> getYaccOutput(Iterable<SourcePath> yaccInputs) {
return FluentIterable.from(yaccInputs)
.transformAndConcat(
yaccInput -> {
String yaccFileName = getSourcePathResolver()
.getAbsolutePath(yaccInput)
.getFileName()
.toString();
ImmutableList.Builder<SourcePath> toReturn = ImmutableList.builder();
toReturn.add(new PathSourcePath(
getProjectFilesystem(),
getGeneratedSourceDir().resolve(
yaccFileName.replaceFirst(
OcamlCompilables.OCAML_MLY_REGEX,
OcamlCompilables.OCAML_ML))));
toReturn.add(new PathSourcePath(
getProjectFilesystem(),
getGeneratedSourceDir().resolve(
yaccFileName.replaceFirst(
OcamlCompilables.OCAML_MLY_REGEX,
OcamlCompilables.OCAML_MLI))));
return toReturn.build();
});
}
@Override
public void appendToRuleKey(RuleKeyObjectSink sink) {
sink
.setReflectively("flags", getFlags())
.setReflectively("input", getInput())
.setReflectively("lexCompiler", getLexCompiler())
.setReflectively("ocamlBytecodeCompiler", getOcamlBytecodeCompiler())
.setReflectively("ocamlCompiler", getOcamlCompiler())
.setReflectively("ocamlDebug", getOcamlDebug())
.setReflectively("ocamlDepTool", getOcamlDepTool())
.setReflectively("yaccCompiler", getYaccCompiler());
}
public ImmutableList<String> getCCompileFlags() {
ImmutableList.Builder<String> compileFlags = ImmutableList.builder();
CxxPreprocessorInput cxxPreprocessorInput = getCxxPreprocessorInput();
compileFlags.addAll(
MoreIterables.zipAndConcat(
Iterables.cycle("-ccopt"),
CxxHeaders.getArgs(
cxxPreprocessorInput.getIncludes(),
getSourcePathResolver(),
Optional.empty(),
getCPreprocessor())));
for (String cFlag : cxxPreprocessorInput.getPreprocessorFlags().get(CxxSource.Type.C)) {
compileFlags.add("-ccopt", cFlag);
}
return compileFlags.build();
}
private static ImmutableList<String> addPrefix(String prefix, Iterable<String> flags) {
return ImmutableList.copyOf(
MoreIterables.zipAndConcat(
Iterables.cycle(prefix),
flags));
}
public ImmutableList<String> getCommonCFlags() {
ImmutableList.Builder<String> builder = ImmutableList.builder();
builder.addAll(addPrefix("-ccopt", getCFlags()));
builder.add("-ccopt",
"-isystem" +
getOcamlInteropIncludesDir().orElse(DEFAULT_OCAML_INTEROP_INCLUDE_DIR.toString()));
return builder.build();
}
public ImmutableList<String> getCommonCLinkerFlags() {
return addPrefix("-ccopt", getLdFlags());
}
public static OcamlBuildContext.Builder builder(OcamlBuckConfig config) {
return OcamlBuildContext.builder()
.setOcamlDepTool(config.getOcamlDepTool())
.setOcamlCompiler(config.getOcamlCompiler())
.setOcamlDebug(config.getOcamlDebug())
.setYaccCompiler(config.getYaccCompiler())
.setLexCompiler(config.getLexCompiler())
.setOcamlBytecodeCompiler(config.getOcamlBytecodeCompiler())
.setOcamlInteropIncludesDir(config.getOcamlInteropIncludesDir())
.setCFlags(config.getCFlags())
.setLdFlags(config.getLdFlags());
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.intention.impl.singlereturn;
import com.intellij.codeInsight.BlockUtils;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.*;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.util.ObjectUtils.tryCast;
import static java.util.Objects.requireNonNull;
/**
* Performs replacement of single return statement as the part of {@link ConvertToSingleReturnAction}.
*/
class ReturnReplacementContext {
private final Project myProject;
private final PsiElementFactory myFactory;
private final PsiCodeBlock myBlock;
private final ExitContext myExitContext;
private PsiReturnStatement myReturnStatement;
private final List<String> myReplacements = new ArrayList<>(3);
private ReturnReplacementContext(Project project,
PsiCodeBlock block,
ExitContext context,
PsiReturnStatement statement) {
myProject = project;
myFactory = JavaPsiFacade.getElementFactory(project);
myBlock = block;
myExitContext = context;
myReturnStatement = statement;
}
private void process() {
PsiExpression value = myReturnStatement.getReturnValue();
PsiStatement currentContext = goUp();
if (currentContext == null) return;
if (value != null) {
myExitContext.registerReturnValue(value, myReplacements);
}
while (currentContext != null) {
currentContext = advance(currentContext);
}
replace();
}
@Nullable
private PsiStatement goUp() {
PsiElement parent = myReturnStatement.getParent();
while (parent instanceof PsiCodeBlock) {
PsiElement grandParent = parent.getParent();
if (!(grandParent instanceof PsiSwitchStatement)) {
PsiStatement[] statements = ((PsiCodeBlock)parent).getStatements();
boolean afterReturn = false;
for (PsiStatement statement : statements) {
if (statement == myReturnStatement) {
afterReturn = true;
}
else if (afterReturn) {
// Unreachable statements after return (compilation error): remove them
new CommentTracker().deleteAndRestoreComments(statement);
}
}
}
if (grandParent instanceof PsiBlockStatement) {
parent = grandParent.getParent();
continue;
}
if (grandParent instanceof PsiCatchSection) {
parent = grandParent.getParent();
break;
}
if (grandParent instanceof PsiStatement) {
parent = grandParent;
}
else if (parent == myBlock) {
// May happen for incorrect code
return null;
}
else {
throw new RuntimeExceptionWithAttachments("Unexpected structure: " + grandParent.getClass(),
new Attachment("body.txt", myBlock.getText()),
new Attachment("context.txt", grandParent.getText()));
}
break;
}
if (!(parent instanceof PsiStatement)) {
throw new RuntimeExceptionWithAttachments("Unexpected structure: " + parent.getClass(),
new Attachment("body.txt", myBlock.getText()),
new Attachment("context.txt", parent.getText()));
}
PsiStatement currentContext = (PsiStatement)parent;
PsiStatement loopOrSwitch = PsiTreeUtil.getNonStrictParentOfType(currentContext, PsiLoopStatement.class, PsiSwitchStatement.class);
if (loopOrSwitch != null && PsiTreeUtil.isAncestor(myBlock, loopOrSwitch, true)) {
myReplacements.add("break;");
return loopOrSwitch;
}
while (true) {
if (currentContext instanceof PsiIfStatement) {
PsiIfStatement ifStatement = (PsiIfStatement)currentContext;
boolean inThen = PsiTreeUtil.isAncestor(ifStatement.getThenBranch(), myReturnStatement, false);
PsiElement ifParent = currentContext.getParent();
if (ifParent instanceof PsiCodeBlock) {
PsiCodeBlock resultBlock = swallowTail(currentContext, ifStatement, inThen, (PsiCodeBlock)ifParent);
if (resultBlock != null &&
!ControlFlowUtils.codeBlockMayCompleteNormally(resultBlock) &&
ifParent.getParent() instanceof PsiBlockStatement &&
ifParent.getParent().getParent() instanceof PsiIfStatement) {
currentContext = (PsiStatement)ifParent.getParent().getParent();
continue;
}
}
}
return currentContext;
}
}
@Nullable
private PsiStatement advance(PsiStatement currentContext) {
PsiElement contextParent = currentContext.getParent();
if (contextParent instanceof PsiLoopStatement) {
Object mark = new Object();
PsiTreeUtil.mark(myReturnStatement, mark);
currentContext = BlockUtils.expandSingleStatementToBlockStatement(currentContext);
contextParent = currentContext.getParent();
myReturnStatement = (PsiReturnStatement)requireNonNull(PsiTreeUtil.releaseMark(currentContext, mark));
}
if (contextParent instanceof PsiCodeBlock) {
PsiElement[] tail = extractTail(currentContext, (PsiCodeBlock)contextParent);
PsiStatement loopOrSwitch = PsiTreeUtil.getParentOfType(currentContext, PsiLoopStatement.class, PsiSwitchStatement.class);
if (loopOrSwitch != null && PsiTreeUtil.isAncestor(myBlock, loopOrSwitch, true)) {
myExitContext.register(myReplacements);
String exitStatement = "if(" + myExitContext.generateExitCondition() + ") break;";
contextParent.addAfter(myFactory.createStatementFromText(exitStatement, currentContext), currentContext);
currentContext = loopOrSwitch;
return currentContext;
}
List<PsiStatement> statements = StreamEx.of(tail).select(PsiStatement.class).toList();
if (!statements.isEmpty()) {
PsiStatement statement = statements.get(0);
if (statements.size() == 1 && myExitContext.isDefaultReturn(statement)) {
new CommentTracker().deleteAndRestoreComments(statement);
}
else {
myExitContext.register(myReplacements);
if (!myExitContext.isFinishCondition(statement)) {
String conditionalBlock = "if(" + myExitContext.getNonExitCondition() + ") {}";
PsiIfStatement ifStatement = (PsiIfStatement)myFactory.createStatementFromText(conditionalBlock, currentContext);
PsiCodeBlock ifBlock = requireNonNull(((PsiBlockStatement)requireNonNull(ifStatement.getThenBranch())).getCodeBlock());
PsiJavaToken lBrace = requireNonNull(ifBlock.getLBrace());
PsiElement tailStart = ArrayUtil.getFirstElement(tail);
PsiElement tailEnd = ArrayUtil.getLastElement(tail);
ifBlock.addRangeAfter(tailStart, tailEnd, lBrace);
contextParent.deleteChildRange(tailStart, tailEnd);
PsiElement insertedIf = contextParent.addAfter(ifStatement, currentContext);
fixNonInitializedVars(insertedIf);
}
}
}
if (contextParent == myBlock) {
currentContext = null;
}
else if (contextParent.getParent() instanceof PsiStatement) {
currentContext = (PsiStatement)contextParent.getParent();
}
else if (contextParent.getParent() instanceof PsiCatchSection) {
currentContext = (PsiStatement)contextParent.getParent().getParent();
}
else {
throw new RuntimeExceptionWithAttachments("Unexpected structure: " + contextParent.getParent().getClass(),
new Attachment("body.txt", myBlock.getText()),
new Attachment("context.txt", contextParent.getText()));
}
}
else if (contextParent instanceof PsiIfStatement || contextParent instanceof PsiLabeledStatement) {
currentContext = (PsiStatement)contextParent;
}
else {
throw new RuntimeExceptionWithAttachments("Unexpected structure: " + contextParent.getClass(),
new Attachment("body.txt", myBlock.getText()),
new Attachment("context.txt", contextParent.getText()));
}
return currentContext;
}
private void fixNonInitializedVars(PsiElement element) {
Set<PsiLocalVariable> locals = new HashSet<>();
PsiTreeUtil.processElements(element, e -> {
if (e instanceof PsiReferenceExpression) {
PsiLocalVariable variable = ExpressionUtils.resolveLocalVariable((PsiExpression)e);
if (variable != null && variable.getInitializer() == null && PsiTreeUtil.isAncestor(myBlock, variable, true)) {
locals.add(variable);
}
}
return true;
});
if (!locals.isEmpty()) {
ControlFlow flow;
try {
flow = ControlFlowFactory.getInstance(myProject).getControlFlow(myBlock, new LocalsControlFlowPolicy(myBlock), false, false);
}
catch (AnalysisCanceledException ignored) {
return;
}
int offset = flow.getStartOffset(element);
if (offset == -1) return;
for (PsiLocalVariable local : locals) {
if (ControlFlowUtil.getVariablePossiblyUnassignedOffsets(local, flow)[offset]) {
local.setInitializer(myFactory.createExpressionFromText(PsiTypesUtil.getDefaultValueOfType(local.getType()), null));
}
}
}
}
@NotNull
private static PsiElement[] extractTail(PsiStatement current, PsiCodeBlock block) {
PsiElement[] children = block.getChildren();
int pos = ArrayUtil.indexOf(children, current);
assert pos >= 0;
PsiElement rBrace = block.getRBrace();
int endPos = rBrace == null ? children.length : ArrayUtil.lastIndexOf(children, rBrace);
assert endPos >= pos;
return Arrays.copyOfRange(children, pos + 1, endPos);
}
private PsiCodeBlock swallowTail(PsiStatement currentContext,
PsiIfStatement ifStatement,
boolean inThen, PsiCodeBlock ifParent) {
PsiElement[] tail = extractTail(currentContext, ifParent);
if (Arrays.stream(tail).noneMatch(PsiStatement.class::isInstance)) return null;
PsiBlockStatement blockForTail = getBlockFromIf(ifStatement, inThen);
PsiCodeBlock codeBlock = blockForTail.getCodeBlock();
PsiJavaToken brace = requireNonNull(codeBlock.getRBrace());
for (PsiElement element : tail) {
if (element.isValid()) {
codeBlock.addBefore(element, brace);
element.delete();
}
}
return codeBlock;
}
@NotNull
private PsiBlockStatement getBlockFromIf(PsiIfStatement ifStatement, boolean inThen) {
if (inThen) {
PsiStatement elseBranch = ifStatement.getElseBranch();
if (elseBranch == null) {
ifStatement.setElseBranch(BlockUtils.createBlockStatement(myProject));
return (PsiBlockStatement)ifStatement.getElseBranch();
}
if (!(elseBranch instanceof PsiBlockStatement)) {
return (PsiBlockStatement)BlockUtils.expandSingleStatementToBlockStatement(elseBranch).getParent().getParent();
}
return (PsiBlockStatement)elseBranch;
}
else {
PsiStatement thenBranch = ifStatement.getThenBranch();
if (thenBranch == null) {
ifStatement.setThenBranch(BlockUtils.createBlockStatement(myProject));
return (PsiBlockStatement)ifStatement.getThenBranch();
}
if (!(thenBranch instanceof PsiBlockStatement)) {
return (PsiBlockStatement)BlockUtils.expandSingleStatementToBlockStatement(thenBranch).getParent().getParent();
}
return (PsiBlockStatement)thenBranch;
}
}
private void replace() {
if (!(myReturnStatement.getParent() instanceof PsiCodeBlock)) {
myReturnStatement = BlockUtils.expandSingleStatementToBlockStatement(myReturnStatement);
}
PsiStatement[] newStatements = ContainerUtil.map2Array(
myReplacements, PsiStatement.class, text -> myFactory.createStatementFromText(text, null));
if (newStatements.length > 0) {
BlockUtils.addBefore(myReturnStatement, newStatements);
}
PsiCodeBlock block = tryCast(myReturnStatement.getParent(), PsiCodeBlock.class);
new CommentTracker().deleteAndRestoreComments(myReturnStatement);
cleanUpEmptyBlocks(block);
}
private static void cleanUpEmptyBlocks(PsiCodeBlock block) {
if (block == null || !block.isEmpty()) return;
PsiBlockStatement blockStatement = tryCast(block.getParent(), PsiBlockStatement.class);
if (blockStatement == null) return;
PsiIfStatement parent = tryCast(blockStatement.getParent(), PsiIfStatement.class);
if (parent == null) return;
PsiExpression condition = parent.getCondition();
if (condition == null) return;
if (blockStatement == parent.getElseBranch()) {
new CommentTracker().deleteAndRestoreComments(blockStatement);
}
else if (blockStatement == parent.getThenBranch()) {
if (parent.getElseBranch() != null) {
new CommentTracker().replaceAndRestoreComments(blockStatement, parent.getElseBranch());
parent.getElseBranch().delete();
CommentTracker ct = new CommentTracker();
String negatedCondition = BoolUtils.getNegatedExpressionText(condition, ct);
ct.replaceAndRestoreComments(condition, negatedCondition);
}
else if (!SideEffectChecker.mayHaveSideEffects(condition)) {
PsiCodeBlock parentBlock = tryCast(parent.getParent(), PsiCodeBlock.class);
new CommentTracker().deleteAndRestoreComments(parent);
cleanUpEmptyBlocks(parentBlock);
}
}
}
static void replaceSingleReturn(@NotNull Project project,
PsiCodeBlock block,
ExitContext exitContext,
PsiReturnStatement returnStatement) {
new ReturnReplacementContext(project, block, exitContext, returnStatement).process();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.function.Function;
import static org.apache.camel.util.StringQuoteHelper.doubleQuote;
/**
* Helper methods for working with Strings.
*/
public final class StringHelper {
/**
* Constructor of utility class should be private.
*/
private StringHelper() {
}
/**
* Ensures that <code>s</code> is friendly for a URL or file system.
*
* @param s String to be sanitized.
* @return sanitized version of <code>s</code>.
* @throws NullPointerException if <code>s</code> is <code>null</code>.
*/
public static String sanitize(String s) {
return s
.replace(':', '-')
.replace('_', '-')
.replace('.', '-')
.replace('/', '-')
.replace('\\', '-');
}
/**
* Counts the number of times the given char is in the string
*
* @param s the string
* @param ch the char
* @return number of times char is located in the string
*/
public static int countChar(String s, char ch) {
if (ObjectHelper.isEmpty(s)) {
return 0;
}
int matches = 0;
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (ch == c) {
matches++;
}
}
return matches;
}
/**
* Limits the length of a string
*
* @param s the string
* @param maxLength the maximum length of the returned string
* @return s if the length of s is less than maxLength or the first maxLength characters of s
* @deprecated use {@link #limitLength(String, int)}
*/
@Deprecated
public static String limitLenght(String s, int maxLength) {
return limitLength(s, maxLength);
}
/**
* Limits the length of a string
*
* @param s the string
* @param maxLength the maximum length of the returned string
* @return s if the length of s is less than maxLength or the first maxLength characters of s
*/
public static String limitLength(String s, int maxLength) {
if (ObjectHelper.isEmpty(s)) {
return s;
}
return s.length() <= maxLength ? s : s.substring(0, maxLength);
}
/**
* Removes all quotes (single and double) from the string
*
* @param s the string
* @return the string without quotes (single and double)
*/
public static String removeQuotes(String s) {
if (ObjectHelper.isEmpty(s)) {
return s;
}
s = replaceAll(s, "'", "");
s = replaceAll(s, "\"", "");
return s;
}
/**
* Removes all leading and ending quotes (single and double) from the string
*
* @param s the string
* @return the string without leading and ending quotes (single and double)
*/
public static String removeLeadingAndEndingQuotes(String s) {
if (ObjectHelper.isEmpty(s)) {
return s;
}
String copy = s.trim();
if (copy.startsWith("'") && copy.endsWith("'")) {
return copy.substring(1, copy.length() - 1);
}
if (copy.startsWith("\"") && copy.endsWith("\"")) {
return copy.substring(1, copy.length() - 1);
}
// no quotes, so return as-is
return s;
}
/**
* Whether the string starts and ends with either single or double quotes.
*
* @param s the string
* @return <tt>true</tt> if the string starts and ends with either single or double quotes.
*/
public static boolean isQuoted(String s) {
if (ObjectHelper.isEmpty(s)) {
return false;
}
if (s.startsWith("'") && s.endsWith("'")) {
return true;
}
if (s.startsWith("\"") && s.endsWith("\"")) {
return true;
}
return false;
}
/**
* Encodes the text into safe XML by replacing < > and & with XML tokens
*
* @param text the text
* @return the encoded text
*/
public static String xmlEncode(String text) {
if (text == null) {
return "";
}
// must replace amp first, so we dont replace < to amp later
text = replaceAll(text, "&", "&");
text = replaceAll(text, "\"", """);
text = replaceAll(text, "<", "<");
text = replaceAll(text, ">", ">");
return text;
}
/**
* Determines if the string has at least one letter in upper case
* @param text the text
* @return <tt>true</tt> if at least one letter is upper case, <tt>false</tt> otherwise
*/
public static boolean hasUpperCase(String text) {
if (text == null) {
return false;
}
for (int i = 0; i < text.length(); i++) {
char ch = text.charAt(i);
if (Character.isUpperCase(ch)) {
return true;
}
}
return false;
}
/**
* Determines if the string is a fully qualified class name
*/
public static boolean isClassName(String text) {
boolean result = false;
if (text != null) {
String[] split = text.split("\\.");
if (split.length > 0) {
String lastToken = split[split.length - 1];
if (lastToken.length() > 0) {
result = Character.isUpperCase(lastToken.charAt(0));
}
}
}
return result;
}
/**
* Does the expression have the language start token?
*
* @param expression the expression
* @param language the name of the language, such as simple
* @return <tt>true</tt> if the expression contains the start token, <tt>false</tt> otherwise
*/
public static boolean hasStartToken(String expression, String language) {
if (expression == null) {
return false;
}
// for the simple language the expression start token could be "${"
if ("simple".equalsIgnoreCase(language) && expression.contains("${")) {
return true;
}
if (language != null && expression.contains("$" + language + "{")) {
return true;
}
return false;
}
/**
* Replaces all the from tokens in the given input string.
* <p/>
* This implementation is not recursive, not does it check for tokens in the replacement string.
*
* @param input the input string
* @param from the from string, must <b>not</b> be <tt>null</tt> or empty
* @param to the replacement string, must <b>not</b> be empty
* @return the replaced string, or the input string if no replacement was needed
* @throws IllegalArgumentException if the input arguments is invalid
*/
public static String replaceAll(String input, String from, String to) {
if (ObjectHelper.isEmpty(input)) {
return input;
}
if (from == null) {
throw new IllegalArgumentException("from cannot be null");
}
if (to == null) {
// to can be empty, so only check for null
throw new IllegalArgumentException("to cannot be null");
}
// fast check if there is any from at all
if (!input.contains(from)) {
return input;
}
final int len = from.length();
final int max = input.length();
StringBuilder sb = new StringBuilder(max);
for (int i = 0; i < max;) {
if (i + len <= max) {
String token = input.substring(i, i + len);
if (from.equals(token)) {
sb.append(to);
// fast forward
i = i + len;
continue;
}
}
// append single char
sb.append(input.charAt(i));
// forward to next
i++;
}
return sb.toString();
}
/**
* Creates a json tuple with the given name/value pair.
*
* @param name the name
* @param value the value
* @param isMap whether the tuple should be map
* @return the json
*/
public static String toJson(String name, String value, boolean isMap) {
if (isMap) {
return "{ " + doubleQuote(name) + ": " + doubleQuote(value) + " }";
} else {
return doubleQuote(name) + ": " + doubleQuote(value);
}
}
/**
* Asserts whether the string is <b>not</b> empty.
*
* @param value the string to test
* @param name the key that resolved the value
* @return the passed {@code value} as is
* @throws IllegalArgumentException is thrown if assertion fails
*/
public static String notEmpty(String value, String name) {
if (ObjectHelper.isEmpty(value)) {
throw new IllegalArgumentException(name + " must be specified and not empty");
}
return value;
}
/**
* Asserts whether the string is <b>not</b> empty.
*
* @param value the string to test
* @param on additional description to indicate where this problem occurred (appended as toString())
* @param name the key that resolved the value
* @return the passed {@code value} as is
* @throws IllegalArgumentException is thrown if assertion fails
*/
public static String notEmpty(String value, String name, Object on) {
if (on == null) {
ObjectHelper.notNull(value, name);
} else if (ObjectHelper.isEmpty(value)) {
throw new IllegalArgumentException(name + " must be specified and not empty on: " + on);
}
return value;
}
public static String[] splitOnCharacter(String value, String needle, int count) {
String rc[] = new String[count];
rc[0] = value;
for (int i = 1; i < count; i++) {
String v = rc[i - 1];
int p = v.indexOf(needle);
if (p < 0) {
return rc;
}
rc[i - 1] = v.substring(0, p);
rc[i] = v.substring(p + 1);
}
return rc;
}
/**
* Removes any starting characters on the given text which match the given
* character
*
* @param text the string
* @param ch the initial characters to remove
* @return either the original string or the new substring
*/
public static String removeStartingCharacters(String text, char ch) {
int idx = 0;
while (text.charAt(idx) == ch) {
idx++;
}
if (idx > 0) {
return text.substring(idx);
}
return text;
}
/**
* Capitalize the string (upper case first character)
*
* @param text the string
* @return the string capitalized (upper case first character)
*/
public static String capitalize(String text) {
if (text == null) {
return null;
}
int length = text.length();
if (length == 0) {
return text;
}
String answer = text.substring(0, 1).toUpperCase(Locale.ENGLISH);
if (length > 1) {
answer += text.substring(1, length);
}
return answer;
}
/**
* Returns the string after the given token
*
* @param text the text
* @param after the token
* @return the text after the token, or <tt>null</tt> if text does not contain the token
*/
public static String after(String text, String after) {
if (!text.contains(after)) {
return null;
}
return text.substring(text.indexOf(after) + after.length());
}
/**
* Returns an object after the given token
*
* @param text the text
* @param after the token
* @param mapper a mapping function to convert the string after the token to type T
* @return an Optional describing the result of applying a mapping function to the text after the token.
*/
public static <T> Optional<T> after(String text, String after, Function<String, T> mapper) {
String result = after(text, after);
if (result == null) {
return Optional.empty();
} else {
return Optional.ofNullable(mapper.apply(result));
}
}
/**
* Returns the string before the given token
*
* @param text the text
* @param before the token
* @return the text before the token, or <tt>null</tt> if text does not
* contain the token
*/
public static String before(String text, String before) {
if (!text.contains(before)) {
return null;
}
return text.substring(0, text.indexOf(before));
}
/**
* Returns an object before the given token
*
* @param text the text
* @param before the token
* @param mapper a mapping function to convert the string before the token to type T
* @return an Optional describing the result of applying a mapping function to the text before the token.
*/
public static <T> Optional<T> before(String text, String before, Function<String, T> mapper) {
String result = before(text, before);
if (result == null) {
return Optional.empty();
} else {
return Optional.ofNullable(mapper.apply(result));
}
}
/**
* Returns the string between the given tokens
*
* @param text the text
* @param after the before token
* @param before the after token
* @return the text between the tokens, or <tt>null</tt> if text does not contain the tokens
*/
public static String between(String text, String after, String before) {
text = after(text, after);
if (text == null) {
return null;
}
return before(text, before);
}
/**
* Returns an object between the given token
*
* @param text the text
* @param after the before token
* @param before the after token
* @param mapper a mapping function to convert the string between the token to type T
* @return an Optional describing the result of applying a mapping function to the text between the token.
*/
public static <T> Optional<T> between(String text, String after, String before, Function<String, T> mapper) {
String result = between(text, after, before);
if (result == null) {
return Optional.empty();
} else {
return Optional.ofNullable(mapper.apply(result));
}
}
/**
* Returns the string between the most outer pair of tokens
* <p/>
* The number of token pairs must be evenly, eg there must be same number of before and after tokens, otherwise <tt>null</tt> is returned
* <p/>
* This implementation skips matching when the text is either single or double quoted.
* For example:
* <tt>${body.matches("foo('bar')")</tt>
* Will not match the parenthesis from the quoted text.
*
* @param text the text
* @param after the before token
* @param before the after token
* @return the text between the outer most tokens, or <tt>null</tt> if text does not contain the tokens
*/
public static String betweenOuterPair(String text, char before, char after) {
if (text == null) {
return null;
}
int pos = -1;
int pos2 = -1;
int count = 0;
int count2 = 0;
boolean singleQuoted = false;
boolean doubleQuoted = false;
for (int i = 0; i < text.length(); i++) {
char ch = text.charAt(i);
if (!doubleQuoted && ch == '\'') {
singleQuoted = !singleQuoted;
} else if (!singleQuoted && ch == '\"') {
doubleQuoted = !doubleQuoted;
}
if (singleQuoted || doubleQuoted) {
continue;
}
if (ch == before) {
count++;
} else if (ch == after) {
count2++;
}
if (ch == before && pos == -1) {
pos = i;
} else if (ch == after) {
pos2 = i;
}
}
if (pos == -1 || pos2 == -1) {
return null;
}
// must be even paris
if (count != count2) {
return null;
}
return text.substring(pos + 1, pos2);
}
/**
* Returns an object between the most outer pair of tokens
*
* @param text the text
* @param after the before token
* @param before the after token
* @param mapper a mapping function to convert the string between the most outer pair of tokens to type T
* @return an Optional describing the result of applying a mapping function to the text between the most outer pair of tokens.
*/
public static <T> Optional<T> betweenOuterPair(String text, char before, char after, Function<String, T> mapper) {
String result = betweenOuterPair(text, before, after);
if (result == null) {
return Optional.empty();
} else {
return Optional.ofNullable(mapper.apply(result));
}
}
/**
* Returns true if the given name is a valid java identifier
*/
public static boolean isJavaIdentifier(String name) {
if (name == null) {
return false;
}
int size = name.length();
if (size < 1) {
return false;
}
if (Character.isJavaIdentifierStart(name.charAt(0))) {
for (int i = 1; i < size; i++) {
if (!Character.isJavaIdentifierPart(name.charAt(i))) {
return false;
}
}
return true;
}
return false;
}
/**
* Cleans the string to a pure Java identifier so we can use it for loading class names.
* <p/>
* Especially from Spring DSL people can have \n \t or other characters that otherwise
* would result in ClassNotFoundException
*
* @param name the class name
* @return normalized classname that can be load by a class loader.
*/
public static String normalizeClassName(String name) {
StringBuilder sb = new StringBuilder(name.length());
for (char ch : name.toCharArray()) {
if (ch == '.' || ch == '[' || ch == ']' || ch == '-' || Character.isJavaIdentifierPart(ch)) {
sb.append(ch);
}
}
return sb.toString();
}
/**
* Compares old and new text content and report back which lines are changed
*
* @param oldText the old text
* @param newText the new text
* @return a list of line numbers that are changed in the new text
*/
public static List<Integer> changedLines(String oldText, String newText) {
if (oldText == null || oldText.equals(newText)) {
return Collections.emptyList();
}
List<Integer> changed = new ArrayList<>();
String[] oldLines = oldText.split("\n");
String[] newLines = newText.split("\n");
for (int i = 0; i < newLines.length; i++) {
String newLine = newLines[i];
String oldLine = i < oldLines.length ? oldLines[i] : null;
if (oldLine == null) {
changed.add(i);
} else if (!newLine.equals(oldLine)) {
changed.add(i);
}
}
return changed;
}
/**
* Removes the leading and trailing whitespace and if the resulting
* string is empty returns {@code null}. Examples:
* <p>
* Examples:
* <blockquote><pre>
* trimToNull("abc") -> "abc"
* trimToNull(" abc") -> "abc"
* trimToNull(" abc ") -> "abc"
* trimToNull(" ") -> null
* trimToNull("") -> null
* </pre></blockquote>
*/
public static String trimToNull(final String given) {
if (given == null) {
return null;
}
final String trimmed = given.trim();
if (trimmed.isEmpty()) {
return null;
}
return trimmed;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */
/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package hk.ust.cse.lantern.data.io.arff;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (without unicode processing).
*/
public class SimpleCharStream
{
/** Whether parser is static. */
public static final boolean staticFlag = false;
int bufsize;
int available;
int tokenBegin;
/** Position in buffer. */
public int bufpos = -1;
protected int bufline[];
protected int bufcolumn[];
protected int column = 0;
protected int line = 1;
protected boolean prevCharIsCR = false;
protected boolean prevCharIsLF = false;
protected java.io.Reader inputStream;
protected char[] buffer;
protected int maxNextCharInd = 0;
protected int inBuf = 0;
protected int tabSize = 8;
protected void setTabSize(int i) { tabSize = i; }
protected int getTabSize(int i) { return tabSize; }
protected void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int newbufline[] = new int[bufsize + 2048];
int newbufcolumn[] = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos += (bufsize - tokenBegin));
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos -= tokenBegin);
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
bufsize += 2048;
available = bufsize;
tokenBegin = 0;
}
protected void FillBuff() throws java.io.IOException
{
if (maxNextCharInd == available)
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = maxNextCharInd = 0;
available = tokenBegin;
}
else if (tokenBegin < 0)
bufpos = maxNextCharInd = 0;
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
int i;
try {
if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1)
{
inputStream.close();
throw new java.io.IOException();
}
else
maxNextCharInd += i;
return;
}
catch(java.io.IOException e) {
--bufpos;
backup(0);
if (tokenBegin == -1)
tokenBegin = bufpos;
throw e;
}
}
/** Start. */
public char BeginToken() throws java.io.IOException
{
tokenBegin = -1;
char c = readChar();
tokenBegin = bufpos;
return c;
}
protected void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (tabSize - (column % tabSize));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
/** Read a character. */
public char readChar() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
return buffer[bufpos];
}
if (++bufpos >= maxNextCharInd)
FillBuff();
char c = buffer[bufpos];
UpdateLineColumn(c);
return c;
}
@Deprecated
/**
* @deprecated
* @see #getEndColumn
*/
public int getColumn() {
return bufcolumn[bufpos];
}
@Deprecated
/**
* @deprecated
* @see #getEndLine
*/
public int getLine() {
return bufline[bufpos];
}
/** Get token end column number. */
public int getEndColumn() {
return bufcolumn[bufpos];
}
/** Get token end line number. */
public int getEndLine() {
return bufline[bufpos];
}
/** Get token beginning column number. */
public int getBeginColumn() {
return bufcolumn[tokenBegin];
}
/** Get token beginning line number. */
public int getBeginLine() {
return bufline[tokenBegin];
}
/** Backup a number of characters. */
public void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.Reader dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
bufpos = -1;
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, 1, 1, 4096);
}
/** Constructor. */
public SimpleCharStream(java.io.InputStream dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Get token literal value. */
public String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
/** Get the suffix. */
public char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
/** Reset buffer when finished. */
public void Done()
{
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k = 0;
int nextColDiff = 0, columnDiff = 0;
while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}
/* JavaCC - OriginalChecksum=30d6aff5b2dc7c8be46059de27c533be (do not edit this line) */
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.workflow.lite;
import org.apache.hadoop.io.Writable;
import org.apache.oozie.service.LiteWorkflowStoreService;
import org.apache.oozie.util.StringSerializationUtil;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.workflow.WorkflowException;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* This node definition is serialized object and should provide readFields() and write() for read and write of fields in
* this class.
*/
public class NodeDef implements Writable {
private String nodeDefVersion = null;
private String name = null;
private Class<? extends NodeHandler> handlerClass;
private String conf = null;
private List<String> transitions = new ArrayList<String>();
private String cred = null;
private String userRetryMax = "null";
private String userRetryInterval = "null";
private String userRetryPolicy = "null";
NodeDef() {
}
NodeDef(String name, String conf, Class<? extends NodeHandler> handlerClass, List<String> transitions) {
this.name = ParamChecker.notEmpty(name, "name");
this.conf = conf;
this.handlerClass = ParamChecker.notNull(handlerClass, "handlerClass");
this.transitions = Collections.unmodifiableList(ParamChecker.notEmptyElements(transitions, "transitions"));
}
NodeDef(String name, String conf, Class<? extends NodeHandler> handlerClass, List<String> transitions, String cred) {
this(name, conf, handlerClass, transitions);
if (cred != null) {
this.cred = cred;
}
}
NodeDef(String name, String conf, Class<? extends NodeHandler> handlerClass, List<String> transitions, String cred,
String userRetryMax, String userRetryInterval, String userRetryPolicy) {
this(name, conf, handlerClass, transitions, cred);
if (userRetryMax != null) {
this.userRetryMax = userRetryMax;
}
if (userRetryInterval != null) {
this.userRetryInterval = userRetryInterval;
}
if (userRetryPolicy != null) {
this.userRetryPolicy = userRetryPolicy;
}
}
public boolean equals(NodeDef other) {
return !(other == null || getClass() != other.getClass() || !getName().equals(other.getName()));
}
@Override
public int hashCode() {
return name.hashCode();
}
public String getName() {
return name;
}
public String getCred() {
return cred;
}
public Class<? extends NodeHandler> getHandlerClass() {
return handlerClass;
}
public List<String> getTransitions() {
return transitions;
}
public String getConf() {
return conf;
}
public String getUserRetryMax() {
return userRetryMax;
}
public String getUserRetryInterval() {
return userRetryInterval;
}
public String getNodeDefVersion() {
if (nodeDefVersion == null) {
try {
nodeDefVersion = LiteWorkflowStoreService.getNodeDefDefaultVersion();
}
catch (WorkflowException e) {
nodeDefVersion = LiteWorkflowStoreService.NODE_DEF_VERSION_2;
}
}
return nodeDefVersion;
}
public String getUserRetryPolicy() {
return userRetryPolicy;
}
public void setUserRetryPolicy(String userRetryPolicy) {
this.userRetryPolicy = userRetryPolicy;
}
@SuppressWarnings("unchecked")
private void readVersionZero(DataInput dataInput, String firstField) throws IOException {
if (firstField.equals(LiteWorkflowStoreService.NODE_DEF_VERSION_0)) {
name = dataInput.readUTF();
} else {
name = firstField;
}
nodeDefVersion = LiteWorkflowStoreService.NODE_DEF_VERSION_0;
cred = dataInput.readUTF();
String handlerClassName = dataInput.readUTF();
if ((handlerClassName != null) && (handlerClassName.length() > 0)) {
try {
handlerClass = (Class<? extends NodeHandler>) Class.forName(handlerClassName);
}
catch (ClassNotFoundException ex) {
throw new IOException(ex);
}
}
conf = readString(dataInput);
if (conf.equals("null")) {
conf = null;
}
int numTrans = dataInput.readInt();
transitions = new ArrayList<String>(numTrans);
for (int i = 0; i < numTrans; i++) {
transitions.add(dataInput.readUTF());
}
}
@SuppressWarnings("unchecked")
private void readVersionOne(DataInput dataInput, String firstField) throws IOException {
readCommon(dataInput, firstField, LiteWorkflowStoreService.NODE_DEF_VERSION_1);
}
/*
* Reads according to version 2
*/
@SuppressWarnings("unchecked")
private void readVersionTwo(DataInput dataInput, String firstField) throws IOException {
readCommon(dataInput, firstField, LiteWorkflowStoreService.NODE_DEF_VERSION_2);
userRetryPolicy = dataInput.readUTF();
}
/*
* Reads common part
*/
@SuppressWarnings("unchecked")
private void readCommon(DataInput dataInput, String firstField, String nodeDefVer) throws IOException {
nodeDefVersion = nodeDefVer;
name = dataInput.readUTF();
cred = dataInput.readUTF();
if (cred.equals("null")) {
cred = null;
}
String handlerClassName = dataInput.readUTF();
if ((handlerClassName != null) && (handlerClassName.length() > 0)) {
try {
handlerClass = (Class<? extends NodeHandler>) Class.forName(handlerClassName);
}
catch (ClassNotFoundException ex) {
throw new IOException(ex);
}
}
conf = readString(dataInput);
if (conf.equals("null")) {
conf = null;
}
int numTrans = dataInput.readInt();
transitions = new ArrayList<String>(numTrans);
for (int i = 0; i < numTrans; i++) {
transitions.add(dataInput.readUTF());
}
userRetryMax = dataInput.readUTF();
userRetryInterval = dataInput.readUTF();
}
/* (non-Javadoc)
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
*/
@Override
public void readFields(DataInput dataInput) throws IOException {
String firstField = dataInput.readUTF();
if (firstField.equals(LiteWorkflowStoreService.NODE_DEF_VERSION_1)) {
// since oozie version 3.1
readVersionOne(dataInput, firstField);
}
else if (firstField.equals(LiteWorkflowStoreService.NODE_DEF_VERSION_2)) {
readVersionTwo(dataInput, firstField);
}
else {
readVersionZero(dataInput, firstField);
}
}
private void writeVersionZero(DataOutput dataOutput) throws IOException {
dataOutput.writeUTF(nodeDefVersion);
dataOutput.writeUTF(name);
if (cred != null) {
dataOutput.writeUTF(cred);
}
else {
dataOutput.writeUTF("null");
}
dataOutput.writeUTF(handlerClass.getName());
if (conf != null) {
writeString(dataOutput, conf);
}
else {
dataOutput.writeUTF("null");
}
dataOutput.writeInt(transitions.size());
for (String transition : transitions) {
dataOutput.writeUTF(transition);
}
}
private void writeString(DataOutput dataOutput, String value) throws IOException {
StringSerializationUtil.writeString(dataOutput, value);
}
private String readString(DataInput dataInput) throws IOException {
return StringSerializationUtil.readString(dataInput);
}
/**
* Write as version one format, this version was since 3.1.
*
* @param dataOutput data output to serialize node def
* @throws IOException thrown if fail to write
*/
private void writeVersionOne(DataOutput dataOutput) throws IOException {
writeCommon(dataOutput);
}
/**
* Write as version two format, this version was since 4.4.4.1.
*
* @param dataOutput data output to serialize node def
* @throws IOException thrown if fail to write
*/
private void writeVersionTwo(DataOutput dataOutput) throws IOException {
writeCommon(dataOutput);
if (userRetryPolicy != null) {
dataOutput.writeUTF(userRetryPolicy);
}
else {
dataOutput.writeUTF("null");
}
}
/*
* Write the common part
*/
private void writeCommon(DataOutput dataOutput) throws IOException {
dataOutput.writeUTF(nodeDefVersion);
dataOutput.writeUTF(name);
if (cred != null) {
writeString(dataOutput, cred);
}
else {
dataOutput.writeUTF("null");
}
writeString(dataOutput, handlerClass.getName());
if (conf != null) {
writeString(dataOutput, conf);
}
else {
dataOutput.writeUTF("null");
}
dataOutput.writeInt(transitions.size());
for (String transition : transitions) {
dataOutput.writeUTF(transition);
}
if (userRetryMax != null) {
dataOutput.writeUTF(userRetryMax);
}
else {
dataOutput.writeUTF("null");
}
if (userRetryInterval != null) {
dataOutput.writeUTF(userRetryInterval);
}
else {
dataOutput.writeUTF("null");
}
}
/* (non-Javadoc)
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
*/
@Override
public void write(DataOutput dataOutput) throws IOException {
if (getNodeDefVersion().equals(LiteWorkflowStoreService.NODE_DEF_VERSION_1)) {
// since oozie version 3.1
writeVersionOne(dataOutput);
}
else if (getNodeDefVersion().equals(LiteWorkflowStoreService.NODE_DEF_VERSION_2)) {
writeVersionTwo(dataOutput);
}
else {
writeVersionZero(dataOutput);
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.dialogs.connection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.jkiss.dbeaver.core.CoreMessages;
import org.jkiss.dbeaver.model.connection.DBPConnectionEventType;
import org.jkiss.dbeaver.model.runtime.DBRShellCommand;
import org.jkiss.dbeaver.registry.DataSourceDescriptor;
import org.jkiss.dbeaver.ui.DBeaverIcons;
import org.jkiss.dbeaver.ui.UIIcon;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.controls.TextWithOpenFolder;
import org.jkiss.dbeaver.ui.controls.VariablesHintLabel;
import org.jkiss.utils.ArrayUtils;
import org.jkiss.utils.CommonUtils;
import java.util.HashMap;
import java.util.Map;
/**
* Events edit dialog page
*/
public class ConnectionPageShellCommands extends ConnectionWizardPage {
public static final String PAGE_NAME = ConnectionPageShellCommands.class.getSimpleName();
private static final String CoreMessagesdialog_connection_edit_wizard_shell_cmd_directory_title = null;
private Text commandText;
private Button showProcessCheck;
private Button waitFinishCheck;
private Spinner waitFinishTimeoutMs;
private Button terminateCheck;
private Spinner pauseAfterExecute;
private TextWithOpenFolder workingDirectory;
private Table eventTypeTable;
private final Map<DBPConnectionEventType, DBRShellCommand> eventsCache = new HashMap<>();
protected ConnectionPageShellCommands(DataSourceDescriptor dataSource)
{
super(PAGE_NAME);
setTitle(CoreMessages.dialog_connection_edit_wizard_shell_cmd);
setDescription(CoreMessages.dialog_connection_events_title);
setImageDescriptor(DBeaverIcons.getImageDescriptor(UIIcon.EVENT));
for (DBPConnectionEventType eventType : DBPConnectionEventType.values()) {
DBRShellCommand command = dataSource.getConnectionConfiguration().getEvent(eventType);
eventsCache.put(eventType, command == null ? null : new DBRShellCommand(command));
}
}
@Override
public void createControl(Composite parent)
{
Composite group = UIUtils.createPlaceholder(parent, 2, 5);
group.setLayoutData(new GridData(GridData.FILL_BOTH));
{
Composite eventGroup = UIUtils.createPlaceholder(group, 1);
eventGroup.setLayoutData(new GridData(GridData.FILL_VERTICAL));
UIUtils.createControlLabel(eventGroup, CoreMessages.dialog_connection_events_label_event);
eventTypeTable = new Table(eventGroup, SWT.BORDER | SWT.CHECK | SWT.SINGLE | SWT.FULL_SELECTION);
eventTypeTable.setLayoutData(new GridData(GridData.FILL_VERTICAL));
eventTypeTable.addListener(SWT.Selection, event -> {
if (event.detail == SWT.CHECK) {
eventTypeTable.select(eventTypeTable.indexOf((TableItem) event.item));
}
});
for (DBPConnectionEventType eventType : DBPConnectionEventType.values()) {
DBRShellCommand command = eventsCache.get(eventType);
TableItem item = new TableItem(eventTypeTable, SWT.NONE);
item.setData(eventType);
item.setText(eventType.getTitle());
item.setImage(DBeaverIcons.getImage(UIIcon.EVENT));
item.setChecked(command != null && command.isEnabled());
}
eventTypeTable.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
DBPConnectionEventType eventType = getSelectedEventType();
selectEventType(eventType);
DBRShellCommand command = eventType == null ? null : eventsCache.get(eventType);
boolean enabled = ((TableItem) e.item).getChecked();
if (enabled || (command != null && enabled != command.isEnabled())) {
updateEvent(false);
}
}
});
}
{
Composite detailsGroup = UIUtils.createPlaceholder(group, 1, 5);
detailsGroup.setLayoutData(new GridData(GridData.FILL_BOTH));
UIUtils.createControlLabel(detailsGroup, CoreMessages.dialog_connection_events_label_command);
commandText = new Text(detailsGroup, SWT.BORDER | SWT.MULTI | SWT.WRAP | SWT.V_SCROLL);
commandText.addModifyListener(e -> updateEvent(true));
GridData gd = new GridData(GridData.FILL_HORIZONTAL);
gd.heightHint = 60;
gd.widthHint = 300;
commandText.setLayoutData(gd);
SelectionAdapter eventEditAdapter = new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
updateEvent(false);
}
};
showProcessCheck = UIUtils.createCheckbox(detailsGroup, CoreMessages.dialog_connection_events_checkbox_show_process, false);
showProcessCheck.addSelectionListener(eventEditAdapter);
waitFinishCheck = UIUtils.createCheckbox(detailsGroup, CoreMessages.dialog_connection_events_checkbox_wait_finish, false);
waitFinishCheck.addSelectionListener(eventEditAdapter);
waitFinishTimeoutMs = createWaitFinishTimeout(detailsGroup);
waitFinishTimeoutMs.addSelectionListener(eventEditAdapter);
terminateCheck = UIUtils.createCheckbox(detailsGroup, CoreMessages.dialog_connection_events_checkbox_terminate_at_disconnect, false);
terminateCheck.addSelectionListener(eventEditAdapter);
{
Composite pauseComposite = UIUtils.createPlaceholder(detailsGroup, 2, 5);
pauseComposite.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
pauseAfterExecute = UIUtils.createLabelSpinner(pauseComposite, CoreMessages.dialog_connection_edit_wizard_shell_cmd_pause_label, CoreMessages.dialog_connection_edit_wizard_shell_cmd_pause_tooltip, 0, 0, Integer.MAX_VALUE);
pauseAfterExecute.addSelectionListener(eventEditAdapter);
UIUtils.createControlLabel(pauseComposite, CoreMessages.dialog_connection_edit_wizard_shell_cmd_directory_label);
workingDirectory = new TextWithOpenFolder(pauseComposite, CoreMessagesdialog_connection_edit_wizard_shell_cmd_directory_title);
workingDirectory.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
workingDirectory.getTextControl().addModifyListener(e -> {
DBRShellCommand command = getActiveCommand();
if (command != null) {
command.setWorkingDirectory(workingDirectory.getText());
}
});
}
new VariablesHintLabel(detailsGroup, DataSourceDescriptor.CONNECT_VARIABLES);
}
selectEventType(null);
setControl(group);
}
private static Spinner createWaitFinishTimeout(Composite detailsGroup) {
Composite waitFinishGroup = new Composite(detailsGroup, SWT.NONE);
GridLayout waitFinishGroupLayout = new GridLayout(2, false);
waitFinishGroupLayout.marginWidth = 0;
waitFinishGroupLayout.marginHeight = 0;
waitFinishGroupLayout.marginLeft = 25;
waitFinishGroup.setLayout(waitFinishGroupLayout);
GridData gridData = new GridData(GridData.FILL_HORIZONTAL);
waitFinishGroup.setLayoutData(gridData);
int defaultValue = DBRShellCommand.WAIT_PROCESS_TIMEOUT_FOREVER;
int maxSelection = DBRShellCommand.WAIT_PROCESS_TIMEOUT_MAX_SELECTION;
Spinner spinner = UIUtils.createSpinner(waitFinishGroup, "-1 to wait forever", 0, defaultValue, maxSelection);
UIUtils.createLabel(waitFinishGroup, CoreMessages.dialog_connection_events_checkbox_wait_finish_timeout);
return spinner;
}
private DBPConnectionEventType getSelectedEventType()
{
TableItem[] selection = eventTypeTable.getSelection();
return ArrayUtils.isEmpty(selection) ? null : (DBPConnectionEventType) selection[0].getData();
}
private TableItem getEventItem(DBPConnectionEventType eventType)
{
for (TableItem item : eventTypeTable.getItems()) {
if (item.getData() == eventType) {
return item;
}
}
return null;
}
private DBRShellCommand getActiveCommand() {
DBPConnectionEventType eventType = getSelectedEventType();
if (eventType != null) {
DBRShellCommand command = eventsCache.get(eventType);
if (command == null) {
command = new DBRShellCommand(""); //$NON-NLS-1$
eventsCache.put(eventType, command);
}
return command;
}
return null;
}
private void updateEvent(boolean commandChange)
{
DBPConnectionEventType eventType = getSelectedEventType();
DBRShellCommand command = getActiveCommand();
if (command != null) {
boolean prevEnabled = command.isEnabled();
if (commandChange) {
command.setCommand(commandText.getText());
} else {
TableItem item = getEventItem(eventType);
if (item != null) {
command.setEnabled(item.getChecked());
}
command.setShowProcessPanel(showProcessCheck.getSelection());
command.setWaitProcessFinish(waitFinishCheck.getSelection());
waitFinishTimeoutMs.setEnabled(waitFinishCheck.getSelection());
command.setWaitProcessTimeoutMs(waitFinishTimeoutMs.getSelection());
command.setTerminateAtDisconnect(terminateCheck.getSelection());
command.setPauseAfterExecute(pauseAfterExecute.getSelection());
command.setWorkingDirectory(workingDirectory.getText());
if (prevEnabled != command.isEnabled()) {
selectEventType(eventType);
}
}
} else if (!commandChange) {
selectEventType(null);
}
}
private void selectEventType(DBPConnectionEventType eventType)
{
DBRShellCommand command = eventType == null ? null : eventsCache.get(eventType);
commandText.setEnabled(command != null && command.isEnabled());
showProcessCheck.setEnabled(command != null && command.isEnabled());
waitFinishCheck.setEnabled(command != null && command.isEnabled());
waitFinishTimeoutMs.setEnabled(waitFinishCheck.isEnabled());
terminateCheck.setEnabled(command != null && command.isEnabled());
pauseAfterExecute.setEnabled(command != null && command.isEnabled());
workingDirectory.setEnabled(command != null && command.isEnabled());
workingDirectory.getTextControl().setEnabled(command != null && command.isEnabled());
if (command != null) {
commandText.setText(CommonUtils.toString(command.getCommand()));
showProcessCheck.setSelection(command.isShowProcessPanel());
waitFinishCheck.setSelection(command.isWaitProcessFinish());
waitFinishTimeoutMs.setSelection(command.getWaitProcessTimeoutMs());
terminateCheck.setSelection(command.isTerminateAtDisconnect());
pauseAfterExecute.setSelection(command.getPauseAfterExecute());
workingDirectory.setText(CommonUtils.notEmpty(command.getWorkingDirectory()));
} else {
commandText.setText(""); //$NON-NLS-1$
showProcessCheck.setSelection(false);
waitFinishCheck.setSelection(false);
waitFinishTimeoutMs.setSelection(DBRShellCommand.WAIT_PROCESS_TIMEOUT_FOREVER);
terminateCheck.setSelection(false);
pauseAfterExecute.setSelection(0);
workingDirectory.setText("");
}
}
@Override
public void saveSettings(DataSourceDescriptor dataSourceDescriptor) {
for (Map.Entry<DBPConnectionEventType, DBRShellCommand> entry : eventsCache.entrySet()) {
dataSourceDescriptor.getConnectionConfiguration().setEvent(entry.getKey(), entry.getValue());
}
}
}
| |
package com.missionhub.ui;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import org.holoeverywhere.LayoutInflater;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* An "super" array adapter for generic objects
*/
public abstract class ObjectArrayAdapter<T> extends BaseAdapter {
/**
* List of all the objects in the adapter.
*/
private final List<T> mObjects = new ArrayList<T>();
/**
* List of the active objects in the adapter.
*/
private final List<T> mActiveObjects = new ArrayList<T>();
/**
* List of the hidden objects in the adpter.
*/
private final List<T> mHiddenObjects = new ArrayList<T>();
/**
* Lock object to synchronize operations on.
*/
private final Object mLock = new Object();
/**
* List of the type of views added to the adapter.
*/
private final List<Class<? extends Object>> mTypes = new ArrayList<Class<? extends Object>>();
/**
* The maximum number of view types allowed.
*/
private final int mMaxViewTypes;
/**
* The context for generating views and other context sensitive operations.
*/
private Context mContext;
/**
* True when changes to the adapter should notify the list.
*/
private boolean mNotify = true;
/**
* Constructor
*
* @param context The current context.
*/
public ObjectArrayAdapter(final Context context) {
this(context, 10);
}
/**
* Constructor
*
* @param context The current context.
* @param maxViewTypes The maximum number of view types allowed.
*/
public ObjectArrayAdapter(final Context context, final int maxViewTypes) {
mContext = context;
mMaxViewTypes = maxViewTypes;
}
/**
* {@inheritDoc}
*/
@Override
public int getCount() {
return mActiveObjects.size();
}
/**
* {@inheritDoc}
*/
@Override
public T getItem(final int position) {
return mActiveObjects.get(position);
}
/**
* @param object The object to find
* @return True if the adapter contains the object
*/
public boolean contains(final T object) {
return mActiveObjects.contains(object);
}
/**
* {@inheritDoc}
*/
@Override
public long getItemId(final int position) {
try {
final T object = mActiveObjects.get(position);
if (object != null && object instanceof ItemIdProvider) {
return ((ItemIdProvider) object).getItemId();
}
} catch (final Exception e) { /* ignore */}
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public int getViewTypeCount() {
return mMaxViewTypes;
}
/**
* Adds a view type.
*
* @param clss The view type class.
*/
private void addType(final Class<? extends Object> clss) {
if (!mTypes.contains(clss)) {
mTypes.add(clss);
}
if (mTypes.size() > mMaxViewTypes) {
throw new RuntimeException("Max view types limit reached.");
}
}
/**
* {@inheritDoc}
*/
@Override
public int getItemViewType(final int position) {
final T object = getItem(position);
if (object == null) return IGNORE_ITEM_VIEW_TYPE;
return mTypes.indexOf(object.getClass());
}
/**
* {@inheritDoc}
*/
@Override
public View getDropDownView(final int position, final View convertView, final ViewGroup parent) {
return getView(position, convertView, parent);
}
/**
* Adds an object to the adapter.
*
* @param object The object to add.
*/
public void add(final T object) {
synchronized (mLock) {
mObjects.add(object);
mActiveObjects.add(object);
addType(object.getClass());
}
maybeNotify();
}
/**
* Adds a collection of objects to the adapter.
*
* @param objects The objects to add.
*/
public void addAll(final Collection<T> objects) {
synchronized (mLock) {
mObjects.addAll(objects);
mActiveObjects.addAll(objects);
for (T object : objects) {
addType(object.getClass());
}
}
maybeNotify();
}
/**
* Performs an unsynchronized insert.
*
* @param object The object to insert
* @param index The index
*/
private void unsafeInsert(final T object, final int index) {
mObjects.add(index, object);
addType(object.getClass());
refreshHidden();
}
/**
* Inserts an object in to the array at a given index.
*
* @param object The object to insert
* @param index The index
*/
public void insert(final T object, final int index) {
synchronized (mLock) {
unsafeInsert(object, index);
}
maybeNotify();
}
/**
* Inserts an object before another object.
*
* @param object The object to insert
* @param beforeObject The object to insert the object before
*/
public void insertBefore(final T object, final T beforeObject) {
synchronized (mLock) {
final int index = mObjects.indexOf(beforeObject);
if (index == -1) return;
unsafeInsert(object, index);
}
maybeNotify();
}
/**
* Inserts an object after another object.
*
* @param object The object to insert
* @param afterObject The object to insert the object after
*/
public void insertAfter(final T object, final T afterObject) {
synchronized (mLock) {
int index = mObjects.indexOf(afterObject);
if (index == -1) return;
index += 1;
if (index > mObjects.size()) index = mObjects.size();
unsafeInsert(object, index);
}
maybeNotify();
}
/**
* Removes an object from the adapter.
*
* @param object The object to remove
*/
public void remove(final T object) {
synchronized (mLock) {
mObjects.remove(object);
mActiveObjects.remove(object);
mHiddenObjects.remove(object);
}
maybeNotify();
}
/**
* Removes a collection of objects from the adapter.
*
* @param objects The objects to remove.
*/
public void removeAll(final Collection<T> objects) {
synchronized (mLock) {
mObjects.removeAll(objects);
mActiveObjects.removeAll(objects);
mHiddenObjects.removeAll(objects);
}
maybeNotify();
}
/**
* Replaces an old object with a new one.
*
* @param oldObject The old object
* @param newObject The new object
*/
public void replace(final T oldObject, final T newObject) {
synchronized (mLock) {
replaceObject(oldObject, newObject, mObjects);
replaceObject(oldObject, newObject, mActiveObjects);
replaceObject(oldObject, newObject, mHiddenObjects);
addType(newObject.getClass());
}
maybeNotify();
}
/**
* Replaces an old object with a new one in the given list.
*
* @param oldObject The old object
* @param newObject The new object
* @param list The list to work on
*/
private void replaceObject(final T oldObject, final T newObject, final List<T> list) {
final int index = list.indexOf(oldObject);
if (index == -1) return;
list.set(index, newObject);
}
/**
* Removes all objects from the adapter.
*/
public void clear() {
synchronized (mLock) {
mObjects.clear();
mActiveObjects.clear();
mHiddenObjects.clear();
}
maybeNotify();
}
/**
* Hides an object to prevent its display.
*
* @param object The object to hide
*/
public void hide(final T object) {
synchronized (mLock) {
mHiddenObjects.add(object);
mActiveObjects.remove(object);
}
maybeNotify();
}
/**
* Shows an object to allow its display.
*
* @param object The object to show
*/
public void show(final T object) {
synchronized (mLock) {
mHiddenObjects.remove(object);
refreshHidden();
}
maybeNotify();
}
/**
* Shows all currently hidden objects.
*/
public void showAll() {
synchronized (mLock) {
mHiddenObjects.clear();
refreshHidden();
}
maybeNotify();
}
/**
* Keeps the list of hidden objects up to date after object changes.
*/
private void refreshHidden() {
mActiveObjects.clear();
for (final T object : mObjects) {
if (!mHiddenObjects.contains(object)) {
mActiveObjects.add(object);
}
}
}
/**
* Possibly notifies the list that the data set has changed.
*/
private void maybeNotify() {
if (mNotify) {
notifyDataSetChanged();
}
}
/**
* {@inheritDoc}
*/
@Override
public void notifyDataSetChanged() {
super.notifyDataSetChanged();
setNotifyOnChange(true);
}
/**
* Sets whether or not the list should be notified on adapter object changes.
*
* @param notify
*/
public void setNotifyOnChange(final boolean notify) {
mNotify = notify;
}
/**
* Sorts the list by a given comparator.
*
* @param comparator The comparator
*/
public void sort(final Comparator<? super T> comparator) {
Collections.sort(mObjects, comparator);
Collections.sort(mActiveObjects, comparator);
maybeNotify();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEmpty() {
return mActiveObjects.isEmpty();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEnabled(final int position) {
final T object = getItem(position);
return !(object != null && object instanceof SupportEnable) || ((SupportEnable) object).isEnabled();
}
/**
* {@inheritDoc}
*/
@Override
public boolean areAllItemsEnabled() {
for (int i = 0; i < getCount(); i++) {
if (!isEnabled(i)) {
return false;
}
}
return super.areAllItemsEnabled();
}
/**
* @return The context
*/
public Context getContext() {
return mContext;
}
/**
* Sets the context for the adapter. Ideally this should be set before the adapter is attached to the adapter view.
*
* @param context
*/
public void setContext(final Context context) {
synchronized (mLock) {
mContext = context;
notifyDataSetChanged();
}
}
/**
* @return A LayoutInflater for the current context.
*/
public LayoutInflater getLayoutInflater() {
return LayoutInflater.from(getContext());
}
/**
* Interface definition for callbacks to be invoked to determine an objects row id.
*/
public interface ItemIdProvider {
/**
* @return The object's row id.
* @see BaseAdapter#getItemId(int)
*/
long getItemId();
}
/**
* Interface definition to provide a callback to determine if a list item is enabled.
*/
public interface SupportEnable {
/**
* @return True if the list item is enabled.
*/
public boolean isEnabled();
}
/**
* Class to extend to disable a list item.
*/
public abstract static class DisabledItem implements SupportEnable {
/**
* {@inheritDoc}
*/
@Override
public boolean isEnabled() {
return false;
}
}
/**
* Returns the lock object for external entities to synchronize on
*
* @return
*/
public Object getLock() {
return mLock;
}
/**
* Returns all of the objects in the adapter
*
* @return
*/
public List<T> getObjects() {
synchronized (mLock) {
return new ArrayList<T>(mObjects);
}
}
/**
* Returns the position of an item by it's id.
*
* @param id
* @return
*/
public int getPositionById(long id) {
synchronized (mLock) {
for (int i = 0; i < getCount(); i++) {
if (getItemId(i) == id) {
return i;
}
}
}
return -1;
}
/**
* Returns the positions of the items by their ids
*
* @param ids
* @return
*/
public List<Integer> getPositionById(Collection<Long> ids) {
synchronized (mLock) {
ArrayList<Integer> positions = new ArrayList<Integer>();
for (int i = 0; i < getCount(); i++) {
if (ids.contains(getItemId(i))) {
positions.add(i);
}
}
return positions;
}
}
/**
* Removes all items from the list with in the list of given ids
*
* @param ids
*/
public void removeAllById(Collection<Long> ids) {
synchronized (getLock()) {
List<Integer> positions = getPositionById(ids);
List<T> remove = new ArrayList<T>();
for (Integer position : positions) {
remove.add(getItem(position));
}
removeAll(remove);
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence.entity;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.db.DbEntity;
import org.camunda.bpm.engine.impl.db.HasDbRevision;
import org.camunda.bpm.engine.impl.event.EventHandler;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.jobexecutor.EventSubscriptionJobDeclaration;
import org.camunda.bpm.engine.impl.pvm.process.ActivityImpl;
import org.camunda.bpm.engine.impl.pvm.process.ProcessDefinitionImpl;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.runtime.EventSubscription;
/**
* @author Daniel Meyer
*/
public abstract class EventSubscriptionEntity implements EventSubscription, DbEntity, HasDbRevision, Serializable {
private static final long serialVersionUID = 1L;
// persistent state ///////////////////////////
protected String id;
protected int revision = 1;
protected String eventType;
protected String eventName;
protected String executionId;
protected String processInstanceId;
protected String activityId;
protected String configuration;
protected Date created;
protected String tenantId;
// runtime state /////////////////////////////
protected ExecutionEntity execution;
protected ActivityImpl activity;
protected EventSubscriptionJobDeclaration jobDeclaration;
/////////////////////////////////////////////
public EventSubscriptionEntity() {
this.created = ClockUtil.getCurrentTime();
}
public EventSubscriptionEntity(ExecutionEntity executionEntity) {
this();
setExecution(executionEntity);
setActivity(execution.getActivity());
this.processInstanceId = executionEntity.getProcessInstanceId();
this.tenantId = executionEntity.getTenantId();
}
// processing /////////////////////////////
public void eventReceived(Object payload, boolean processASync) {
if(processASync) {
scheduleEventAsync(payload);
} else {
processEventSync(payload);
}
}
protected void processEventSync(Object payload) {
EventHandler eventHandler = Context.getProcessEngineConfiguration().getEventHandler(eventType);
ensureNotNull("Could not find eventhandler for event of type '" + eventType + "'", "eventHandler", eventHandler);
eventHandler.handleEvent(this, payload, Context.getCommandContext());
}
protected void scheduleEventAsync(Object payload) {
EventSubscriptionJobDeclaration asyncDeclaration = getJobDeclaration();
if (asyncDeclaration == null) {
// fallback to sync if we couldn't find a job declaration
processEventSync(payload);
}
else {
MessageEntity message = asyncDeclaration.createJobInstance(this);
CommandContext commandContext = Context.getCommandContext();
commandContext.getJobManager().send(message);
}
}
// persistence behavior /////////////////////
public void delete() {
Context.getCommandContext()
.getEventSubscriptionManager()
.deleteEventSubscription(this);
removeFromExecution();
}
public void insert() {
Context.getCommandContext()
.getEventSubscriptionManager()
.insert(this);
addToExecution();
}
// referential integrity -> ExecutionEntity ////////////////////////////////////
protected void addToExecution() {
// add reference in execution
ExecutionEntity execution = getExecution();
if(execution != null) {
execution.addEventSubscription(this);
}
}
protected void removeFromExecution() {
// remove reference in execution
ExecutionEntity execution = getExecution();
if(execution != null) {
execution.removeEventSubscription(this);
}
}
public Object getPersistentState() {
HashMap<String, Object> persistentState = new HashMap<String, Object>();
persistentState.put("executionId", executionId);
persistentState.put("configuration", configuration);
persistentState.put("activityId", activityId);
return persistentState;
}
// getters & setters ////////////////////////////
public ExecutionEntity getExecution() {
if(execution == null && executionId != null) {
execution = Context.getCommandContext()
.getExecutionManager()
.findExecutionById(executionId);
}
return execution;
}
public void setExecution(ExecutionEntity execution) {
if(execution != null) {
this.execution = execution;
this.executionId = execution.getId();
addToExecution();
}
else {
removeFromExecution();
this.executionId = null;
this.execution = null;
}
}
public ActivityImpl getActivity() {
if(activity == null && activityId != null) {
ProcessDefinitionImpl processDefinition = getProcessDefinition();
activity = processDefinition.findActivity(activityId);
}
return activity;
}
public ProcessDefinitionEntity getProcessDefinition() {
if (executionId != null) {
ExecutionEntity execution = getExecution();
return (ProcessDefinitionEntity) execution.getProcessDefinition();
}
else {
// this assumes that start event subscriptions have the process definition id
// as their configuration (which holds for message and signal start events)
String processDefinitionId = getConfiguration();
return Context.getProcessEngineConfiguration()
.getDeploymentCache()
.findDeployedProcessDefinitionById(processDefinitionId);
}
}
public void setActivity(ActivityImpl activity) {
this.activity = activity;
if(activity != null) {
this.activityId = activity.getId();
}
}
public EventSubscriptionJobDeclaration getJobDeclaration() {
if (jobDeclaration == null) {
jobDeclaration = EventSubscriptionJobDeclaration.findDeclarationForSubscription(this);
}
return jobDeclaration;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getRevision() {
return revision;
}
public void setRevision(int revision) {
this.revision = revision;
}
public int getRevisionNext() {
return revision +1;
}
public String getEventType() {
return eventType;
}
public void setEventType(String eventType) {
this.eventType = eventType;
}
public String getEventName() {
return eventName;
}
public void setEventName(String eventName) {
this.eventName = eventName;
}
public String getExecutionId() {
return executionId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
public String getActivityId() {
return activityId;
}
public void setActivityId(String activityId) {
this.activityId = activityId;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EventSubscriptionEntity other = (EventSubscriptionEntity) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
@Override
public String toString() {
return this.getClass().getSimpleName()
+ "[id=" + id
+ ", eventType=" + eventType
+ ", eventName=" + eventName
+ ", executionId=" + executionId
+ ", processInstanceId=" + processInstanceId
+ ", activityId=" + activityId
+ ", tenantId=" + tenantId
+ ", configuration=" + configuration
+ ", revision=" + revision
+ ", created=" + created
+ "]";
}
}
| |
/*
* Copyright (C) 2014 Nafundi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/**
* Activity to upload completed forms to gme.
*
* @author Carl Hartung (chartung@nafundi.com)
*/
package org.mamasdelrio.android.activities;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import org.mamasdelrio.android.R;
import org.mamasdelrio.android.application.Collect;
import org.mamasdelrio.android.listeners.InstanceUploaderListener;
import org.mamasdelrio.android.preferences.PreferencesActivity;
import org.mamasdelrio.android.provider.InstanceProviderAPI.InstanceColumns;
import org.mamasdelrio.android.tasks.GoogleSheetsAbstractUploader;
import org.mamasdelrio.android.tasks.GoogleSheetsTask;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import com.google.android.gms.auth.GoogleAuthException;
import com.google.android.gms.auth.GooglePlayServicesAvailabilityException;
import com.google.android.gms.auth.UserRecoverableAuthException;
import com.google.android.gms.common.GooglePlayServicesUtil;
public class GoogleSheetsUploaderActivity extends Activity implements InstanceUploaderListener {
private final static String tag = "GoogleSheetsUploaderActivity";
private final static int PROGRESS_DIALOG = 1;
private final static int GOOGLE_USER_DIALOG = 3;
private static final String ALERT_MSG = "alertmsg";
private static final String ALERT_SHOWING = "alertshowing";
private ProgressDialog mProgressDialog;
private AlertDialog mAlertDialog;
private String mAlertMsg;
private boolean mAlertShowing;
private Long[] mInstancesToSend;
private GoogleSheetsInstanceUploaderTask mUlTask;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(tag, "onCreate: " + ((savedInstanceState == null) ? "creating" : "re-initializing"));
// if we start this activity, the following must be true:
// 1) Google Sheets is selected in preferences
// 2) A google user is selected
// default initializers
mAlertMsg = getString(R.string.please_wait);
mAlertShowing = false;
setTitle(getString(R.string.app_name) + " > " + getString(R.string.send_data));
// get any simple saved state...
// resets alert message and showing dialog if the screen is rotated
if (savedInstanceState != null) {
if (savedInstanceState.containsKey(ALERT_MSG)) {
mAlertMsg = savedInstanceState.getString(ALERT_MSG);
}
if (savedInstanceState.containsKey(ALERT_SHOWING)) {
mAlertShowing = savedInstanceState.getBoolean(ALERT_SHOWING, false);
}
}
long[] selectedInstanceIDs;
Intent intent = getIntent();
selectedInstanceIDs = intent.getLongArrayExtra(FormEntryActivity.KEY_INSTANCES);
mInstancesToSend = new Long[(selectedInstanceIDs == null) ? 0 : selectedInstanceIDs.length];
if (selectedInstanceIDs != null) {
for (int i = 0; i < selectedInstanceIDs.length; ++i) {
mInstancesToSend[i] = selectedInstanceIDs[i];
}
}
// at this point, we don't expect this to be empty...
if (mInstancesToSend.length == 0) {
Log.e(tag, "onCreate: No instances to upload!");
// drop through --
// everything will process through OK
} else {
Log.i(tag, "onCreate: Beginning upload of " + mInstancesToSend.length + " instances!");
}
runTask();
}
private void runTask() {
mUlTask = (GoogleSheetsInstanceUploaderTask)getLastNonConfigurationInstance();
if (mUlTask == null) {
mUlTask = new GoogleSheetsInstanceUploaderTask();
// ensure we have a google account selected
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String googleUsername = prefs.getString(
PreferencesActivity.KEY_SELECTED_GOOGLE_ACCOUNT, null);
if (googleUsername == null || googleUsername.equals("")) {
showDialog(GOOGLE_USER_DIALOG);
return;
}
// setup dialog and upload task
showDialog(PROGRESS_DIALOG);
mUlTask.setUserName(googleUsername);
mUlTask.setUploaderListener(this);
mUlTask.execute(mInstancesToSend);
} else {
// it's not null, so we have a task running
// progress dialog is handled by the system
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == GoogleSheetsTask.PLAYSTORE_REQUEST_CODE && resultCode == RESULT_OK) {
// the user got sent to the playstore
// it returns to this activity, but we'd rather they manually retry
// so we finish
finish();
} else if (requestCode == GoogleSheetsTask.USER_RECOVERABLE_REQUEST_CODE
&& resultCode == RESULT_OK) {
// authorization granted, try again
runTask();
} else if (requestCode == GoogleSheetsTask.USER_RECOVERABLE_REQUEST_CODE
&& resultCode == RESULT_CANCELED) {
// the user backed out
finish();
} else {
Log.e(tag, "unknown request: " + requestCode + " :: result: " + resultCode);
}
}
@Override
protected void onStart() {
super.onStart();
Collect.getInstance().getActivityLogger().logOnStart(this);
}
@Override
protected void onResume() {
if (mUlTask != null) {
mUlTask.setUploaderListener(this);
}
if (mAlertShowing) {
createAlertDialog(mAlertMsg);
}
super.onResume();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putString(ALERT_MSG, mAlertMsg);
outState.putBoolean(ALERT_SHOWING, mAlertShowing);
}
@Override
public Object onRetainNonConfigurationInstance() {
return mUlTask;
}
@Override
protected void onPause() {
super.onPause();
if (mAlertDialog != null && mAlertDialog.isShowing()) {
mAlertDialog.dismiss();
}
}
@Override
protected void onStop() {
Collect.getInstance().getActivityLogger().logOnStop(this);
super.onStop();
}
@Override
protected void onDestroy() {
if (mUlTask != null) {
mUlTask.setUploaderListener(null);
}
super.onDestroy();
}
@Override
public void uploadingComplete(HashMap<String, String> result) {
try {
dismissDialog(PROGRESS_DIALOG);
} catch (Exception e) {
// tried to close a dialog not open. don't care.
}
if (result == null) {
// probably got an auth request, so ignore
return;
}
Log.i(tag, "uploadingComplete: Processing results (" + result.size() + ") from upload of "
+ mInstancesToSend.length + " instances!");
StringBuilder selection = new StringBuilder();
Set<String> keys = result.keySet();
StringBuilder message = new StringBuilder();
if (keys.size() == 0) {
message.append(getString(R.string.no_forms_uploaded));
} else {
Iterator<String> it = keys.iterator();
String[] selectionArgs = new String[keys.size()];
int i = 0;
while (it.hasNext()) {
String id = it.next();
selection.append(InstanceColumns._ID + "=?");
selectionArgs[i++] = id;
if (i != keys.size()) {
selection.append(" or ");
}
}
Cursor results = null;
try {
results = getContentResolver().query(InstanceColumns.CONTENT_URI, null,
selection.toString(), selectionArgs, null);
if (results.getCount() > 0) {
results.moveToPosition(-1);
while (results.moveToNext()) {
String name = results.getString(results
.getColumnIndex(InstanceColumns.DISPLAY_NAME));
String id = results.getString(results.getColumnIndex(InstanceColumns._ID));
message.append(name).append(" - ").append(result.get(id)).append("\n\n");
}
} else {
message.append(getString(R.string.no_forms_uploaded));
}
} finally {
if (results != null) {
results.close();
}
}
}
createAlertDialog(message.toString().trim());
}
@Override
public void progressUpdate(int progress, int total) {
mAlertMsg = getString(R.string.sending_items, progress, total);
mProgressDialog.setMessage(mAlertMsg);
}
@Override
protected Dialog onCreateDialog(int id) {
switch (id) {
case PROGRESS_DIALOG:
Collect.getInstance().getActivityLogger()
.logAction(this, "onCreateDialog.PROGRESS_DIALOG", "show");
mProgressDialog = new ProgressDialog(this);
DialogInterface.OnClickListener loadingButtonListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Collect.getInstance().getActivityLogger()
.logAction(this, "onCreateDialog.PROGRESS_DIALOG", "cancel");
dialog.dismiss();
mUlTask.cancel(true);
mUlTask.setUploaderListener(null);
finish();
}
};
mProgressDialog.setTitle(getString(R.string.uploading_data));
mProgressDialog.setMessage(mAlertMsg);
mProgressDialog.setIndeterminate(true);
mProgressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
mProgressDialog.setCancelable(false);
mProgressDialog.setButton(getString(R.string.cancel), loadingButtonListener);
return mProgressDialog;
case GOOGLE_USER_DIALOG:
AlertDialog.Builder gudBuilder = new AlertDialog.Builder(this);
gudBuilder.setTitle(getString(R.string.no_google_account));
gudBuilder.setMessage(getString(R.string.google_set_account));
gudBuilder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
gudBuilder.setCancelable(false);
return gudBuilder.create();
}
return null;
}
private void createAlertDialog(String message) {
Collect.getInstance().getActivityLogger().logAction(this, "createAlertDialog", "show");
mAlertDialog = new AlertDialog.Builder(this).create();
mAlertDialog.setTitle(getString(R.string.upload_results));
mAlertDialog.setMessage(message);
DialogInterface.OnClickListener quitListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int i) {
switch (i) {
case DialogInterface.BUTTON1: // ok
Collect.getInstance().getActivityLogger()
.logAction(this, "createAlertDialog", "OK");
// always exit this activity since it has no interface
mAlertShowing = false;
finish();
break;
}
}
};
mAlertDialog.setCancelable(false);
mAlertDialog.setButton(getString(R.string.ok), quitListener);
mAlertDialog.setIcon(android.R.drawable.ic_dialog_info);
mAlertShowing = true;
mAlertMsg = message;
mAlertDialog.show();
}
public class GoogleSheetsInstanceUploaderTask extends
GoogleSheetsAbstractUploader<Long, Integer, HashMap<String, String>> {
@Override
protected HashMap<String, String> doInBackground(Long... values) {
mResults = new HashMap<String, String>();
String selection = InstanceColumns._ID + "=?";
String[] selectionArgs = new String[(values == null) ? 0 : values.length];
if (values != null) {
for (int i = 0; i < values.length; i++) {
if (i != values.length - 1) {
selection += " or " + InstanceColumns._ID + "=?";
}
selectionArgs[i] = values[i].toString();
}
}
String token;
try {
token = authenticate(GoogleSheetsUploaderActivity.this, mGoogleUserName);
} catch (IOException e) {
// network or server error, the call is expected to succeed if
// you try again later. Don't attempt to call again immediately
// - the request is likely to fail, you'll hit quotas or
// back-off.
e.printStackTrace();
mResults.put("0", oauth_fail + e.getMessage());
return mResults;
} catch (GooglePlayServicesAvailabilityException playEx) {
Dialog alert = GooglePlayServicesUtil.getErrorDialog(
playEx.getConnectionStatusCode(), GoogleSheetsUploaderActivity.this,
PLAYSTORE_REQUEST_CODE);
alert.show();
return null;
} catch (UserRecoverableAuthException e) {
GoogleSheetsUploaderActivity.this.startActivityForResult(e.getIntent(),
USER_RECOVERABLE_REQUEST_CODE);
e.printStackTrace();
return null;
} catch (GoogleAuthException e) {
// Failure. The call is not expected to ever succeed so it
// should not be retried.
e.printStackTrace();
mResults.put("0", oauth_fail + e.getMessage());
return mResults;
}
if (token == null) {
// if token is null,
return null;
}
uploadInstances(selection, selectionArgs, token);
return mResults;
}
}
@Override
public void authRequest(Uri url, HashMap<String, String> doneSoFar) {
// in interface, but not needed
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.services.resources.admin;
import org.jboss.resteasy.annotations.cache.NoCache;
import org.jboss.resteasy.spi.NotFoundException;
import org.keycloak.events.admin.OperationType;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.UserModel;
import org.keycloak.representations.idm.RoleRepresentation;
import org.keycloak.services.ServicesLogger;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriInfo;
import java.util.List;
import java.util.Set;
/**
* Sometimes its easier to just interact with roles by their ID instead of container/role-name
*
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class RoleByIdResource extends RoleResource {
protected static final ServicesLogger logger = ServicesLogger.ROOT_LOGGER;
private final RealmModel realm;
private final RealmAuth auth;
private AdminEventBuilder adminEvent;
@Context
private KeycloakSession session;
@Context
private UriInfo uriInfo;
public RoleByIdResource(RealmModel realm, RealmAuth auth, AdminEventBuilder adminEvent) {
super(realm);
this.realm = realm;
this.auth = auth;
this.adminEvent = adminEvent;
}
/**
* Get a specific role's representation
*
* @param id id of role
* @return
*/
@Path("{role-id}")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public RoleRepresentation getRole(final @PathParam("role-id") String id) {
auth.requireAny();
RoleModel roleModel = getRoleModel(id);
return getRole(roleModel);
}
protected RoleModel getRoleModel(String id) {
RoleModel roleModel = realm.getRoleById(id);
if (roleModel == null) {
throw new NotFoundException("Could not find role with id");
}
RealmAuth.Resource r = null;
if (roleModel.getContainer() instanceof RealmModel) {
r = RealmAuth.Resource.REALM;
} else if (roleModel.getContainer() instanceof ClientModel) {
r = RealmAuth.Resource.CLIENT;
} else if (roleModel.getContainer() instanceof UserModel) {
r = RealmAuth.Resource.USER;
}
auth.init(r);
return roleModel;
}
/**
* Delete the role
*
* @param id id of role
*/
@Path("{role-id}")
@DELETE
@NoCache
public void deleteRole(final @PathParam("role-id") String id) {
auth.requireManage();
RoleModel role = getRoleModel(id);
deleteRole(role);
adminEvent.operation(OperationType.DELETE).resourcePath(uriInfo).success();
}
/**
* Update the role
*
* @param id id of role
* @param rep
*/
@Path("{role-id}")
@PUT
@Consumes(MediaType.APPLICATION_JSON)
public void updateRole(final @PathParam("role-id") String id, final RoleRepresentation rep) {
auth.requireManage();
RoleModel role = getRoleModel(id);
updateRole(rep, role);
adminEvent.operation(OperationType.UPDATE).resourcePath(uriInfo).representation(rep).success();
}
/**
* Make the role a composite role by associating some child roles
*
* @param id
* @param roles
*/
@Path("{role-id}/composites")
@POST
@Consumes(MediaType.APPLICATION_JSON)
public void addComposites(final @PathParam("role-id") String id, List<RoleRepresentation> roles) {
auth.requireManage();
RoleModel role = getRoleModel(id);
addComposites(adminEvent, uriInfo, roles, role);
}
/**
* Get role's children
*
* Returns a set of role's children provided the role is a composite.
*
* @param id
* @return
*/
@Path("{role-id}/composites")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public Set<RoleRepresentation> getRoleComposites(final @PathParam("role-id") String id) {
auth.requireAny();
if (logger.isDebugEnabled()) logger.debug("*** getRoleComposites: '" + id + "'");
RoleModel role = getRoleModel(id);
auth.requireView();
return getRoleComposites(role);
}
/**
* Get realm-level roles that are in the role's composite
*
* @param id
* @return
*/
@Path("{role-id}/composites/realm")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public Set<RoleRepresentation> getRealmRoleComposites(final @PathParam("role-id") String id) {
auth.requireAny();
RoleModel role = getRoleModel(id);
return getRealmRoleComposites(role);
}
/**
* Get client-level roles for the client that are in the role's composite
*
* @param id
* @param client
* @return
*/
@Path("{role-id}/composites/clients/{client}")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public Set<RoleRepresentation> getClientRoleComposites(final @PathParam("role-id") String id,
final @PathParam("client") String client) {
auth.requireAny();
RoleModel role = getRoleModel(id);
ClientModel clientModel = realm.getClientById(client);
if (clientModel == null) {
throw new NotFoundException("Could not find client");
}
return getClientRoleComposites(clientModel, role);
}
/**
* Remove a set of roles from the role's composite
*
* @param id Role id
* @param roles A set of roles to be removed
*/
@Path("{role-id}/composites")
@DELETE
@Consumes(MediaType.APPLICATION_JSON)
public void deleteComposites(final @PathParam("role-id") String id, List<RoleRepresentation> roles) {
auth.requireManage();
RoleModel role = getRoleModel(id);
deleteComposites(roles, role);
adminEvent.operation(OperationType.DELETE).resourcePath(uriInfo).representation(roles).success();
}
}
| |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.gradle.tasks.bundling;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Consumer;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.TaskOutcome;
import org.junit.jupiter.api.TestTemplate;
import org.springframework.boot.gradle.testkit.GradleBuild;
import org.springframework.boot.loader.tools.FileUtils;
import org.springframework.boot.loader.tools.JarModeLibrary;
import org.springframework.util.FileSystemUtils;
import org.springframework.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Integration tests for {@link BootJar} and {@link BootWar}.
*
* @author Andy Wilkinson
* @author Madhura Bhave
*/
abstract class AbstractBootArchiveIntegrationTests {
private final String taskName;
private final String libPath;
private final String classesPath;
private final String indexPath;
GradleBuild gradleBuild;
protected AbstractBootArchiveIntegrationTests(String taskName, String libPath, String classesPath,
String indexPath) {
this.taskName = taskName;
this.libPath = libPath;
this.classesPath = classesPath;
this.indexPath = indexPath;
}
@TestTemplate
void basicBuild() {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
}
@Deprecated
@TestTemplate
void basicBuildUsingDeprecatedMainClassName() {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void reproducibleArchive() throws IOException, InterruptedException {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
File jar = new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0];
String firstHash = FileUtils.sha1Hash(jar);
Thread.sleep(1500);
assertThat(this.gradleBuild.build("clean", this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
String secondHash = FileUtils.sha1Hash(jar);
assertThat(firstHash).isEqualTo(secondHash);
}
@TestTemplate
void upToDateWhenBuiltTwice() {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.UP_TO_DATE);
}
@TestTemplate
void upToDateWhenBuiltTwiceWithLaunchScriptIncluded() {
assertThat(this.gradleBuild.build("-PincludeLaunchScript=true", this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.build("-PincludeLaunchScript=true", this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.UP_TO_DATE);
}
@TestTemplate
void notUpToDateWhenLaunchScriptWasNotIncludedAndThenIsIncluded() {
assertThat(this.gradleBuild.scriptProperty("launchScript", "").build(this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("launchScript", "launchScript()").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void notUpToDateWhenLaunchScriptWasIncludedAndThenIsNotIncluded() {
assertThat(this.gradleBuild.scriptProperty("launchScript", "launchScript()").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("launchScript", "").build(this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void notUpToDateWhenLaunchScriptPropertyChanges() {
assertThat(this.gradleBuild.scriptProperty("launchScriptProperty", "alpha").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("launchScriptProperty", "bravo").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void applicationPluginMainClassNameIsUsed() throws IOException {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getManifest().getMainAttributes().getValue("Start-Class"))
.isEqualTo("com.example.CustomMain");
}
}
@TestTemplate
void springBootExtensionMainClassNameIsUsed() throws IOException {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getManifest().getMainAttributes().getValue("Start-Class"))
.isEqualTo("com.example.CustomMain");
}
}
@TestTemplate
void duplicatesAreHandledGracefully() {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void developmentOnlyDependenciesAreNotIncludedInTheArchiveByDefault() throws IOException {
File srcMainResources = new File(this.gradleBuild.getProjectDir(), "src/main/resources");
srcMainResources.mkdirs();
new File(srcMainResources, "resource").createNewFile();
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
Stream<String> libEntryNames = jarFile.stream().filter((entry) -> !entry.isDirectory())
.map(JarEntry::getName).filter((name) -> name.startsWith(this.libPath));
assertThat(libEntryNames).containsExactly(this.libPath + "commons-io-2.6.jar");
Stream<String> classesEntryNames = jarFile.stream().filter((entry) -> !entry.isDirectory())
.map(JarEntry::getName).filter((name) -> name.startsWith(this.classesPath));
assertThat(classesEntryNames).containsExactly(this.classesPath + "resource");
}
}
@TestTemplate
void developmentOnlyDependenciesCanBeIncludedInTheArchive() throws IOException {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
Stream<String> libEntryNames = jarFile.stream().filter((entry) -> !entry.isDirectory())
.map(JarEntry::getName).filter((name) -> name.startsWith(this.libPath));
assertThat(libEntryNames).containsExactly(this.libPath + "commons-io-2.6.jar",
this.libPath + "commons-lang3-3.9.jar");
}
}
@TestTemplate
void jarTypeFilteringIsApplied() throws IOException {
File flatDirRepository = new File(this.gradleBuild.getProjectDir(), "repository");
createDependenciesStarterJar(new File(flatDirRepository, "starter.jar"));
createStandardJar(new File(flatDirRepository, "standard.jar"));
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
Stream<String> libEntryNames = jarFile.stream().filter((entry) -> !entry.isDirectory())
.map(JarEntry::getName).filter((name) -> name.startsWith(this.libPath));
assertThat(libEntryNames).containsExactly(this.libPath + "standard.jar");
}
}
@TestTemplate
void startClassIsSetByResolvingTheMainClass() throws IOException {
copyMainClassApplication();
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
Attributes mainAttributes = jarFile.getManifest().getMainAttributes();
assertThat(mainAttributes.getValue("Start-Class"))
.isEqualTo("com.example." + this.taskName.toLowerCase(Locale.ENGLISH) + ".main.CustomMainClass");
}
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.UP_TO_DATE);
}
@TestTemplate
void upToDateWhenBuiltWithDefaultLayeredAndThenWithExplicitLayered() {
assertThat(this.gradleBuild.scriptProperty("layered", "").build("" + this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("layered", "layered {}").build("" + this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.UP_TO_DATE);
}
@TestTemplate
void notUpToDateWhenBuiltWithoutLayersAndThenWithLayers() {
assertThat(this.gradleBuild.scriptProperty("layerEnablement", "enabled = false").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("layerEnablement", "enabled = true").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void notUpToDateWhenBuiltWithLayerToolsAndThenWithoutLayerTools() {
assertThat(this.gradleBuild.scriptProperty("layerTools", "").build(this.taskName).task(":" + this.taskName)
.getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertThat(this.gradleBuild.scriptProperty("layerTools", "includeLayerTools = false").build(this.taskName)
.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void layersWithCustomSourceSet() {
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
}
@TestTemplate
void implicitLayers() throws IOException {
writeMainClass();
writeResource();
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Map<String, List<String>> indexedLayers;
String layerToolsJar = this.libPath + JarModeLibrary.LAYER_TOOLS.getName();
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getEntry(layerToolsJar)).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "commons-lang3-3.9.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-core-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-jcl-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "library-1.0-SNAPSHOT.jar")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "example/Main.class")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "static/file.txt")).isNotNull();
indexedLayers = readLayerIndex(jarFile);
}
List<String> layerNames = Arrays.asList("dependencies", "spring-boot-loader", "snapshot-dependencies",
"application");
assertThat(indexedLayers.keySet()).containsExactlyElementsOf(layerNames);
Set<String> expectedDependencies = new TreeSet<>();
expectedDependencies.add(this.libPath + "commons-lang3-3.9.jar");
expectedDependencies.add(this.libPath + "spring-core-5.2.5.RELEASE.jar");
expectedDependencies.add(this.libPath + "spring-jcl-5.2.5.RELEASE.jar");
expectedDependencies.add(this.libPath + "jul-to-slf4j-1.7.28.jar");
expectedDependencies.add(this.libPath + "log4j-api-2.12.1.jar");
expectedDependencies.add(this.libPath + "log4j-to-slf4j-2.12.1.jar");
expectedDependencies.add(this.libPath + "logback-classic-1.2.3.jar");
expectedDependencies.add(this.libPath + "logback-core-1.2.3.jar");
expectedDependencies.add(this.libPath + "slf4j-api-1.7.28.jar");
expectedDependencies.add(this.libPath + "spring-boot-starter-logging-2.2.0.RELEASE.jar");
Set<String> expectedSnapshotDependencies = new TreeSet<>();
expectedSnapshotDependencies.add(this.libPath + "library-1.0-SNAPSHOT.jar");
(layerToolsJar.contains("SNAPSHOT") ? expectedSnapshotDependencies : expectedDependencies).add(layerToolsJar);
assertThat(indexedLayers.get("dependencies")).containsExactlyElementsOf(expectedDependencies);
assertThat(indexedLayers.get("spring-boot-loader")).containsExactly("org/");
assertThat(indexedLayers.get("snapshot-dependencies")).containsExactlyElementsOf(expectedSnapshotDependencies);
assertThat(indexedLayers.get("application"))
.containsExactly(getExpectedApplicationLayerContents(this.classesPath));
BuildResult listLayers = this.gradleBuild.build("listLayers");
assertThat(listLayers.task(":listLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
String listLayersOutput = listLayers.getOutput();
assertThat(new BufferedReader(new StringReader(listLayersOutput)).lines()).containsSequence(layerNames);
BuildResult extractLayers = this.gradleBuild.build("extractLayers");
assertThat(extractLayers.task(":extractLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertExtractedLayers(layerNames, indexedLayers);
}
abstract String[] getExpectedApplicationLayerContents(String... additionalFiles);
@TestTemplate
void multiModuleImplicitLayers() throws IOException {
writeSettingsGradle();
writeMainClass();
writeResource();
assertThat(this.gradleBuild.build(this.taskName).task(":" + this.taskName).getOutcome())
.isEqualTo(TaskOutcome.SUCCESS);
Map<String, List<String>> indexedLayers;
String layerToolsJar = this.libPath + JarModeLibrary.LAYER_TOOLS.getName();
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getEntry(layerToolsJar)).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "alpha-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "bravo-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "charlie-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "commons-lang3-3.9.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-core-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-jcl-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "library-1.0-SNAPSHOT.jar")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "example/Main.class")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "static/file.txt")).isNotNull();
indexedLayers = readLayerIndex(jarFile);
}
List<String> layerNames = Arrays.asList("dependencies", "spring-boot-loader", "snapshot-dependencies",
"application");
assertThat(indexedLayers.keySet()).containsExactlyElementsOf(layerNames);
Set<String> expectedDependencies = new TreeSet<>();
expectedDependencies.add(this.libPath + "commons-lang3-3.9.jar");
expectedDependencies.add(this.libPath + "spring-core-5.2.5.RELEASE.jar");
expectedDependencies.add(this.libPath + "spring-jcl-5.2.5.RELEASE.jar");
Set<String> expectedSnapshotDependencies = new TreeSet<>();
expectedSnapshotDependencies.add(this.libPath + "library-1.0-SNAPSHOT.jar");
(layerToolsJar.contains("SNAPSHOT") ? expectedSnapshotDependencies : expectedDependencies).add(layerToolsJar);
assertThat(indexedLayers.get("dependencies")).containsExactlyElementsOf(expectedDependencies);
assertThat(indexedLayers.get("spring-boot-loader")).containsExactly("org/");
assertThat(indexedLayers.get("snapshot-dependencies")).containsExactlyElementsOf(expectedSnapshotDependencies);
assertThat(indexedLayers.get("application"))
.containsExactly(getExpectedApplicationLayerContents(this.classesPath, this.libPath + "alpha-1.2.3.jar",
this.libPath + "bravo-1.2.3.jar", this.libPath + "charlie-1.2.3.jar"));
BuildResult listLayers = this.gradleBuild.build("listLayers");
assertThat(listLayers.task(":listLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
String listLayersOutput = listLayers.getOutput();
assertThat(new BufferedReader(new StringReader(listLayersOutput)).lines()).containsSequence(layerNames);
BuildResult extractLayers = this.gradleBuild.build("extractLayers");
assertThat(extractLayers.task(":extractLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertExtractedLayers(layerNames, indexedLayers);
}
@TestTemplate
void customLayers() throws IOException {
writeMainClass();
writeResource();
BuildResult build = this.gradleBuild.build(this.taskName);
assertThat(build.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
Map<String, List<String>> indexedLayers;
String layerToolsJar = this.libPath + JarModeLibrary.LAYER_TOOLS.getName();
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getEntry(layerToolsJar)).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "commons-lang3-3.9.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-core-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-jcl-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "library-1.0-SNAPSHOT.jar")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "example/Main.class")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "static/file.txt")).isNotNull();
assertThat(jarFile.getEntry(this.indexPath + "layers.idx")).isNotNull();
indexedLayers = readLayerIndex(jarFile);
}
List<String> layerNames = Arrays.asList("dependencies", "commons-dependencies", "snapshot-dependencies",
"static", "app");
assertThat(indexedLayers.keySet()).containsExactlyElementsOf(layerNames);
Set<String> expectedDependencies = new TreeSet<>();
expectedDependencies.add(this.libPath + "spring-core-5.2.5.RELEASE.jar");
expectedDependencies.add(this.libPath + "spring-jcl-5.2.5.RELEASE.jar");
List<String> expectedSnapshotDependencies = new ArrayList<>();
expectedSnapshotDependencies.add(this.libPath + "library-1.0-SNAPSHOT.jar");
(layerToolsJar.contains("SNAPSHOT") ? expectedSnapshotDependencies : expectedDependencies).add(layerToolsJar);
assertThat(indexedLayers.get("dependencies")).containsExactlyElementsOf(expectedDependencies);
assertThat(indexedLayers.get("commons-dependencies")).containsExactly(this.libPath + "commons-lang3-3.9.jar");
assertThat(indexedLayers.get("snapshot-dependencies")).containsExactlyElementsOf(expectedSnapshotDependencies);
assertThat(indexedLayers.get("static")).containsExactly(this.classesPath + "static/");
List<String> appLayer = new ArrayList<>(indexedLayers.get("app"));
String[] appLayerContents = getExpectedApplicationLayerContents(this.classesPath + "example/");
assertThat(appLayer).containsSubsequence(appLayerContents);
appLayer.removeAll(Arrays.asList(appLayerContents));
assertThat(appLayer).containsExactly("org/");
BuildResult listLayers = this.gradleBuild.build("listLayers");
assertThat(listLayers.task(":listLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
String listLayersOutput = listLayers.getOutput();
assertThat(new BufferedReader(new StringReader(listLayersOutput)).lines()).containsSequence(layerNames);
BuildResult extractLayers = this.gradleBuild.build("extractLayers");
assertThat(extractLayers.task(":extractLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertExtractedLayers(layerNames, indexedLayers);
}
@TestTemplate
void multiModuleCustomLayers() throws IOException {
writeSettingsGradle();
writeMainClass();
writeResource();
BuildResult build = this.gradleBuild.build(this.taskName);
assertThat(build.task(":" + this.taskName).getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
Map<String, List<String>> indexedLayers;
String layerToolsJar = this.libPath + JarModeLibrary.LAYER_TOOLS.getName();
try (JarFile jarFile = new JarFile(new File(this.gradleBuild.getProjectDir(), "build/libs").listFiles()[0])) {
assertThat(jarFile.getEntry(layerToolsJar)).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "alpha-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "bravo-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "charlie-1.2.3.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "commons-lang3-3.9.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-core-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "spring-jcl-5.2.5.RELEASE.jar")).isNotNull();
assertThat(jarFile.getEntry(this.libPath + "library-1.0-SNAPSHOT.jar")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "example/Main.class")).isNotNull();
assertThat(jarFile.getEntry(this.classesPath + "static/file.txt")).isNotNull();
assertThat(jarFile.getEntry(this.indexPath + "layers.idx")).isNotNull();
indexedLayers = readLayerIndex(jarFile);
}
List<String> layerNames = Arrays.asList("dependencies", "commons-dependencies", "snapshot-dependencies",
"subproject-dependencies", "static", "app");
assertThat(indexedLayers.keySet()).containsExactlyElementsOf(layerNames);
Set<String> expectedSubprojectDependencies = new TreeSet<>();
expectedSubprojectDependencies.add(this.libPath + "alpha-1.2.3.jar");
expectedSubprojectDependencies.add(this.libPath + "bravo-1.2.3.jar");
expectedSubprojectDependencies.add(this.libPath + "charlie-1.2.3.jar");
Set<String> expectedDependencies = new TreeSet<>();
expectedDependencies.add(this.libPath + "spring-core-5.2.5.RELEASE.jar");
expectedDependencies.add(this.libPath + "spring-jcl-5.2.5.RELEASE.jar");
List<String> expectedSnapshotDependencies = new ArrayList<>();
expectedSnapshotDependencies.add(this.libPath + "library-1.0-SNAPSHOT.jar");
(layerToolsJar.contains("SNAPSHOT") ? expectedSnapshotDependencies : expectedDependencies).add(layerToolsJar);
assertThat(indexedLayers.get("subproject-dependencies"))
.containsExactlyElementsOf(expectedSubprojectDependencies);
assertThat(indexedLayers.get("dependencies")).containsExactlyElementsOf(expectedDependencies);
assertThat(indexedLayers.get("commons-dependencies")).containsExactly(this.libPath + "commons-lang3-3.9.jar");
assertThat(indexedLayers.get("snapshot-dependencies")).containsExactlyElementsOf(expectedSnapshotDependencies);
assertThat(indexedLayers.get("static")).containsExactly(this.classesPath + "static/");
List<String> appLayer = new ArrayList<>(indexedLayers.get("app"));
String[] appLayerContents = getExpectedApplicationLayerContents(this.classesPath + "example/");
assertThat(appLayer).containsSubsequence(appLayerContents);
appLayer.removeAll(Arrays.asList(appLayerContents));
assertThat(appLayer).containsExactly("org/");
BuildResult listLayers = this.gradleBuild.build("listLayers");
assertThat(listLayers.task(":listLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
String listLayersOutput = listLayers.getOutput();
assertThat(new BufferedReader(new StringReader(listLayersOutput)).lines()).containsSequence(layerNames);
BuildResult extractLayers = this.gradleBuild.build("extractLayers");
assertThat(extractLayers.task(":extractLayers").getOutcome()).isEqualTo(TaskOutcome.SUCCESS);
assertExtractedLayers(layerNames, indexedLayers);
}
private void copyMainClassApplication() throws IOException {
copyApplication("main");
}
protected void copyApplication(String name) throws IOException {
File output = new File(this.gradleBuild.getProjectDir(),
"src/main/java/com/example/" + this.taskName.toLowerCase() + "/" + name);
output.mkdirs();
FileSystemUtils.copyRecursively(
new File("src/test/java/com/example/" + this.taskName.toLowerCase(Locale.ENGLISH) + "/" + name),
output);
}
private void createStandardJar(File location) throws IOException {
createJar(location, (attributes) -> {
});
}
private void createDependenciesStarterJar(File location) throws IOException {
createJar(location, (attributes) -> attributes.putValue("Spring-Boot-Jar-Type", "dependencies-starter"));
}
private void createJar(File location, Consumer<Attributes> attributesConfigurer) throws IOException {
location.getParentFile().mkdirs();
Manifest manifest = new Manifest();
Attributes attributes = manifest.getMainAttributes();
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0");
attributesConfigurer.accept(attributes);
new JarOutputStream(new FileOutputStream(location), manifest).close();
}
private void writeSettingsGradle() {
try (PrintWriter writer = new PrintWriter(
new FileWriter(new File(this.gradleBuild.getProjectDir(), "settings.gradle")))) {
writer.println("include 'alpha', 'bravo', 'charlie'");
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private void writeMainClass() {
File examplePackage = new File(this.gradleBuild.getProjectDir(), "src/main/java/example");
examplePackage.mkdirs();
File main = new File(examplePackage, "Main.java");
try (PrintWriter writer = new PrintWriter(new FileWriter(main))) {
writer.println("package example;");
writer.println();
writer.println("import java.io.IOException;");
writer.println();
writer.println("public class Main {");
writer.println();
writer.println(" public static void main(String[] args) {");
writer.println(" }");
writer.println();
writer.println("}");
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private void writeResource() {
try {
Path path = this.gradleBuild.getProjectDir().toPath()
.resolve(Paths.get("src", "main", "resources", "static", "file.txt"));
Files.createDirectories(path.getParent());
Files.createFile(path);
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private Map<String, List<String>> readLayerIndex(JarFile jarFile) throws IOException {
Map<String, List<String>> index = new LinkedHashMap<>();
ZipEntry indexEntry = jarFile.getEntry(this.indexPath + "layers.idx");
try (BufferedReader reader = new BufferedReader(new InputStreamReader(jarFile.getInputStream(indexEntry)))) {
String line = reader.readLine();
String layer = null;
while (line != null) {
if (line.startsWith("- ")) {
layer = line.substring(3, line.length() - 2);
}
else if (line.startsWith(" - ")) {
index.computeIfAbsent(layer, (key) -> new ArrayList<>()).add(line.substring(5, line.length() - 1));
}
line = reader.readLine();
}
return index;
}
}
private Map<String, List<String>> readExtractedLayers(File root, List<String> layerNames) throws IOException {
Map<String, List<String>> extractedLayers = new LinkedHashMap<>();
for (String layerName : layerNames) {
File layer = new File(root, layerName);
assertThat(layer).isDirectory();
extractedLayers.put(layerName,
Files.walk(layer.toPath()).filter((path) -> path.toFile().isFile()).map(layer.toPath()::relativize)
.map(Path::toString).map(StringUtils::cleanPath).collect(Collectors.toList()));
}
return extractedLayers;
}
private void assertExtractedLayers(List<String> layerNames, Map<String, List<String>> indexedLayers)
throws IOException {
Map<String, List<String>> extractedLayers = readExtractedLayers(this.gradleBuild.getProjectDir(), layerNames);
assertThat(extractedLayers.keySet()).isEqualTo(indexedLayers.keySet());
extractedLayers.forEach((name, contents) -> {
List<String> index = indexedLayers.get(name);
List<String> unexpected = new ArrayList<>();
for (String file : contents) {
if (!isInIndex(index, file)) {
unexpected.add(name);
}
}
assertThat(unexpected).isEmpty();
});
}
private boolean isInIndex(List<String> index, String file) {
for (String candidate : index) {
if (file.equals(candidate) || candidate.endsWith("/") && file.startsWith(candidate)) {
return true;
}
}
return false;
}
}
| |
/**
* PIQConnect: Connected-component analysis for Big Graph
*
* __________.___________ _________ __
* \______ \ \_____ \ \_ ___ \ ____ ____ ____ ____ _____/ |_
* | ___/ |/ / \ \/ \ \/ / _ \ / \ / \_/ __ \_/ ___\ __\
* | | | / \_/. \ \___( <_> ) | \ | \ ___/\ \___| |
* |____| |___\_____\ \_/\______ /\____/|___| /___| /\___ >\___ >__|
* \__> \/ \/ \/ \/ \/
*
* Copyright (c) 2014 PlaceIQ, Inc
*
* This software is licensed under Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ----------------------------------------------------------------------------
* Author: Jerome Serrano <jerome.serrano@placeiq.com>
* Date: 2015-01-09
* ---------------------------------------------------------------------------*/
package com.placeiq.piqconnect;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.VLongWritable;
import org.apache.hadoop.io.compress.SnappyCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
public class Runner extends Configured implements Tool {
public static int MAX_ITERATIONS = 1024;
private static final Logger LOG = LogManager.getLogger(Runner.class);
private int numberOfReducers = 1;
private int blockSize = 64;
public static void main(final String[] args) throws Exception {
final int result = ToolRunner.run(new Configuration(), new Runner(), args);
System.exit(result);
}
public int run(final String[] args) throws Exception {
Path pathEdges = new Path(args[0]);
Path pathVector = new Path(args[1]);
Path workDir = new Path(args[2]);
Path pathOutputStage1 = new Path(workDir, "stage1");
Path pathOutputStage2 = new Path(workDir, "stage2");
Path pathOutputVector = new Path(workDir, "result");
numberOfReducers = Integer.parseInt(args[3]);
blockSize = Integer.parseInt(args[4]);
int maxConvergence = Integer.parseInt(args[5]);
int maxIters = Integer.parseInt(args[6]);
if (maxConvergence < 0) {
maxConvergence = 0;
}
if (maxIters < 0 || maxIters > MAX_ITERATIONS) {
maxIters = MAX_ITERATIONS;
}
FileSystem fs = FileSystem.get(getConf());
int n = 0;
long changedNodes = Long.MAX_VALUE;
while (n < maxIters && changedNodes > maxConvergence) {
fs.delete(pathOutputStage1, true);
fs.delete(pathOutputStage2, true);
LOG.info("Start iteration " + n + " Stage1");
Job job1 = buildJob1(pathEdges, pathVector, pathOutputStage1);
if (!job1.waitForCompletion(true)) {
LOG.error("Failed to execute IterationStage1 for iteration #" + n);
return -1;
}
LOG.info("Start iteration " + n + " Stage2");
Job job2 = buildJob2(pathOutputStage1, pathOutputStage2);
if (!job2.waitForCompletion(true)) {
LOG.error("Failed to execute IterationStage2 for iteration #" + n);
return -1;
}
changedNodes = job2.getCounters().findCounter(PiqConnectCounter.NUMBER_INCOMPLETE_VECTOR).getValue();
long unchangedNodes = job2.getCounters().findCounter(PiqConnectCounter.NUMBER_FINAL_VECTOR).getValue();
LOG.info("End of iteration " + n + ", changedNodes=" + changedNodes + ", unchangedNodes=" + unchangedNodes);
LOG.info(pathOutputStage2);
fs.delete(pathVector, true);
if (!fs.rename(pathOutputStage2, pathVector)) {
LOG.error("failed to rename " + pathOutputStage2 + " into " + pathVector);
return -1;
}
n++;
}
Job job3 = buildJob3(pathVector, pathOutputVector);
if (!job3.waitForCompletion(true)) {
LOG.error("Failed to execute FinalResultBuilder for iteration #" + n);
return -1;
}
LOG.info("Connected component computed in " + n + " iterations");
return 0;
}
private Job buildJob1(Path input1, Path input2, Path output) throws Exception {
Configuration conf = getConf();
conf.setInt(Constants.PROP_BLOCK_SIZE, blockSize);
conf.set("mapred.output.compression.type", "BLOCK");
Job job = new Job(conf, "data-piqid.piqconnect.IterationStage1");
job.setJarByClass(Runner.class);
job.setMapperClass(IterationStage1._Mapper.class);
job.setReducerClass(IterationStage1._Reducer.class);
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
job.setNumReduceTasks(numberOfReducers);
job.setMapOutputKeyClass(IterationStage1.JoinKey.class);
job.setMapOutputValueClass(BlockWritable.class);
job.setOutputKeyClass(VLongWritable.class);
job.setOutputValueClass(BlockWritable.class);
job.setGroupingComparatorClass(IterationStage1.IndexComparator.class);
job.setPartitionerClass(IterationStage1.IndexPartitioner.class);
job.setSortComparatorClass(IterationStage1.SortComparator.class);
FileInputFormat.setInputPaths(job, input1, input2);
SequenceFileOutputFormat.setOutputPath(job, output);
SequenceFileOutputFormat.setCompressOutput(job, true);
setCompression(job);
return job;
}
private Job buildJob2(Path input, Path output) throws Exception {
Configuration conf = getConf();
conf.setInt(Constants.PROP_BLOCK_SIZE, blockSize);
Job job = new Job(conf, "data-piqid.piqconnect.IterationStage2");
job.setJarByClass(Runner.class);
job.setMapperClass(Mapper.class);
job.setReducerClass(IterationStage2._Reducer.class);
job.setNumReduceTasks(numberOfReducers);
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
job.setMapOutputKeyClass(VLongWritable.class);
job.setMapOutputValueClass(BlockWritable.class);
job.setOutputKeyClass(BlockIndexWritable.class);
job.setOutputValueClass(BlockWritable.class);
job.setSortComparatorClass(VLongWritableComparator.class);
SequenceFileInputFormat.setInputPaths(job, input);
FileOutputFormat.setOutputPath(job, output);
FileOutputFormat.setCompressOutput(job, true);
setCompression(job);
return job;
}
private Job buildJob3(Path input, Path output) throws Exception {
Configuration conf = getConf();
conf.setInt(Constants.PROP_BLOCK_SIZE, blockSize);
Job job = new Job(conf, "data-piqid.piqconnect.FinalResultBuilder");
job.setJarByClass(Runner.class);
job.setMapperClass(FinalResultBuilder._Mapper.class);
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(VLongWritable.class);
job.setOutputValueClass(VLongWritable.class);
FileInputFormat.setInputPaths(job, input);
FileOutputFormat.setOutputPath(job, output);
FileOutputFormat.setCompressOutput(job, true);
setCompression(job);
return job;
}
public static void setCompression(Job job) {
FileOutputFormat.setOutputCompressorClass(job, SnappyCodec.class);
job.getConfiguration().set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec");
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chimesdkmessaging.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Stores information about a message status.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-sdk-messaging-2021-05-15/ChannelMessageStatusStructure"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ChannelMessageStatusStructure implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The message status value.
* </p>
*/
private String value;
/**
* <p>
* Contains more details about the messasge status.
* </p>
*/
private String detail;
/**
* <p>
* The message status value.
* </p>
*
* @param value
* The message status value.
* @see ChannelMessageStatus
*/
public void setValue(String value) {
this.value = value;
}
/**
* <p>
* The message status value.
* </p>
*
* @return The message status value.
* @see ChannelMessageStatus
*/
public String getValue() {
return this.value;
}
/**
* <p>
* The message status value.
* </p>
*
* @param value
* The message status value.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelMessageStatus
*/
public ChannelMessageStatusStructure withValue(String value) {
setValue(value);
return this;
}
/**
* <p>
* The message status value.
* </p>
*
* @param value
* The message status value.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelMessageStatus
*/
public ChannelMessageStatusStructure withValue(ChannelMessageStatus value) {
this.value = value.toString();
return this;
}
/**
* <p>
* Contains more details about the messasge status.
* </p>
*
* @param detail
* Contains more details about the messasge status.
*/
public void setDetail(String detail) {
this.detail = detail;
}
/**
* <p>
* Contains more details about the messasge status.
* </p>
*
* @return Contains more details about the messasge status.
*/
public String getDetail() {
return this.detail;
}
/**
* <p>
* Contains more details about the messasge status.
* </p>
*
* @param detail
* Contains more details about the messasge status.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ChannelMessageStatusStructure withDetail(String detail) {
setDetail(detail);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getValue() != null)
sb.append("Value: ").append(getValue()).append(",");
if (getDetail() != null)
sb.append("Detail: ").append(getDetail());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ChannelMessageStatusStructure == false)
return false;
ChannelMessageStatusStructure other = (ChannelMessageStatusStructure) obj;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
if (other.getDetail() == null ^ this.getDetail() == null)
return false;
if (other.getDetail() != null && other.getDetail().equals(this.getDetail()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
hashCode = prime * hashCode + ((getDetail() == null) ? 0 : getDetail().hashCode());
return hashCode;
}
@Override
public ChannelMessageStatusStructure clone() {
try {
return (ChannelMessageStatusStructure) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.chimesdkmessaging.model.transform.ChannelMessageStatusStructureMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
package org.assertj.android.api.widget;
import android.annotation.TargetApi;
import android.graphics.Typeface;
import android.text.TextUtils;
import android.widget.TextView;
import org.assertj.android.api.view.AbstractViewAssert;
import java.util.regex.Pattern;
import static android.os.Build.VERSION_CODES.HONEYCOMB;
import static android.os.Build.VERSION_CODES.JELLY_BEAN;
import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
import static org.assertj.core.api.Assertions.assertThat;
public abstract class AbstractTextViewAssert<S extends AbstractTextViewAssert<S, A>, A extends TextView>
extends AbstractViewAssert<S, A> {
protected AbstractTextViewAssert(A actual, Class<S> selfType) {
super(actual, selfType);
}
public S hasAutoLinkMask(int mask) {
isNotNull();
int actualMask = actual.getAutoLinkMask();
assertThat(actualMask) //
.overridingErrorMessage("Expected auto-link mask <%s> but was <%s>.", mask, actualMask) //
.isEqualTo(mask);
return myself;
}
public S hasCompoundDrawablePadding(int padding) {
isNotNull();
int actualPadding = actual.getCompoundDrawablePadding();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasCompoundPaddingBottom(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingBottom();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable bottom padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
@TargetApi(JELLY_BEAN_MR1)
public S hasCompoundPaddingEnd(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingEnd();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable end padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasCompoundPaddingLeft(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingLeft();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable left padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasCompoundPaddingRight(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingRight();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable right padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
@TargetApi(JELLY_BEAN_MR1)
public S hasCompoundPaddingStart(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingStart();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable start padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasCompoundPaddingTop(int padding) {
isNotNull();
int actualPadding = actual.getCompoundPaddingTop();
assertThat(actualPadding) //
.overridingErrorMessage("Expected compound drawable top padding <%s> but was <%s>.",
padding, actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasCurrentHintTextColor(int color) {
isNotNull();
int actualColor = actual.getCurrentHintTextColor();
assertThat(actualColor) //
.overridingErrorMessage("Expected current hint text color <%s> but was <%s>.",
Integer.toHexString(color), Integer.toHexString(actualColor)) //
.isEqualTo(color);
return myself;
}
public S hasCurrentTextColor(int color) {
isNotNull();
int actualColor = actual.getCurrentTextColor();
assertThat(actualColor) //
.overridingErrorMessage("Expected current text color <%s> but was <%s>.",
Integer.toHexString(color), Integer.toHexString(actualColor)) //
.isEqualTo(color);
return myself;
}
public S hasEllipsize(TextUtils.TruncateAt truncation) {
isNotNull();
TextUtils.TruncateAt actualTruncation = actual.getEllipsize();
assertThat(actualTruncation) //
.overridingErrorMessage("Expected ellipsize <%s> but was <%s>.", truncation,
actualTruncation) //
.isEqualTo(truncation);
return myself;
}
public S hasError() {
isNotNull();
assertThat(actual.getError()) //
.overridingErrorMessage("Expected error but had none.") //
.isNotNull();
return myself;
}
public S hasNoError() {
isNotNull();
assertThat(actual.getError()) //
.overridingErrorMessage("Expected no error but had one.") //
.isNull();
return myself;
}
public S hasError(CharSequence error) {
isNotNull();
CharSequence actualError = actual.getError();
assertThat(actualError) //
.overridingErrorMessage("Expected error <%s> but was <%s>.", error, actualError) //
.isEqualTo(error);
return myself;
}
public S hasError(int resId) {
isNotNull();
return hasError(actual.getContext().getString(resId));
}
public S hasExtendedPaddingBottom(int padding) {
isNotNull();
int actualPadding = actual.getExtendedPaddingBottom();
assertThat(actualPadding) //
.overridingErrorMessage("Expected extended bottom padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasExtendedPaddingTop(int padding) {
isNotNull();
int actualPadding = actual.getExtendedPaddingTop();
assertThat(actualPadding) //
.overridingErrorMessage("Expected extended top padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasFreezesText(boolean freezes) {
isNotNull();
boolean actualFreezes = actual.getFreezesText();
assertThat(actualFreezes) //
.overridingErrorMessage("Expected freezes text <%s> but was <%s>.", freezes,
actualFreezes) //
.isEqualTo(freezes);
return myself;
}
public S hasGravity(int gravity) {
isNotNull();
int actualGravity = actual.getGravity();
// TODO tostring the flags for output
assertThat(actualGravity) //
.overridingErrorMessage("Expected gravity <%s> but was <%s>.", gravity, actualGravity) //
.isEqualTo(gravity);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasHighlightColor(int color) {
isNotNull();
int actualColor = actual.getHighlightColor();
assertThat(actualColor) //
.overridingErrorMessage("Expected highlight color <%s> but was <%s>.",
Integer.toHexString(color), Integer.toHexString(actualColor)) //
.isEqualTo(color);
return myself;
}
public S hasHint(CharSequence hint) {
isNotNull();
CharSequence actualHint = actual.getHint();
assertThat(actualHint) //
.overridingErrorMessage("Expected hint <%s> but was <%s>.", hint, actualHint) //
.isEqualTo(hint);
return myself;
}
public S hasHint(int resId) {
isNotNull();
return hasHint(actual.getContext().getString(resId));
}
public S hasImeActionId(int id) {
isNotNull();
int actualId = actual.getImeActionId();
assertThat(actualId) //
.overridingErrorMessage("Expected IME action ID <%s> but was <%s>.", id, actualId) //
.isEqualTo(id);
return myself;
}
public S hasImeActionLabel(CharSequence label) {
isNotNull();
CharSequence actualLabel = actual.getImeActionLabel();
assertThat(actualLabel) //
.overridingErrorMessage("Expected IME action label <%s> but was <%s>.", label,
actualLabel) //
.isEqualTo(label);
return myself;
}
public S hasImeActionLabel(int resId) {
isNotNull();
return hasImeActionLabel(actual.getContext().getString(resId));
}
public S hasImeOptions(int options) {
isNotNull();
int actualOptions = actual.getImeOptions();
assertThat(actualOptions) //
// TODO tostring flags values
.overridingErrorMessage("Expected IME options <%s> but was <%s>.", options,
actualOptions) //
.isEqualTo(options);
return myself;
}
@TargetApi(JELLY_BEAN)
public S isIncludingFontPadding() {
isNotNull();
assertThat(actual.getIncludeFontPadding()) //
.overridingErrorMessage("Expected to be including font padding but was not.") //
.isTrue();
return myself;
}
@TargetApi(JELLY_BEAN)
public S isNotIncludingFontPadding() {
isNotNull();
assertThat(actual.getIncludeFontPadding()) //
.overridingErrorMessage("Expected to not be including font padding but was.") //
.isFalse();
return myself;
}
public S hasInputType(int type) {
isNotNull();
int actualType = actual.getInputType();
assertThat(actualType) //
.overridingErrorMessage("Expected input type <%s> but was <%s>.", type, actualType) //
.isEqualTo(type);
return myself;
}
public S hasLineCount(int count) {
isNotNull();
int actualCount = actual.getLineCount();
assertThat(actualCount) //
.overridingErrorMessage("Expected line count <%s> but was <%s>.", count, actualCount) //
.isEqualTo(count);
return myself;
}
public S hasLineHeight(int height) {
isNotNull();
int actualHeight = actual.getLineHeight();
assertThat(actualHeight) //
.overridingErrorMessage("Expected line height <%s> but was <%s>.", height, actualHeight) //
.isEqualTo(height);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasLineSpacingExtra(float extra) {
isNotNull();
float actualExtra = actual.getLineSpacingExtra();
assertThat(actualExtra) //
.overridingErrorMessage("Expected line spacing extra <%s> but was <%s>.", extra,
actualExtra) //
.isEqualTo(extra);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasLineSpacingMultiplier(float multiplier) {
isNotNull();
float actualMultiplier = actual.getLineSpacingMultiplier();
assertThat(actualMultiplier) //
.overridingErrorMessage("Expected line spacing multiplier <%s> but was <%s>.", multiplier,
actualMultiplier) //
.isEqualTo(multiplier);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMarqueeRepeatLimit(int limit) {
isNotNull();
int actualLimit = actual.getMarqueeRepeatLimit();
assertThat(actualLimit) //
.overridingErrorMessage("Expected marquee repeat limit <%s> but was <%s>.", limit,
actualLimit) //
.isEqualTo(limit);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMaxEms(int ems) {
isNotNull();
int actualEms = actual.getMaxEms();
assertThat(actualEms) //
.overridingErrorMessage("Expected maximum EMs <%s> but was <%s>.", ems, actualEms) //
.isEqualTo(ems);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMaxHeight(int height) {
isNotNull();
int actualHeight = actual.getMaxHeight();
assertThat(actualHeight) //
.overridingErrorMessage("Expected maximum height <%s> but was <%s>.", height,
actualHeight) //
.isEqualTo(height);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMaxLines(int lines) {
isNotNull();
int actualLines = actual.getMaxLines();
assertThat(actualLines) //
.overridingErrorMessage("Expected maximum lines <%s> but was <%s>.", lines, actualLines) //
.isEqualTo(lines);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMaxWidth(int ems) {
isNotNull();
int actualWidth = actual.getMaxWidth();
assertThat(actualWidth) //
.overridingErrorMessage("Expected maximum width <%s> but was <%s>.", ems, actualWidth) //
.isEqualTo(ems);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMinEms(int ems) {
isNotNull();
int actualEms = actual.getMinEms();
assertThat(actualEms) //
.overridingErrorMessage("Expected minimum EMs <%s> but was <%s>.", ems, actualEms) //
.isEqualTo(ems);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMinHeight(int height) {
isNotNull();
int actualHeight = actual.getMinHeight();
assertThat(actualHeight) //
.overridingErrorMessage("Expected minimum height <%s> but was <%s>.", height,
actualHeight) //
.isEqualTo(height);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMinLines(int lines) {
isNotNull();
int actualLines = actual.getMinLines();
assertThat(actualLines) //
.overridingErrorMessage("Expected minimum lines <%s> but was <%s>.", lines, actualLines) //
.isEqualTo(lines);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasMinWidth(int ems) {
isNotNull();
int actualWidth = actual.getMinWidth();
assertThat(actualWidth) //
.overridingErrorMessage("Expected minimum width <%s> but was <%s>.", ems, actualWidth) //
.isEqualTo(ems);
return myself;
}
public S hasPaintFlags(int flags) {
isNotNull();
int actualFlags = actual.getPaintFlags();
assertThat(actualFlags) //
.overridingErrorMessage("Expected paint flags <%s> but was <%s>.", flags, actualFlags) //
.isEqualTo(flags);
return myself;
}
public S hasSelectionEnd(int position) {
isNotNull();
int actualPosition = actual.getSelectionEnd();
assertThat(actualPosition) //
.overridingErrorMessage("Expected selection end <%s> but was <%s>.", position,
actualPosition) //
.isEqualTo(position);
return myself;
}
public S hasSelectionStart(int position) {
isNotNull();
int actualPosition = actual.getSelectionStart();
assertThat(actualPosition) //
.overridingErrorMessage("Expected selection start <%s> but was <%s>.", position,
actualPosition) //
.isEqualTo(position);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasShadowColor(int color) {
isNotNull();
int actualColor = actual.getShadowColor();
assertThat(actualColor) //
.overridingErrorMessage("Expected shadow color <%s> but was <%s>.",
Integer.toHexString(color), Integer.toHexString(actualColor)) //
.isEqualTo(color);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasShadowDx(float dx) {
isNotNull();
float actualDx = actual.getShadowDx();
assertThat(actualDx) //
.overridingErrorMessage("Expected shadow DX <%s> but was <%s>.", dx, actualDx) //
.isEqualTo(dx);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasShadowDy(float dy) {
isNotNull();
float actualDy = actual.getShadowDy();
assertThat(actualDy) //
.overridingErrorMessage("Expected shadow DY <%s> but was <%s>.", dy, actualDy) //
.isEqualTo(dy);
return myself;
}
@TargetApi(JELLY_BEAN)
public S hasShadowRadius(float radius) {
isNotNull();
float actualRadius = actual.getShadowRadius();
assertThat(actualRadius) //
.overridingErrorMessage("Expected shadow radius <%s> but was <%s>.", radius,
actualRadius) //
.isEqualTo(radius);
return myself;
}
public S hasText(CharSequence text) {
isNotNull();
CharSequence actualText = actual.getText();
assertThat(actualText) //
.overridingErrorMessage("Expected text <%s> but was <%s>.", text, actualText) //
.isEqualTo(text);
return myself;
}
public S hasText(int resId) {
isNotNull();
return hasText(actual.getContext().getString(resId));
}
public S hasTextString(String text) {
isNotNull();
String actualText = actual.getText().toString();
assertThat(actualText) //
.overridingErrorMessage("Expected text string <%s> but was <%s>.", text, actualText) //
.isEqualTo(text);
return myself;
}
public S hasTextString(int resId) {
isNotNull();
return hasTextString(actual.getContext().getString(resId));
}
public S isEmpty() {
isNotNull();
return hasTextString("");
}
public S isNotEmpty() {
isNotNull();
CharSequence text = actual.getText();
assertThat(text) //
.overridingErrorMessage("Expected empty text but was <%s>.", text) //
.isNotEqualTo("");
return myself;
}
public S matches(Pattern pattern) {
isNotNull();
String text = actual.getText().toString();
assertThat(pattern.matcher(text).matches())
.overridingErrorMessage("Expected text <%s> to match <%s>, but did not.", text,
pattern.pattern())
.isTrue();
return myself;
}
public S doesNotMatch(Pattern pattern) {
isNotNull();
String text = actual.getText().toString();
assertThat(pattern.matcher(text).matches())
.overridingErrorMessage("Expected text <%s> to not match <%s>, but did.", text,
pattern.pattern())
.isFalse();
return myself;
}
public S containsText(String sequence) {
isNotNull();
assertThat(actual.getText().toString()).contains(sequence);
return myself;
}
public S containsText(int resId) {
isNotNull();
return containsText(actual.getContext().getString(resId));
}
public S doesNotContainText(String sequence) {
isNotNull();
assertThat(actual.getText().toString()).doesNotContain(sequence);
return myself;
}
public S doesNotContainText(int resId) {
isNotNull();
return doesNotContainText(actual.getContext().getString(resId));
}
public S startsWithText(String sequence) {
isNotNull();
String text = actual.getText().toString();
assertThat(text.startsWith(sequence)) //
.overridingErrorMessage("Expected text <%s> to start with <%s> but did not.", text,
sequence) //
.isTrue();
return myself;
}
public S startsWithText(int resId) {
isNotNull();
return startsWithText(actual.getContext().getString(resId));
}
public S doesNotStartWithText(String sequence) {
isNotNull();
String text = actual.getText().toString();
assertThat(text.startsWith(sequence)) //
.overridingErrorMessage("Expected text <%s> to not start with <%s> but did.", text,
sequence) //
.isFalse();
return myself;
}
public S doesNotStartWithText(int resId) {
isNotNull();
return doesNotStartWithText(actual.getContext().getString(resId));
}
public S endsWithText(String sequence) {
isNotNull();
String text = actual.getText().toString();
assertThat(text.endsWith(sequence)) //
.overridingErrorMessage("Expected text <%s> to end with <%s> but did not.", text,
sequence) //
.isTrue();
return myself;
}
public S endsWithText(int resId) {
isNotNull();
return endsWithText(actual.getContext().getString(resId));
}
public S doesNotEndWithText(String sequence) {
isNotNull();
String text = actual.getText().toString();
assertThat(text.endsWith(sequence)) //
.overridingErrorMessage("Expected text <%s> to not end with <%s> but did.", text,
sequence) //
.isFalse();
return myself;
}
public S doesNotEndWithText(int resId) {
isNotNull();
return doesNotEndWithText(actual.getContext().getString(resId));
}
// TODO API 17
//public S hasTextLocale(Locale locale) {
// isNotNull();
// Locale actualLocale = actual.getTextLocale();
// assertThat(actualLocale) //
// .overridingErrorMessage("Expected text locale <%s> but was <%s>.", locale, actualLocale) //
// .isEqualTo(locale);
// return myself;
//}
public S hasTextScaleX(float scale) {
isNotNull();
float actualScale = actual.getTextScaleX();
assertThat(actualScale) //
.overridingErrorMessage("Expected text X scale <%s> but was <%s>.", scale, actualScale) //
.isEqualTo(scale);
return myself;
}
public S hasTextSize(float size) {
isNotNull();
float actualSize = actual.getTextSize();
assertThat(actualSize) //
.overridingErrorMessage("Expected text size <%s> but was <%s>.", size, actualSize) //
.isEqualTo(size);
return myself;
}
public S hasTotalPaddingBottom(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingBottom();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total bottom padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
@TargetApi(JELLY_BEAN_MR1)
public S hasTotalPaddingEnd(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingEnd();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total end padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasTotalPaddingLeft(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingLeft();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total left padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasTotalPaddingRight(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingRight();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total right padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
@TargetApi(JELLY_BEAN_MR1)
public S hasTotalPaddingStart(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingStart();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total start padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasTotalPaddingTop(int padding) {
isNotNull();
int actualPadding = actual.getTotalPaddingTop();
assertThat(actualPadding) //
.overridingErrorMessage("Expected total top padding <%s> but was <%s>.", padding,
actualPadding) //
.isEqualTo(padding);
return myself;
}
public S hasTypeface(Typeface typeface) {
isNotNull();
Typeface actualTypeface = actual.getTypeface();
assertThat(actualTypeface) //
.overridingErrorMessage("Expected typeface <%s> but was <%s>.", typeface, actualTypeface) //
.isSameAs(typeface);
return myself;
}
@TargetApi(JELLY_BEAN)
public S isCursorVisible() {
isNotNull();
assertThat(actual.isCursorVisible()) //
.overridingErrorMessage("Expected cursor to be visible but was not visible.") //
.isTrue();
return myself;
}
@TargetApi(JELLY_BEAN)
public S isCursorNotVisible() {
isNotNull();
assertThat(actual.isCursorVisible()) //
.overridingErrorMessage("Expected cursor to not be visible but was visible.") //
.isFalse();
return myself;
}
@TargetApi(HONEYCOMB)
public S hasSelectableText() {
isNotNull();
assertThat(actual.isTextSelectable()) //
.overridingErrorMessage("Expected text to be selectable but was not.") //
.isTrue();
return myself;
}
@TargetApi(HONEYCOMB)
public S hasUnselectableText() {
isNotNull();
assertThat(actual.isTextSelectable()) //
.overridingErrorMessage("Expected text to not be selectable but was.") //
.isFalse();
return myself;
}
public S hasLength(int length) {
isNotNull();
int actualLength = actual.length();
assertThat(actualLength) //
.overridingErrorMessage("Expected length <%s> but was <%s>.", length, actualLength) //
.isEqualTo(length);
return myself;
}
public S isInputMethodTarget() {
isNotNull();
assertThat(actual.isInputMethodTarget()) //
.overridingErrorMessage("Expected to be the input method target but was not.") //
.isTrue();
return myself;
}
public S isNotInputMethodTarget() {
isNotNull();
assertThat(actual.isInputMethodTarget()) //
.overridingErrorMessage("Expected to not be the input method target but was.") //
.isFalse();
return myself;
}
}
| |
/*
* Copyright 2003 - 2019 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.admin.common;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.efaps.admin.datamodel.Type;
import org.efaps.admin.user.Company;
import org.efaps.ci.CIAdminCommon;
import org.efaps.db.Context;
import org.efaps.db.InstanceQuery;
import org.efaps.db.MultiPrintQuery;
import org.efaps.db.QueryBuilder;
import org.efaps.db.SelectBuilder;
import org.efaps.db.wrapper.SQLPart;
import org.efaps.db.wrapper.SQLSelect;
import org.efaps.util.EFapsException;
import org.efaps.util.cache.CacheLogListener;
import org.efaps.util.cache.CacheObjectInterface;
import org.efaps.util.cache.CacheReloadException;
import org.efaps.util.cache.InfinispanCache;
import org.infinispan.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Association
implements CacheObjectInterface, Serializable
{
/** */
private static final long serialVersionUID = 1L;
/**
* Logging instance used in this class.
*/
private static final Logger LOG = LoggerFactory.getLogger(Association.class);
/**
* Name of the Cache by UUID.
*/
private static final String UUIDCACHE = Association.class.getName() + ".UUID";
/**
* Name of the Cache by ID.
*/
private static final String IDCACHE = Association.class.getName() + ".ID";
/**
* Name of the Cache by Name.
*/
private static final String NAMECACHE = Association.class.getName() + ".Name";
/**
* Name of the Cache by UUID.
*/
private static final String KEYCACHE = Association.class.getName() + ".Key";
private static final String SQL = new SQLSelect()
.column(0, "ID")
.from("T_CMASSOC", 0)
.leftJoin("T_CMASSOCDEF", 1, "ASSOCID", 0, "ID")
.leftJoin("T_CMASSOCMAP", 2, "ASSOCID", 0, "ID")
.addPart(SQLPart.WHERE)
.addColumnPart(1, "COMPANYID").addPart(SQLPart.EQUAL).addValuePart("?")
.addPart(SQLPart.AND)
.addColumnPart(2, "TYPEID").addPart(SQLPart.EQUAL).addValuePart("?")
.toString();
/**
* The instance variable stores the id of this Association.
*/
private final long id;
/**
* The instance variable stores the UUID of this Association.
*/
private final UUID uuid;
/**
* The instance variable stores the Name of this Association.
*/
private final String name;
private final Set<Long> companyIds = new HashSet<>();
private Association(final long _id,
final String _name,
final String _uuid)
{
id = _id;
name = _name;
uuid = UUID.fromString(_uuid);
}
@Override
public String getName()
{
return name;
}
@Override
public UUID getUUID()
{
return uuid;
}
@Override
public long getId()
{
return id;
}
private void addCompanyId(final Long _companyId)
{
companyIds.add(_companyId);
}
public Set<Company> getCompanies()
{
return companyIds.stream()
.map(id -> {
try {
return Company.get(id);
} catch (final CacheReloadException e) {
LOG.error("Well", e);
}
return null;
})
.collect(Collectors.toSet());
}
/**
* Method to initialize the {@link #CACHE cache} for the Associations.
*/
public static void initialize()
{
if (InfinispanCache.get().exists(Association.UUIDCACHE)) {
InfinispanCache.get().getCache(Association.UUIDCACHE).clear();
} else {
InfinispanCache.get().<UUID, Association>getCache(Association.UUIDCACHE)
.addListener(new CacheLogListener(Association.LOG));
}
if (InfinispanCache.get().exists(Association.IDCACHE)) {
InfinispanCache.get().getCache(Association.IDCACHE).clear();
} else {
InfinispanCache.get().<Long, Association>getCache(Association.IDCACHE)
.addListener(new CacheLogListener(Association.LOG));
}
if (InfinispanCache.get().exists(Association.NAMECACHE)) {
InfinispanCache.get().getCache(Association.NAMECACHE).clear();
} else {
InfinispanCache.get().<String, Association>getCache(Association.NAMECACHE)
.addListener(new CacheLogListener(Association.LOG));
}
if (InfinispanCache.get().exists(Association.KEYCACHE)) {
InfinispanCache.get().getCache(Association.KEYCACHE).clear();
} else {
InfinispanCache.get().<AssociationKey, Long>getCache(Association.KEYCACHE)
.addListener(new CacheLogListener(Association.LOG));
}
}
public static Association get(final long _id)
throws EFapsException
{
final Cache<Long, Association> cache = InfinispanCache.get().<Long, Association>getCache(Association.IDCACHE);
if (!cache.containsKey(_id)) {
Association.loadAssociation(_id);
}
return cache.get(_id);
}
@SuppressWarnings("unchecked")
private static void loadAssociation(final long _id)
throws EFapsException
{
final QueryBuilder queryBldr = new QueryBuilder(CIAdminCommon.AssociationAbstract);
queryBldr.addWhereAttrEqValue(CIAdminCommon.AssociationAbstract.ID, _id);
final MultiPrintQuery multi = queryBldr.getPrint();
final SelectBuilder selCompanyIds = SelectBuilder.get()
.linkfrom(CIAdminCommon.AssociationDefinition.AssociationLink)
.attribute(CIAdminCommon.AssociationDefinition.CompanyLink);
multi.addSelect(selCompanyIds);
multi.addAttribute(CIAdminCommon.AssociationAbstract.Name, CIAdminCommon.AssociationAbstract.UUID);
multi.executeWithoutAccessCheck();
if (multi.next()) {
final String name = multi.getAttribute(CIAdminCommon.AssociationAbstract.Name);
final String uuid = multi.getAttribute(CIAdminCommon.AssociationAbstract.UUID);
final Object companies = multi.getSelect(selCompanyIds);
final Association association = new Association(_id, name, uuid);
if (multi.isList4Select(selCompanyIds.toString())) {
((List<Long>) companies).forEach(id -> association.addCompanyId(id));
} else {
association.addCompanyId((Long) companies);
}
cacheAssociation(association);
} else {
// TODO error
}
}
private static void cacheAssociation(final Association _association)
{
final Cache<UUID, Association> cache4UUID = InfinispanCache.get().<UUID, Association>getIgnReCache(
Association.UUIDCACHE);
cache4UUID.putIfAbsent(_association.getUUID(), _association);
final Cache<String, Association> nameCache = InfinispanCache.get().<String, Association>getIgnReCache(
Association.NAMECACHE);
nameCache.putIfAbsent(_association.getName(), _association);
final Cache<Long, Association> idCache = InfinispanCache.get().<Long, Association>getIgnReCache(
Association.IDCACHE);
idCache.putIfAbsent(_association.getId(), _association);
}
public static Association evaluate(final Type _type)
throws EFapsException
{
final Long companyId = Context.getThreadContext().getCompany().getId();
return Association.evaluate(_type, companyId);
}
public static Association evaluate(final Type _type,
final long _companyId)
throws EFapsException
{
final Long typeId = _type.getId();
final AssociationKey key = AssociationKey.get(_companyId, typeId);
final Cache<AssociationKey, Long> cache = InfinispanCache.get().<AssociationKey, Long>getCache(Association.KEYCACHE);
if (!cache.containsKey(key)) {
load(_companyId, _type);
}
final Long associationId = cache.get(key);
return Association.get(associationId);
}
private static void load(final long _companyId,
final Type _type)
throws EFapsException
{
final Cache<AssociationKey, Long> cache = InfinispanCache.get().<AssociationKey, Long>getCache(Association.KEYCACHE);
final Set<Long> typeIds = new HashSet<>();
Long assocId = null;
Type currentType = _type;
while (assocId == null && currentType != null) {
final AssociationKey verifyKey = AssociationKey.get(_companyId, currentType.getId());
if (cache.containsKey(verifyKey)) {
assocId = cache.get(verifyKey);
} else {
assocId = loadFromDB(_companyId, currentType.getId());
typeIds.add(currentType.getId());
currentType = currentType.getParentType();
}
}
if (assocId == null) {
assocId = loadDefault(_companyId);
for (final Long typeId : typeIds) {
cache.put(AssociationKey.get(_companyId, typeId), assocId);
}
} else {
for (final Long typeId : typeIds) {
cache.put(AssociationKey.get(_companyId, typeId), assocId);
}
}
}
private static Long loadDefault(final long _companyId)
throws EFapsException
{
Long ret = null;
final QueryBuilder attrQueryBldr = new QueryBuilder(CIAdminCommon.AssociationDefinition);
attrQueryBldr.addWhereAttrEqValue(CIAdminCommon.AssociationDefinition.CompanyLink, _companyId);
final QueryBuilder queryBldr = new QueryBuilder(CIAdminCommon.AssociationDefault);
queryBldr.addWhereAttrInQuery(CIAdminCommon.AssociationDefault.ID,
attrQueryBldr.getAttributeQuery(CIAdminCommon.AssociationDefinition.AssociationLink));
final InstanceQuery query = queryBldr.getQuery();
query.executeWithoutAccessCheck();
if (query.next()) {
ret = query.getCurrentValue().getId();
} else {
// TODO error
}
return ret;
}
private static Long loadFromDB(final long _companyId,
final long _typeid)
throws EFapsException
{
Long ret = null;
Connection con = null;
try {
con = Context.getConnection();
PreparedStatement stmt = null;
try {
stmt = con.prepareStatement(SQL);
stmt.setObject(1, _companyId);
stmt.setObject(2, _typeid);
final ResultSet rs = stmt.executeQuery();
if (rs.next()) {
ret = rs.getLong(1);
}
rs.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
con.commit();
} catch (final SQLException e) {
throw new CacheReloadException("could not read roles", e);
} catch (final EFapsException e) {
throw new CacheReloadException("could not read roles", e);
} finally {
try {
if (con != null && !con.isClosed()) {
con.close();
}
} catch (final SQLException e) {
throw new CacheReloadException("Cannot read a type for an attribute.", e);
}
}
return ret;
}
}
| |
/*
* Copyright (c) 2018 stnetix.com. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, without warranties or
* conditions of any kind, EITHER EXPRESS OR IMPLIED. See the License for the
* specific language governing permissions and limitations under the License.
*/
/*
* ariADDna API
* #### This document contains the API description for ariADDna project. Using this API one can manage all available cloud services (DropBox, GDrive, Yandex.Disk etc.) from single point.
*
* OpenAPI spec version: 1.0
* Contact: ariaddna.support@stnetix.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.stnetix.ariaddna.client.api;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import com.stnetix.ariaddna.client.ApiException;
import com.stnetix.ariaddna.client.model.Cloud;
import com.stnetix.ariaddna.client.model.CloudSetPages;
import com.stnetix.ariaddna.client.model.Credential;
import com.stnetix.ariaddna.client.model.InitialAllocationModel;
import com.stnetix.ariaddna.client.model.Session;
import com.stnetix.ariaddna.client.model.StatisticSet;
import com.stnetix.ariaddna.client.model.User;
import com.stnetix.ariaddna.client.model.Vufs;
/**
* API tests for AriaddnaApi
*/
@Ignore
public class AriaddnaApiTest {
private final AriaddnaApi api = new AriaddnaApi();
/**
*
*
* Allows one to add an external cloud account to an existing ariADDna's user. The User MUST be already registered at the cloud service to be added.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void addExternalCloudAccountTest() throws ApiException {
Cloud cloud = new Cloud();
Cloud response = api.addExternalCloudAccount(cloud);
//Check response is not null
Assert.assertNotNull(response);
}
/**
*
*
* Allows one to create a new user.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void addUserTest() throws ApiException {
User user = new User();
User response = api.addUser(user);
//check response is not null
Assert.assertNotNull(response);
}
/**
*
*
* Creating new user session.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void authUserTest() throws ApiException {
Credential user = new Credential();
Session response = api.authUser(user);
//check response is not null
Assert.assertNotNull(response);
}
/**
*
*
* Changing user password.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void changeUserPasswordTest() throws ApiException {
String userUuid = "user uuid";
User user = new User();
api.changeUserPassword(userUuid, user);
// TODO: test validations
}
/** *
*
* Allows to delete user's external cloud account.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void deleteExternalCloudAccountTest() throws ApiException {
String cloudUuid = "cloud uuid";
String response = api.deleteExternalCloudAccount(cloudUuid);
Assert.assertNotNull(response);
}
/**
*
*
* Deleting user.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void deleteUserTest() throws ApiException {
String userUuid = "userUuid";
api.deleteUser(userUuid);
// TODO: test validations
}
/**
*
*
* This operation allows one to get back information about certain user providing his UUID as a path parameter.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void findUserByUuidTest() throws ApiException {
String userUuid = "userUuid";
User response = api.findUserByUuid(userUuid);
// TODO: test validations
}
/**
*
*
* Allows to get statistic object about clouds.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getCloudStatisticSetTest() throws ApiException {
String userUuid = "userUuid";
StatisticSet response = api.getCloudStatisticSet(userUuid);
// TODO: test validations
}
/**
*
*
* Allows to get difference of previous snapshot and actual.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getDiffVUFSTest() throws ApiException {
String userUuid = "userUuid";
Long dateTime = System.currentTimeMillis();
Vufs response = api.getDiffVUFS(userUuid, dateTime);
// TODO: test validations
}
/**
*
*
* Returns a list of clouds which a certain user has connected to his ariADDna account.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getExternalCloudAccountsTest() throws ApiException {
String userUuid = "userUuid";
CloudSetPages response = api.getExternalCloudAccounts(userUuid);
// TODO: test validations
}
/**
*
*
* Allows to get health-check statistic about users Clouds.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getHealthCheckStatTest() throws ApiException {
String userUuid = "userUuid";
StatisticSet response = api.getHealthCheckStat(userUuid);
// TODO: test validations
}
/**
*
*
* Allows to get snapshot vufs.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void getVUFSTest() throws ApiException {
String userUuid = "userUuid";
Vufs response = api.getVUFS(userUuid);
// TODO: test validations
}
/**
*
*
* Closing user session.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void logoutSessionTest() throws ApiException {
String uuid = "userUuid";
api.logoutSession(uuid);
// TODO: test validations
}
/**
*
*
* Allows to post file allocate strategy from client to server.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void postAllocateModelTest() throws ApiException {
String userUuid = "userUuid";
InitialAllocationModel initialAllocationModel = new InitialAllocationModel();
Vufs response = api.postAllocateModel(userUuid, initialAllocationModel);
// TODO: test validations
}
/**
*
*
* Allows to post statistic from client to server about clouds.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void postCloudStatSetTest() throws ApiException {
String userUuid = "userUuid";
StatisticSet cloudStatisticSet = new StatisticSet();
api.postCloudStatSet(userUuid, cloudStatisticSet);
// TODO: test validations
}
/**
*
*
* Allows to send changes in local file storage to server with empty Allocation model and as response get Vufs object with Allocation model.
*
* @throws ApiException
* if the Api call fails
*/
@Test
public void sendChangesInLFSTest() throws ApiException {
Vufs localChanges = new Vufs();
String userUuid = "userUuid";
Vufs response = api.sendChangesInLFS(localChanges, userUuid);
// TODO: test validations
}
}
| |
/*
* Copyright 2015 NEC Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.o3project.odenos.core.component;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import org.powermock.reflect.Whitebox;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Test class for ConversionTable.
*/
public class ConversionTableTest {
private ConversionTable target;
/**
* @throws java.lang.Exception throws Exception in targets
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
}
/**
* @throws java.lang.Exception throws Exception in targets
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
/**
* @throws java.lang.Exception throws Exception in targets
*/
@Before
public void setUp() throws Exception {
target = Mockito.spy(new ConversionTable());
}
/**
* @throws java.lang.Exception throws Exception in targets
*/
@After
public void tearDown() throws Exception {
target = null;
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#ConversionTable()}.
*/
@Test
public void testConversionTable() {
ConversionTable target = new ConversionTable();
assertThat(target, is(notNullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getConnectionType(java.lang.String)}
* .
*/
@Test
public void testGetConnectionType() {
/*
* setting
*/
Map<String, String> connectionTypeMap = new HashMap<String, String>();
connectionTypeMap.put("abc", "def");
Whitebox.setInternalState(target, "connectionTypeMap", connectionTypeMap);
/*
* test
*/
String result = target.getConnectionType("abc");
/*
* check
*/
assertThat(result, is("def"));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getConnectionType(java.lang.String)}
* .
*/
@Test
public void testGetConnectionType_null() {
String result = target.getConnectionType(null);
assertThat(result, is(nullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getConnectionList(java.lang.String)}
* .
*/
@Test
public void testGetConnectionList() {
/*
* setting
*/
target.addEntryConnectionType("id1", "genuine");
target.addEntryConnectionType("id2", "fake");
target.addEntryConnectionType("id3", "fake");
target.addEntryConnectionType("id4", "genuine");
/*
* test
*/
ArrayList<String> result = target.getConnectionList("genuine");
/*
* check
*/
assertThat(result.size(), is(2));
assertThat(result.contains("id1"), is(true));
assertThat(result.contains("id2"), is(false));
assertThat(result.contains("id3"), is(false));
assertThat(result.contains("id4"), is(true));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#isConnectionType(java.lang.String)}
* .
*/
@Test
public void testIsConnectionType() {
/*
* setting
*/
target.addEntryConnectionType("id1", "genuine");
target.addEntryConnectionType("id2", "fake");
target.addEntryConnectionType("id3", "fake");
target.addEntryConnectionType("id4", "genuine");
/*
* test & check
*/
boolean result1 = target.isConnectionType("genuine");
assertThat(result1, is(true));
boolean result2 = target.isConnectionType("nothing");
assertThat(result2, is(false));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#isConnectionType(java.lang.String)}
* .
*/
@Test
public void testIsConnectionType_null() {
/*
* test
*/
boolean result = target.isConnectionType(null);
/*
* check
*/
assertThat(result, is(false));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#isConnectionType(java.lang.String)}
* .
*/
@Test
public void testIsConnectionType_nullValue() {
/*
* setting
*/
target.addEntryConnectionType("id1", "genuine");
target.addEntryConnectionType("id2", null);
target.addEntryConnectionType("id3", null);
target.addEntryConnectionType("id4", "genuine");
/*
* test
*/
boolean result = target.isConnectionType(null);
/*
* check
*/
assertThat(result, is(false));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryConnectionType(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryConnectionType() {
/*
* test
*/
target.addEntryConnectionType("id", "type");
/*
* check
*/
String validationResult = target.getConnectionType("id");
assertThat(validationResult, is("type"));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryConnectionType(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryConnectionType_nullKey() {
/*
* test
*/
target.addEntryConnectionType(null, "type");
/*
* check
*/
String validationResult = target.getConnectionType(null);
assertThat(validationResult, is("type"));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryConnectionType(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryConnectionType_nullValue() {
/*
* test
*/
target.addEntryConnectionType("id", null);
/*
* check
*/
String validationResult = target.getConnectionType("id");
assertThat(validationResult, is(nullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryConnectionType(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryConnectionType_allNull() {
/*
* test
*/
target.addEntryConnectionType(null, null);
/*
* check
*/
String validationResult = target.getConnectionType(null);
assertThat(validationResult, is(nullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryConnectionType(java.lang.String)}
* .
*/
@Test
public void testDelEntryConnectionType() {
/*
* setting
*/
target.addEntryConnectionType("id", "value");
String beforeValue = target.getConnectionType("id");
assertThat(beforeValue, is("value"));
/*
* test
*/
target.delEntryConnectionType("id");
/*
* check
*/
String afterValue = target.getConnectionType("id");
assertThat(afterValue, is(nullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryConnectionType(java.lang.String)}
* .
*/
@Test
public void testDelEntryConnectionType_notRegisterId() {
/*
* test
*/
target.delEntryConnectionType("id");
/*
* check
*/
String afterValue = target.getConnectionType("id");
assertThat(afterValue, is(nullValue()));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getNetwork()}.
*/
@Test
public void testGetNetwork() {
/*
* test
*/
HashMap<String, ArrayList<String>> result = target.getNetwork();
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getNode()}.
*/
@Test
public void testGetNode() {
/*
* test
*/
HashMap<String, ArrayList<String>> result = target.getNode();
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getPort()}.
*/
@Test
public void testGetPort() {
/*
* test
*/
HashMap<String, ArrayList<String>> result = target.getPort();
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getLink()}.
*/
@Test
public void testGetLink() {
/*
* test
*/
HashMap<String, ArrayList<String>> result = target.getLink();
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getFlow()}.
*/
@Test
public void testGetFlow() {
/*
* test
*/
HashMap<String, ArrayList<String>> result = target.getFlow();
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getNetwork(java.lang.String)}
* .
*/
@Test
public void testGetNetworkString() {
/*
* test
*/
ArrayList<String> result = target.getNetwork("nwcId");
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getNode(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testGetNodeStringString() {
/*
* test
*/
ArrayList<String> result = target.getNode("nwcId", "nodeId");
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getPort(java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testGetPortStringStringString() {
/*
* test
*/
ArrayList<String> result = target.getPort("nwcId", "nodeId", "portId");
/*
* chcek
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getLink(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testGetLinkStringString() {
/*
* test
*/
ArrayList<String> result = target.getLink("nwcId", "linkId");
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#getFlow(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testGetFlowStringString() {
/*
* test
*/
ArrayList<String> result = target.getFlow("nwcId", "flowId");
/*
* check
*/
assertThat(result.size(), is(0));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryNetwork(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryNetwork() {
/*
* test
*/
target.addEntryNetwork("nwcId1", "nwcId2");
target.addEntryNetwork("nwcId2", "nwcId3");
target.addEntryNetwork("nwcId3", "nwcId1");
/*
* check
*/
ArrayList<String> resultNwc1 = target.getNetwork("nwcId1");
assertThat(resultNwc1.size(), is(2));
ArrayList<String> resultNwc2 = target.getNetwork("nwcId2");
assertThat(resultNwc2.size(), is(2));
ArrayList<String> resultNwc3 = target.getNetwork("nwcId3");
assertThat(resultNwc3.size(), is(2));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryNode(java.lang.String, java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryNode() {
/*
* test
*/
target.addEntryNode("nwcId1", "nodeId", "nwcId2", "nodeId");
target.addEntryNode("nwcId2", "nodeId", "nwcId3", "nodeId");
target.addEntryNode("nwcId3", "nodeId", "nwcId1", "nodeId");
/*
* check
*/
ArrayList<String> resultNode1 = target.getNode("nwcId1", "nodeId");
assertThat(resultNode1.size(), is(2));
ArrayList<String> resultNode2 = target.getNode("nwcId2", "nodeId");
assertThat(resultNode2.size(), is(2));
ArrayList<String> resultNode3 = target.getNode("nwcId3", "nodeId");
assertThat(resultNode3.size(), is(2));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryPort(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryPort() {
/*
* test
*/
target.addEntryPort("nwcId1", "linkId", "portId", "nwcId2", "linkId", "portId");
target.addEntryPort("nwcId2", "linkId", "portId", "nwcId3", "linkId", "portId");
target.addEntryPort("nwcId3", "linkId", "portId", "nwcId1", "linkId", "portId");
/*
* check
*/
ArrayList<String> resultPort1 = target.getPort("nwcId1", "linkId", "portId");
assertThat(resultPort1.size(), is(2));
ArrayList<String> resultPort2 = target.getPort("nwcId2", "linkId", "portId");
assertThat(resultPort2.size(), is(2));
ArrayList<String> resultPort3 = target.getPort("nwcId3", "linkId", "portId");
assertThat(resultPort3.size(), is(2));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryLink(java.lang.String, java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryLink() {
/*
* test
*/
target.addEntryLink("nwcId1", "linkId", "nwcId2", "linkId");
target.addEntryLink("nwcId2", "linkId", "nwcId3", "linkId");
target.addEntryLink("nwcId3", "linkId", "nwcId1", "linkId");
/*
* check
*/
ArrayList<String> resultLink1 = target.getLink("nwcId1", "linkId");
assertThat(resultLink1.size(), is(2));
ArrayList<String> resultLink2 = target.getLink("nwcId2", "linkId");
assertThat(resultLink2.size(), is(2));
ArrayList<String> resultLink3 = target.getLink("nwcId3", "linkId");
assertThat(resultLink3.size(), is(2));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#addEntryFlow(java.lang.String, java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testAddEntryFlow() {
/*
* test
*/
target.addEntryFlow("nwcId1", "flowId", "nwcId2", "flowId");
target.addEntryFlow("nwcId2", "flowId", "nwcId3", "flowId");
target.addEntryFlow("nwcId3", "flowId", "nwcId1", "flowId");
/*
* check
*/
ArrayList<String> resultFlow1 = target.getFlow("nwcId1", "flowId");
assertThat(resultFlow1.size(), is(2));
ArrayList<String> resultFlow2 = target.getFlow("nwcId2", "flowId");
assertThat(resultFlow2.size(), is(2));
ArrayList<String> resultFlow3 = target.getFlow("nwcId3", "flowId");
assertThat(resultFlow3.size(), is(2));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryNetwork(java.lang.String)}
* .
*/
@Test
public void testDelEntryNetwork() {
/*
* setting
*/
target.addEntryNetwork("nwcId1", "nwcId2");
target.addEntryNetwork("nwcId2", "nwcId3");
target.addEntryNetwork("nwcId3", "nwcId1");
/*
* test
*/
target.delEntryNetwork("nwcId3");
/*
* check
*/
ArrayList<String> resultNwc1 = target.getNetwork("nwcId1");
assertThat(resultNwc1.size(), is(2 - 1));
ArrayList<String> resultNwc2 = target.getNetwork("nwcId2");
assertThat(resultNwc2.size(), is(2 - 1));
ArrayList<String> resultNwc3 = target.getNetwork("nwcId3");
assertThat(resultNwc3.size(), is(2 - 2));
target.addEntryNetwork("nwcId3", "nwcId1");
ArrayList<String> resultNwc21 = target.getNetwork("nwcId1");
assertThat(resultNwc21.size(), is(1 + 1));
ArrayList<String> resultNwc22 = target.getNetwork("nwcId2");
assertThat(resultNwc22.size(), is(1));
ArrayList<String> resultNwc23 = target.getNetwork("nwcId3");
assertThat(resultNwc23.size(), is(0 + 1));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryNode(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testDelEntryNode() {
/*
* setting
*/
target.addEntryNode("nwcId1", "nodeId", "nwcId2", "nodeId");
target.addEntryNode("nwcId2", "nodeId", "nwcId3", "nodeId");
target.addEntryNode("nwcId3", "nodeId", "nwcId1", "nodeId");
/*
* test
*/
target.delEntryNode("nwcId3", "nodeId");
/*
* check
*/
ArrayList<String> resultNode1 = target.getNode("nwcId1", "nodeId");
assertThat(resultNode1.size(), is(2 - 1));
ArrayList<String> resultNode2 = target.getNode("nwcId2", "nodeId");
assertThat(resultNode2.size(), is(2 - 1));
ArrayList<String> resultNode3 = target.getNode("nwcId3", "nodeId");
assertThat(resultNode3.size(), is(2 - 2));
target.addEntryNode("nwcId3", "nodeId", "nwcId1", "nodeId");
ArrayList<String> resultNode21 = target.getNode("nwcId1", "nodeId");
assertThat(resultNode21.size(), is(1 + 1));
ArrayList<String> resultNode22 = target.getNode("nwcId2", "nodeId");
assertThat(resultNode22.size(), is(1));
ArrayList<String> resultNode23 = target.getNode("nwcId3", "nodeId");
assertThat(resultNode23.size(), is(0 + 1));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryPort(java.lang.String, java.lang.String, java.lang.String)}
* .
*/
@Test
public void testDelEntryPort() {
/*
* setting
*/
target.addEntryPort("nwcId1", "nodeId", "portId", "nwcId2", "nodeId", "portId");
target.addEntryPort("nwcId2", "nodeId", "portId", "nwcId3", "nodeId", "portId");
target.addEntryPort("nwcId3", "nodeId", "portId", "nwcId1", "nodeId", "portId");
/*
* test
*/
target.delEntryPort("nwcId3", "nodeId", "portId");
/*
* check
*/
ArrayList<String> resultPort1 = target.getPort("nwcId1", "nodeId", "portId");
assertThat(resultPort1.size(), is(2 - 1));
ArrayList<String> resultPort2 = target.getPort("nwcId2", "nodeId", "portId");
assertThat(resultPort2.size(), is(2 - 1));
ArrayList<String> resultPort3 = target.getPort("nwcId3", "nodeId", "portId");
assertThat(resultPort3.size(), is(2 - 2));
target.addEntryPort("nwcId3", "nodeId", "portId", "nwcId1", "nodeId", "portId");
ArrayList<String> resultPort21 = target.getPort("nwcId1", "nodeId", "portId");
assertThat(resultPort21.size(), is(1 + 1));
ArrayList<String> resultPort22 = target.getPort("nwcId2", "nodeId", "portId");
assertThat(resultPort22.size(), is(1));
ArrayList<String> resultPort23 = target.getPort("nwcId3", "nodeId", "portId");
assertThat(resultPort23.size(), is(0 + 1));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryLink(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testDelEntryLink() {
/*
* setting
*/
target.addEntryLink("nwcId1", "linkId", "nwcId2", "linkId");
target.addEntryLink("nwcId2", "linkId", "nwcId3", "linkId");
target.addEntryLink("nwcId3", "linkId", "nwcId1", "linkId");
/*
* test
*/
target.delEntryLink("nwcId3", "linkId");
/*
* check
*/
ArrayList<String> resultLink1 = target.getLink("nwcId1", "linkId");
assertThat(resultLink1.size(), is(2 - 1));
ArrayList<String> resultLink2 = target.getLink("nwcId2", "linkId");
assertThat(resultLink2.size(), is(2 - 1));
ArrayList<String> resultLink3 = target.getLink("nwcId3", "linkId");
assertThat(resultLink3.size(), is(2 - 2));
target.addEntryLink("nwcId3", "linkId", "nwcId1", "linkId");
ArrayList<String> resultLink21 = target.getLink("nwcId1", "linkId");
assertThat(resultLink21.size(), is(1 + 1));
ArrayList<String> resultLink22 = target.getLink("nwcId2", "linkId");
assertThat(resultLink22.size(), is(1));
ArrayList<String> resultLink23 = target.getLink("nwcId3", "linkId");
assertThat(resultLink23.size(), is(0 + 1));
}
/**
* Test method for
* {@link org.o3project.odenos.core.component.ConversionTable#delEntryFlow(java.lang.String, java.lang.String)}
* .
*/
@Test
public void testDelEntryFlow() {
/*
* setting
*/
target.addEntryFlow("nwcId1", "flowId", "nwcId2", "flowId");
target.addEntryFlow("nwcId2", "flowId", "nwcId3", "flowId");
target.addEntryFlow("nwcId3", "flowId", "nwcId1", "flowId");
/*
* test
*/
target.delEntryFlow("nwcId3", "flowId");
/*
* check
*/
ArrayList<String> resultFlow1 = target.getFlow("nwcId1", "flowId");
assertThat(resultFlow1.size(), is(2 - 1));
ArrayList<String> resultFlow2 = target.getFlow("nwcId2", "flowId");
assertThat(resultFlow2.size(), is(2 - 1));
ArrayList<String> resultFlow3 = target.getFlow("nwcId3", "flowId");
assertThat(resultFlow3.size(), is(2 - 2));
target.addEntryFlow("nwcId3", "flowId", "nwcId1", "flowId");
ArrayList<String> resultFlow21 = target.getFlow("nwcId1", "flowId");
assertThat(resultFlow21.size(), is(1 + 1));
ArrayList<String> resultFlow22 = target.getFlow("nwcId2", "flowId");
assertThat(resultFlow22.size(), is(1));
ArrayList<String> resultFlow23 = target.getFlow("nwcId3", "flowId");
assertThat(resultFlow23.size(), is(0 + 1));
}
/**
* Test method for {@link org.o3project.odenos.core.component.ConversionTable#addEntryObject(HashMap, String, String)}.
*
* @throws Exception throws Exception in targets
*/
@Test
public void testAddEntryObject() throws Exception {
/*
* setting
*/
HashMap<String, ArrayList<String>> hashObject = new HashMap<>();
ArrayList<String> initList = new ArrayList<String>(Arrays.asList("OriginalValue"));
hashObject.put("Key", initList);
/*
* test
*/
Whitebox.invokeMethod(target, "addEntryObject", hashObject, "Key", "Value");
/*
* check
*/
ArrayList<String> resultValues = hashObject.get("Key");
assertThat(resultValues.size(), is(2));
assertThat(resultValues.contains("Value"), is(true));
assertThat(resultValues.contains("OriginalValue"), is(true));
}
/**
* Test method for {@link org.o3project.odenos.core.component.ConversionTable#delEntryObject(HashMap, String)}.
*
* @throws Exception throws Exception in targets
*/
@Test
public void testDelEntryObject() throws Exception {
/*
* setting
*/
HashMap<String, ArrayList<String>> hashObject = new HashMap<>();
ArrayList<String> initList1 = new ArrayList<String>(Arrays.asList("OriginalValue1"));
hashObject.put("Key1", initList1);
ArrayList<String> initList2 = new ArrayList<String>(Arrays.asList("OriginalValue2"));
hashObject.put("Key2", initList2);
ArrayList<String> initList3 = new ArrayList<String>(Arrays.asList("OriginalValue3"));
hashObject.put("Key3", initList3);
/*
* test
*/
Whitebox.invokeMethod(target, "delEntryObject", hashObject, "Key2");
/*
* check
*/
assertThat(hashObject.containsKey("Key2"), is(false));
assertThat(hashObject.size(), is(2));
assertThat(hashObject.containsKey("Key1"), is(true));
assertThat(hashObject.containsKey("Key3"), is(true));
}
}
| |
package nl.knaw.dans.common.lang.util;
/**
* A Base64 Encoder/Decoder.
* <p>
* This class is used to encode and decode data in Base64 format as described in RFC 1521.
* <p>
* This is "Open Source" software and released under the <a href="http://www.gnu.org/licenses/lgpl.html">GNU/LGPL</a>
* license.<br>
* It is provided "as is" without warranty of any kind.<br>
* Copyright 2003: Christian d'Heureuse, Inventec Informatik AG, Switzerland.<br>
* Home page: <a href="http://www.source-code.biz">www.source-code.biz</a><br>
* <p>
* Version history:<br>
* 2003-07-22 Christian d'Heureuse (chdh): Module created.<br>
* 2005-08-11 chdh: Lincense changed from GPL to LGPL.<br>
* 2006-11-21 chdh:<br>
* Method encode(String) renamed to encodeString(String).<br>
* Method decode(String) renamed to decodeString(String).<br>
* New method encode(byte[],int) added.<br>
* New method decode(String) added.<br>
*/
public class Base64Coder
{
// Mapping table from 6-bit nibbles to Base64 characters.
protected static char[] map1 = new char[64];
static
{
int i = 0;
for (char c = 'A'; c <= 'Z'; c++)
map1[i++] = c;
for (char c = 'a'; c <= 'z'; c++)
map1[i++] = c;
for (char c = '0'; c <= '9'; c++)
map1[i++] = c;
map1[i++] = '+';
map1[i++] = '/';
}
// Mapping table from Base64 characters to 6-bit nibbles.
protected static byte[] map2 = new byte[128];
static
{
for (int i = 0; i < map2.length; i++)
map2[i] = -1;
for (int i = 0; i < 64; i++)
map2[map1[i]] = (byte) i;
}
/**
* Encodes a string into Base64 format. No blanks or line breaks are inserted.
*
* @param s
* a String to be encoded.
* @return A String with the Base64 encoded data.
*/
public static String encodeString(String s)
{
return new String(encode(s.getBytes()));
}
/**
* Encodes a byte array into Base64 format. No blanks or line breaks are inserted.
*
* @param in
* an array containing the data bytes to be encoded.
* @return A character array with the Base64 encoded data.
*/
public static char[] encode(byte[] in)
{
return encode(in, in.length);
}
/**
* Encodes a byte array into Base64 format. No blanks or line breaks are inserted.
*
* @param in
* an array containing the data bytes to be encoded.
* @param iLen
* number of bytes to process in <code>in</code>.
* @return A character array with the Base64 encoded data.
*/
public static char[] encode(byte[] in, int iLen)
{
int oDataLen = (iLen * 4 + 2) / 3; // output length without padding
int oLen = ((iLen + 2) / 3) * 4; // output length including padding
char[] out = new char[oLen];
int ip = 0;
int op = 0;
while (ip < iLen)
{
int i0 = in[ip++] & 0xff;
int i1 = ip < iLen ? in[ip++] & 0xff : 0;
int i2 = ip < iLen ? in[ip++] & 0xff : 0;
int o0 = i0 >>> 2;
int o1 = ((i0 & 3) << 4) | (i1 >>> 4);
int o2 = ((i1 & 0xf) << 2) | (i2 >>> 6);
int o3 = i2 & 0x3F;
out[op++] = map1[o0];
out[op++] = map1[o1];
out[op] = op < oDataLen ? map1[o2] : '=';
op++;
out[op] = op < oDataLen ? map1[o3] : '=';
op++;
}
return out;
}
/**
* Decodes a string from Base64 format.
*
* @param s
* a Base64 String to be decoded.
* @return A String containing the decoded data.
* @throws IllegalArgumentException
* if the input is not valid Base64 encoded data.
*/
public static String decodeString(String s)
{
return new String(decode(s));
}
/**
* Decodes a byte array from Base64 format.
*
* @param s
* a Base64 String to be decoded.
* @return An array containing the decoded data bytes.
* @throws IllegalArgumentException
* if the input is not valid Base64 encoded data.
*/
public static byte[] decode(String s)
{
return decode(s.toCharArray());
}
/**
* Decodes a byte array from Base64 format. No blanks or line breaks are allowed within the Base64 encoded data.
*
* @param in
* a character array containing the Base64 encoded data.
* @return An array containing the decoded data bytes.
* @throws IllegalArgumentException
* if the input is not valid Base64 encoded data.
*/
public static byte[] decode(char[] in)
{
int iLen = in.length;
if (iLen % 4 != 0)
throw new IllegalArgumentException("Length of Base64 encoded input string is not a multiple of 4.");
while (iLen > 0 && in[iLen - 1] == '=')
iLen--;
int oLen = (iLen * 3) / 4;
byte[] out = new byte[oLen];
int ip = 0;
int op = 0;
while (ip < iLen)
{
int i0 = in[ip++];
int i1 = in[ip++];
int i2 = ip < iLen ? in[ip++] : 'A';
int i3 = ip < iLen ? in[ip++] : 'A';
if (i0 > 127 || i1 > 127 || i2 > 127 || i3 > 127)
throw new IllegalArgumentException("Illegal character in Base64 encoded data.");
int b0 = map2[i0];
int b1 = map2[i1];
int b2 = map2[i2];
int b3 = map2[i3];
if (b0 < 0 || b1 < 0 || b2 < 0 || b3 < 0)
throw new IllegalArgumentException("Illegal character in Base64 encoded data.");
int o0 = (b0 << 2) | (b1 >>> 4);
int o1 = ((b1 & 0xf) << 4) | (b2 >>> 2);
int o2 = ((b2 & 3) << 6) | b3;
out[op++] = (byte) o0;
if (op < oLen)
out[op++] = (byte) o1;
if (op < oLen)
out[op++] = (byte) o2;
}
return out;
}
// Dummy constructor.
private Base64Coder()
{
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.cell.resolver.CellPathResolver;
import com.facebook.buck.core.description.BuildRuleParams;
import com.facebook.buck.core.description.attr.ImplicitDepsInferringDescription;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.model.FlavorDomain;
import com.facebook.buck.core.model.Flavored;
import com.facebook.buck.core.model.targetgraph.BuildRuleCreationContextWithTargetGraph;
import com.facebook.buck.core.model.targetgraph.TargetGraph;
import com.facebook.buck.core.rulekey.AddToRuleKey;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.sourcepath.BuildTargetSourcePath;
import com.facebook.buck.core.sourcepath.NonHashableSourcePathContainer;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.core.util.immutables.BuckStyleImmutable;
import com.facebook.buck.cxx.toolchain.CxxBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatforms;
import com.facebook.buck.cxx.toolchain.CxxPlatformsProvider;
import com.facebook.buck.cxx.toolchain.PathShortener;
import com.facebook.buck.cxx.toolchain.Preprocessor;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.cxx.toolchain.linker.Linker.LinkableDepType;
import com.facebook.buck.cxx.toolchain.linker.Linkers;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkableInput;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.macros.MacroException;
import com.facebook.buck.parser.BuildTargetParser;
import com.facebook.buck.parser.BuildTargetPatternParser;
import com.facebook.buck.rules.SymlinkTree;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.ProxyArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.rules.args.ToolArg;
import com.facebook.buck.rules.macros.AbstractMacroExpander;
import com.facebook.buck.rules.macros.AbstractMacroExpanderWithoutPrecomputedWork;
import com.facebook.buck.rules.macros.CcFlagsMacro;
import com.facebook.buck.rules.macros.CcMacro;
import com.facebook.buck.rules.macros.CppFlagsMacro;
import com.facebook.buck.rules.macros.CxxFlagsMacro;
import com.facebook.buck.rules.macros.CxxGenruleFilterAndTargetsMacro;
import com.facebook.buck.rules.macros.CxxMacro;
import com.facebook.buck.rules.macros.CxxppFlagsMacro;
import com.facebook.buck.rules.macros.ExecutableMacroExpander;
import com.facebook.buck.rules.macros.LdMacro;
import com.facebook.buck.rules.macros.LdflagsSharedFilterMacro;
import com.facebook.buck.rules.macros.LdflagsSharedMacro;
import com.facebook.buck.rules.macros.LdflagsStaticFilterMacro;
import com.facebook.buck.rules.macros.LdflagsStaticMacro;
import com.facebook.buck.rules.macros.LdflagsStaticPicFilterMacro;
import com.facebook.buck.rules.macros.LdflagsStaticPicMacro;
import com.facebook.buck.rules.macros.Macro;
import com.facebook.buck.rules.macros.PlatformNameMacro;
import com.facebook.buck.rules.macros.SimpleMacroExpander;
import com.facebook.buck.rules.macros.StringExpander;
import com.facebook.buck.sandbox.SandboxExecutionStrategy;
import com.facebook.buck.shell.AbstractGenruleDescription;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.toolchain.ToolchainProvider;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.RichStream;
import com.facebook.buck.util.types.Pair;
import com.facebook.buck.versions.VersionPropagator;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.collect.Streams;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.immutables.value.Value;
public class CxxGenruleDescription extends AbstractGenruleDescription<CxxGenruleDescriptionArg>
implements Flavored,
VersionPropagator<CxxGenruleDescriptionArg>,
ImplicitDepsInferringDescription<CxxGenruleDescriptionArg> {
private final ImmutableSet<Flavor> declaredPlatforms;
public CxxGenruleDescription(
CxxBuckConfig cxxBuckConfig,
ToolchainProvider toolchainProvider,
SandboxExecutionStrategy sandboxExecutionStrategy) {
super(toolchainProvider, sandboxExecutionStrategy, false);
this.declaredPlatforms = cxxBuckConfig.getDeclaredPlatforms();
}
public static boolean wrapsCxxGenrule(SourcePathRuleFinder ruleFinder, SourcePath path) {
Optional<BuildRule> rule = ruleFinder.getRule(path);
return rule.map(CxxGenrule.class::isInstance).orElse(false);
}
/**
* @return a new {@link BuildTargetSourcePath} for an existing {@link BuildTargetSourcePath} which
* refers to a {@link CxxGenrule} with the given {@code platform} flavor applied.
*/
public static SourcePath fixupSourcePath(
BuildRuleResolver ruleResolver,
SourcePathRuleFinder ruleFinder,
CxxPlatform platform,
SourcePath path) {
Optional<BuildRule> rule = ruleFinder.getRule(path);
if (rule.isPresent() && rule.get() instanceof CxxGenrule) {
Genrule platformRule =
(Genrule)
ruleResolver.requireRule(
rule.get().getBuildTarget().withAppendedFlavors(platform.getFlavor()));
path = platformRule.getSourcePathToOutput();
}
return path;
}
public static ImmutableList<SourcePath> fixupSourcePaths(
BuildRuleResolver ruleResolver,
SourcePathRuleFinder ruleFinder,
CxxPlatform cxxPlatform,
ImmutableList<SourcePath> paths) {
ImmutableList.Builder<SourcePath> fixed = ImmutableList.builder();
for (SourcePath path : paths) {
fixed.add(fixupSourcePath(ruleResolver, ruleFinder, cxxPlatform, path));
}
return fixed.build();
}
public static ImmutableSortedSet<SourcePath> fixupSourcePaths(
BuildRuleResolver ruleResolver,
SourcePathRuleFinder ruleFinder,
CxxPlatform cxxPlatform,
ImmutableSortedSet<SourcePath> paths) {
ImmutableSortedSet.Builder<SourcePath> fixed =
new ImmutableSortedSet.Builder<>(Preconditions.checkNotNull(paths.comparator()));
for (SourcePath path : paths) {
fixed.add(fixupSourcePath(ruleResolver, ruleFinder, cxxPlatform, path));
}
return fixed.build();
}
public static <T> ImmutableMap<T, SourcePath> fixupSourcePaths(
BuildRuleResolver ruleResolver,
SourcePathRuleFinder ruleFinder,
CxxPlatform cxxPlatform,
ImmutableMap<T, SourcePath> paths) {
ImmutableMap.Builder<T, SourcePath> fixed = ImmutableMap.builder();
for (Map.Entry<T, SourcePath> ent : paths.entrySet()) {
fixed.put(
ent.getKey(), fixupSourcePath(ruleResolver, ruleFinder, cxxPlatform, ent.getValue()));
}
return fixed.build();
}
private static String shquoteJoin(Iterable<String> args) {
return Streams.stream(args).map(Escaper.SHELL_ESCAPER).collect(Collectors.joining(" "));
}
@Override
public Class<CxxGenruleDescriptionArg> getConstructorArgType() {
return CxxGenruleDescriptionArg.class;
}
@Override
public boolean hasFlavors(ImmutableSet<Flavor> flavors) {
return getCxxPlatforms().containsAnyOf(flavors)
|| !Sets.intersection(declaredPlatforms, flavors).isEmpty();
}
@Override
protected Optional<ImmutableList<AbstractMacroExpander<? extends Macro, ?>>> getMacroHandler(
BuildTarget buildTarget,
ProjectFilesystem filesystem,
BuildRuleResolver resolver,
TargetGraph targetGraph,
CxxGenruleDescriptionArg args) {
Optional<CxxPlatform> maybeCxxPlatform = getCxxPlatforms().getValue(buildTarget);
if (!maybeCxxPlatform.isPresent()) {
return Optional.empty();
}
CxxPlatform cxxPlatform = maybeCxxPlatform.get();
ImmutableList.Builder<AbstractMacroExpander<? extends Macro, ?>> expanders =
ImmutableList.builder();
expanders.add(new ExecutableMacroExpander());
expanders.add(new CxxLocationMacroExpander(cxxPlatform));
expanders.add(
new StringExpander<>(
PlatformNameMacro.class, StringArg.of(cxxPlatform.getFlavor().toString())));
expanders.add(new ToolExpander<>(CcMacro.class, cxxPlatform.getCc().resolve(resolver)));
expanders.add(new ToolExpander<>(CxxMacro.class, cxxPlatform.getCxx().resolve(resolver)));
ImmutableList<String> asflags = cxxPlatform.getAsflags();
ImmutableList<String> cflags = cxxPlatform.getCflags();
ImmutableList<String> cxxflags = cxxPlatform.getCxxflags();
expanders.add(
new StringExpander<>(
CcFlagsMacro.class, StringArg.of(shquoteJoin(Iterables.concat(cflags, asflags)))));
expanders.add(
new StringExpander<>(
CxxFlagsMacro.class, StringArg.of(shquoteJoin(Iterables.concat(cxxflags, asflags)))));
expanders.add(
new CxxPreprocessorFlagsExpander<>(CppFlagsMacro.class, cxxPlatform, CxxSource.Type.C));
expanders.add(
new CxxPreprocessorFlagsExpander<>(CxxppFlagsMacro.class, cxxPlatform, CxxSource.Type.CXX));
expanders.add(new ToolExpander<>(LdMacro.class, cxxPlatform.getLd().resolve(resolver)));
for (Map.Entry<Class<? extends CxxGenruleFilterAndTargetsMacro>, Pair<LinkableDepType, Filter>>
ent :
ImmutableMap
.<Class<? extends CxxGenruleFilterAndTargetsMacro>, Pair<LinkableDepType, Filter>>
builder()
.put(LdflagsSharedMacro.class, new Pair<>(LinkableDepType.SHARED, Filter.NONE))
.put(
LdflagsSharedFilterMacro.class,
new Pair<>(LinkableDepType.SHARED, Filter.PARAM))
.put(LdflagsStaticMacro.class, new Pair<>(LinkableDepType.STATIC, Filter.NONE))
.put(
LdflagsStaticFilterMacro.class,
new Pair<>(LinkableDepType.STATIC, Filter.PARAM))
.put(
LdflagsStaticPicMacro.class,
new Pair<>(LinkableDepType.STATIC_PIC, Filter.NONE))
.put(
LdflagsStaticPicFilterMacro.class,
new Pair<>(LinkableDepType.STATIC_PIC, Filter.PARAM))
.build()
.entrySet()) {
expanders.add(
new CxxLinkerFlagsExpander<>(
ent.getKey(),
buildTarget,
filesystem,
cxxPlatform,
ent.getValue().getFirst(),
args.getOut(),
ent.getValue().getSecond()));
}
return Optional.of(expanders.build());
}
@Override
public BuildRule createBuildRule(
BuildRuleCreationContextWithTargetGraph context,
BuildTarget buildTarget,
BuildRuleParams params,
CxxGenruleDescriptionArg args) {
Optional<CxxPlatform> cxxPlatform = getCxxPlatforms().getValue(buildTarget);
if (cxxPlatform.isPresent()) {
return super.createBuildRule(
context, buildTarget.withAppendedFlavors(cxxPlatform.get().getFlavor()), params, args);
}
return new CxxGenrule(buildTarget, context.getProjectFilesystem(), params, args.getOut());
}
@Override
protected BuildRule createBuildRule(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
BuildRuleResolver resolver,
CxxGenruleDescriptionArg args,
Optional<Arg> cmd,
Optional<Arg> bash,
Optional<Arg> cmdExe) {
return createBuildRule(
buildTarget, projectFilesystem, params, resolver, args, cmd, bash, cmdExe, args.getOut());
}
@Override
public void findDepsForTargetFromConstructorArgs(
BuildTarget buildTarget,
CellPathResolver cellRoots,
CxxGenruleDescriptionArg constructorArg,
ImmutableCollection.Builder<BuildTarget> extraDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
// Add in all parse time deps from the C/C++ platforms.
for (CxxPlatform cxxPlatform : getCxxPlatforms().getValues()) {
targetGraphOnlyDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatform));
}
}
@Override
public boolean producesCacheableSubgraph() {
return true;
}
private FlavorDomain<CxxPlatform> getCxxPlatforms() {
return toolchainProvider
.getByName(CxxPlatformsProvider.DEFAULT_NAME, CxxPlatformsProvider.class)
.getCxxPlatforms();
}
@BuckStyleImmutable
@Value.Immutable
interface AbstractCxxGenruleDescriptionArg extends AbstractGenruleDescription.CommonArg {
String getOut();
}
/** A macro expander that expands to a specific {@link Tool}. */
private static class ToolExpander<M extends Macro> extends SimpleMacroExpander<M> {
private final Class<M> clazz;
private final Tool tool;
ToolExpander(Class<M> clazz, Tool tool) {
this.clazz = clazz;
this.tool = tool;
}
@Override
public Class<M> getInputClass() {
return clazz;
}
@Override
public Arg expandFrom(
BuildTarget target, CellPathResolver cellNames, BuildRuleResolver resolver) {
return ToolArg.of(tool);
}
}
private abstract static class FilterAndTargetsExpander<M extends CxxGenruleFilterAndTargetsMacro>
extends AbstractMacroExpanderWithoutPrecomputedWork<M> {
private final Filter filter;
FilterAndTargetsExpander(Filter filter) {
this.filter = filter;
}
/** @return an instance of the subclass represented by T. */
@SuppressWarnings("unchecked")
public <T extends CxxGenruleFilterAndTargetsMacro> T create(
Class<T> clazz, Optional<Pattern> filter, ImmutableList<BuildTarget> targets) {
try {
return (T)
clazz
.getMethod("of", Optional.class, ImmutableList.class)
.invoke(null, filter, targets);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
@Override
protected final M parse(
BuildTarget target, CellPathResolver cellNames, ImmutableList<String> input)
throws MacroException {
if (this.filter == Filter.PARAM && input.size() < 1) {
throw new MacroException("expected at least 1 argument");
}
Iterator<String> itr = input.iterator();
Optional<Pattern> filter =
this.filter == Filter.PARAM ? Optional.of(Pattern.compile(itr.next())) : Optional.empty();
ImmutableList.Builder<BuildTarget> targets = ImmutableList.builder();
while (itr.hasNext()) {
targets.add(
BuildTargetParser.INSTANCE.parse(
itr.next(), BuildTargetPatternParser.forBaseName(target.getBaseName()), cellNames));
}
return create(getInputClass(), filter, targets.build());
}
protected ImmutableList<BuildRule> resolve(
BuildRuleResolver resolver, ImmutableList<BuildTarget> input) throws MacroException {
ImmutableList.Builder<BuildRule> rules = ImmutableList.builder();
for (BuildTarget ruleTarget : input) {
Optional<BuildRule> rule = resolver.getRuleOptional(ruleTarget);
if (!rule.isPresent()) {
throw new MacroException(String.format("no rule %s", ruleTarget));
}
rules.add(rule.get());
}
return rules.build();
}
protected abstract Arg expand(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules, Optional<Pattern> filter)
throws MacroException;
@Override
public Arg expandFrom(
BuildTarget target, CellPathResolver cellNames, BuildRuleResolver resolver, M input)
throws MacroException {
return expand(resolver, resolve(resolver, input.getTargets()), input.getFilter());
}
@Override
public void extractParseTimeDepsFrom(
BuildTarget target,
CellPathResolver cellNames,
M input,
ImmutableCollection.Builder<BuildTarget> buildDepsBuilder,
ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) {
buildDepsBuilder.addAll(input.getTargets());
}
}
/**
* A build target expander that replaces lists of build target with their transitive preprocessor
* input.
*/
private static class CxxPreprocessorFlagsExpander<M extends CxxGenruleFilterAndTargetsMacro>
extends FilterAndTargetsExpander<M> {
private final Class<M> clazz;
private final CxxPlatform cxxPlatform;
private final CxxSource.Type sourceType;
CxxPreprocessorFlagsExpander(
Class<M> clazz, CxxPlatform cxxPlatform, CxxSource.Type sourceType) {
super(Filter.NONE);
this.clazz = clazz;
this.cxxPlatform = cxxPlatform;
this.sourceType = sourceType;
}
@Override
public Class<M> getInputClass() {
return clazz;
}
/** Make sure all resolved targets are instances of {@link CxxPreprocessorDep}. */
@Override
protected ImmutableList<BuildRule> resolve(
BuildRuleResolver resolver, ImmutableList<BuildTarget> input) throws MacroException {
return FluentIterable.from(super.resolve(resolver, input))
.filter(CxxPreprocessorDep.class::isInstance)
.toList();
}
/** Get the transitive C/C++ preprocessor input rooted at the given rules. */
private Collection<CxxPreprocessorInput> getCxxPreprocessorInput(
BuildRuleResolver ruleResolver, ImmutableList<BuildRule> rules) {
return CxxPreprocessables.getTransitiveCxxPreprocessorInput(cxxPlatform, ruleResolver, rules);
}
/**
* Return the {@link PreprocessorFlags} object formed by the transitive C/C++ preprocessor input
* for the given rules.
*/
private PreprocessorFlags getPreprocessorFlags(
Iterable<CxxPreprocessorInput> transitivePreprocessorInput) {
PreprocessorFlags.Builder ppFlagsBuilder = PreprocessorFlags.builder();
ExplicitCxxToolFlags.Builder toolFlagsBuilder = CxxToolFlags.explicitBuilder();
toolFlagsBuilder.setPlatformFlags(
StringArg.from(CxxSourceTypes.getPlatformPreprocessFlags(cxxPlatform, sourceType)));
for (CxxPreprocessorInput input : transitivePreprocessorInput) {
ppFlagsBuilder.addAllIncludes(input.getIncludes());
ppFlagsBuilder.addAllFrameworkPaths(input.getFrameworks());
toolFlagsBuilder.addAllRuleFlags(input.getPreprocessorFlags().get(sourceType));
}
ppFlagsBuilder.setOtherFlags(toolFlagsBuilder.build());
return ppFlagsBuilder.build();
}
/**
* Expand the preprocessor input for the given rules into a shell-escaped string containing all
* flags and header trees.
*/
@Override
protected Arg expand(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules, Optional<Pattern> filter) {
return new CxxPreprocessorFlagsArg(
getPreprocessorFlags(getCxxPreprocessorInput(resolver, rules)),
CxxSourceTypes.getPreprocessor(cxxPlatform, sourceType).resolve(resolver));
}
private class CxxPreprocessorFlagsArg implements Arg {
@AddToRuleKey private final PreprocessorFlags ppFlags;
@AddToRuleKey private final Preprocessor preprocessor;
CxxPreprocessorFlagsArg(PreprocessorFlags ppFlags, Preprocessor preprocessor) {
this.ppFlags = ppFlags;
this.preprocessor = preprocessor;
}
@Override
public void appendToCommandLine(Consumer<String> consumer, SourcePathResolver resolver) {
consumer.accept(
Arg.stringify(
ppFlags
.toToolFlags(
resolver,
PathShortener.identity(),
CxxDescriptionEnhancer.frameworkPathToSearchPath(cxxPlatform, resolver),
preprocessor,
/* pch */ Optional.empty())
.getAllFlags(),
resolver)
.stream()
.map(Escaper.SHELL_ESCAPER)
.collect(Collectors.joining(" ")));
}
}
}
/**
* A build target expander that replaces lists of build target with their transitive preprocessor
* input.
*/
private static class CxxLinkerFlagsExpander<M extends CxxGenruleFilterAndTargetsMacro>
extends FilterAndTargetsExpander<M> {
private final Class<M> clazz;
private final BuildTarget buildTarget;
private final ProjectFilesystem filesystem;
private final CxxPlatform cxxPlatform;
private final Linker.LinkableDepType depType;
private final String out;
CxxLinkerFlagsExpander(
Class<M> clazz,
BuildTarget buildTarget,
ProjectFilesystem filesystem,
CxxPlatform cxxPlatform,
Linker.LinkableDepType depType,
String out,
Filter filter) {
super(filter);
this.clazz = clazz;
this.buildTarget = buildTarget;
this.filesystem = filesystem;
this.cxxPlatform = cxxPlatform;
this.depType = depType;
this.out = out;
}
@Override
public Class<M> getInputClass() {
return clazz;
}
/**
* @return a {@link SymlinkTree} containing all the transitive shared libraries from the given
* roots linked in by their library name.
*/
private SymlinkTree requireSymlinkTree(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules) {
return CxxDescriptionEnhancer.requireSharedLibrarySymlinkTree(
buildTarget, filesystem, resolver, cxxPlatform, rules);
}
/**
* @return the list of {@link Arg} required for dynamic linking so that linked binaries can find
* their shared library dependencies at runtime.
*/
private ImmutableList<Arg> getSharedLinkArgs(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules) {
// Embed a origin-relative library path into the binary so it can find the shared libraries.
// The shared libraries root is absolute. Also need an absolute path to the linkOutput
Path linkOutput = BuildTargets.getGenPath(filesystem, buildTarget, "%s").resolve(out);
Path absLinkOut = buildTarget.getCellPath().resolve(linkOutput);
SymlinkTree symlinkTree = requireSymlinkTree(resolver, rules);
return RichStream.from(
StringArg.from(
Linkers.iXlinker(
"-rpath",
String.format(
"%s/%s",
cxxPlatform.getLd().resolve(resolver).origin(),
absLinkOut.getParent().relativize(symlinkTree.getRoot()).toString()))))
.map(
arg ->
new ProxyArg(arg) {
// This is added so that the arg's rulekey properly reflects its deps.
@AddToRuleKey
private final NonHashableSourcePathContainer symlinkTreeRef =
new NonHashableSourcePathContainer(symlinkTree.getSourcePathToOutput());
})
.collect(ImmutableList.toImmutableList());
}
private NativeLinkableInput getNativeLinkableInput(
BuildRuleResolver ruleResolver, Iterable<BuildRule> rules, Optional<Pattern> filter) {
ImmutableList<NativeLinkable> nativeLinkables =
NativeLinkables.getNativeLinkables(
cxxPlatform,
ruleResolver,
FluentIterable.from(rules).filter(NativeLinkable.class),
depType,
!filter.isPresent()
? x -> true
: input -> {
Preconditions.checkArgument(input instanceof BuildRule);
BuildRule rule = (BuildRule) input;
return filter
.get()
.matcher(String.format("%s(%s)", rule.getType(), rule.getBuildTarget()))
.find();
});
ImmutableList.Builder<NativeLinkableInput> nativeLinkableInputs = ImmutableList.builder();
for (NativeLinkable nativeLinkable : nativeLinkables) {
nativeLinkableInputs.add(
NativeLinkables.getNativeLinkableInput(
cxxPlatform, depType, nativeLinkable, ruleResolver));
}
return NativeLinkableInput.concat(nativeLinkableInputs.build());
}
/** Make sure all resolved targets are instances of {@link NativeLinkable}. */
@Override
protected ImmutableList<BuildRule> resolve(
BuildRuleResolver resolver, ImmutableList<BuildTarget> input) throws MacroException {
return FluentIterable.from(super.resolve(resolver, input))
.filter(NativeLinkable.class::isInstance)
.toList();
}
/** Return the args formed by the transitive native linkable input for the given rules. */
private ImmutableList<Arg> getLinkerArgs(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules, Optional<Pattern> filter) {
ImmutableList.Builder<Arg> args = ImmutableList.builder();
args.addAll(StringArg.from(cxxPlatform.getLdflags()));
if (depType == Linker.LinkableDepType.SHARED) {
args.addAll(getSharedLinkArgs(resolver, rules));
}
args.addAll(getNativeLinkableInput(resolver, rules, filter).getArgs());
return args.build();
}
/**
* Expand the native linkable input for the given rules into a shell-escaped string containing
* all linker flags.
*/
@Override
public Arg expand(
BuildRuleResolver resolver, ImmutableList<BuildRule> rules, Optional<Pattern> filter) {
return new ShQuoteJoinArg(getLinkerArgs(resolver, rules, filter));
}
}
private static class ShQuoteJoinArg implements Arg {
@AddToRuleKey private final ImmutableList<Arg> args;
ShQuoteJoinArg(ImmutableList<Arg> args) {
this.args = args;
}
@Override
public void appendToCommandLine(Consumer<String> consumer, SourcePathResolver pathResolver) {
consumer.accept(shquoteJoin(Arg.stringify(args, pathResolver)));
}
}
private enum Filter {
NONE,
PARAM,
}
}
| |
/**
* (c) 2014 by Christian Schenk
**/
package de.schenk.jrtrace.ui.debug;
import java.io.File;
import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.UndeclaredThrowableException;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IMarkerDelta;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.MultiStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.DebugException;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.model.DebugElement;
import org.eclipse.debug.core.model.IBreakpoint;
import org.eclipse.debug.core.model.IDebugTarget;
import org.eclipse.debug.core.model.IMemoryBlock;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.model.IThread;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import de.schenk.jrtrace.service.IJRTraceVM;
import de.schenk.jrtrace.ui.JRTraceUIActivator;
import de.schenk.jrtrace.ui.markers.JRTraceMarkerManager;
import de.schenk.jrtrace.ui.util.JarByteUtil;
import de.schenk.jrtrace.ui.util.JarUtil;
public class JRTraceDebugTarget extends DebugElement implements IDebugTarget {
public static final String JRTRACE_DEBUG_MODEL = "bytemam.debug.model";
private String pid;
private IJRTraceVM machine;
private ILaunch launch;
private boolean isDisconnected = false;
private boolean isTerminated = false;
private JRTraceConsoleConnector JRTraceConsole;
IProcess process;
private JRTraceMarkerManager markerManager;
private IProject theProject;
public JRTraceDebugTarget(IJRTraceVM vm, ILaunch launch,
final IProject theProject, boolean uploadHelperOnConnect) {
super(null);
this.launch = launch;
machine = vm;
process = new JRTraceProcess(this);
createConsole();
markerManager = new JRTraceMarkerManager(this);
this.theProject = theProject;
if (theProject != null) {
if (uploadHelperOnConnect) {
final File jarFile[] = new File[1];
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
jarFile[0] = JarUtil.createJar(theProject, Display
.getDefault().getActiveShell());
}
});
Job installEngineXJob = new InstallJRTraceJob(this, jarFile[0]);
installEngineXJob.schedule();
}
}
}
private void createConsole() {
JRTraceConsole = new JRTraceConsoleConnector();
JRTraceConsole.start(this);
}
public IJRTraceVM getJRTraceMachine() {
return machine;
}
@Override
public IDebugTarget getDebugTarget() {
return this;
}
@Override
public ILaunch getLaunch() {
return launch;
}
@Override
public String getModelIdentifier() {
return JRTRACE_DEBUG_MODEL;
}
@Override
public boolean canTerminate() {
return true;
}
@Override
public boolean isTerminated() {
return isTerminated;
}
@Override
public void terminate() throws DebugException {
try {
disconnect();
} finally {
JRTraceConsole.close();
isTerminated = true;
fireEvent(new DebugEvent(this, DebugEvent.TERMINATE));
}
}
@Override
public boolean canResume() {
return false;
}
@Override
public boolean canSuspend() {
return false;
}
@Override
public boolean isSuspended() {
return false;
}
@Override
public void resume() throws DebugException {
}
@Override
public void suspend() throws DebugException {
}
@Override
public void breakpointAdded(IBreakpoint breakpoint) {
}
@Override
public void breakpointRemoved(IBreakpoint breakpoint, IMarkerDelta delta) {
// TODO Auto-generated method stub
}
@Override
public void breakpointChanged(IBreakpoint breakpoint, IMarkerDelta delta) {
// TODO Auto-generated method stub
}
@Override
public boolean canDisconnect() {
return true;
}
@Override
public void disconnect() throws DebugException {
DetachJRTraceJob job = new DetachJRTraceJob(this);
job.schedule();
while (true)
try {
job.join();
break;
} catch (InterruptedException e) {
//
}
JRTraceConsole.stop();
if (markerManager != null) {
markerManager.close();
markerManager = null;
}
isDisconnected = true;
fireEvent(new DebugEvent(this, DebugEvent.CHANGE));
}
@Override
public boolean isDisconnected() {
return isDisconnected;
}
@Override
public boolean supportsStorageRetrieval() {
return false;
}
@Override
public IMemoryBlock getMemoryBlock(long startAddress, long length)
throws DebugException {
return null;
}
@Override
public IProcess getProcess() {
return process;
}
@Override
public IThread[] getThreads() throws DebugException {
return new IThread[0];
}
@Override
public boolean hasThreads() throws DebugException {
return false;
}
@Override
public String getName() throws DebugException {
return machine.toString();
}
@Override
public boolean supportsBreakpoint(IBreakpoint breakpoint) {
return false;
}
public void installJar(byte[] bytes) {
if (!machine.installJar(bytes)) {
disconnectAfterConnectionProblem();
}
}
/**
*
* @param theClassLoader
* for classloaderpolicy TARGET: the name of the class to use for
* the invocation.
* @param className
* name of the class to invoke a method on
* @param methodName
* static method name (void void)
*/
public void runJava(String theClassLoader, final String className,
final String methodName) {
if (!machine.invokeMethodAsync(theClassLoader, className, methodName)) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
MultiStatus m = createStatusFromThrowableForDialog("Error during java call.");
Shell shell = Display.getDefault().getShells()[0];
ErrorDialog.openError(shell, "Execution Problem",
"It was not possible to run the method "
+ methodName + " of class " + className
+ " on the target.", m);
}
});
}
}
private void disconnectAfterConnectionProblem() {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
MultiStatus m = createStatusFromThrowableForDialog("Connection to target lost.");
ErrorDialog
.openError(
Display.getDefault().getActiveShell(),
"Connection Problem",
"The connection to the target machine "
+ pid
+ " is broken. Disconnecting from target.",
m);
}
});
try {
disconnect();
} catch (DebugException e) {
throw new RuntimeException(e);
}
}
public void installEngineX(File jarFile) {
markerManager.clearAllMarkers();
byte[][] classFileBytes = JarByteUtil
.convertJarToClassByteArray(jarFile);
if (!machine.installJRTraceClasses(classFileBytes)) {
disconnectAfterConnectionProblem();
}
}
public IProject getProject() {
return theProject;
}
/**
* create a multistatus taking into accound that some exceptions have additional stacktraces
* in other places. (UndeclaredThrowableException).
*
* @param msg
* @return
*/
private MultiStatus createStatusFromThrowableForDialog(String msg) {
MultiStatus m;
m=new MultiStatus(
de.schenk.jrtrace.ui.JRTraceUIActivator.BUNDLE_ID,
IStatus.ERROR, msg, machine
.getLastError());
Throwable t= machine.getLastError();
if(t!=null) m.add(createMultiStatus("Last Exception Thrown:",t));
if (machine.getLastError() instanceof UndeclaredThrowableException) {
Throwable ex = ((UndeclaredThrowableException)t).getUndeclaredThrowable();
if(ex!=null)
m.add(createMultiStatus("Undeclared Throwable:", ex));
}
return m;
}
/**
* Create a multistatus with the specified message and the substatus such that they show the throwable stacktrace
* suitable for showing in an ErrorDialog
* @param msg
* @param t
* @return
*/
private static MultiStatus createMultiStatus(String msg, Throwable t) {
List<Status> childStatuses = new ArrayList<>();
for (StackTraceElement tr: t.getStackTrace()) {
Status status = new Status(IStatus.ERROR,
JRTraceUIActivator.BUNDLE_ID, tr.toString());
childStatuses.add(status);
}
MultiStatus ms = new MultiStatus(JRTraceUIActivator.BUNDLE_ID,
IStatus.ERROR, childStatuses.toArray(new Status[0]),
msg, t);
return ms;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.ui;
import com.intellij.CommonBundle;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.MasterDetails;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.ui.popup.ListPopupStep;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.*;
import com.intellij.ui.navigation.History;
import com.intellij.ui.navigation.Place;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.Function;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashSet;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.xmlb.XmlSerializerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.*;
import java.awt.*;
import java.util.*;
import java.util.List;
/**
* @author anna
* @since 29-May-2006
*/
public abstract class MasterDetailsComponent implements Configurable, DetailsComponent.Facade, MasterDetails {
protected static final Logger LOG = Logger.getInstance("#com.intellij.openapi.ui.MasterDetailsComponent");
protected static final Icon COPY_ICON = PlatformIcons.COPY_ICON;
protected NamedConfigurable myCurrentConfigurable;
private final JBSplitter mySplitter;
@NonNls public static final String TREE_OBJECT = "treeObject";
@NonNls public static final String TREE_NAME = "treeName";
protected History myHistory = new History(new Place.Navigator() {
public void setHistory(final History history) {
myHistory = history;
}
@Nullable
public ActionCallback navigateTo(@Nullable final Place place, final boolean requestFocus) {
return null;
}
public void queryPlace(@NotNull final Place place) {
}
});
private JComponent myMaster;
public void setHistory(final History history) {
myHistory = history;
}
protected final MasterDetailsState myState;
protected Runnable TREE_UPDATER;
{
TREE_UPDATER = new Runnable() {
public void run() {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath == null) return;
MyNode node = (MyNode)selectionPath.getLastPathComponent();
if (node == null) return;
myState.setLastEditedConfigurable(getNodePathString(node)); //survive after rename;
myDetails.setText(node.getConfigurable().getBannerSlogan());
((DefaultTreeModel)myTree.getModel()).reload(node);
fireItemsChangedExternally();
}
};
}
protected MyNode myRoot = new MyRootNode();
protected Tree myTree = new Tree();
private final DetailsComponent myDetails = new DetailsComponent(!Registry.is("ide.new.project.settings"), !Registry.is("ide.new.project.settings"));
protected JPanel myWholePanel;
public JPanel myNorthPanel = new JPanel(new BorderLayout());
private final List<ItemsChangeListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
private final Set<NamedConfigurable> myInitializedConfigurables = new HashSet<NamedConfigurable>();
private boolean myHasDeletedItems;
protected AutoScrollToSourceHandler myAutoScrollHandler;
protected boolean myToReInitWholePanel = true;
protected MasterDetailsComponent() {
this(new MasterDetailsState());
}
protected MasterDetailsComponent(MasterDetailsState state) {
myState = state;
mySplitter = isNewProjectSettings() ? new OnePixelSplitter(false, .2f) : new JBSplitter(false, .2f);
mySplitter.setSplitterProportionKey("ProjectStructure.SecondLevelElements");
mySplitter.setHonorComponentsMinimumSize(true);
installAutoScroll();
reInitWholePanelIfNeeded();
}
private boolean isNewProjectSettings() {
if (!Registry.is("ide.new.project.settings")) {
return false;
}
try {
// assume that only project structure dialog uses the following base class for details:
String name = "com.intellij.openapi.roots.ui.configuration.projectRoot.BaseStructureConfigurable";
return Class.forName(name).isAssignableFrom(getClass());
}
catch (ClassNotFoundException ignored) {
return false;
}
}
protected void reInitWholePanelIfNeeded() {
if (!myToReInitWholePanel) return;
myWholePanel = new JPanel(new BorderLayout()) {
public void addNotify() {
super.addNotify();
MasterDetailsComponent.this.addNotify();
TreeModel m = myTree.getModel();
if (m instanceof DefaultTreeModel) {
DefaultTreeModel model = (DefaultTreeModel)m;
for (int eachRow = 0; eachRow < myTree.getRowCount(); eachRow++) {
TreePath eachPath = myTree.getPathForRow(eachRow);
Object component = eachPath.getLastPathComponent();
if (component instanceof TreeNode) {
model.nodeChanged((TreeNode)component);
}
}
}
}
};
mySplitter.setHonorComponentsMinimumSize(true);
myWholePanel.add(mySplitter, BorderLayout.CENTER);
JPanel left = new JPanel(new BorderLayout()) {
public Dimension getMinimumSize() {
final Dimension original = super.getMinimumSize();
return new Dimension(Math.max(original.width, 100), original.height);
}
};
if (isNewProjectSettings()) {
ToolbarDecorator decorator = ToolbarDecorator.createDecorator(myTree);
DefaultActionGroup group = createToolbarActionGroup();
if (group != null) {
decorator.setActionGroup(group);
}
//left.add(myNorthPanel, BorderLayout.NORTH);
myMaster = decorator.setAsUsualTopToolbar().setPanelBorder(JBUI.Borders.empty()).createPanel();
myNorthPanel.setVisible(false);
} else {
left.add(myNorthPanel, BorderLayout.NORTH);
myMaster = ScrollPaneFactory.createScrollPane(myTree);
}
left.add(myMaster, BorderLayout.CENTER);
mySplitter.setFirstComponent(left);
final JPanel right = new JPanel(new BorderLayout());
right.add(myDetails.getComponent(), BorderLayout.CENTER);
if (!isNewProjectSettings()) {
myWholePanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
}
mySplitter.setSecondComponent(right);
GuiUtils.replaceJSplitPaneWithIDEASplitter(myWholePanel);
myToReInitWholePanel = false;
}
private void installAutoScroll() {
myAutoScrollHandler = new AutoScrollToSourceHandler() {
protected boolean isAutoScrollMode() {
return isAutoScrollEnabled();
}
protected void setAutoScrollMode(boolean state) {
//do nothing
}
protected void scrollToSource(Component tree) {
updateSelectionFromTree();
}
protected boolean needToCheckFocus() {
return false;
}
};
myAutoScrollHandler.install(myTree);
}
protected void addNotify() {
updateSelectionFromTree();
}
private void updateSelectionFromTree() {
TreePath[] treePaths = myTree.getSelectionPaths();
if (treePaths != null) {
List<NamedConfigurable> selectedConfigurables = new ArrayList<NamedConfigurable>();
for (TreePath path : treePaths) {
Object lastPathComponent = path.getLastPathComponent();
if (lastPathComponent instanceof MyNode) {
selectedConfigurables.add(((MyNode)lastPathComponent).getConfigurable());
}
}
if (selectedConfigurables.size() > 1 && updateMultiSelection(selectedConfigurables)) {
return;
}
}
final TreePath path = myTree.getSelectionPath();
if (path != null) {
final Object lastPathComp = path.getLastPathComponent();
if (!(lastPathComp instanceof MyNode)) return;
final MyNode node = (MyNode)lastPathComp;
setSelectedNode(node);
} else {
setSelectedNode(null);
}
}
protected boolean updateMultiSelection(final List<NamedConfigurable> selectedConfigurables) {
return false;
}
public DetailsComponent getDetailsComponent() {
return myDetails;
}
public Splitter getSplitter() {
return mySplitter;
}
protected boolean isAutoScrollEnabled() {
return myHistory == null || !myHistory.isNavigatingNow();
}
protected DefaultActionGroup createToolbarActionGroup() {
final ArrayList<AnAction> actions = createActions(false);
if (actions != null) {
final DefaultActionGroup group = new DefaultActionGroup();
for (AnAction action : actions) {
if (action instanceof ActionGroupWithPreselection) {
group.add(new MyActionGroupWrapper((ActionGroupWithPreselection)action));
}
else {
group.add(action);
}
}
return group;
}
return null;
}
private void initToolbar() {
if (isNewProjectSettings()) return;
DefaultActionGroup group = createToolbarActionGroup();
if (group != null) {
final JComponent component = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true).getComponent();
myNorthPanel.add(component, BorderLayout.NORTH);
}
}
public void addItemsChangeListener(ItemsChangeListener l) {
myListeners.add(l);
}
protected Dimension getPanelPreferredSize() {
return JBUI.size(800, 600);
}
@NotNull
public JComponent createComponent() {
myTree.updateUI();
reInitWholePanelIfNeeded();
updateSelectionFromTree();
final JPanel panel = new JPanel(new BorderLayout()) {
public Dimension getPreferredSize() {
return getPanelPreferredSize();
}
};
panel.add(myWholePanel, BorderLayout.CENTER);
return panel;
}
public boolean isModified() {
if (myHasDeletedItems) return true;
final boolean[] modified = new boolean[1];
TreeUtil.traverseDepth(myRoot, new TreeUtil.Traverse() {
public boolean accept(Object node) {
if (node instanceof MyNode) {
final NamedConfigurable configurable = ((MyNode)node).getConfigurable();
if (isInitialized(configurable) && configurable.isModified()) {
modified[0] = true;
return false;
}
}
return true;
}
});
return modified[0];
}
protected boolean isInitialized(final NamedConfigurable configurable) {
return myInitializedConfigurables.contains(configurable);
}
public void apply() throws ConfigurationException {
processRemovedItems();
final ConfigurationException[] ex = new ConfigurationException[1];
TreeUtil.traverse(myRoot, new TreeUtil.Traverse() {
public boolean accept(Object node) {
if (node instanceof MyNode) {
try {
final NamedConfigurable configurable = ((MyNode)node).getConfigurable();
if (isInitialized(configurable) && configurable.isModified()) {
configurable.apply();
}
}
catch (ConfigurationException e) {
ex[0] = e;
return false;
}
}
return true;
}
});
if (ex[0] != null) {
throw ex[0];
}
myHasDeletedItems = false;
}
protected abstract void processRemovedItems();
protected abstract boolean wasObjectStored(Object editableObject);
public void reset() {
loadComponentState();
myHasDeletedItems = false;
((DefaultTreeModel)myTree.getModel()).reload();
//myTree.requestFocus();
myState.getProportions().restoreSplitterProportions(myWholePanel);
final Enumeration enumeration = myRoot.breadthFirstEnumeration();
boolean selected = false;
while (enumeration.hasMoreElements()) {
final MyNode node = (MyNode)enumeration.nextElement();
if (node instanceof MyRootNode) continue;
final String path = getNodePathString(node);
if (!selected && Comparing.strEqual(path, myState.getLastEditedConfigurable())) {
TreeUtil.selectInTree(node, false, myTree);
selected = true;
}
}
if (!selected) {
TreeUtil.selectFirstNode(myTree);
}
updateSelectionFromTree();
}
protected void loadComponentState() {
final String key = getComponentStateKey();
final MasterDetailsStateService stateService = getStateService();
if (key != null && stateService != null) {
final MasterDetailsState state = stateService.getComponentState(key, myState.getClass());
if (state != null) {
loadState(state);
}
}
}
private static String getNodePathString(final MyNode node) {
StringBuilder path = new StringBuilder();
MyNode current = node;
while (current != null) {
final Object userObject = current.getUserObject();
if (!(userObject instanceof NamedConfigurable)) break;
final String displayName = current.getDisplayName();
if (StringUtil.isEmptyOrSpaces(displayName)) break;
if (path.length() > 0) {
path.append('|');
}
path.append(displayName);
final TreeNode parent = current.getParent();
if (!(parent instanceof MyNode)) break;
current = (MyNode)parent;
}
return path.toString();
}
@Nullable
@NonNls
protected String getComponentStateKey() {
return null;
}
@Nullable
protected MasterDetailsStateService getStateService() {
return null;
}
protected MasterDetailsState getState() {
return myState;
}
protected void loadState(final MasterDetailsState object) {
XmlSerializerUtil.copyBean(object, myState);
}
public void disposeUIResources() {
myState.getProportions().saveSplitterProportions(myWholePanel);
myAutoScrollHandler.cancelAllRequests();
myDetails.disposeUIResources();
myInitializedConfigurables.clear();
clearChildren();
final String key = getComponentStateKey();
final MasterDetailsStateService stateService = getStateService();
if (key != null && stateService != null) {
stateService.setComponentState(key, getState());
}
myCurrentConfigurable = null;
}
protected void clearChildren() {
TreeUtil.traverseDepth(myRoot, new TreeUtil.Traverse() {
public boolean accept(Object node) {
if (node instanceof MyNode) {
final MyNode treeNode = ((MyNode)node);
treeNode.getConfigurable().disposeUIResources();
if (!(treeNode instanceof MyRootNode)) {
treeNode.setUserObject(null);
}
}
return true;
}
});
myRoot.removeAllChildren();
}
@Nullable
protected ArrayList<AnAction> createActions(final boolean fromPopup) {
return null;
}
protected void initTree() {
((DefaultTreeModel)myTree.getModel()).setRoot(myRoot);
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
UIUtil.setLineStyleAngled(myTree);
TreeUtil.installActions(myTree);
myTree.setCellRenderer(new ColoredTreeCellRenderer() {
public void customizeCellRenderer(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
if (value instanceof MyNode) {
final MyNode node = ((MyNode)value);
setIcon(node.getIcon(expanded));
final Font font = UIUtil.getTreeFont();
if (node.isDisplayInBold()) {
setFont(font.deriveFont(Font.BOLD));
}
else {
setFont(font.deriveFont(Font.PLAIN));
}
append(node.getDisplayName(),
node.isDisplayInBold() ? SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES : SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
}
});
initToolbar();
ArrayList<AnAction> actions = createActions(true);
if (actions != null) {
final DefaultActionGroup group = new DefaultActionGroup();
for (AnAction action : actions) {
group.add(action);
}
actions = getAdditionalActions();
if (actions != null) {
group.addSeparator();
for (AnAction action : actions) {
group.add(action);
}
}
PopupHandler
.installPopupHandler(myTree, group, ActionPlaces.UNKNOWN, ActionManager.getInstance()); //popup should follow the selection
}
}
@Nullable
protected ArrayList<AnAction> getAdditionalActions() {
return null;
}
public void fireItemsChangeListener(final Object editableObject) {
for (ItemsChangeListener listener : myListeners) {
listener.itemChanged(editableObject);
}
}
private void fireItemsChangedExternally() {
for (ItemsChangeListener listener : myListeners) {
listener.itemsExternallyChanged();
}
}
private void createUIComponents() {
myTree = new Tree() {
public Dimension getPreferredScrollableViewportSize() {
Dimension size = super.getPreferredScrollableViewportSize();
size = new Dimension(size.width + 20, size.height);
return size;
}
@SuppressWarnings({"NonStaticInitializer"})
public JToolTip createToolTip() {
final JToolTip toolTip = new JToolTip() {
{
setUI(new MultiLineTooltipUI());
}
};
toolTip.setComponent(this);
return toolTip;
}
};
}
protected void addNode(MyNode nodeToAdd, MyNode parent) {
int i = TreeUtil.indexedBinarySearch(parent, nodeToAdd, getNodeComparator());
int insertionPoint = i >= 0 ? i : -i - 1;
((DefaultTreeModel)myTree.getModel()).insertNodeInto(nodeToAdd, parent, insertionPoint);
}
protected void sortDescendants(MyNode root) {
TreeUtil.sort(root, getNodeComparator());
((DefaultTreeModel)myTree.getModel()).reload(root);
}
protected Comparator<MyNode> getNodeComparator() {
return new Comparator<MyNode>() {
public int compare(final MyNode o1, final MyNode o2) {
return StringUtil.naturalCompare(o1.getDisplayName(), o2.getDisplayName());
}
};
}
public ActionCallback selectNodeInTree(final DefaultMutableTreeNode nodeToSelect) {
return selectNodeInTree(nodeToSelect, true, false);
}
public ActionCallback selectNodeInTree(final DefaultMutableTreeNode nodeToSelect, boolean requestFocus) {
return selectNodeInTree(nodeToSelect, true, requestFocus);
}
public ActionCallback selectNodeInTree(final DefaultMutableTreeNode nodeToSelect, boolean center, final boolean requestFocus) {
if (requestFocus) {
myTree.requestFocus();
}
if (nodeToSelect != null) {
return TreeUtil.selectInTree(nodeToSelect, requestFocus, myTree, center);
}
else {
return TreeUtil.selectFirstNode(myTree);
}
}
@Nullable
public Object getSelectedObject() {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null && selectionPath.getLastPathComponent() instanceof MyNode) {
MyNode node = (MyNode)selectionPath.getLastPathComponent();
final NamedConfigurable configurable = node.getConfigurable();
LOG.assertTrue(configurable != null, "already disposed");
return configurable.getEditableObject();
}
return null;
}
@Nullable
public NamedConfigurable getSelectedConfigurable() {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
MyNode node = (MyNode)selectionPath.getLastPathComponent();
final NamedConfigurable configurable = node.getConfigurable();
LOG.assertTrue(configurable != null, "already disposed");
return configurable;
}
return null;
}
public void selectNodeInTree(String displayName) {
final MyNode nodeByName = findNodeByName(myRoot, displayName);
selectNodeInTree(nodeByName, true);
}
public void selectNodeInTree(final Object object) {
selectNodeInTree(findNodeByObject(myRoot, object), true);
}
@Nullable
protected static MyNode findNodeByName(final TreeNode root, final String profileName) {
if (profileName == null) return null; //do not suggest root node
return findNodeByCondition(root, new Condition<NamedConfigurable>() {
public boolean value(final NamedConfigurable configurable) {
return Comparing.strEqual(profileName, configurable.getDisplayName());
}
});
}
@Nullable
public static MyNode findNodeByObject(final TreeNode root, final Object editableObject) {
if (editableObject == null) return null; //do not suggest root node
return findNodeByCondition(root, new Condition<NamedConfigurable>() {
public boolean value(final NamedConfigurable configurable) {
return Comparing.equal(editableObject, configurable.getEditableObject());
}
});
}
protected static MyNode findNodeByCondition(final TreeNode root, final Condition<NamedConfigurable> condition) {
final MyNode[] nodeToSelect = new MyNode[1];
TreeUtil.traverseDepth(root, new TreeUtil.Traverse() {
public boolean accept(Object node) {
if (condition.value(((MyNode)node).getConfigurable())) {
nodeToSelect[0] = (MyNode)node;
return false;
}
return true;
}
});
return nodeToSelect[0];
}
protected void setSelectedNode(@Nullable MyNode node) {
if (node != null) {
myState.setLastEditedConfigurable(getNodePathString(node));
}
updateSelection(node != null ? node.getConfigurable() : null);
}
protected void updateSelection(@Nullable NamedConfigurable configurable) {
myDetails.setText(configurable != null ? configurable.getBannerSlogan() : null);
myCurrentConfigurable = configurable;
if (configurable != null) {
final JComponent comp = configurable.createComponent();
if (comp == null) {
setEmpty();
LOG.error("createComponent() returned null. configurable=" + configurable);
} else {
myDetails.setContent(comp);
ensureInitialized(configurable);
myHistory.pushPlaceForElement(TREE_OBJECT, configurable.getEditableObject());
}
} else {
setEmpty();
}
}
public void ensureInitialized(NamedConfigurable configurable) {
if (!isInitialized(configurable)) {
configurable.reset();
initializeConfigurable(configurable);
}
}
private void setEmpty() {
myDetails.setContent(null);
myDetails.setEmptyContentText(getEmptySelectionString());
}
public String getHelpTopic() {
if (myCurrentConfigurable != null) {
return myCurrentConfigurable.getHelpTopic();
}
return null;
}
protected @Nullable String getEmptySelectionString() {
return null;
}
protected void initializeConfigurable(final NamedConfigurable configurable) {
myInitializedConfigurables.add(configurable);
}
/**
* @deprecated use {@link #checkForEmptyAndDuplicatedNames(String, String, Class} instead
*/
protected void checkApply(Set<MyNode> rootNodes, String prefix, String title) throws ConfigurationException {
for (MyNode rootNode : rootNodes) {
checkForEmptyAndDuplicatedNames(rootNode, prefix, title, NamedConfigurable.class, false);
}
}
protected final void checkForEmptyAndDuplicatedNames(String prefix, String title,
Class<? extends NamedConfigurable> configurableClass) throws ConfigurationException {
checkForEmptyAndDuplicatedNames(myRoot, prefix, title, configurableClass, true);
}
private void checkForEmptyAndDuplicatedNames(MyNode rootNode,
String prefix,
String title,
Class<? extends NamedConfigurable> configurableClass,
boolean recursively) throws ConfigurationException {
final Set<String> names = new HashSet<String>();
for (int i = 0; i < rootNode.getChildCount(); i++) {
final MyNode node = (MyNode)rootNode.getChildAt(i);
final NamedConfigurable scopeConfigurable = node.getConfigurable();
if (configurableClass.isInstance(scopeConfigurable)) {
final String name = scopeConfigurable.getDisplayName();
if (name.trim().length() == 0) {
selectNodeInTree(node);
throw new ConfigurationException("Name should contain non-space characters");
}
if (names.contains(name)) {
final NamedConfigurable selectedConfigurable = getSelectedConfigurable();
if (selectedConfigurable == null || !Comparing.strEqual(selectedConfigurable.getDisplayName(), name)) {
selectNodeInTree(node);
}
throw new ConfigurationException(CommonBundle.message("smth.already.exist.error.message", prefix, name), title);
}
names.add(name);
}
if (recursively) {
checkForEmptyAndDuplicatedNames(node, prefix, title, configurableClass, true);
}
}
}
public Tree getTree() {
return myTree;
}
protected void removePaths(final TreePath... paths) {
MyNode parentNode = null;
int idx = -1;
for (TreePath path : paths) {
final MyNode node = (MyNode)path.getLastPathComponent();
final NamedConfigurable namedConfigurable = node.getConfigurable();
final Object editableObject = namedConfigurable.getEditableObject();
parentNode = (MyNode)node.getParent();
idx = parentNode.getIndex(node);
((DefaultTreeModel)myTree.getModel()).removeNodeFromParent(node);
myHasDeletedItems |= wasObjectStored(editableObject);
fireItemsChangeListener(editableObject);
onItemDeleted(editableObject);
namedConfigurable.disposeUIResources();
}
if (paths.length > 0) {
if (parentNode != null && idx != -1) {
DefaultMutableTreeNode toSelect = null;
if (idx < parentNode.getChildCount()) {
toSelect = (DefaultMutableTreeNode) parentNode.getChildAt(idx);
} else {
if (idx > 0 && parentNode.getChildCount() > 0) {
if (idx - 1 < parentNode.getChildCount()) {
toSelect = (DefaultMutableTreeNode) parentNode.getChildAt(idx - 1);
} else {
toSelect = (DefaultMutableTreeNode) parentNode.getFirstChild();
}
} else {
if (parentNode.isRoot() && myTree.isRootVisible()) {
toSelect = parentNode;
} else if (parentNode.getChildCount() > 0) {
toSelect = (DefaultMutableTreeNode) parentNode.getFirstChild();
}
}
}
if (toSelect != null) {
TreeUtil.selectInTree(toSelect, true, myTree);
}
}
else {
TreeUtil.selectFirstNode(myTree);
}
}
}
protected void onItemDeleted(Object item) {
}
protected class MyDeleteAction extends AnAction implements DumbAware {
private final Condition<Object[]> myCondition;
public MyDeleteAction() {
this(Conditions.<Object[]>alwaysTrue());
}
public MyDeleteAction(Condition<Object[]> availableCondition) {
super(CommonBundle.message("button.delete"), CommonBundle.message("button.delete"), PlatformIcons.DELETE_ICON);
registerCustomShortcutSet(CommonShortcuts.getDelete(), myTree);
myCondition = availableCondition;
}
public void update(AnActionEvent e) {
final Presentation presentation = e.getPresentation();
presentation.setEnabled(false);
final TreePath[] selectionPath = myTree.getSelectionPaths();
if (selectionPath != null) {
Object[] nodes = ContainerUtil.map2Array(selectionPath, new Function<TreePath, Object>() {
@Override
public Object fun(TreePath treePath) {
return treePath.getLastPathComponent();
}
});
if (!myCondition.value(nodes)) return;
presentation.setEnabled(true);
}
}
public void actionPerformed(AnActionEvent e) {
removePaths(myTree.getSelectionPaths());
}
}
protected static Condition<Object[]> forAll(final Condition<Object> condition) {
return new Condition<Object[]>() {
@Override
public boolean value(Object[] objects) {
for (Object object : objects) {
if (!condition.value(object)) return false;
}
return true;
}
};
}
public static class MyNode extends DefaultMutableTreeNode {
private boolean myDisplayInBold;
public MyNode(@NotNull NamedConfigurable userObject) {
super(userObject);
}
public MyNode(@NotNull NamedConfigurable userObject, boolean displayInBold) {
super(userObject);
myDisplayInBold = displayInBold;
}
@NotNull
public String getDisplayName() {
final NamedConfigurable configurable = ((NamedConfigurable)getUserObject());
LOG.assertTrue(configurable != null, "Tree was already disposed");
return configurable.getDisplayName();
}
public NamedConfigurable getConfigurable() {
return (NamedConfigurable)getUserObject();
}
public boolean isDisplayInBold() {
return myDisplayInBold;
}
public void setDisplayInBold(boolean displayInBold) {
myDisplayInBold = displayInBold;
}
@Nullable
public Icon getIcon(boolean expanded) {
// thanks to invokeLater() in TreeUtil.showAndSelect(), we can get calls to getIcon() after the tree has been disposed
final NamedConfigurable configurable = getConfigurable();
if (configurable != null) {
return configurable.getIcon(expanded);
}
return null;
}
}
@SuppressWarnings({"ConstantConditions"})
protected static class MyRootNode extends MyNode {
public MyRootNode() {
super(new NamedConfigurable(false, null) {
public void setDisplayName(String name) {
}
public Object getEditableObject() {
return null;
}
public String getBannerSlogan() {
return null;
}
public String getDisplayName() {
return "";
}
@Nullable
@NonNls
public String getHelpTopic() {
return null;
}
public JComponent createOptionsPanel() {
return null;
}
public boolean isModified() {
return false;
}
public void apply() throws ConfigurationException {
}
public void reset() {
}
public void disposeUIResources() {
}
}, false);
}
}
protected interface ItemsChangeListener {
void itemChanged(@Nullable Object deletedItem);
void itemsExternallyChanged();
}
public interface ActionGroupWithPreselection {
ActionGroup getActionGroup();
int getDefaultIndex();
}
protected class MyActionGroupWrapper extends AnAction implements DumbAware {
private ActionGroup myActionGroup;
private ActionGroupWithPreselection myPreselection;
public MyActionGroupWrapper(final ActionGroupWithPreselection actionGroup) {
this(actionGroup.getActionGroup());
myPreselection = actionGroup;
}
public MyActionGroupWrapper(final ActionGroup actionGroup) {
super(actionGroup.getTemplatePresentation().getText(), actionGroup.getTemplatePresentation().getDescription(),
actionGroup.getTemplatePresentation().getIcon());
myActionGroup = actionGroup;
registerCustomShortcutSet(actionGroup.getShortcutSet(), myTree);
}
public void actionPerformed(AnActionEvent e) {
final JBPopupFactory popupFactory = JBPopupFactory.getInstance();
final ListPopupStep step = popupFactory.createActionsStep(myActionGroup, e.getDataContext(), false, false,
myActionGroup.getTemplatePresentation().getText(), myTree, true,
myPreselection != null ? myPreselection.getDefaultIndex() : 0, true);
final ListPopup listPopup = popupFactory.createListPopup(step);
listPopup.setHandleAutoSelectionBeforeShow(true);
if (e instanceof AnActionButton.AnActionEventWrapper) {
((AnActionButton.AnActionEventWrapper)e).showPopup(listPopup);
} else {
listPopup.showUnderneathOf(myNorthPanel);
}
}
}
public JComponent getToolbar() {
myToReInitWholePanel = true;
return myNorthPanel;
}
public JComponent getMaster() {
myToReInitWholePanel = true;
return myMaster;
}
public DetailsComponent getDetails() {
myToReInitWholePanel = true;
return myDetails;
}
public void initUi() {
createComponent();
}
}
| |
package sf.codingcomp.blocks;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import sf.codingcomp.blocks.solution.PolyBlockImpl;
public class PolyBlockTest {
private PolyBlock block1;
private PolyBlock block2;
private PolyBlock block3;
private PolyBlock block4;
private PolyBlock block5;
@Before
public void setUp() throws Exception {
block1 = new PolyBlockImpl();
block2 = new PolyBlockImpl();
block3 = new PolyBlockImpl();
block4 = new PolyBlockImpl();
block5 = new PolyBlockImpl();
}
@After
public void tearDown() throws Exception {
setBlock1(null);
setBlock2(null);
setBlock3(null);
setBlock4(null);
setBlock5(null);
}
@Test
public void testIterator() {
assertNotNull(getBlock1().iterator());
}
@Test
public void testIteratorWithOneElement() {
Iterator<PolyBlock> it = getBlock1().iterator();
assertTrue(it.hasNext());
assertEquals(getBlock1(), it.next());
}
@Test
public void testIteratorWithTwoElements() {
getBlock1().connect(getBlock2());
List<PolyBlock> fbs = new ArrayList<PolyBlock>();
for(PolyBlock fb : getBlock1()) {
fbs.add(fb);
}
assertEquals(2, fbs.size());
assertTrue(fbs.contains(getBlock1()));
assertTrue(fbs.contains(getBlock2()));
}
@Test
public void testIteratorWithThreeElementsInSeries() {
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
List<PolyBlock> fbs = new ArrayList<PolyBlock>();
for(PolyBlock fb : getBlock1()) {
fbs.add(fb);
}
assertEquals(3, fbs.size());
assertTrue(fbs.contains(getBlock1()));
assertTrue(fbs.contains(getBlock2()));
assertTrue(fbs.contains(getBlock3()));
}
@Test
public void testIteratorWithThreeElementsInATriangle() {
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
getBlock3().connect(getBlock1());
List<PolyBlock> fbs = new ArrayList<PolyBlock>();
for(PolyBlock fb : getBlock1()) {
fbs.add(fb);
}
assertEquals(3, fbs.size());
assertTrue(fbs.contains(getBlock1()));
assertTrue(fbs.contains(getBlock2()));
assertTrue(fbs.contains(getBlock3()));
}
@Test
public void testIteratorWithFiveElementsInAPlus() {
getBlock1().connect(getBlock2());
getBlock1().connect(getBlock3());
getBlock1().connect(getBlock4());
getBlock1().connect(getBlock5());
List<PolyBlock> fbs = new ArrayList<PolyBlock>();
for(PolyBlock fb : getBlock1()) {
fbs.add(fb);
}
assertEquals(5, fbs.size());
assertTrue(fbs.contains(getBlock1()));
assertTrue(fbs.contains(getBlock2()));
assertTrue(fbs.contains(getBlock3()));
assertTrue(fbs.contains(getBlock4()));
assertTrue(fbs.contains(getBlock5()));
}
@Test
public void testIteratorWithFiveElementsInBothAStarAndPentagon() {
connectAsStarAndPentagram();
List<PolyBlock> fbs = new ArrayList<PolyBlock>();
for(PolyBlock fb : getBlock1()) {
fbs.add(fb);
}
assertEquals(5, fbs.size());
assertTrue(fbs.contains(getBlock1()));
assertTrue(fbs.contains(getBlock2()));
assertTrue(fbs.contains(getBlock3()));
assertTrue(fbs.contains(getBlock4()));
assertTrue(fbs.contains(getBlock5()));
}
@Test
public void testConnectWithNull() {
assertEquals(0, getBlock1().connections());
assertEquals(1, getBlock1().size());
getBlock1().connect(null);
assertEquals(0, getBlock1().connections());
assertEquals(1, getBlock1().size());
}
@Test
public void testConnectWithAnotherPolyBlock() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
getBlock1().connect(getBlock2());
assertEquals(1, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(2, getBlock1().size());
assertEquals(2, getBlock2().size());
}
@Test
public void testConnectWithThreePolyBlocksInSeries() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertEquals(1, getBlock1().connections());
assertEquals(2, getBlock2().connections());
assertEquals(1, getBlock3().connections());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
}
@Test
public void testConnectWithThreePolyBlocksInATriangle() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
getBlock3().connect(getBlock1());
assertEquals(2, getBlock1().connections());
assertEquals(2, getBlock2().connections());
assertEquals(2, getBlock3().connections());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
}
@Test
public void testConnectWithFivePolyBlocksInAPlus() {
verifyPreCondition();
getBlock2().connect(getBlock1());
getBlock3().connect(getBlock1());
getBlock4().connect(getBlock1());
getBlock5().connect(getBlock1());
assertEquals(4, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(1, getBlock3().connections());
assertEquals(1, getBlock4().connections());
assertEquals(1, getBlock5().connections());
assertEquals(5, getBlock1().size());
assertEquals(5, getBlock2().size());
assertEquals(5, getBlock3().size());
assertEquals(5, getBlock4().size());
assertEquals(5, getBlock5().size());
}
@Test
public void testConnectWithFivePolyBlocksInBothAStarAndPentagon() {
verifyPreCondition();
connectAsStarAndPentagram();
assertEquals(4, getBlock1().connections());
assertEquals(4, getBlock2().connections());
assertEquals(4, getBlock3().connections());
assertEquals(4, getBlock4().connections());
assertEquals(4, getBlock5().connections());
assertEquals(5, getBlock1().size());
assertEquals(5, getBlock2().size());
assertEquals(5, getBlock3().size());
assertEquals(5, getBlock4().size());
assertEquals(5, getBlock5().size());
}
@Test
public void testDisconnectWithNull() {
getBlock1().disconnect(null);
getBlock2().disconnect(null);
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
getBlock1().connect(getBlock2());
assertEquals(1, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(2, getBlock1().size());
assertEquals(2, getBlock2().size());
getBlock1().disconnect(null);
getBlock2().disconnect(null);
assertEquals(1, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(2, getBlock1().size());
assertEquals(2, getBlock2().size());
}
@Test
public void testDisconnect() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
getBlock1().connect(getBlock2());
assertEquals(1, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(2, getBlock1().size());
assertEquals(2, getBlock2().size());
getBlock1().disconnect(getBlock2());
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
}
@Test
public void testDisconnectWithThreePolyBlocksInSeries() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertEquals(1, getBlock1().connections());
assertEquals(2, getBlock2().connections());
assertEquals(1, getBlock3().connections());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
getBlock1().disconnect(getBlock2());
getBlock2().disconnect(getBlock3());
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
}
@Test
public void testDisconnectWithThreePolyBlocksInATriangle() {
verifyPreCondition();
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertEquals(1, getBlock1().connections());
assertEquals(2, getBlock2().connections());
assertEquals(1, getBlock3().connections());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
getBlock1().disconnect(getBlock2());
getBlock2().disconnect(getBlock3());
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
}
@Test
public void testDisconnectWithFiveFacetedBlocksInAPlus() {
verifyPreCondition();
getBlock1().connect(getBlock2());
getBlock1().connect(getBlock3());
getBlock1().connect(getBlock4());
getBlock1().connect(getBlock5());
assertEquals(4, getBlock1().connections());
assertEquals(1, getBlock2().connections());
assertEquals(1, getBlock3().connections());
assertEquals(1, getBlock4().connections());
assertEquals(1, getBlock5().connections());
assertEquals(5, getBlock1().size());
assertEquals(5, getBlock2().size());
assertEquals(5, getBlock3().size());
assertEquals(5, getBlock4().size());
assertEquals(5, getBlock5().size());
getBlock1().disconnect(getBlock2());
getBlock1().disconnect(getBlock3());
getBlock1().disconnect(getBlock4());
getBlock1().disconnect(getBlock5());
verifyPreCondition();
}
@Test
public void testDisconnectWithFiveElementsInBothAStarAndPentagram() {
verifyPreCondition();
connectAsStarAndPentagram();
assertEquals(4, getBlock1().connections());
assertEquals(4, getBlock2().connections());
assertEquals(4, getBlock3().connections());
assertEquals(4, getBlock4().connections());
assertEquals(4, getBlock5().connections());
assertEquals(5, getBlock1().size());
assertEquals(5, getBlock2().size());
assertEquals(5, getBlock3().size());
assertEquals(5, getBlock4().size());
assertEquals(5, getBlock5().size());
getBlock1().disconnect(getBlock2());
getBlock1().disconnect(getBlock3());
getBlock1().disconnect(getBlock4());
getBlock1().disconnect(getBlock5());
getBlock2().disconnect(getBlock3());
getBlock2().disconnect(getBlock4());
getBlock2().disconnect(getBlock5());
getBlock3().disconnect(getBlock4());
getBlock3().disconnect(getBlock5());
getBlock4().disconnect(getBlock5());
verifyPreCondition();
}
private void verifyPreCondition() {
assertEquals(0, getBlock1().connections());
assertEquals(0, getBlock2().connections());
assertEquals(0, getBlock3().connections());
assertEquals(0, getBlock4().connections());
assertEquals(0, getBlock5().connections());
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
assertEquals(1, getBlock4().size());
assertEquals(1, getBlock5().size());
}
private void connectAsStarAndPentagram() {
getBlock1().connect(getBlock2());
getBlock1().connect(getBlock3());
getBlock1().connect(getBlock4());
getBlock1().connect(getBlock5());
getBlock2().connect(getBlock3());
getBlock2().connect(getBlock4());
getBlock2().connect(getBlock5());
getBlock3().connect(getBlock4());
getBlock3().connect(getBlock5());
getBlock4().connect(getBlock5());
}
@Test
public void testFindWithNoConnections() {
assertFalse(getBlock1().contains(getBlock2()));
}
@Test
public void testFindWithOneConnection() {
assertFalse(getBlock1().contains(getBlock2()));
assertFalse(getBlock2().contains(getBlock1()));
getBlock1().connect(getBlock2());
assertTrue(getBlock1().contains(getBlock2()));
assertTrue(getBlock2().contains(getBlock1()));
}
@Test
public void testFindWithThreeConnectionsInSeries() {
assertFalse(getBlock1().contains(getBlock2()));
assertFalse(getBlock2().contains(getBlock1()));
assertFalse(getBlock2().contains(getBlock3()));
assertFalse(getBlock3().contains(getBlock2()));
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertTrue(getBlock1().contains(getBlock2()));
assertTrue(getBlock2().contains(getBlock1()));
assertTrue(getBlock2().contains(getBlock3()));
assertTrue(getBlock3().contains(getBlock2()));
}
@Test
public void testFindWithThreeConnectionsInATriangle() {
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
getBlock3().connect(getBlock2());
assertTrue(getBlock1().contains(getBlock2()));
assertTrue(getBlock2().contains(getBlock1()));
assertTrue(getBlock2().contains(getBlock3()));
assertTrue(getBlock3().contains(getBlock2()));
}
@Test
public void testSize() {
assertEquals(1, getBlock1().size());
}
@Test
public void testSizeWithOneConnection() {
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
getBlock1().connect(getBlock2());
assertEquals(2, getBlock1().size());
assertEquals(2, getBlock2().size());
}
@Test
public void testSizeWithThreeConnectionsInSeries() {
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
}
@Test
public void testSizeWithThreeConnectionsInATriangle() {
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
getBlock3().connect(getBlock1());
assertEquals(3, getBlock1().size());
assertEquals(3, getBlock2().size());
assertEquals(3, getBlock3().size());
}
@Test
public void testSizeWithFiveConnectionsInAPlus() {
assertEquals(1, getBlock1().size());
assertEquals(1, getBlock2().size());
assertEquals(1, getBlock3().size());
assertEquals(1, getBlock4().size());
assertEquals(1, getBlock5().size());
getBlock1().connect(getBlock2());
getBlock1().connect(getBlock3());
getBlock1().connect(getBlock4());
getBlock1().connect(getBlock5());
assertEquals(5, getBlock1().size());
assertEquals(5, getBlock2().size());
assertEquals(5, getBlock3().size());
assertEquals(5, getBlock4().size());
assertEquals(5, getBlock5().size());
}
@Test
public void testConnectingAnPolyConnectorToItself() {
assertEquals(1, getBlock1().size());
assertEquals(0, getBlock1().connections());
assertFalse(getBlock1().contains(getBlock1()));
getBlock1().connect(getBlock1());
assertEquals(1, getBlock1().size());
assertEquals(0, getBlock1().connections());
assertFalse(getBlock1().contains(getBlock1()));
}
@Test
public void testConnectingAPolyConnectorTwice() {
assertEquals(1, getBlock1().size());
assertEquals(0, getBlock1().connections());
assertFalse(getBlock1().contains(getBlock2()));
getBlock1().connect(getBlock2());
assertEquals(2, getBlock1().size());
assertEquals(1, getBlock1().connections());
assertTrue(getBlock1().contains(getBlock2()));
getBlock1().connect(getBlock2());
assertEquals(2, getBlock1().size());
assertEquals(1, getBlock1().connections());
assertTrue(getBlock1().contains(getBlock2()));
}
@Test
public void testDisconnectingFromAPolyConnectorThatIsNotConnected() {
assertEquals(1, getBlock1().size());
assertEquals(0, getBlock1().connections());
getBlock1().disconnect(getBlock2());
assertEquals(1, getBlock1().size());
assertEquals(0, getBlock1().connections());
}
@Test
public void testContainsWhenThisDoesNotContainButOneOfMyConnectionsDoes() {
getBlock1().connect(getBlock2());
getBlock2().connect(getBlock3());
assertFalse(getBlock1().contains(getBlock3()));
assertTrue(getBlock1().contains(getBlock2()));
}
@Test
public void testCopyOnePolyConnector() {
PolyBlock newPolyCon = getBlock1().copy();
assertEquals(getBlock1(), newPolyCon);
assertNotSame(getBlock1(), newPolyCon);
}
/**
* Copies an entire structure of PolyConnectors.
*/
@Test
public void testCopyAFormationOfPolyConnectors() {
verifyPreCondition();
connectAsStarAndPentagram();
PolyBlock newOne = getBlock1().copy();
assertEquals(getBlock1(), newOne);
Iterator<PolyBlock> leftIterator = getBlock1().iterator();
Iterator<PolyBlock> rightIterator = newOne.iterator();
for(int i = 0; i < getBlock1().size(); i++) {
PolyBlock left = leftIterator.next();
PolyBlock right = rightIterator.next();
assertEquals(left, right);
assertNotSame(left, right);
}
}
public PolyBlock getBlock1() {
return block1;
}
public void setBlock1(PolyBlock aBlock) {
this.block1 = aBlock;
}
public PolyBlock getBlock2() {
return block2;
}
public void setBlock2(PolyBlock aBlock) {
this.block2 = aBlock;
}
public PolyBlock getBlock3() {
return block3;
}
public void setBlock3(PolyBlock aBlock) {
this.block3 = aBlock;
}
public PolyBlock getBlock4() {
return block4;
}
public void setBlock4(PolyBlock aBlock) {
this.block4 = aBlock;
}
public PolyBlock getBlock5() {
return block5;
}
public void setBlock5(PolyBlock aBlock) {
this.block5 = aBlock;
}
}
| |
package org.wso2.msf4j.formparam;
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.msf4j.formparam.exception.FormUploadException;
import org.wso2.msf4j.formparam.util.Closeable;
import org.wso2.msf4j.formparam.util.StreamUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import static java.lang.String.format;
/**
* <p> Low level API for processing file uploads. </p>
*
* <p> This class can be used to process data streams conforming to MIME
* 'multipart' format as defined in
* <a href="http://www.ietf.org/rfc/rfc1867.txt">RFC 1867</a>. Arbitrarily
* large amounts of data in the stream can be processed under constant
* memory usage.
*
* <p> The format of the stream is defined in the following way:<br>
* </p>
* <code>
* multipart-body := preamble 1*encapsulation close-delimiter epilogue<br>
* encapsulation := delimiter body CRLF<br>
* delimiter := "--" boundary CRLF<br>
* close-delimiter := "--" boundary "--"<br>
* preamble := <ignore><br>
* epilogue := <ignore><br>
* body := header-part CRLF body-part<br>
* header-part := 1*header CRLF<br>
* header := header-name ":" header-value<br>
* header-name := <printable ascii characters except ":"><br>
* header-value := <any ascii characters except CR & LF><br>
* body-data := <arbitrary data><br>
* </code>
*
* <p>Note that body-data can contain another mulipart entity. There
* is limited support for single pass processing of such nested
* streams. The nested stream is <strong>required</strong> to have a
* boundary token of the same length as the parent stream (see {@link
* #setBoundary(byte[])}).
* </p>
*
* <p>Here is an example of usage of this class.<br>
* </p>
* <pre>
* try {
* MultipartStream multipartStream = new MultipartStream(input, boundary);
* boolean nextPart = multipartStream.skipPreamble();
* OutputStream output;
* while(nextPart) {
* String header = multipartStream.readHeaders();
* // process headers
* // create some output stream
* multipartStream.readBodyData(output);
* nextPart = multipartStream.readBoundary();
* }
* } catch(MultipartStream.MalformedStreamException e) {
* // the stream failed to follow required syntax
* } catch(IOException e) {
* // a read or write error occurred
* }
* </pre>
*
*/
public class MultipartStream {
// ----------------------------------------------------- Manifest constants
/**
* The Carriage Return ASCII character value.
*/
private static final byte CR = 0x0D;
/**
* The Line Feed ASCII character value.
*/
private static final byte LF = 0x0A;
/**
* The dash (-) ASCII character value.
*/
private static final byte DASH = 0x2D;
/**
* The maximum length of <code>header-part</code> that will be
* processed (10 kilobytes = 10240 bytes.).
*/
private static final int HEADER_PART_SIZE_MAX = 10240;
/**
* The default length of the buffer used for processing a request.
*/
private static final int DEFAULT_BUFSIZE = 4096;
/**
* A byte sequence that marks the end of <code>header-part</code>
* (<code>CRLFCRLF</code>).
*/
private static final byte[] HEADER_SEPARATOR = { CR, LF, CR, LF };
/**
* A byte sequence that that follows a delimiter that will be
* followed by an encapsulation (<code>CRLF</code>).
*/
private static final byte[] FIELD_SEPARATOR = { CR, LF };
/**
* A byte sequence that that follows a delimiter of the last
* encapsulation in the stream (<code>--</code>).
*/
private static final byte[] STREAM_TERMINATOR = { DASH, DASH };
/**
* A byte sequence that precedes a boundary (<code>CRLF--</code>).
*/
private static final byte[] BOUNDARY_PREFIX = { CR, LF, DASH, DASH };
// ----------------------------------------------------------- Data members
/**
* The input stream from which data is read.
*/
private final InputStream input;
/**
* The length of the boundary token plus the leading <code>CRLF--</code>.
*/
private int boundaryLength;
/**
* The amount of data, in bytes, that must be kept in the buffer in order
* to detect delimiters reliably.
*/
private final int keepRegion;
/**
* The byte sequence that partitions the stream.
*/
private final byte[] boundary;
/**
* The length of the buffer used for processing the request.
*/
private final int bufSize;
/**
* The buffer used for processing the request.
*/
private final byte[] buffer;
/**
* The index of first valid character in the buffer.
* <br>
* 0 <= head < bufSize
*/
private int head;
/**
* The index of last valid character in the buffer + 1.
* <br>
* 0 <= tail <= bufSize
*/
private int tail;
/**
* The content encoding to use when reading headers.
*/
private String headerEncoding;
private static final Logger log = LoggerFactory.getLogger(MultipartStream.class);
// ----------------------------------------------------------- Constructors
/**
* <p> Constructs a <code>MultipartStream</code> with a custom size buffer.
* </p>
* <p> Note that the buffer must be at least big enough to contain the
* boundary string, plus 4 characters for CR/LF and double dash, plus at
* least one byte of data. Too small a buffer size setting will degrade
* performance.
* </p>
* @param input The <code>InputStream</code> to serve as a data source.
* @param boundary The token used for dividing the stream into
* <code>encapsulations</code>.
* @param bufSize The size of the buffer to be used, in bytes.
*/
public MultipartStream(InputStream input, byte[] boundary, int bufSize) {
if (boundary == null) {
throw new IllegalArgumentException("boundary may not be null");
}
// We prepend CR/LF to the boundary to chop trailing CR/LF from
// body-data tokens.
this.boundaryLength = boundary.length + BOUNDARY_PREFIX.length;
if (bufSize < this.boundaryLength + 1) {
throw new IllegalArgumentException("The buffer size specified for the MultipartStream is too small");
}
this.input = input;
this.bufSize = Math.max(bufSize, boundaryLength * 2);
this.buffer = new byte[this.bufSize];
this.boundary = new byte[this.boundaryLength];
this.keepRegion = this.boundary.length;
System.arraycopy(BOUNDARY_PREFIX, 0, this.boundary, 0, BOUNDARY_PREFIX.length);
System.arraycopy(boundary, 0, this.boundary, BOUNDARY_PREFIX.length, boundary.length);
head = 0;
tail = 0;
}
/**
* <p> Constructs a <code>MultipartStream</code> with a default size buffer.</p>
*
* @param input The <code>InputStream</code> to serve as a data source.
* @param boundary The token used for dividing the stream into
* <code>encapsulations</code>.
* @see #MultipartStream(InputStream, byte[], int)
*/
MultipartStream(InputStream input, byte[] boundary) {
this(input, boundary, DEFAULT_BUFSIZE);
}
// --------------------------------------------------------- Public methods
/**
* Retrieves the character encoding used when reading the headers of an
* individual part. When not specified, or <code>null</code>, the platform
* default encoding is used.
*
* @return The encoding used to read part headers.
*/
public String getHeaderEncoding() {
return headerEncoding;
}
/**
* Specifies the character encoding to be used when reading the headers of
* individual parts. When not specified, or <code>null</code>, the platform
* default encoding is used.
*
* @param encoding The encoding used to read part headers.
*/
public void setHeaderEncoding(String encoding) {
headerEncoding = encoding;
}
/**
* Reads a byte from the <code>buffer</code>, and refills it as
* necessary.
*
* @return The next byte from the input stream.
* @throws IOException if there is no more data available.
*/
public byte readByte() throws IOException {
// Buffer depleted ?
if (head == tail) {
head = 0;
// Refill.
tail = input.read(buffer, head, bufSize);
if (tail == -1) {
// No more data available.
throw new IOException("No more data is available");
}
}
return buffer[head++];
}
/**
* Skips a <code>boundary</code> token, and checks whether more
* <code>encapsulations</code> are contained in the stream.
*
* @return <code>true</code> if there are more encapsulations in
* this stream; <code>false</code> otherwise.
*/
public boolean readBoundary() {
byte[] marker = new byte[2];
boolean nextChunk;
head += boundaryLength;
try {
marker[0] = readByte();
if (marker[0] == LF) {
// Work around IE5 Mac bug with input type=image.
// Because the boundary delimiter, not including the trailing
// CRLF, must not appear within any file (RFC 2046, section
// 5.1.1), we know the missing CR is due to a buggy browser
// rather than a file containing something similar to a
// boundary.
return true;
}
marker[1] = readByte();
if (arrayequals(marker, STREAM_TERMINATOR, 2)) {
nextChunk = false;
} else if (arrayequals(marker, FIELD_SEPARATOR, 2)) {
nextChunk = true;
} else {
throw new MalformedStreamException("Unexpected characters follow a boundary");
}
} catch (IOException e) {
throw new MalformedStreamException("Stream ended unexpectedly");
}
return nextChunk;
}
/**
* <p>Changes the boundary token used for partitioning the stream.
* </p>
* <p>This method allows single pass processing of nested multipart
* streams.
* </p>
* <p>The boundary token of the nested stream is <code>required</code>
* to be of the same length as the boundary token in parent stream.
* </p>
* <p>Restoring the parent stream boundary token after processing of a
* nested stream is left to the application.
* </p>
* @param boundary The boundary to be used for parsing of the nested
* stream.
*/
public void setBoundary(byte[] boundary) {
if (boundary.length != boundaryLength - BOUNDARY_PREFIX.length) {
throw new IllegalBoundaryException("The length of a boundary token can not be changed");
}
System.arraycopy(boundary, 0, this.boundary, BOUNDARY_PREFIX.length, boundary.length);
}
/**
* <p>Reads the <code>header-part</code> of the current
* <code>encapsulation</code>.
* </p>
* <p>Headers are returned verbatim to the input stream, including the
* trailing <code>CRLF</code> marker. Parsing is left to the
* application.
* </p>
*
* @return The <code>header-part</code> of the current encapsulation.
*/
public String readHeaders() {
int i = 0;
byte b;
// to support multi-byte characters
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int size = 0;
while (i < HEADER_SEPARATOR.length) {
try {
b = readByte();
} catch (IOException e) {
throw new MalformedStreamException("Stream ended unexpectedly");
}
if (++size > HEADER_PART_SIZE_MAX) {
throw new MalformedStreamException(
format("Header section has more than %s bytes (maybe it is not properly terminated)",
Integer.valueOf(HEADER_PART_SIZE_MAX)));
}
if (b == HEADER_SEPARATOR[i]) {
i++;
} else {
i = 0;
}
baos.write(b);
}
String headers = null;
if (headerEncoding != null) {
try {
headers = baos.toString(headerEncoding);
} catch (UnsupportedEncodingException e) {
// Fall back to platform default if specified encoding is not
// supported.
if (!headerEncoding.equals(Charset.defaultCharset().displayName())) {
try {
headers = baos.toString(Charset.defaultCharset().displayName());
} catch (UnsupportedEncodingException e1) {
throw new FormUploadException("Provided encoding doesn't support", e);
}
}
}
} else {
try {
headers = baos.toString(Charset.defaultCharset().displayName());
} catch (UnsupportedEncodingException e) {
throw new FormUploadException("Provided encoding doesn't support", e);
}
}
return headers;
}
/**
* <p>Reads <code>body-data</code> from the current
* <code>encapsulation</code> and writes its contents into the
* output <code>Stream</code>.
* </p>
* <p>Arbitrary large amounts of data can be processed by this
* method using a constant size buffer. (see {@link
* #MultipartStream(InputStream, byte[], int) constructor}).
* </p>
* @param output The <code>Stream</code> to write data into. May
* be null, in which case this method is equivalent
* to {@link #discardBodyData()}.
* @return the amount of data written.
*/
public int readBodyData(OutputStream output) {
return (int) StreamUtil.copy(newInputStream(), output, false); // N.B. Streams.copy closes the input stream
}
/**
* Creates a new {@link ItemInputStream}.
*
* @return A new instance of {@link ItemInputStream}.
*/
ItemInputStream newInputStream() {
return new ItemInputStream();
}
/**
* <p> Reads <code>body-data</code> from the current
* <code>encapsulation</code> and discards it.
* </p>
* <p>Use this method to skip encapsulations you don't need or don't
* understand.</p>
*
* @return The amount of data discarded.
*/
public int discardBodyData() {
return readBodyData(null);
}
/**
* Finds the beginning of the first <code>encapsulation</code>.
*
* @return <code>true</code> if an <code>encapsulation</code> was found in
* the stream.
*/
public boolean skipPreamble() {
// First delimiter may be not preceeded with a CRLF.
System.arraycopy(boundary, 2, boundary, 0, boundary.length - 2);
boundaryLength = boundary.length - 2;
try {
// Discard all data up to the delimiter.
discardBodyData();
// Read boundary - if succeeded, the stream contains an
// encapsulation.
return readBoundary();
} catch (MalformedStreamException e) {
return false;
} finally {
// Restore delimiter.
System.arraycopy(boundary, 0, boundary, 2, boundary.length - 2);
boundaryLength = boundary.length;
boundary[0] = CR;
boundary[1] = LF;
}
}
/**
* Compares <code>count</code> first bytes in the arrays
* <code>a</code> and <code>b</code>.
*
* @param a The first array to compare.
* @param b The second array to compare.
* @param count How many bytes should be compared.
* @return <code>true</code> if <code>count</code> first bytes in arrays
* <code>a</code> and <code>b</code> are equal.
*/
public static boolean arrayequals(byte[] a, byte[] b, int count) {
for (int i = 0; i < count; i++) {
if (a[i] != b[i]) {
return false;
}
}
return true;
}
/**
* Searches for a byte of specified value in the <code>buffer</code>,
* starting at the specified <code>position</code>.
*
* @param value The value to find.
* @param pos The starting position for searching.
* @return The position of byte found, counting from beginning of the
* <code>buffer</code>, or <code>-1</code> if not found.
*/
protected int findByte(byte value, int pos) {
for (int i = pos; i < tail; i++) {
if (buffer[i] == value) {
return i;
}
}
return -1;
}
/**
* Searches for the <code>boundary</code> in the <code>buffer</code>
* region delimited by <code>head</code> and <code>tail</code>.
*
* @return The position of the boundary found, counting from the
* beginning of the <code>buffer</code>, or <code>-1</code> if
* not found.
*/
protected int findSeparator() {
int first;
int match = 0;
int maxpos = tail - boundaryLength;
for (first = head; first <= maxpos && match != boundaryLength; first++) {
first = findByte(boundary[0], first);
if (first == -1 || first > maxpos) {
return -1;
}
for (match = 1; match < boundaryLength; match++) {
if (buffer[first + match] != boundary[match]) {
break;
}
}
}
if (match == boundaryLength) {
return first - 1;
}
return -1;
}
/**
* Thrown to indicate that the input stream fails to follow the
* required syntax.
*/
public static class MalformedStreamException extends RuntimeException {
/**
* The UID to use when serializing this instance.
*/
private static final long serialVersionUID = 6466926458059796677L;
/**
* Constructs a <code>MalformedStreamException</code> with no
* detail message.
*/
public MalformedStreamException() {
super();
}
/**
* Constructs an <code>MalformedStreamException</code> with
* the specified detail message.
*
* @param message The detail message.
*/
public MalformedStreamException(String message) {
super(message);
}
}
/**
* Thrown upon attempt of setting an invalid boundary token.
*/
public static class IllegalBoundaryException extends RuntimeException {
/**
* The UID to use when serializing this instance.
*/
private static final long serialVersionUID = -161533165102632918L;
/**
* Constructs an <code>IllegalBoundaryException</code> with
* the specified detail message.
*
* @param message The detail message.
*/
public IllegalBoundaryException(String message) {
super(message);
}
}
/**
* An {@link InputStream} for reading an items contents.
*/
public class ItemInputStream extends InputStream implements Closeable {
/**
* The number of bytes, which have been read so far.
*/
private long total;
/**
* The number of bytes, which must be hold, because
* they might be a part of the boundary.
*/
private int pad;
/**
* The current offset in the buffer.
*/
private int pos;
/**
* Whether the stream is already closed.
*/
private boolean closed;
/**
* Creates a new instance.
*/
ItemInputStream() {
findSeparator();
}
/**
* Called for finding the separator.
*/
private void findSeparator() {
pos = MultipartStream.this.findSeparator();
if (pos == -1) {
if (tail - head > keepRegion) {
pad = keepRegion;
} else {
pad = tail - head;
}
}
}
/**
* Returns the number of bytes, which have been read
* by the stream.
*
* @return Number of bytes, which have been read so far.
*/
public long getBytesRead() {
return total;
}
/**
* Returns the number of bytes, which are currently
* available, without blocking.
*
* @return Number of bytes in the buffer.
*/
@Override
public int available() {
if (pos == -1) {
return tail - head - pad;
}
return pos - head;
}
/**
* Offset when converting negative bytes to integers.
*/
private static final int BYTE_POSITIVE_OFFSET = 256;
/**
* Returns the next byte in the stream.
*
* @return The next byte in the stream, as a non-negative
* integer, or -1 for EOF.
* @throws IOException An I/O error occurred.
*/
@Override
public int read() throws IOException {
if (closed) {
throw new FormItem.ItemSkippedException();
}
if (available() == 0 && makeAvailable() == 0) {
return -1;
}
++total;
int b = buffer[head++];
if (b >= 0) {
return b;
}
return b + BYTE_POSITIVE_OFFSET;
}
/**
* Reads bytes into the given buffer.
*
* @param b The destination buffer, where to write to.
* @param off Offset of the first byte in the buffer.
* @param len Maximum number of bytes to read.
* @return Number of bytes, which have been actually read,
* or -1 for EOF.
* @throws IOException An I/O error occurred.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (closed) {
throw new FormItem.ItemSkippedException();
}
if (len == 0) {
return 0;
}
int res = available();
if (res == 0) {
res = makeAvailable();
if (res == 0) {
return -1;
}
}
res = Math.min(res, len);
System.arraycopy(buffer, head, b, off, res);
head += res;
total += res;
return res;
}
/**
* Closes the input stream.
*
* @throws IOException An I/O error occurred.
*/
@Override
public void close() throws IOException {
close(false);
}
/**
* Closes the input stream.
*
* @param pCloseUnderlying Whether to close the underlying stream
* (hard close)
* @throws IOException An I/O error occurred.
*/
public void close(boolean pCloseUnderlying) throws IOException {
if (closed) {
return;
}
if (pCloseUnderlying) {
closed = true;
input.close();
} else {
for (;;) {
int av = available();
if (av == 0) {
av = makeAvailable();
if (av == 0) {
break;
}
}
long skip = skip(av);
if (skip != av) {
if (log.isDebugEnabled()) {
log.debug(skip + " bytes been skipped.");
}
}
}
}
closed = true;
}
/**
* Skips the given number of bytes.
*
* @param bytes Number of bytes to skip.
* @return The number of bytes, which have actually been
* skipped.
*/
@Override
public long skip(long bytes) {
if (closed) {
throw new FormItem.ItemSkippedException();
}
int av = available();
if (av == 0) {
av = makeAvailable();
if (av == 0) {
return 0;
}
}
long res = Math.min(av, bytes);
head += res;
return res;
}
/**
* Attempts to read more data.
*
* @return Number of available bytes
*/
private int makeAvailable() {
if (pos != -1) {
return 0;
}
// Move the data to the beginning of the buffer.
total += tail - head - pad;
System.arraycopy(buffer, tail - pad, buffer, 0, pad);
// Refill buffer with new data.
head = 0;
tail = pad;
for (;;) {
int bytesRead = 0;
try {
bytesRead = input.read(buffer, tail, bufSize - tail);
} catch (IOException e) {
throw new RuntimeException("Error while reading multipart stream");
}
if (bytesRead == -1) {
// The last pad amount is left in the buffer.
// Boundary can't be in there so signal an error
// condition.
final String msg = "Stream ended unexpectedly";
throw new MalformedStreamException(msg);
}
tail += bytesRead;
findSeparator();
int av = available();
if (av > 0 || pos != -1) {
return av;
}
}
}
/**
* Returns, whether the stream is closed.
*
* @return True, if the stream is closed, otherwise false.
*/
public boolean isClosed() {
return closed;
}
}
}
| |
/* File BufMgr,java */
package bufmgr;
import java.io.*;
import java.util.*;
import diskmgr.*;
import global.*;
/** A frame description class. It describes each page in the buffer
* pool, the page number in the file, whether it is dirty or not,
* its pin count, and the pin count change when pinning or unpinning
* a page.
*/
class FrameDesc implements GlobalConst{
/** The page within file, or INVALID_PAGE if the frame is empty. */
public PageId pageNo;
/** the dirty bit, 1 (TRUE) stands for this frame is altered,
*0 (FALSE) for clean frames.
*/
public boolean dirty;
/** The pin count for the page in this frame */
public int pin_cnt;
/** Creates a FrameDesc object, initialize pageNo, dirty and
* pin_count.
*/
public FrameDesc() {
pageNo = new PageId();
pageNo.pid = INVALID_PAGE;
dirty = false;
pin_cnt = 0;
}
/** Returns the pin count of a certain frame page.
*
* @return the pin count number.
*/
public int pin_count() { return(pin_cnt); }
/** Increments the pin count of a certain frame page when the
* page is pinned.
*
* @return the incremented pin count.
*/
public int pin() { return(++pin_cnt); }
/** Decrements the pin count of a frame when the page is
* unpinned. If the pin count is equal to or less than
* zero, the pin count will be zero.
*
* @return the decremented pin count.
*/
public int unpin() {
pin_cnt = (pin_cnt <= 0) ? 0 : pin_cnt - 1;
return(pin_cnt);
}
}
// *****************************************************
/** A buffer hashtable entry description class. It describes
* each entry for the buffer hash table, the page number and
* frame number for that page, the pointer points to the next
* hash table entry.
*/
class BufHTEntry {
/** The next entry in this hashtable bucket. */
public BufHTEntry next;
/** This page number. */
public PageId pageNo = new PageId();
/** The frame we are stored in. */
public int frameNo;
}
// *****************************************************
/** A buffer hashtable to keep track of pages in the buffer pool.
* It inserts, retrieves and removes pages from the h ash table.
*/
class BufHashTbl implements GlobalConst{
/** Hash Table size, small number for debugging. */
private static final int HTSIZE = 20;
/** Each slot holds a linked list of BufHTEntrys, NULL means
* none.
*/
private BufHTEntry ht[] = new BufHTEntry[HTSIZE];
/** Returns the number of hash bucket used, value between 0 and HTSIZE-1
*
* @param pageNo the page number for the page in file.
* @return the bucket number in the hash table.
*/
private int hash(PageId pageNo)
{
return (pageNo.pid % HTSIZE);
}
/** Creates a buffer hash table object. */
public BufHashTbl()
{
for (int i=0; i < HTSIZE; i++)
ht[i] = null;
}
/** Insert association between page pageNo and frame frameNo
* into the hash table.
*
* @param pageNo page number in the bucket.
* @param frameNo frame number in the bucket.
* @return true if successful.
*/
public boolean insert(PageId pageNo, int frameNo)
{
BufHTEntry ent = new BufHTEntry();
int index = hash(pageNo);
ent.pageNo.pid = pageNo.pid;
ent.frameNo = frameNo;
ent.next = ht[index]; // insert this page at the top
ht[index] = ent;
return true;
}
/** Find a page in the hashtable, return INVALID_PAGE
* on failure, otherwise the frame number.
* @param pageNo page number in the bucket.
*/
public int lookup(PageId pageNo)
{
BufHTEntry ent;
if (pageNo.pid == INVALID_PAGE)
return INVALID_PAGE;
for (ent=ht[hash(pageNo)]; ent!=null; ent=ent.next) {
if (ent.pageNo.pid == pageNo.pid) {
return(ent.frameNo);
}
}
return(INVALID_PAGE);
}
/** Remove the page from the hashtable.
* @param pageNo page number of the bucket.
*/
public boolean remove(PageId pageNo)
{
BufHTEntry cur, prev = null;
// Allow INVALID_PAGE to be removed all they want.
if (pageNo.pid == INVALID_PAGE)
return true;
int indx = hash(pageNo);
for (cur=ht[indx]; cur!=null; cur=cur.next) {
if (cur.pageNo.pid == pageNo.pid)
break;
prev = cur;
}
if (cur != null) {
if (prev != null)
prev.next = cur.next;
else
ht[indx] = cur.next;
} else {
System.err.println ("ERROR: Page " + pageNo.pid
+ " was not found in hashtable.\n");
return false;
}
return true;
}
/** Show hashtable contents. */
public void display() {
BufHTEntry cur;
System.out.println("HASH Table contents :FrameNo[PageNo]");
for (int i=0; i < HTSIZE; i++) {
// System.out.println ( "\nindex: " + i + "-" );
if (ht[i] != null) {
for (cur=ht[i]; cur!=null; cur=cur.next) {
System.out.println(cur.frameNo + "[" + cur.pageNo.pid + "]-");
}
System.out.println("\t\t");
}
else {
System.out.println("NONE\t");
}
}
System.out.println("");
}
}
// *****************************************************
/** A clock algorithm for buffer pool replacement policy.
* It picks up the frame in the buffer pool to be replaced.
* This is the default replacement policy.
*/
class Clock extends Replacer {
/** Creates a clock object. */
public Clock(BufMgr javamgr)
{
super(javamgr);
}
/** Picks up the victim frame to be replaced according to
* the clock algorithm. Pin the victim so that other
* process can not pick it as a victim.
*
* @return -1 if no frame is available.
* head of the list otherwise.
* @throws BufferPoolExceededException.
*/
public int pick_victim()
throws BufferPoolExceededException,
PagePinnedException
{
int num = 0;
int numBuffers = mgr.getNumBuffers();
head = (head+1) % numBuffers;
while ( state_bit[head].state != Available ) {
if ( state_bit[head].state == Referenced )
state_bit[head].state = Available;
if ( num == 2*numBuffers ) {
throw new BufferPoolExceededException (null, "BUFMGR: BUFFER_EXCEEDED.");
}
++num;
head = (head+1) % numBuffers;
}
// Make sure pin count is 0.
/** need to convert assert to a similar function. */
// assert( (mgr.frameTable())[head].pin_count() == 0 );
if ((mgr.frameTable())[head].pin_count() != 0) {
throw new PagePinnedException (null, "BUFMGR: PIN_COUNT IS NOT 0.");
}
state_bit[head].state = Pinned; // Pin this victim so that other
(mgr.frameTable())[head].pin();
// process can't pick it as victim (???)
return head;
}
/** Returns the name of the clock algorithm as a string.
*
* @return "Clock", the name of the algorithm.
*/
public final String name() { return "Clock"; }
/** Displays information from clock replacement algorithm. */
public void info()
{
super.info();
System.out.println ("Clock hand:\t" + head);
System.out.println ("\n\n");
}
} // end of Clock
// *****************************************************
/** The buffer manager class, it allocates new pages for the
* buffer pool, pins and unpins the frame, frees the frame
* page, and uses the replacement algorithm to replace the
* page.
*/
public class BufMgr implements GlobalConst{
/** The hash table, only allocated once. */
private BufHashTbl hashTable = new BufHashTbl();
/** Total number of buffer frames in the buffer pool. */
private int numBuffers;
/** physical buffer pool. */
private byte[][] bufPool; // default = byte[NUMBUF][MAX_SPACE];
/** An array of Descriptors one per frame. */
private FrameDesc[] frmeTable; // default = new FrameDesc[NUMBUF];
/** The replacer object, which is only used in this class. */
private Replacer replacer;
/** Factor out the common code for the two versions of Flush
*
* @param pageid the page number of the page which needs
* to be flushed.
* @param all_pages the total number of page to be flushed.
*
* @exception HashOperationException if there is a hashtable error.
* @exception PageUnpinnedException when unpinning an unpinned page
* @exception PagePinnedException when trying to free a pinned page
* @exception PageNotFoundException when the page could not be found
* @exception InvalidPageNumberException when the page number is invalid
* @exception FileIOException File I/O error
* @exception IOException Other I/O errors
*/
private void privFlushPages(PageId pageid, int all_pages)
throws HashOperationException,
PageUnpinnedException,
PagePinnedException,
PageNotFoundException,
BufMgrException,
IOException
{
int i;
int unpinned = 0;
for (i=0; i < numBuffers; i++) // write all valid dirty pages to disk
if ( (all_pages !=0) || (frmeTable[i].pageNo.pid == pageid.pid)) {
if ( frmeTable[i].pin_count() != 0 )
unpinned++;
if ( frmeTable[i].dirty != false ) {
if(frmeTable[i].pageNo.pid == INVALID_PAGE)
throw new PageNotFoundException( null, "BUFMGR: INVALID_PAGE_NO");
pageid.pid = frmeTable[i].pageNo.pid;
Page apage = new Page(bufPool[i]);
write_page(pageid, apage);
try {
hashTable.remove(pageid);
}
catch (Exception e2){
throw new HashOperationException(e2, "BUFMGR: HASH_TBL_ERROR.");
}
frmeTable[i].pageNo.pid = INVALID_PAGE; // frame is empty
frmeTable[i].dirty = false ;
}
if (all_pages == 0) {
if (unpinned != 0)
throw new PagePinnedException (null, "BUFMGR: PAGE_PINNED.");
}
}
if (all_pages != 0) {
if (unpinned != 0)
throw new PagePinnedException (null, "BUFMGR: PAGE_PINNED.");
}
}
/**
* Create a buffer manager object.
*
* @param numbufs number of buffers in the buffer pool.
* @param replacerArg name of the buffer replacement policy.
*/
public BufMgr( int numbufs, String replacerArg )
{
numBuffers = numbufs;
frmeTable = new FrameDesc[numBuffers];
bufPool = new byte[numBuffers][MAX_SPACE];
frmeTable = new FrameDesc[numBuffers];
for (int i=0; i<numBuffers; i++) // initialize frameTable
frmeTable[i] = new FrameDesc();
if (replacerArg == null) {
replacer = new Clock(this);
} else {
if (replacerArg.compareTo("Clock")==0)
{
replacer = new Clock(this);
System.out.println("Replacer: Clock\n");
}
else if(replacerArg.compareTo("LRU")==0)
{
replacer = new LRU(this);
System.out.println("Replacer: LRU\n");
}
else if(replacerArg.compareTo("MRU")==0)
{
replacer = new LRU(this);
System.out.println("Replacer: MRU\n");
}
else
{
replacer = new Clock(this);
System.out.println("Replacer:Unknown, Use Clock\n");
}
}
replacer.setBufferManager( this );
}
// Debug use only
private void bmhashdisplay()
{
hashTable.display();
}
/** Check if this page is in buffer pool, otherwise
* find a frame for this page, read in and pin it.
* Also write out the old page if it's dirty before reading
* if emptyPage==TRUE, then actually no read is done to bring
* the page in.
*
* @param Page_Id_in_a_DB page number in the minibase.
* @param page the pointer poit to the page.
* @param emptyPage true (empty page); false (non-empty page)
*
* @exception ReplacerException if there is a replacer error.
* @exception HashOperationException if there is a hashtable error.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception InvalidFrameNumberException if there is an invalid frame number .
* @exception PageNotReadException if a page cannot be read.
* @exception BufferPoolExceededException if the buffer pool is full.
* @exception PagePinnedException if a page is left pinned .
* @exception BufMgrException other error occured in bufmgr layer
* @exception IOException if there is other kinds of I/O error.
*/
public void pinPage(PageId pin_pgid, Page page, boolean emptyPage)
throws ReplacerException,
HashOperationException,
PageUnpinnedException,
InvalidFrameNumberException,
PageNotReadException,
BufferPoolExceededException,
PagePinnedException,
BufMgrException,
IOException
{
int frameNo;
boolean bst, bst2;
PageId oldpageNo = new PageId(-1);
int needwrite = 0;
frameNo = hashTable.lookup(pin_pgid);
if (frameNo < 0) { // Not in the buffer pool
frameNo = replacer.pick_victim(); // frameNo is pinned
if (frameNo < 0) {
page = null;
throw new ReplacerException (null, "BUFMGR: REPLACER_ERROR.");
}
if ((frmeTable[frameNo].pageNo.pid != INVALID_PAGE)
&& (frmeTable[frameNo].dirty == true) ) {
needwrite = 1;
oldpageNo.pid = frmeTable[frameNo].pageNo.pid;
}
bst = hashTable.remove(frmeTable[frameNo].pageNo);
if (bst != true) {
throw new HashOperationException (null, "BUFMGR: HASH_TABLE_ERROR.");
}
frmeTable[frameNo].pageNo.pid = INVALID_PAGE; // frame is empty
frmeTable[frameNo].dirty = false; // not dirty
bst2 = hashTable.insert(pin_pgid,frameNo);
(frmeTable[frameNo].pageNo).pid = pin_pgid.pid;
frmeTable[frameNo].dirty = false;
if (bst2 != true){
throw new HashOperationException (null, "BUFMGR: HASH_TABLE_ERROR.");
}
Page apage = new Page(bufPool[frameNo]);
if (needwrite == 1) {
write_page(oldpageNo, apage);
} // end of needwrite..
// read in the page if not empty
if (emptyPage == false){
try {
apage.setpage(bufPool[frameNo]);
read_page(pin_pgid, apage);
}
catch (Exception e) {
bst = hashTable.remove(frmeTable[frameNo].pageNo);
if (bst != true)
throw new HashOperationException (e, "BUFMGR: HASH_TABLE_ERROR.");
frmeTable[frameNo].pageNo.pid = INVALID_PAGE; // frame is empty
frmeTable[frameNo].dirty = false;
bst = replacer.unpin(frameNo);
if (bst != true)
throw new ReplacerException (e, "BUFMGR: REPLACER_ERROR.");
throw new PageNotReadException (e, "BUFMGR: DB_READ_PAGE_ERROR.");
}
}
page.setpage(bufPool[frameNo]);
// return true;
} else { // the page is in the buffer pool ( frameNo > 0 )
page.setpage(bufPool[frameNo]);
replacer.pin(frameNo);
}
}
/**
* To unpin a page specified by a pageId.
*If pincount>0, decrement it and if it becomes zero,
* put it in a group of replacement candidates.
* if pincount=0 before this call, return error.
*
* @param globalPageId_in_a_DB page number in the minibase.
* @param dirty the dirty bit of the frame
*
* @exception ReplacerException if there is a replacer error.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception InvalidFrameNumberException if there is an invalid frame number .
* @exception HashEntryNotFoundException if there is no entry of page in the hash table.
*/
public void unpinPage(PageId PageId_in_a_DB, boolean dirty)
throws ReplacerException,
PageUnpinnedException,
HashEntryNotFoundException,
InvalidFrameNumberException
{
int frameNo;
frameNo=hashTable.lookup(PageId_in_a_DB);
if (frameNo<0){
throw new HashEntryNotFoundException (null, "BUFMGR: HASH_NOT_FOUND.");
}
if (frmeTable[frameNo].pageNo.pid == INVALID_PAGE) {
throw new InvalidFrameNumberException (null, "BUFMGR: BAD_FRAMENO.");
}
if ((replacer.unpin(frameNo)) != true) {
throw new ReplacerException (null, "BUFMGR: REPLACER_ERROR.");
}
if (dirty == true)
frmeTable[frameNo].dirty = dirty;
}
/** Call DB object to allocate a run of new pages and
* find a frame in the buffer pool for the first page
* and pin it. If buffer is full, ask DB to deallocate
* all these pages and return error (null if error).
*
* @param firstpage the address of the first page.
* @param howmany total number of allocated new pages.
* @return the first page id of the new pages.
*
* @exception BufferPoolExceededException if the buffer pool is full.
* @exception HashOperationException if there is a hashtable error.
* @exception ReplacerException if there is a replacer error.
* @exception HashEntryNotFoundException if there is no entry of page in the hash table.
* @exception InvalidFrameNumberException if there is an invalid frame number.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception PagePinnedException if a page is left pinned.
* @exception PageNotReadException if a page cannot be read.
* @exception IOException if there is other kinds of I/O error.
* @exception BufMgrException other error occured in bufmgr layer
* @exception DiskMgrException other error occured in diskmgr layer
*/
public PageId newPage(Page firstpage, int howmany)
throws BufferPoolExceededException,
HashOperationException,
ReplacerException,
HashEntryNotFoundException,
InvalidFrameNumberException,
PagePinnedException,
PageUnpinnedException,
PageNotReadException,
BufMgrException,
DiskMgrException,
IOException
{
int i;
PageId firstPageId = new PageId();
allocate_page(firstPageId,howmany);
try{
pinPage(firstPageId,firstpage,true);
}
// rollback because pin failed
catch (Exception e) {
for (i=0; i < howmany; i++){
firstPageId.pid += i;
deallocate_page(firstPageId);
}
return null;
}
return firstPageId;
}
/** User should call this method if she needs to delete a page.
* this routine will call DB to deallocate the page.
*
* @param globalPageId the page number in the data base.
* @exception InvalidBufferException if buffer pool corrupted.
* @exception ReplacerException if there is a replacer error.
* @exception HashOperationException if there is a hash table error.
* @exception InvalidFrameNumberException if there is an invalid frame number.
* @exception PageNotReadException if a page cannot be read.
* @exception BufferPoolExceededException if the buffer pool is already full.
* @exception PagePinnedException if a page is left pinned.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception HashEntryNotFoundException if there is no entry
* of page in the hash table.
* @exception IOException if there is other kinds of I/O error.
* @exception BufMgrException other error occured in bufmgr layer
* @exception DiskMgrException other error occured in diskmgr layer
*/
public void freePage(PageId globalPageId)
throws InvalidBufferException,
ReplacerException,
HashOperationException,
InvalidFrameNumberException,
PageNotReadException,
BufferPoolExceededException,
PagePinnedException,
PageUnpinnedException,
HashEntryNotFoundException,
BufMgrException,
DiskMgrException,
IOException
{
int frameNo;
frameNo = hashTable.lookup(globalPageId);
//if globalPageId is not in pool, frameNo < 0
//then call deallocate
if (frameNo < 0){
deallocate_page(globalPageId);
return;
}
if (frameNo >= (int)numBuffers){
throw new InvalidBufferException(null, "BUFMGR, BAD_BUFFER");
}
try{
replacer.free(frameNo);
}
catch(Exception e1){
throw new ReplacerException(e1, "BUFMGR, REPLACER_ERROR");
}
try {
hashTable.remove(frmeTable[frameNo].pageNo);
}
catch (Exception e2){
throw new HashOperationException(e2, "BUFMGR, HASH_TABLE_ERROR");
}
frmeTable[frameNo].pageNo.pid = INVALID_PAGE; // frame is empty
frmeTable[frameNo].dirty = false;
deallocate_page(globalPageId);
}
/** Added to flush a particular page of the buffer pool to disk
* @param pageid the page number in the database.
*
* @exception HashOperationException if there is a hashtable error.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception PagePinnedException if a page is left pinned.
* @exception PageNotFoundException if a page is not found.
* @exception BufMgrException other error occured in bufmgr layer
* @exception IOException if there is other kinds of I/O error.
*/
public void flushPage(PageId pageid)
throws HashOperationException,
PageUnpinnedException,
PagePinnedException,
PageNotFoundException,
BufMgrException,
IOException
{
privFlushPages(pageid, 0);
}
/** Flushes all pages of the buffer pool to disk
* @exception HashOperationException if there is a hashtable error.
* @exception PageUnpinnedException if there is a page that is already unpinned.
* @exception PagePinnedException if a page is left pinned.
* @exception PageNotFoundException if a page is not found.
* @exception BufMgrException other error occured in bufmgr layer
* @exception IOException if there is other kinds of I/O error.
*/
public void flushAllPages()
throws HashOperationException,
PageUnpinnedException,
PagePinnedException,
PageNotFoundException,
BufMgrException,
IOException
{
PageId pageId = new PageId(INVALID_PAGE);
privFlushPages(pageId ,1);
}
/** Gets the total number of buffers.
*
* @return total number of buffer frames.
*/
public int getNumBuffers() { return numBuffers; }
/** Gets the total number of unpinned buffer frames.
*
* @return total number of unpinned buffer frames.
*/
public int getNumUnpinnedBuffers()
{
return replacer.getNumUnpinnedBuffers();
}
/** A few routines currently need direct access to the FrameTable. */
public FrameDesc[] frameTable() { return frmeTable; }
private void write_page (PageId pageno, Page page)
throws BufMgrException {
try {
SystemDefs.JavabaseDB.write_page(pageno, page);
}
catch (Exception e) {
throw new BufMgrException(e,"BufMgr.java: write_page() failed");
}
} // end of write_page
private void read_page (PageId pageno, Page page)
throws BufMgrException {
try {
SystemDefs.JavabaseDB.read_page(pageno, page);
}
catch (Exception e) {
throw new BufMgrException(e,"BufMgr.java: read_page() failed");
}
} // end of read_page
private void allocate_page (PageId pageno, int num)
throws BufMgrException {
try {
SystemDefs.JavabaseDB.allocate_page(pageno, num);
}
catch (Exception e) {
throw new BufMgrException(e,"BufMgr.java: allocate_page() failed");
}
} // end of allocate_page
private void deallocate_page (PageId pageno)
throws BufMgrException {
try {
SystemDefs.JavabaseDB.deallocate_page(pageno);
}
catch (Exception e) {
throw new BufMgrException(e,"BufMgr.java: deallocate_page() failed");
}
} // end of deallocate_page
}
/** A class describes the victim data, its frame number and page
* number.
*/
class victim_data {
public int frame_num;
public int page_id;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.data;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.Files;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TestHelpers.Row;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Sets;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.ArrayUtil;
import org.apache.iceberg.util.Pair;
import org.apache.iceberg.util.StructLikeSet;
import org.apache.iceberg.util.StructProjection;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public abstract class DeleteReadTests {
// Schema passed to create tables
public static final Schema SCHEMA = new Schema(
Types.NestedField.required(1, "id", Types.IntegerType.get()),
Types.NestedField.required(2, "data", Types.StringType.get())
);
// Partition spec used to create tables
public static final PartitionSpec SPEC = PartitionSpec.builderFor(SCHEMA)
.bucket("data", 16)
.build();
@Rule
public TemporaryFolder temp = new TemporaryFolder();
private String tableName = null;
private Table table = null;
private List<Record> records = null;
private DataFile dataFile = null;
@Before
public void writeTestDataFile() throws IOException {
this.tableName = "test";
this.table = createTable(tableName, SCHEMA, SPEC);
this.records = Lists.newArrayList();
// records all use IDs that are in bucket id_bucket=0
GenericRecord record = GenericRecord.create(table.schema());
records.add(record.copy("id", 29, "data", "a"));
records.add(record.copy("id", 43, "data", "b"));
records.add(record.copy("id", 61, "data", "c"));
records.add(record.copy("id", 89, "data", "d"));
records.add(record.copy("id", 100, "data", "e"));
records.add(record.copy("id", 121, "data", "f"));
records.add(record.copy("id", 122, "data", "g"));
this.dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), Row.of(0), records);
table.newAppend()
.appendFile(dataFile)
.commit();
}
@After
public void cleanup() throws IOException {
dropTable("test");
}
protected abstract Table createTable(String name, Schema schema, PartitionSpec spec) throws IOException;
protected abstract void dropTable(String name) throws IOException;
protected abstract StructLikeSet rowSet(String name, Table testTable, String... columns) throws IOException;
protected boolean expectPruned() {
return true;
}
@Test
public void testEqualityDeletes() throws IOException {
Schema deleteRowSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(deleteRowSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", "a"), // id = 29
dataDelete.copy("data", "d"), // id = 89
dataDelete.copy("data", "g") // id = 122
);
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, deleteRowSchema);
table.newRowDelta()
.addDeletes(eqDeletes)
.commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
@Test
public void testEqualityDeletesWithRequiredEqColumn() throws IOException {
Schema deleteRowSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(deleteRowSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", "a"), // id = 29
dataDelete.copy("data", "d"), // id = 89
dataDelete.copy("data", "g") // id = 122
);
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, deleteRowSchema);
table.newRowDelta()
.addDeletes(eqDeletes)
.commit();
StructLikeSet expected = selectColumns(rowSetWithoutIds(29, 89, 122), "id");
StructLikeSet actual = rowSet(tableName, table, "id");
if (expectPruned()) {
Assert.assertEquals("Table should contain expected rows", expected, actual);
} else {
// data is added by the reader to apply the eq deletes, use StructProjection to remove it from comparison
Assert.assertEquals("Table should contain expected rows", expected, selectColumns(actual, "id"));
}
}
@Test
public void testEqualityDeletesSpanningMultipleDataFiles() throws IOException {
// Add another DataFile with common values
GenericRecord record = GenericRecord.create(table.schema());
records.add(record.copy("id", 144, "data", "a"));
this.dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), Row.of(0), records);
table.newAppend()
.appendFile(dataFile)
.commit();
Schema deleteRowSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(deleteRowSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", "a"), // id = 29, 144
dataDelete.copy("data", "d"), // id = 89
dataDelete.copy("data", "g") // id = 122
);
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, deleteRowSchema);
table.newRowDelta()
.addDeletes(eqDeletes)
.commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 122, 144);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
@Test
public void testPositionDeletes() throws IOException {
List<Pair<CharSequence, Long>> deletes = Lists.newArrayList(
Pair.of(dataFile.path(), 0L), // id = 29
Pair.of(dataFile.path(), 3L), // id = 89
Pair.of(dataFile.path(), 6L) // id = 122
);
Pair<DeleteFile, Set<CharSequence>> posDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), deletes);
table.newRowDelta()
.addDeletes(posDeletes.first())
.validateDataFilesExist(posDeletes.second())
.commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
@Test
public void testMixedPositionAndEqualityDeletes() throws IOException {
Schema dataSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(dataSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", "a"), // id = 29
dataDelete.copy("data", "d"), // id = 89
dataDelete.copy("data", "g") // id = 122
);
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, dataSchema);
List<Pair<CharSequence, Long>> deletes = Lists.newArrayList(
Pair.of(dataFile.path(), 3L), // id = 89
Pair.of(dataFile.path(), 5L) // id = 121
);
Pair<DeleteFile, Set<CharSequence>> posDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), deletes);
table.newRowDelta()
.addDeletes(eqDeletes)
.addDeletes(posDeletes.first())
.validateDataFilesExist(posDeletes.second())
.commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 121, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
@Test
public void testMultipleEqualityDeleteSchemas() throws IOException {
Schema dataSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(dataSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", "a"), // id = 29
dataDelete.copy("data", "d"), // id = 89
dataDelete.copy("data", "g") // id = 122
);
DeleteFile dataEqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, dataSchema);
Schema idSchema = table.schema().select("id");
Record idDelete = GenericRecord.create(idSchema);
List<Record> idDeletes = Lists.newArrayList(
idDelete.copy("id", 121), // id = 121
idDelete.copy("id", 29) // id = 29
);
DeleteFile idEqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), idDeletes, idSchema);
table.newRowDelta()
.addDeletes(dataEqDeletes)
.addDeletes(idEqDeletes)
.commit();
StructLikeSet expected = rowSetWithoutIds(29, 89, 121, 122);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
@Test
public void testEqualityDeleteByNull() throws IOException {
// data is required in the test table; make it optional for this test
table.updateSchema()
.makeColumnOptional("data")
.commit();
// add a new data file with a record where data is null
Record record = GenericRecord.create(table.schema());
DataFile dataFileWithNull = FileHelpers.writeDataFile(
table, Files.localOutput(temp.newFile()), Row.of(0),
Lists.newArrayList(record.copy("id", 131, "data", null)));
table.newAppend()
.appendFile(dataFileWithNull)
.commit();
// delete where data is null
Schema dataSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(dataSchema);
List<Record> dataDeletes = Lists.newArrayList(
dataDelete.copy("data", null) // id = 131
);
DeleteFile eqDeletes = FileHelpers.writeDeleteFile(
table, Files.localOutput(temp.newFile()), Row.of(0), dataDeletes, dataSchema);
table.newRowDelta()
.addDeletes(eqDeletes)
.commit();
StructLikeSet expected = rowSetWithoutIds(131);
StructLikeSet actual = rowSet(tableName, table, "*");
Assert.assertEquals("Table should contain expected rows", expected, actual);
}
private StructLikeSet selectColumns(StructLikeSet rows, String... columns) {
Schema projection = table.schema().select(columns);
StructLikeSet set = StructLikeSet.create(projection.asStruct());
rows.stream()
.map(row -> StructProjection.create(table.schema(), projection).wrap(row))
.forEach(set::add);
return set;
}
private StructLikeSet rowSetWithoutIds(int... idsToRemove) {
Set<Integer> deletedIds = Sets.newHashSet(ArrayUtil.toIntList(idsToRemove));
StructLikeSet set = StructLikeSet.create(table.schema().asStruct());
records.stream()
.filter(row -> !deletedIds.contains(row.getField("id")))
.forEach(set::add);
return set;
}
}
| |
package org.apache.maven.plugin.eclipse.writers.wtp;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.eclipse.Constants;
import org.apache.maven.plugin.eclipse.EclipseSourceDir;
import org.apache.maven.plugin.eclipse.Messages;
import org.apache.maven.plugin.ide.IdeUtils;
import org.apache.maven.plugin.ide.JeeUtils;
import org.codehaus.plexus.util.IOUtil;
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
import org.codehaus.plexus.util.xml.XMLWriter;
import org.codehaus.plexus.util.xml.Xpp3Dom;
/**
* Creates a .settings folder for Eclipse WTP 1.x release and writes out the configuration under it.
*
* @author <a href="mailto:rahul.thakur.xdev@gmail.com">Rahul Thakur</a>
* @author <a href="mailto:fgiust@apache.org">Fabrizio Giustina</a>
* @version $Id$
*/
public class EclipseWtpComponentWriter
extends AbstractWtpResourceWriter
{
/**
* Context root attribute.
*/
public static final String ATTR_CONTEXT_ROOT = "context-root"; //$NON-NLS-1$
/**
* The .settings folder for Web Tools Project 1.x release.
*/
public static final String DIR_WTP_SETTINGS = ".settings"; //$NON-NLS-1$
/**
* File name where the WTP component settings will be stored for our Eclipse Project.
*
* @return <code>.component</code>
*/
protected String getComponentFileName()
{
return ".component"; //$NON-NLS-1$
}
/**
* Version number added to component configuration.
*
* @return <code>1.0</code>
*/
protected String getProjectVersion()
{
return null;
}
/**
* @see org.apache.maven.plugin.eclipse.writers.EclipseWriter#write()
*/
public void write()
throws MojoExecutionException
{
// create a .settings directory (if not existing)
File settingsDir = new File( config.getEclipseProjectDirectory(), DIR_WTP_SETTINGS );
settingsDir.mkdirs();
Writer w;
try
{
w =
new OutputStreamWriter( new FileOutputStream( new File( settingsDir, getComponentFileName() ) ),
"UTF-8" );
}
catch ( IOException ex )
{
throw new MojoExecutionException( Messages.getString( "EclipsePlugin.erroropeningfile" ), ex );
}
// create a .component file and write out to it
XMLWriter writer = new PrettyPrintXMLWriter( w, "UTF-8", null );
writeModuleTypeComponent( writer, config.getPackaging(), config.getBuildOutputDirectory(),
config.getSourceDirs(), config.getLocalRepository() );
IOUtil.close( w );
}
/**
* Writes out the module type settings for a Web Tools Project to a component file.
*
* @param writer
* @param packaging
* @param buildOutputDirectory
* @param sourceDirs
* @param localRepository
* @throws MojoExecutionException
*/
private void writeModuleTypeComponent( XMLWriter writer, String packaging, File buildOutputDirectory,
EclipseSourceDir[] sourceDirs, ArtifactRepository localRepository )
throws MojoExecutionException
{
writer.startElement( ELT_PROJECT_MODULES );
writer.addAttribute( ATTR_MODULE_ID, "moduleCoreId" ); //$NON-NLS-1$
if ( getProjectVersion() != null )
{
writer.addAttribute( ATTR_PROJECT_VERSION, getProjectVersion() );
}
writer.startElement( ELT_WB_MODULE );
// we should use the eclipse project name as the deploy name.
writer.addAttribute( ATTR_DEPLOY_NAME, this.config.getEclipseProjectName() );
// deploy-path is "/" for utility and ejb projects, "/WEB-INF/classes" for webapps
String target = "/"; //$NON-NLS-1$
if ( Constants.PROJECT_PACKAGING_WAR.equalsIgnoreCase( packaging ) ) //$NON-NLS-1$
{
target = "/WEB-INF/classes"; //$NON-NLS-1$
File warSourceDirectory =
new File( IdeUtils.getPluginSetting( config.getProject(), JeeUtils.ARTIFACT_MAVEN_WAR_PLUGIN,
"warSourceDirectory", //$NON-NLS-1$
config.getProject().getBasedir() + "/src/main/webapp" ) );
writeContextRoot( writer );
writer.startElement( ELT_WB_RESOURCE );
writer.addAttribute( ATTR_DEPLOY_PATH, "/" ); //$NON-NLS-1$
writer.addAttribute( ATTR_SOURCE_PATH,
IdeUtils.toRelativeAndFixSeparator( config.getEclipseProjectDirectory(),
warSourceDirectory, false ) );
writer.endElement();
// add web resources over the top of the war source directory
Xpp3Dom[] webResources =
IdeUtils.getPluginConfigurationDom( config.getProject(), JeeUtils.ARTIFACT_MAVEN_WAR_PLUGIN,
new String[] { "webResources", "resource" } );
for ( Xpp3Dom webResource : webResources )
{
File webResourceDirectory = new File( webResource.getChild( "directory" ).getValue() );
writer.startElement( ELT_WB_RESOURCE );
writer.addAttribute( ATTR_DEPLOY_PATH, "/" ); //$NON-NLS-1$
writer.addAttribute( ATTR_SOURCE_PATH,
IdeUtils.toRelativeAndFixSeparator( config.getEclipseProjectDirectory(),
webResourceDirectory, false ) );
writer.endElement();
}
// @todo is this really needed?
writer.startElement( ELT_PROPERTY );
writer.addAttribute( ATTR_NAME, "java-output-path" ); //$NON-NLS-1$
writer.addAttribute( ATTR_VALUE, "/" //$NON-NLS-1$
+ IdeUtils.toRelativeAndFixSeparator( config.getProject().getBasedir(), buildOutputDirectory, false ) );
writer.endElement(); // property
}
else if ( Constants.PROJECT_PACKAGING_EAR.equalsIgnoreCase( packaging ) ) //$NON-NLS-1$
{
String defaultApplicationXML =
config.getWtpapplicationxml() ? "/target/eclipseEar" : "/src/main/application";
String earSourceDirectory =
IdeUtils.getPluginSetting( config.getProject(), JeeUtils.ARTIFACT_MAVEN_EAR_PLUGIN,
"earSourceDirectory", //$NON-NLS-1$
config.getProject().getBasedir() + defaultApplicationXML );
writer.startElement( ELT_WB_RESOURCE );
writer.addAttribute( ATTR_DEPLOY_PATH, "/" ); //$NON-NLS-1$
writer.addAttribute( ATTR_SOURCE_PATH,
IdeUtils.toRelativeAndFixSeparator( config.getEclipseProjectDirectory(),
new File( earSourceDirectory ), false ) );
writer.endElement();
}
if ( Constants.PROJECT_PACKAGING_WAR.equalsIgnoreCase( packaging )
|| Constants.PROJECT_PACKAGING_EAR.equalsIgnoreCase( packaging ) )
{
// write out the dependencies.
writeWarOrEarResources( writer, config.getProject(), localRepository );
}
for ( EclipseSourceDir dir : sourceDirs )
{
// test src/resources are not added to wtpmodules
if ( !dir.isTest() )
{
// <wb-resource deploy-path="/" source-path="/src/java" />
writer.startElement( ELT_WB_RESOURCE );
writer.addAttribute( ATTR_DEPLOY_PATH, target );
writer.addAttribute( ATTR_SOURCE_PATH, dir.getPath() );
writer.endElement();
}
}
writer.endElement(); // wb-module
writer.endElement(); // project-modules
}
/**
* @param writer
*/
protected void writeContextRoot( XMLWriter writer )
{
writer.startElement( ELT_PROPERTY );
writer.addAttribute( ATTR_CONTEXT_ROOT, config.getContextName() );
writer.endElement(); // property
}
}
| |
/**
* Copyright (C) 2013 The DAISY Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.daisy.maven.xspec;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
import java.io.File;
import java.io.PrintStream;
import java.util.Collections;
import java.util.Map;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugin.logging.Log;
import org.daisy.maven.xspec.TestResults.Builder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
public class XSpecMojoTest {
private static PrintStream SYSOUT = System.out;
private static File resourcesDir = new File(XSpecMojoTest.class
.getResource("/").getPath());
private XSpecMojo mojo;
private TestResults.Builder resultsBuilder;
@Mock
private Log log;
@Mock
private XSpecRunner runner;
@Before
public void setup() {
System.setOut(new PrintStream(ByteStreams.nullOutputStream()));
MockitoAnnotations.initMocks(this);
resultsBuilder = new Builder("results");
// Initialize the Mojo with default settings
mojo = new XSpecMojo();
mojo.setXSpecRunner(runner);
mojo.setLog(log);
mojo.setTestSourceDirectory(new File(resourcesDir, "xspec-dummy"));
mojo.setReportsDirectory(Files.createTempDir());
// By default, make the XSpec mock return an empty result
when(runner.run(anyMapOf(String.class, File.class), any(File.class)))
.thenReturn(resultsBuilder.build());
}
@After
public void tearDown() {
System.setOut(SYSOUT);
}
@Test
public void testSimple() throws MojoExecutionException,
MojoFailureException {
mojo.execute();
verify(runner, times(1)).run(anyMapOf(String.class, File.class),
any(File.class));
}
@Test
public void testSkip() throws MojoExecutionException, MojoFailureException {
mojo.setSkip(true);
mojo.execute();
verify(runner, never()).run(anyMapOf(String.class, File.class),
any(File.class));
}
@Test
public void testSkipTests() throws MojoExecutionException,
MojoFailureException {
mojo.setSkipTests(true);
mojo.execute();
verify(runner, never()).run(anyMapOf(String.class, File.class),
any(File.class));
}
@Test
public void testInvalidReportsDir() throws MojoExecutionException,
MojoFailureException {
mojo.setReportsDirectory(new File("pom.xml"));
mojo.execute();
verify(runner, never()).run(anyMapOf(String.class, File.class),
any(File.class));
}
@Test
public void testInvalidSourceDir() throws MojoExecutionException,
MojoFailureException {
mojo.setTestSourceDirectory(new File("foobar"));
mojo.execute();
verify(runner, never()).run(anyMapOf(String.class, File.class),
any(File.class));
}
@Test
public void testDefaultIncludes() throws MojoExecutionException,
MojoFailureException {
mojo.execute();
Map<String, File> includes = ImmutableMap.of("test", new File(
resourcesDir, "xspec-dummy/test.xspec"), "sub.other", new File(
resourcesDir, "xspec-dummy/sub/other.xspec"), "sub.test", new File(
resourcesDir, "xspec-dummy/sub/test.xspec"), "sub.sub.test",
new File(resourcesDir, "xspec-dummy/sub/sub/test.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test
public void testEmptyIncludes() throws MojoExecutionException,
MojoFailureException {
mojo.setIncludes(Collections.<String> emptyList());
mojo.execute();
Map<String, File> includes = ImmutableMap.of("test", new File(
resourcesDir, "xspec-dummy/test.xspec"), "sub.other", new File(
resourcesDir, "xspec-dummy/sub/other.xspec"), "sub.test", new File(
resourcesDir, "xspec-dummy/sub/test.xspec"), "sub.sub.test",
new File(resourcesDir, "xspec-dummy/sub/sub/test.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test
public void testIncludes() throws MojoExecutionException,
MojoFailureException {
mojo.setIncludes(ImmutableList.of("**/test.xspec"));
mojo.execute();
Map<String, File> includes = ImmutableMap.of("test", new File(
resourcesDir, "xspec-dummy/test.xspec"), "sub.test", new File(
resourcesDir, "xspec-dummy/sub/test.xspec"), "sub.sub.test",
new File(resourcesDir, "xspec-dummy/sub/sub/test.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test
public void testExcludes() throws MojoExecutionException,
MojoFailureException {
mojo.setExcludes(ImmutableList.of("**/other.xspec"));
mojo.execute();
Map<String, File> includes = ImmutableMap.of("test", new File(
resourcesDir, "xspec-dummy/test.xspec"), "sub.test", new File(
resourcesDir, "xspec-dummy/sub/test.xspec"), "sub.sub.test",
new File(resourcesDir, "xspec-dummy/sub/sub/test.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test
public void testSingle() throws MojoExecutionException,
MojoFailureException {
mojo.setTest("other");
mojo.execute();
Map<String, File> includes = ImmutableMap.of("sub.other", new File(
resourcesDir, "xspec-dummy/sub/other.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test
public void testSingleOverridesIncludes() throws MojoExecutionException,
MojoFailureException {
mojo.setTest("other");
mojo.setIncludes(ImmutableList.of("**/test.xspec"));
mojo.execute();
Map<String, File> includes = ImmutableMap.of("sub.other", new File(
resourcesDir, "xspec-dummy/sub/other.xspec"));
verify(runner, times(1)).run(eq(includes), any(File.class));
}
@Test(expected = MojoFailureException.class)
public void testErrors() throws MojoExecutionException,
MojoFailureException {
when(runner.run(anyMapOf(String.class, File.class), any(File.class)))
.thenReturn(resultsBuilder.addError().build());
mojo.execute();
}
@Test(expected = MojoFailureException.class)
public void testFailures() throws MojoExecutionException,
MojoFailureException {
when(runner.run(anyMapOf(String.class, File.class), any(File.class)))
.thenReturn(resultsBuilder.addFailure().build());
mojo.execute();
}
@Test(expected = MojoExecutionException.class)
public void testException() throws MojoExecutionException,
MojoFailureException {
when(runner.run(anyMapOf(String.class, File.class), any(File.class)))
.thenReturn(null);
mojo.execute();
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.android.common.SdkConstants;
import com.facebook.buck.android.NdkCxxPlatforms.TargetCpuType;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.RuleKeyBuilder;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.step.AbstractExecutionStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.CopyStep;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.immutables.value.Value;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import javax.annotation.Nullable;
/**
* A {@link com.facebook.buck.rules.BuildRule} that gathers shared objects generated by
* {@code ndk_library} and {@code prebuilt_native_library} rules into a directory. It also hashes
* the shared objects collected and stores this metadata in a text file, to be used later by
* {@link ExopackageInstaller}.
*/
public class CopyNativeLibraries extends AbstractBuildRule {
private final ImmutableSet<SourcePath> nativeLibDirectories;
@AddToRuleKey
private final ImmutableSet<TargetCpuType> cpuFilters;
@AddToRuleKey
private final ImmutableSet<StrippedObjectDescription> stripLibRules;
@AddToRuleKey
private final ImmutableSet<StrippedObjectDescription> stripLibAssetRules;
protected CopyNativeLibraries(
BuildRuleParams buildRuleParams,
SourcePathResolver resolver,
ImmutableSet<SourcePath> nativeLibDirectories,
ImmutableSet<StrippedObjectDescription> stripLibRules,
ImmutableSet<StrippedObjectDescription> stripLibAssetRules,
ImmutableSet<TargetCpuType> cpuFilters) {
super(buildRuleParams, resolver);
this.nativeLibDirectories = nativeLibDirectories;
this.stripLibRules = stripLibRules;
this.stripLibAssetRules = stripLibAssetRules;
this.cpuFilters = cpuFilters;
Preconditions.checkArgument(
!nativeLibDirectories.isEmpty() ||
!stripLibRules.isEmpty() ||
!stripLibAssetRules.isEmpty(),
"There should be at least one native library to copy.");
}
public Path getPathToNativeLibsDir() {
return getBinPath().resolve("libs");
}
public Path getPathToNativeLibsAssetsDir() { return getBinPath().resolve("assetLibs"); }
/**
* Returns the path that is the immediate parent of {@link #getPathToNativeLibsAssetsDir()} and
* {@link #getPathToNativeLibsDir()}.
*/
public Path getPathToAllLibsDir() {
return getBinPath();
}
public Path getPathToMetadataTxt() {
return getBinPath().resolve("metadata.txt");
}
private Path getBinPath() {
return BuildTargets.getScratchPath(getBuildTarget(), "__native_libs_%s__");
}
@VisibleForTesting
ImmutableSet<SourcePath> getNativeLibDirectories() {
return nativeLibDirectories;
}
@VisibleForTesting
ImmutableSet<StrippedObjectDescription> getStrippedObjectDescriptions() {
return ImmutableSet.<StrippedObjectDescription>builder()
.addAll(stripLibRules)
.addAll(stripLibAssetRules)
.build();
}
private void addStepsForCopyingStrippedNativeLibrariesOrAssets(
ProjectFilesystem filesystem,
ImmutableSet<StrippedObjectDescription> strippedNativeLibrariesOrAssets,
Path destinationRootDir,
ImmutableList.Builder<Step> steps) {
for (StrippedObjectDescription strippedObject : strippedNativeLibrariesOrAssets) {
Optional<String> abiDirectoryComponent =
getAbiDirectoryComponent(strippedObject.getTargetCpuType());
Preconditions.checkState(abiDirectoryComponent.isPresent());
Path destination =
destinationRootDir
.resolve(abiDirectoryComponent.get())
.resolve(strippedObject.getStrippedObjectName());
steps.add(new MkdirStep(getProjectFilesystem(), destination.getParent()));
steps.add(
CopyStep.forFile(
filesystem,
getResolver().getAbsolutePath(strippedObject.getSourcePath()),
destination));
}
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context,
BuildableContext buildableContext) {
ImmutableList.Builder<Step> steps = ImmutableList.builder();
steps.add(new MakeCleanDirectoryStep(getProjectFilesystem(), getBinPath()));
final Path pathToNativeLibs = getPathToNativeLibsDir();
steps.add(new MakeCleanDirectoryStep(getProjectFilesystem(), pathToNativeLibs));
final Path pathToNativeLibsAssets = getPathToNativeLibsAssetsDir();
steps.add(new MakeCleanDirectoryStep(getProjectFilesystem(), pathToNativeLibsAssets));
for (SourcePath nativeLibDir : nativeLibDirectories.asList().reverse()) {
copyNativeLibrary(
getProjectFilesystem(),
getResolver().getAbsolutePath(nativeLibDir),
pathToNativeLibs,
cpuFilters,
steps);
}
addStepsForCopyingStrippedNativeLibrariesOrAssets(
getProjectFilesystem(), stripLibRules, pathToNativeLibs, steps);
addStepsForCopyingStrippedNativeLibrariesOrAssets(
getProjectFilesystem(), stripLibAssetRules, pathToNativeLibsAssets, steps);
final Path pathToMetadataTxt = getPathToMetadataTxt();
steps.add(
new AbstractExecutionStep("hash_native_libs") {
@Override
public int execute(ExecutionContext context) {
ProjectFilesystem filesystem = getProjectFilesystem();
ImmutableList.Builder<String> metadataLines = ImmutableList.builder();
try {
for (Path nativeLib : filesystem.getFilesUnderPath(getPathToAllLibsDir())) {
String filesha1 = filesystem.computeSha1(nativeLib);
Path relativePath = getPathToAllLibsDir().relativize(nativeLib);
metadataLines.add(String.format("%s %s", relativePath.toString(), filesha1));
}
filesystem.writeLinesToPath(metadataLines.build(), pathToMetadataTxt);
} catch (IOException e) {
context.logError(e, "There was an error hashing native libraries.");
return 1;
}
return 0;
}
});
buildableContext.recordArtifact(pathToNativeLibs);
buildableContext.recordArtifact(pathToNativeLibsAssets);
buildableContext.recordArtifact(pathToMetadataTxt);
return steps.build();
}
@Nullable
@Override
public Path getPathToOutput() {
return null;
}
public static void copyNativeLibrary(
final ProjectFilesystem filesystem,
Path sourceDir,
final Path destinationDir,
ImmutableSet<TargetCpuType> cpuFilters,
ImmutableList.Builder<Step> steps) {
if (cpuFilters.isEmpty()) {
steps.add(
CopyStep.forDirectory(
filesystem,
sourceDir,
destinationDir,
CopyStep.DirectoryMode.CONTENTS_ONLY));
} else {
for (TargetCpuType cpuType : cpuFilters) {
Optional<String> abiDirectoryComponent = getAbiDirectoryComponent(cpuType);
Preconditions.checkState(abiDirectoryComponent.isPresent());
final Path libSourceDir = sourceDir.resolve(abiDirectoryComponent.get());
Path libDestinationDir = destinationDir.resolve(abiDirectoryComponent.get());
final MkdirStep mkDirStep = new MkdirStep(filesystem, libDestinationDir);
final CopyStep copyStep = CopyStep.forDirectory(
filesystem,
libSourceDir,
libDestinationDir,
CopyStep.DirectoryMode.CONTENTS_ONLY);
steps.add(
new Step() {
@Override
public int execute(ExecutionContext context) {
// TODO(simons): Using a projectfilesystem here is almost definitely wrong.
// This is because each library may come from different build rules, which may be in
// different cells --- this check works by coincidence.
if (!filesystem.exists(libSourceDir)) {
return 0;
}
if (mkDirStep.execute(context) == 0 && copyStep.execute(context) == 0) {
return 0;
}
return 1;
}
@Override
public String getShortName() {
return "copy_native_libraries";
}
@Override
public String getDescription(ExecutionContext context) {
ImmutableList.Builder<String> stringBuilder = ImmutableList.builder();
stringBuilder.add(String.format("[ -d %s ]", libSourceDir.toString()));
stringBuilder.add(mkDirStep.getDescription(context));
stringBuilder.add(copyStep.getDescription(context));
return Joiner.on(" && ").join(stringBuilder.build());
}
});
}
}
// Rename native files named like "*-disguised-exe" to "lib*.so" so they will be unpacked
// by the Android package installer. Then they can be executed like normal binaries
// on the device.
steps.add(
new AbstractExecutionStep("rename_native_executables") {
@Override
public int execute(ExecutionContext context) {
final ImmutableSet.Builder<Path> executablesBuilder = ImmutableSet.builder();
try {
filesystem.walkRelativeFileTree(destinationDir, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
if (file.toString().endsWith("-disguised-exe")) {
executablesBuilder.add(file);
}
return FileVisitResult.CONTINUE;
}
});
for (Path exePath : executablesBuilder.build()) {
Path fakeSoPath = Paths.get(
MorePaths.pathWithUnixSeparators(exePath)
.replaceAll("/([^/]+)-disguised-exe$", "/lib$1.so"));
filesystem.move(exePath, fakeSoPath);
}
} catch (IOException e) {
context.logError(e, "Renaming native executables failed.");
return 1;
}
return 0;
}
});
}
/**
* Native libraries compiled for different CPU architectures are placed in the
* respective ABI subdirectories, such as 'armeabi', 'armeabi-v7a', 'x86' and 'mips'.
* This looks at the cpu filter and returns the correct subdirectory. If cpu filter is
* not present or not supported, returns Optional.absent();
*/
private static Optional<String> getAbiDirectoryComponent(TargetCpuType cpuType) {
String component = null;
if (cpuType.equals(NdkCxxPlatforms.TargetCpuType.ARM)) {
component = SdkConstants.ABI_ARMEABI;
} else if (cpuType.equals(NdkCxxPlatforms.TargetCpuType.ARMV7)) {
component = SdkConstants.ABI_ARMEABI_V7A;
} else if (cpuType.equals(NdkCxxPlatforms.TargetCpuType.X86)) {
component = SdkConstants.ABI_INTEL_ATOM;
} else if (cpuType.equals(NdkCxxPlatforms.TargetCpuType.MIPS)) {
component = SdkConstants.ABI_MIPS;
}
return Optional.fromNullable(component);
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractStrippedObjectDescription implements RuleKeyAppendable {
public abstract SourcePath getSourcePath();
public abstract String getStrippedObjectName();
public abstract TargetCpuType getTargetCpuType();
@Override
public RuleKeyBuilder appendToRuleKey(RuleKeyBuilder builder) {
return builder
.setReflectively("sourcePath", getSourcePath())
.setReflectively("strippedObjectName", getStrippedObjectName())
.setReflectively("targetCpuType", getTargetCpuType());
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/datastore/v1beta3/datastore.proto
package com.google.datastore.v1beta3;
/**
* <pre>
* The request for [Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
* </pre>
*
* Protobuf type {@code google.datastore.v1beta3.LookupRequest}
*/
public final class LookupRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.datastore.v1beta3.LookupRequest)
LookupRequestOrBuilder {
// Use LookupRequest.newBuilder() to construct.
private LookupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LookupRequest() {
projectId_ = "";
keys_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private LookupRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.datastore.v1beta3.ReadOptions.Builder subBuilder = null;
if (readOptions_ != null) {
subBuilder = readOptions_.toBuilder();
}
readOptions_ = input.readMessage(com.google.datastore.v1beta3.ReadOptions.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(readOptions_);
readOptions_ = subBuilder.buildPartial();
}
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
keys_ = new java.util.ArrayList<com.google.datastore.v1beta3.Key>();
mutable_bitField0_ |= 0x00000004;
}
keys_.add(
input.readMessage(com.google.datastore.v1beta3.Key.parser(), extensionRegistry));
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
projectId_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
keys_ = java.util.Collections.unmodifiableList(keys_);
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_LookupRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_LookupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.datastore.v1beta3.LookupRequest.class, com.google.datastore.v1beta3.LookupRequest.Builder.class);
}
private int bitField0_;
public static final int PROJECT_ID_FIELD_NUMBER = 8;
private volatile java.lang.Object projectId_;
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int READ_OPTIONS_FIELD_NUMBER = 1;
private com.google.datastore.v1beta3.ReadOptions readOptions_;
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public boolean hasReadOptions() {
return readOptions_ != null;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public com.google.datastore.v1beta3.ReadOptions getReadOptions() {
return readOptions_ == null ? com.google.datastore.v1beta3.ReadOptions.getDefaultInstance() : readOptions_;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public com.google.datastore.v1beta3.ReadOptionsOrBuilder getReadOptionsOrBuilder() {
return getReadOptions();
}
public static final int KEYS_FIELD_NUMBER = 3;
private java.util.List<com.google.datastore.v1beta3.Key> keys_;
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public java.util.List<com.google.datastore.v1beta3.Key> getKeysList() {
return keys_;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public java.util.List<? extends com.google.datastore.v1beta3.KeyOrBuilder>
getKeysOrBuilderList() {
return keys_;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public int getKeysCount() {
return keys_.size();
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.Key getKeys(int index) {
return keys_.get(index);
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.KeyOrBuilder getKeysOrBuilder(
int index) {
return keys_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (readOptions_ != null) {
output.writeMessage(1, getReadOptions());
}
for (int i = 0; i < keys_.size(); i++) {
output.writeMessage(3, keys_.get(i));
}
if (!getProjectIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, projectId_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (readOptions_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getReadOptions());
}
for (int i = 0; i < keys_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, keys_.get(i));
}
if (!getProjectIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, projectId_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.datastore.v1beta3.LookupRequest)) {
return super.equals(obj);
}
com.google.datastore.v1beta3.LookupRequest other = (com.google.datastore.v1beta3.LookupRequest) obj;
boolean result = true;
result = result && getProjectId()
.equals(other.getProjectId());
result = result && (hasReadOptions() == other.hasReadOptions());
if (hasReadOptions()) {
result = result && getReadOptions()
.equals(other.getReadOptions());
}
result = result && getKeysList()
.equals(other.getKeysList());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
if (hasReadOptions()) {
hash = (37 * hash) + READ_OPTIONS_FIELD_NUMBER;
hash = (53 * hash) + getReadOptions().hashCode();
}
if (getKeysCount() > 0) {
hash = (37 * hash) + KEYS_FIELD_NUMBER;
hash = (53 * hash) + getKeysList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.datastore.v1beta3.LookupRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.LookupRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.datastore.v1beta3.LookupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.datastore.v1beta3.LookupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The request for [Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
* </pre>
*
* Protobuf type {@code google.datastore.v1beta3.LookupRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.datastore.v1beta3.LookupRequest)
com.google.datastore.v1beta3.LookupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_LookupRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_LookupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.datastore.v1beta3.LookupRequest.class, com.google.datastore.v1beta3.LookupRequest.Builder.class);
}
// Construct using com.google.datastore.v1beta3.LookupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getKeysFieldBuilder();
}
}
public Builder clear() {
super.clear();
projectId_ = "";
if (readOptionsBuilder_ == null) {
readOptions_ = null;
} else {
readOptions_ = null;
readOptionsBuilder_ = null;
}
if (keysBuilder_ == null) {
keys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
keysBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.datastore.v1beta3.DatastoreProto.internal_static_google_datastore_v1beta3_LookupRequest_descriptor;
}
public com.google.datastore.v1beta3.LookupRequest getDefaultInstanceForType() {
return com.google.datastore.v1beta3.LookupRequest.getDefaultInstance();
}
public com.google.datastore.v1beta3.LookupRequest build() {
com.google.datastore.v1beta3.LookupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.datastore.v1beta3.LookupRequest buildPartial() {
com.google.datastore.v1beta3.LookupRequest result = new com.google.datastore.v1beta3.LookupRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.projectId_ = projectId_;
if (readOptionsBuilder_ == null) {
result.readOptions_ = readOptions_;
} else {
result.readOptions_ = readOptionsBuilder_.build();
}
if (keysBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
keys_ = java.util.Collections.unmodifiableList(keys_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.keys_ = keys_;
} else {
result.keys_ = keysBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.datastore.v1beta3.LookupRequest) {
return mergeFrom((com.google.datastore.v1beta3.LookupRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.datastore.v1beta3.LookupRequest other) {
if (other == com.google.datastore.v1beta3.LookupRequest.getDefaultInstance()) return this;
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
onChanged();
}
if (other.hasReadOptions()) {
mergeReadOptions(other.getReadOptions());
}
if (keysBuilder_ == null) {
if (!other.keys_.isEmpty()) {
if (keys_.isEmpty()) {
keys_ = other.keys_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureKeysIsMutable();
keys_.addAll(other.keys_);
}
onChanged();
}
} else {
if (!other.keys_.isEmpty()) {
if (keysBuilder_.isEmpty()) {
keysBuilder_.dispose();
keysBuilder_ = null;
keys_ = other.keys_;
bitField0_ = (bitField0_ & ~0x00000004);
keysBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getKeysFieldBuilder() : null;
} else {
keysBuilder_.addAllMessages(other.keys_);
}
}
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.datastore.v1beta3.LookupRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.datastore.v1beta3.LookupRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object projectId_ = "";
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public com.google.protobuf.ByteString
getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder setProjectId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
onChanged();
return this;
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
onChanged();
return this;
}
/**
* <pre>
* The ID of the project against which to make the request.
* </pre>
*
* <code>optional string project_id = 8;</code>
*/
public Builder setProjectIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
onChanged();
return this;
}
private com.google.datastore.v1beta3.ReadOptions readOptions_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.datastore.v1beta3.ReadOptions, com.google.datastore.v1beta3.ReadOptions.Builder, com.google.datastore.v1beta3.ReadOptionsOrBuilder> readOptionsBuilder_;
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public boolean hasReadOptions() {
return readOptionsBuilder_ != null || readOptions_ != null;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public com.google.datastore.v1beta3.ReadOptions getReadOptions() {
if (readOptionsBuilder_ == null) {
return readOptions_ == null ? com.google.datastore.v1beta3.ReadOptions.getDefaultInstance() : readOptions_;
} else {
return readOptionsBuilder_.getMessage();
}
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public Builder setReadOptions(com.google.datastore.v1beta3.ReadOptions value) {
if (readOptionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
readOptions_ = value;
onChanged();
} else {
readOptionsBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public Builder setReadOptions(
com.google.datastore.v1beta3.ReadOptions.Builder builderForValue) {
if (readOptionsBuilder_ == null) {
readOptions_ = builderForValue.build();
onChanged();
} else {
readOptionsBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public Builder mergeReadOptions(com.google.datastore.v1beta3.ReadOptions value) {
if (readOptionsBuilder_ == null) {
if (readOptions_ != null) {
readOptions_ =
com.google.datastore.v1beta3.ReadOptions.newBuilder(readOptions_).mergeFrom(value).buildPartial();
} else {
readOptions_ = value;
}
onChanged();
} else {
readOptionsBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public Builder clearReadOptions() {
if (readOptionsBuilder_ == null) {
readOptions_ = null;
onChanged();
} else {
readOptions_ = null;
readOptionsBuilder_ = null;
}
return this;
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public com.google.datastore.v1beta3.ReadOptions.Builder getReadOptionsBuilder() {
onChanged();
return getReadOptionsFieldBuilder().getBuilder();
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
public com.google.datastore.v1beta3.ReadOptionsOrBuilder getReadOptionsOrBuilder() {
if (readOptionsBuilder_ != null) {
return readOptionsBuilder_.getMessageOrBuilder();
} else {
return readOptions_ == null ?
com.google.datastore.v1beta3.ReadOptions.getDefaultInstance() : readOptions_;
}
}
/**
* <pre>
* The options for this lookup request.
* </pre>
*
* <code>optional .google.datastore.v1beta3.ReadOptions read_options = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.datastore.v1beta3.ReadOptions, com.google.datastore.v1beta3.ReadOptions.Builder, com.google.datastore.v1beta3.ReadOptionsOrBuilder>
getReadOptionsFieldBuilder() {
if (readOptionsBuilder_ == null) {
readOptionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.datastore.v1beta3.ReadOptions, com.google.datastore.v1beta3.ReadOptions.Builder, com.google.datastore.v1beta3.ReadOptionsOrBuilder>(
getReadOptions(),
getParentForChildren(),
isClean());
readOptions_ = null;
}
return readOptionsBuilder_;
}
private java.util.List<com.google.datastore.v1beta3.Key> keys_ =
java.util.Collections.emptyList();
private void ensureKeysIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
keys_ = new java.util.ArrayList<com.google.datastore.v1beta3.Key>(keys_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.datastore.v1beta3.Key, com.google.datastore.v1beta3.Key.Builder, com.google.datastore.v1beta3.KeyOrBuilder> keysBuilder_;
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public java.util.List<com.google.datastore.v1beta3.Key> getKeysList() {
if (keysBuilder_ == null) {
return java.util.Collections.unmodifiableList(keys_);
} else {
return keysBuilder_.getMessageList();
}
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public int getKeysCount() {
if (keysBuilder_ == null) {
return keys_.size();
} else {
return keysBuilder_.getCount();
}
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.Key getKeys(int index) {
if (keysBuilder_ == null) {
return keys_.get(index);
} else {
return keysBuilder_.getMessage(index);
}
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder setKeys(
int index, com.google.datastore.v1beta3.Key value) {
if (keysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeysIsMutable();
keys_.set(index, value);
onChanged();
} else {
keysBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder setKeys(
int index, com.google.datastore.v1beta3.Key.Builder builderForValue) {
if (keysBuilder_ == null) {
ensureKeysIsMutable();
keys_.set(index, builderForValue.build());
onChanged();
} else {
keysBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder addKeys(com.google.datastore.v1beta3.Key value) {
if (keysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeysIsMutable();
keys_.add(value);
onChanged();
} else {
keysBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder addKeys(
int index, com.google.datastore.v1beta3.Key value) {
if (keysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeysIsMutable();
keys_.add(index, value);
onChanged();
} else {
keysBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder addKeys(
com.google.datastore.v1beta3.Key.Builder builderForValue) {
if (keysBuilder_ == null) {
ensureKeysIsMutable();
keys_.add(builderForValue.build());
onChanged();
} else {
keysBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder addKeys(
int index, com.google.datastore.v1beta3.Key.Builder builderForValue) {
if (keysBuilder_ == null) {
ensureKeysIsMutable();
keys_.add(index, builderForValue.build());
onChanged();
} else {
keysBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder addAllKeys(
java.lang.Iterable<? extends com.google.datastore.v1beta3.Key> values) {
if (keysBuilder_ == null) {
ensureKeysIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, keys_);
onChanged();
} else {
keysBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder clearKeys() {
if (keysBuilder_ == null) {
keys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
keysBuilder_.clear();
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public Builder removeKeys(int index) {
if (keysBuilder_ == null) {
ensureKeysIsMutable();
keys_.remove(index);
onChanged();
} else {
keysBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.Key.Builder getKeysBuilder(
int index) {
return getKeysFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.KeyOrBuilder getKeysOrBuilder(
int index) {
if (keysBuilder_ == null) {
return keys_.get(index); } else {
return keysBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public java.util.List<? extends com.google.datastore.v1beta3.KeyOrBuilder>
getKeysOrBuilderList() {
if (keysBuilder_ != null) {
return keysBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(keys_);
}
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.Key.Builder addKeysBuilder() {
return getKeysFieldBuilder().addBuilder(
com.google.datastore.v1beta3.Key.getDefaultInstance());
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public com.google.datastore.v1beta3.Key.Builder addKeysBuilder(
int index) {
return getKeysFieldBuilder().addBuilder(
index, com.google.datastore.v1beta3.Key.getDefaultInstance());
}
/**
* <pre>
* Keys of entities to look up.
* </pre>
*
* <code>repeated .google.datastore.v1beta3.Key keys = 3;</code>
*/
public java.util.List<com.google.datastore.v1beta3.Key.Builder>
getKeysBuilderList() {
return getKeysFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.datastore.v1beta3.Key, com.google.datastore.v1beta3.Key.Builder, com.google.datastore.v1beta3.KeyOrBuilder>
getKeysFieldBuilder() {
if (keysBuilder_ == null) {
keysBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.datastore.v1beta3.Key, com.google.datastore.v1beta3.Key.Builder, com.google.datastore.v1beta3.KeyOrBuilder>(
keys_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
keys_ = null;
}
return keysBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.datastore.v1beta3.LookupRequest)
}
// @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupRequest)
private static final com.google.datastore.v1beta3.LookupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.datastore.v1beta3.LookupRequest();
}
public static com.google.datastore.v1beta3.LookupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<LookupRequest>
PARSER = new com.google.protobuf.AbstractParser<LookupRequest>() {
public LookupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LookupRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<LookupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<LookupRequest> getParserForType() {
return PARSER;
}
public com.google.datastore.v1beta3.LookupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.allocation;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope= Scope.TEST, numDataNodes =0)
public class FilteringAllocationIT extends ESIntegTestCase {
private final Logger logger = Loggers.getLogger(FilteringAllocationIT.class);
public void testDecommissionNodeNoReplicas() throws Exception {
logger.info("--> starting 2 nodes");
List<String> nodesIds = internalCluster().startNodes(2);
final String node_0 = nodesIds.get(0);
final String node_1 = nodesIds.get(1);
assertThat(cluster().size(), equalTo(2));
logger.info("--> creating an index with no replicas");
client().admin().indices().prepareCreate("test")
.setSettings(Settings.builder().put("index.number_of_replicas", 0))
.execute().actionGet();
ensureGreen();
logger.info("--> index some data");
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(100L));
logger.info("--> decommission the second node");
client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Settings.builder().put("cluster.routing.allocation.exclude._name", node_1))
.execute().actionGet();
waitForRelocation();
logger.info("--> verify all are allocated on node1 now");
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
assertThat(clusterState.nodes().get(shardRouting.currentNodeId()).getName(), equalTo(node_0));
}
}
}
client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(100L));
}
public void testDisablingAllocationFiltering() throws Exception {
logger.info("--> starting 2 nodes");
List<String> nodesIds = internalCluster().startNodes(2);
final String node_0 = nodesIds.get(0);
final String node_1 = nodesIds.get(1);
assertThat(cluster().size(), equalTo(2));
logger.info("--> creating an index with no replicas");
client().admin().indices().prepareCreate("test")
.setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0))
.execute().actionGet();
ensureGreen();
logger.info("--> index some data");
for (int i = 0; i < 100; i++) {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits(), equalTo(100L));
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
IndexRoutingTable indexRoutingTable = clusterState.routingTable().index("test");
int numShardsOnNode1 = 0;
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
if ("node1".equals(clusterState.nodes().get(shardRouting.currentNodeId()).getName())) {
numShardsOnNode1++;
}
}
}
if (numShardsOnNode1 > ThrottlingAllocationDecider.DEFAULT_CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES) {
client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", numShardsOnNode1)).execute().actionGet();
// make sure we can recover all the nodes at once otherwise we might run into a state where one of the shards has not yet started relocating
// but we already fired up the request to wait for 0 relocating shards.
}
logger.info("--> remove index from the first node");
client().admin().indices().prepareUpdateSettings("test")
.setSettings(Settings.builder().put("index.routing.allocation.exclude._name", node_0))
.execute().actionGet();
client().admin().cluster().prepareReroute().get();
ensureGreen();
logger.info("--> verify all shards are allocated on node_1 now");
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
indexRoutingTable = clusterState.routingTable().index("test");
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
assertThat(clusterState.nodes().get(shardRouting.currentNodeId()).getName(), equalTo(node_1));
}
}
logger.info("--> disable allocation filtering ");
client().admin().indices().prepareUpdateSettings("test")
.setSettings(Settings.builder().put("index.routing.allocation.exclude._name", ""))
.execute().actionGet();
client().admin().cluster().prepareReroute().get();
ensureGreen();
logger.info("--> verify that there are shards allocated on both nodes now");
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
assertThat(clusterState.routingTable().index("test").numberOfNodesShardsAreAllocatedOn(), equalTo(2));
}
public void testInvalidIPFilterClusterSettings() {
String ipKey = randomFrom("_ip", "_host_ip", "_publish_ip");
Setting<String> filterSetting = randomFrom(FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING,
FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings()
.setTransientSettings(Settings.builder().put(filterSetting.getKey() + ipKey, "192.168.1.1."))
.execute().actionGet());
assertEquals("invalid IP address [192.168.1.1.] for [" + filterSetting.getKey() + ipKey + "]", e.getMessage());
}
public void testTransientSettingsStillApplied() throws Exception {
List<String> nodes = internalCluster().startNodes(6);
Set<String> excludeNodes = new HashSet<>(nodes.subList(0, 3));
Set<String> includeNodes = new HashSet<>(nodes.subList(3, 6));
logger.info("--> exclude: [{}], include: [{}]",
Strings.collectionToCommaDelimitedString(excludeNodes),
Strings.collectionToCommaDelimitedString(includeNodes));
ensureStableCluster(6);
client().admin().indices().prepareCreate("test").get();
ensureGreen("test");
Settings exclude = Settings.builder().put("cluster.routing.allocation.exclude._name",
Strings.collectionToCommaDelimitedString(excludeNodes)).build();
logger.info("--> updating settings");
client().admin().cluster().prepareUpdateSettings().setTransientSettings(exclude).get();
logger.info("--> waiting for relocation");
waitForRelocation(ClusterHealthStatus.GREEN);
ClusterState state = client().admin().cluster().prepareState().get().getState();
for (ShardRouting shard : state.getRoutingTable().shardsWithState(ShardRoutingState.STARTED)) {
String node = state.getRoutingNodes().node(shard.currentNodeId()).node().getName();
logger.info("--> shard on {} - {}", node, shard);
assertTrue("shard on " + node + " but should only be on the include node list: " +
Strings.collectionToCommaDelimitedString(includeNodes),
includeNodes.contains(node));
}
Settings other = Settings.builder().put("cluster.info.update.interval", "45s").build();
logger.info("--> updating settings with random persistent setting");
client().admin().cluster().prepareUpdateSettings()
.setPersistentSettings(other).setTransientSettings(exclude).get();
logger.info("--> waiting for relocation");
waitForRelocation(ClusterHealthStatus.GREEN);
state = client().admin().cluster().prepareState().get().getState();
// The transient settings still exist in the state
assertThat(state.metaData().transientSettings(), equalTo(exclude));
for (ShardRouting shard : state.getRoutingTable().shardsWithState(ShardRoutingState.STARTED)) {
String node = state.getRoutingNodes().node(shard.currentNodeId()).node().getName();
logger.info("--> shard on {} - {}", node, shard);
assertTrue("shard on " + node + " but should only be on the include node list: " +
Strings.collectionToCommaDelimitedString(includeNodes),
includeNodes.contains(node));
}
}
}
| |
/*
* RESTful API
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 1
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.logsentinel.model;
import java.util.Objects;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* GDPRRecipient
*/
public class GDPRRecipient {
/**
* Gets or Sets category
*/
public enum CategoryEnum {
STAFF("STAFF"),
PUBLIC_AUTHORITY("PUBLIC_AUTHORITY"),
CONTRACTOR("CONTRACTOR"),
OTHER("OTHER");
private String value;
CategoryEnum(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static CategoryEnum fromValue(String text) {
for (CategoryEnum b : CategoryEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("category")
private CategoryEnum category = null;
/**
* Gets or Sets entityType
*/
public enum EntityTypeEnum {
CONTROLLER("CONTROLLER"),
PROCESSOR("PROCESSOR"),
NON_EU_ENTITY_REPRESENTATIVE("NON_EU_ENTITY_REPRESENTATIVE"),
THIRD_PARTY("THIRD_PARTY");
private String value;
EntityTypeEnum(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static EntityTypeEnum fromValue(String text) {
for (EntityTypeEnum b : EntityTypeEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("entityType")
private EntityTypeEnum entityType = null;
/**
* Gets or Sets legalEntityType
*/
public enum LegalEntityTypeEnum {
LEGAL_PERSON("LEGAL_PERSON"),
NATURAL_PERSON("NATURAL_PERSON"),
PUBLIC_AUTHORITY("PUBLIC_AUTHORITY");
private String value;
LegalEntityTypeEnum(String value) {
this.value = value;
}
@JsonValue
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static LegalEntityTypeEnum fromValue(String text) {
for (LegalEntityTypeEnum b : LegalEntityTypeEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
}
@JsonProperty("legalEntityType")
private LegalEntityTypeEnum legalEntityType = null;
@JsonProperty("legalIdentifier")
private String legalIdentifier = null;
@JsonProperty("name")
private String name = null;
public GDPRRecipient category(CategoryEnum category) {
this.category = category;
return this;
}
/**
* Get category
* @return category
**/
@ApiModelProperty(value = "")
public CategoryEnum getCategory() {
return category;
}
public void setCategory(CategoryEnum category) {
this.category = category;
}
public GDPRRecipient entityType(EntityTypeEnum entityType) {
this.entityType = entityType;
return this;
}
/**
* Get entityType
* @return entityType
**/
@ApiModelProperty(value = "")
public EntityTypeEnum getEntityType() {
return entityType;
}
public void setEntityType(EntityTypeEnum entityType) {
this.entityType = entityType;
}
public GDPRRecipient legalEntityType(LegalEntityTypeEnum legalEntityType) {
this.legalEntityType = legalEntityType;
return this;
}
/**
* Get legalEntityType
* @return legalEntityType
**/
@ApiModelProperty(value = "")
public LegalEntityTypeEnum getLegalEntityType() {
return legalEntityType;
}
public void setLegalEntityType(LegalEntityTypeEnum legalEntityType) {
this.legalEntityType = legalEntityType;
}
public GDPRRecipient legalIdentifier(String legalIdentifier) {
this.legalIdentifier = legalIdentifier;
return this;
}
/**
* Get legalIdentifier
* @return legalIdentifier
**/
@ApiModelProperty(value = "")
public String getLegalIdentifier() {
return legalIdentifier;
}
public void setLegalIdentifier(String legalIdentifier) {
this.legalIdentifier = legalIdentifier;
}
public GDPRRecipient name(String name) {
this.name = name;
return this;
}
/**
* Get name
* @return name
**/
@ApiModelProperty(value = "")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GDPRRecipient gdPRRecipient = (GDPRRecipient) o;
return Objects.equals(this.category, gdPRRecipient.category) &&
Objects.equals(this.entityType, gdPRRecipient.entityType) &&
Objects.equals(this.legalEntityType, gdPRRecipient.legalEntityType) &&
Objects.equals(this.legalIdentifier, gdPRRecipient.legalIdentifier) &&
Objects.equals(this.name, gdPRRecipient.name);
}
@Override
public int hashCode() {
return Objects.hash(category, entityType, legalEntityType, legalIdentifier, name);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class GDPRRecipient {\n");
sb.append(" category: ").append(toIndentedString(category)).append("\n");
sb.append(" entityType: ").append(toIndentedString(entityType)).append("\n");
sb.append(" legalEntityType: ").append(toIndentedString(legalEntityType)).append("\n");
sb.append(" legalIdentifier: ").append(toIndentedString(legalIdentifier)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vera Y. Petrashkova
* @version $Revision$
*/
package tests.security.cert;
import dalvik.annotation.TestTargets;
import dalvik.annotation.TestLevel;
import dalvik.annotation.TestTargetNew;
import dalvik.annotation.TestTargetClass;
import junit.framework.TestCase;
import tests.security.cert.myCertPathBuilder.MyProvider;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Provider;
import java.security.Security;
import java.security.cert.CertPathBuilder;
import java.security.cert.CertPathBuilderException;
import java.security.cert.CertPathBuilderResult;
import org.apache.harmony.security.tests.support.SpiEngUtils;
/**
* Tests for CertPathBuilder class constructors and methods
*
*/
@TestTargetClass(CertPathBuilder.class)
public class CertPathBuilder2Test extends TestCase {
private static final String defaultAlg = "CertPB";
private static final String CertPathBuilderProviderClass = "org.apache.harmony.security.tests.support.cert.MyCertPathBuilderSpi";
private static final String[] invalidValues = SpiEngUtils.invalidValues;
private static final String[] validValues;
static {
validValues = new String[4];
validValues[0] = defaultAlg;
validValues[1] = defaultAlg.toLowerCase();
validValues[2] = "CeRtPb";
validValues[3] = "cERTpb";
}
Provider mProv;
protected void setUp() throws Exception {
super.setUp();
mProv = (new SpiEngUtils()).new MyProvider("MyCertPathBuilderProvider",
"Provider for testing", CertPathBuilder1Test.srvCertPathBuilder
+ "." + defaultAlg,
CertPathBuilderProviderClass);
Security.insertProviderAt(mProv, 1);
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
Security.removeProvider(mProv.getName());
}
private void checkResult(CertPathBuilder certBuild)
throws InvalidAlgorithmParameterException,
CertPathBuilderException {
String dt = CertPathBuilder.getDefaultType();
String propName = CertPathBuilder1Test.DEFAULT_TYPE_PROPERTY;
String dtN;
for (int i = 0; i <invalidValues.length; i++) {
Security.setProperty(propName, invalidValues[i]);
dtN = CertPathBuilder.getDefaultType();
if (!dtN.equals(invalidValues[i]) && !dtN.equals(dt)) {
fail("Incorrect default type: ".concat(dtN));
}
}
Security.setProperty(propName, dt);
assertEquals("Incorrect default type", CertPathBuilder.getDefaultType(),
dt);
try {
certBuild.build(null);
fail("CertPathBuilderException must be thrown");
} catch (CertPathBuilderException e) {
}
CertPathBuilderResult cpbResult = certBuild.build(null);
assertNull("Not null CertPathBuilderResult", cpbResult);
}
/**
* Test for <code>getInstance(String algorithm)</code> method
* Assertions:
* throws
* throws NullPointerException when algorithm is null
* throws NoSuchAlgorithmException when algorithm is not correct
* returns CertPathBuilder object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class}
)
public void testGetInstance01() throws NoSuchAlgorithmException,
InvalidAlgorithmParameterException, CertPathBuilderException {
try {
CertPathBuilder.getInstance(null);
fail("NullPointerException or NoSuchAlgorithmException must be thrown when algorithm is null");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
CertPathBuilder.getInstance(invalidValues[i]);
fail("NoSuchAlgorithmException must be thrown (type: ".concat(
invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
CertPathBuilder cerPB;
for (int i = 0; i < validValues.length; i++) {
cerPB = CertPathBuilder.getInstance(validValues[i]);
assertEquals("Incorrect type", cerPB.getAlgorithm(), validValues[i]);
assertEquals("Incorrect provider", cerPB.getProvider(), mProv);
checkResult(cerPB);
}
}
/**
* Test for <code>getInstance(String algorithm, String provider)</code> method
* Assertions:
* throws NullPointerException when algorithm is null
* throws NoSuchAlgorithmException when algorithm is not correct
* throws IllegalArgumentException when provider is null or empty;
* throws NoSuchProviderException when provider is available;
* returns CertPathBuilder object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class, java.lang.String.class}
)
public void testGetInstance02() throws NoSuchAlgorithmException,
NoSuchProviderException, IllegalArgumentException,
InvalidAlgorithmParameterException, CertPathBuilderException {
try {
CertPathBuilder.getInstance(null, mProv.getName());
fail("NullPointerException or NoSuchAlgorithmException must be thrown when algorithm is null");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
CertPathBuilder.getInstance(invalidValues[i], mProv
.getName());
fail("NoSuchAlgorithmException must be thrown (type: ".concat(
invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
String prov = null;
for (int i = 0; i < validValues.length; i++) {
try {
CertPathBuilder.getInstance(validValues[i], prov);
fail("IllegalArgumentException must be thrown when provider is null (type: "
.concat(validValues[i]).concat(")"));
} catch (IllegalArgumentException e) {
}
try {
CertPathBuilder.getInstance(validValues[i], "");
fail("IllegalArgumentException must be thrown when provider is empty (type: "
.concat(validValues[i]).concat(")"));
} catch (IllegalArgumentException e) {
}
}
for (int i = 0; i < validValues.length; i++) {
for (int j = 1; j < invalidValues.length; j++) {
try {
CertPathBuilder.getInstance(validValues[i],
invalidValues[j]);
fail("NoSuchProviderException must be thrown (type: "
.concat(validValues[i]).concat(" provider: ")
.concat(invalidValues[j]).concat(")"));
} catch (NoSuchProviderException e) {
}
}
}
CertPathBuilder cerPB;
for (int i = 0; i < validValues.length; i++) {
cerPB = CertPathBuilder.getInstance(validValues[i], mProv
.getName());
assertEquals("Incorrect type", cerPB.getAlgorithm(), validValues[i]);
assertEquals("Incorrect provider", cerPB.getProvider().getName(),
mProv.getName());
checkResult(cerPB);
}
}
/**
* Test for <code>getInstance(String algorithm, Provider provider)</code>
* method
* Assertions:
* throws NullPointerException when algorithm is null
* throws NoSuchAlgorithmException when algorithm is not correct
* returns CertPathBuilder object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class, java.security.Provider.class}
)
public void testGetInstance03() throws NoSuchAlgorithmException,
IllegalArgumentException,
InvalidAlgorithmParameterException, CertPathBuilderException {
try {
CertPathBuilder.getInstance(null, mProv);
fail("NullPointerException or NoSuchAlgorithmException must be thrown when algorithm is null");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
CertPathBuilder.getInstance(invalidValues[i], mProv);
fail("NoSuchAlgorithmException must be thrown (type: ".concat(
invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
Provider prov = null;
for (int i = 0; i < validValues.length; i++) {
try {
CertPathBuilder.getInstance(validValues[i], prov);
fail("IllegalArgumentException must be thrown when provider is null (type: "
.concat(validValues[i]).concat(")"));
} catch (IllegalArgumentException e) {
}
}
CertPathBuilder cerPB;
for (int i = 0; i < validValues.length; i++) {
cerPB = CertPathBuilder.getInstance(validValues[i], mProv);
assertEquals("Incorrect type", cerPB.getAlgorithm(), validValues[i]);
assertEquals("Incorrect provider", cerPB.getProvider(), mProv);
checkResult(cerPB);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spark.execution;
import com.facebook.airlift.json.JsonCodec;
import com.facebook.airlift.log.Logger;
import com.facebook.airlift.stats.TestingGcMonitor;
import com.facebook.presto.Session;
import com.facebook.presto.block.BlockEncodingManager;
import com.facebook.presto.event.SplitMonitor;
import com.facebook.presto.execution.ExecutionFailureInfo;
import com.facebook.presto.execution.ScheduledSplit;
import com.facebook.presto.execution.StageExecutionId;
import com.facebook.presto.execution.StageId;
import com.facebook.presto.execution.TaskId;
import com.facebook.presto.execution.TaskInfo;
import com.facebook.presto.execution.TaskManagerConfig;
import com.facebook.presto.execution.TaskSource;
import com.facebook.presto.execution.TaskState;
import com.facebook.presto.execution.TaskStateMachine;
import com.facebook.presto.execution.TaskStatus;
import com.facebook.presto.execution.buffer.OutputBufferInfo;
import com.facebook.presto.execution.buffer.OutputBufferMemoryManager;
import com.facebook.presto.execution.executor.TaskExecutor;
import com.facebook.presto.memory.MemoryPool;
import com.facebook.presto.memory.NodeMemoryConfig;
import com.facebook.presto.memory.QueryContext;
import com.facebook.presto.metadata.FunctionAndTypeManager;
import com.facebook.presto.metadata.SessionPropertyManager;
import com.facebook.presto.operator.FragmentResultCacheManager;
import com.facebook.presto.operator.OutputFactory;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.operator.TaskStats;
import com.facebook.presto.spark.PrestoSparkAuthenticatorProvider;
import com.facebook.presto.spark.PrestoSparkTaskDescriptor;
import com.facebook.presto.spark.classloader_interface.IPrestoSparkTaskExecutor;
import com.facebook.presto.spark.classloader_interface.IPrestoSparkTaskExecutorFactory;
import com.facebook.presto.spark.classloader_interface.MutablePartitionId;
import com.facebook.presto.spark.classloader_interface.PrestoSparkMutableRow;
import com.facebook.presto.spark.classloader_interface.PrestoSparkSerializedPage;
import com.facebook.presto.spark.classloader_interface.PrestoSparkShuffleStats;
import com.facebook.presto.spark.classloader_interface.PrestoSparkTaskInputs;
import com.facebook.presto.spark.classloader_interface.PrestoSparkTaskOutput;
import com.facebook.presto.spark.classloader_interface.SerializedPrestoSparkTaskDescriptor;
import com.facebook.presto.spark.classloader_interface.SerializedPrestoSparkTaskSource;
import com.facebook.presto.spark.classloader_interface.SerializedTaskInfo;
import com.facebook.presto.spark.execution.PrestoSparkPageOutputOperator.PrestoSparkPageOutputFactory;
import com.facebook.presto.spark.execution.PrestoSparkRowBatch.RowTupleSupplier;
import com.facebook.presto.spark.execution.PrestoSparkRowOutputOperator.PreDeterminedPartitionFunction;
import com.facebook.presto.spark.execution.PrestoSparkRowOutputOperator.PrestoSparkRowOutputFactory;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.memory.MemoryPoolId;
import com.facebook.presto.spi.plan.PlanNodeId;
import com.facebook.presto.spi.security.TokenAuthenticator;
import com.facebook.presto.spiller.NodeSpillConfig;
import com.facebook.presto.spiller.SpillSpaceTracker;
import com.facebook.presto.sql.planner.LocalExecutionPlanner;
import com.facebook.presto.sql.planner.LocalExecutionPlanner.LocalExecutionPlan;
import com.facebook.presto.sql.planner.OutputPartitioning;
import com.facebook.presto.sql.planner.PlanFragment;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.planPrinter.PlanPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.util.CollectionAccumulator;
import org.joda.time.DateTime;
import scala.Tuple2;
import scala.collection.AbstractIterator;
import scala.collection.Iterator;
import javax.inject.Inject;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import static com.facebook.presto.SystemSessionProperties.getHashPartitionCount;
import static com.facebook.presto.execution.FragmentResultCacheContext.createFragmentResultCacheContext;
import static com.facebook.presto.execution.TaskState.FAILED;
import static com.facebook.presto.execution.TaskStatus.STARTING_VERSION;
import static com.facebook.presto.execution.buffer.BufferState.FINISHED;
import static com.facebook.presto.metadata.MetadataUpdates.DEFAULT_METADATA_UPDATES;
import static com.facebook.presto.spark.PrestoSparkSessionProperties.getShuffleOutputTargetAverageRowSize;
import static com.facebook.presto.spark.classloader_interface.PrestoSparkShuffleStats.Operation.WRITE;
import static com.facebook.presto.spark.util.PrestoSparkUtils.compress;
import static com.facebook.presto.spark.util.PrestoSparkUtils.decompress;
import static com.facebook.presto.spark.util.PrestoSparkUtils.toPrestoSparkSerializedPage;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static com.facebook.presto.util.Failures.toFailures;
import static com.fasterxml.jackson.databind.SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Throwables.propagateIfPossible;
import static com.google.common.collect.Iterables.getFirst;
import static java.util.Objects.requireNonNull;
import static java.util.UUID.randomUUID;
public class PrestoSparkTaskExecutorFactory
implements IPrestoSparkTaskExecutorFactory
{
private static final Logger log = Logger.get(PrestoSparkTaskExecutorFactory.class);
private final SessionPropertyManager sessionPropertyManager;
private final BlockEncodingManager blockEncodingManager;
private final FunctionAndTypeManager functionAndTypeManager;
private final JsonCodec<PrestoSparkTaskDescriptor> taskDescriptorJsonCodec;
private final JsonCodec<TaskSource> taskSourceJsonCodec;
private final JsonCodec<TaskInfo> taskInfoJsonCodec;
private final Executor notificationExecutor;
private final ScheduledExecutorService yieldExecutor;
private final ScheduledExecutorService memoryUpdateExecutor;
private final LocalExecutionPlanner localExecutionPlanner;
private final PrestoSparkExecutionExceptionFactory executionExceptionFactory;
private final TaskExecutor taskExecutor;
private final SplitMonitor splitMonitor;
private final Set<PrestoSparkAuthenticatorProvider> authenticatorProviders;
private final FragmentResultCacheManager fragmentResultCacheManager;
private final ObjectMapper objectMapper;
private final DataSize maxUserMemory;
private final DataSize maxTotalMemory;
private final DataSize maxRevocableMemory;
private final DataSize maxSpillMemory;
private final DataSize sinkMaxBufferSize;
private final boolean perOperatorCpuTimerEnabled;
private final boolean cpuTimerEnabled;
private final boolean perOperatorAllocationTrackingEnabled;
private final boolean allocationTrackingEnabled;
@Inject
public PrestoSparkTaskExecutorFactory(
SessionPropertyManager sessionPropertyManager,
BlockEncodingManager blockEncodingManager,
FunctionAndTypeManager functionAndTypeManager,
JsonCodec<PrestoSparkTaskDescriptor> taskDescriptorJsonCodec,
JsonCodec<TaskSource> taskSourceJsonCodec,
JsonCodec<TaskInfo> taskInfoJsonCodec,
Executor notificationExecutor,
ScheduledExecutorService yieldExecutor,
ScheduledExecutorService memoryUpdateExecutor,
LocalExecutionPlanner localExecutionPlanner,
PrestoSparkExecutionExceptionFactory executionExceptionFactory,
TaskExecutor taskExecutor,
SplitMonitor splitMonitor,
Set<PrestoSparkAuthenticatorProvider> authenticatorProviders,
FragmentResultCacheManager fragmentResultCacheManager,
ObjectMapper objectMapper,
TaskManagerConfig taskManagerConfig,
NodeMemoryConfig nodeMemoryConfig,
NodeSpillConfig nodeSpillConfig)
{
this(
sessionPropertyManager,
blockEncodingManager,
functionAndTypeManager,
taskDescriptorJsonCodec,
taskSourceJsonCodec,
taskInfoJsonCodec,
notificationExecutor,
yieldExecutor,
memoryUpdateExecutor,
localExecutionPlanner,
executionExceptionFactory,
taskExecutor,
splitMonitor,
authenticatorProviders,
fragmentResultCacheManager,
objectMapper,
requireNonNull(nodeMemoryConfig, "nodeMemoryConfig is null").getMaxQueryMemoryPerNode(),
requireNonNull(nodeMemoryConfig, "nodeMemoryConfig is null").getMaxQueryTotalMemoryPerNode(),
requireNonNull(nodeSpillConfig, "nodeSpillConfig is null").getMaxRevocableMemoryPerNode(),
requireNonNull(nodeSpillConfig, "nodeSpillConfig is null").getMaxSpillPerNode(),
requireNonNull(taskManagerConfig, "taskManagerConfig is null").getSinkMaxBufferSize(),
requireNonNull(taskManagerConfig, "taskManagerConfig is null").isPerOperatorCpuTimerEnabled(),
requireNonNull(taskManagerConfig, "taskManagerConfig is null").isTaskCpuTimerEnabled(),
requireNonNull(taskManagerConfig, "taskManagerConfig is null").isPerOperatorAllocationTrackingEnabled(),
requireNonNull(taskManagerConfig, "taskManagerConfig is null").isTaskAllocationTrackingEnabled());
}
public PrestoSparkTaskExecutorFactory(
SessionPropertyManager sessionPropertyManager,
BlockEncodingManager blockEncodingManager,
FunctionAndTypeManager functionAndTypeManager,
JsonCodec<PrestoSparkTaskDescriptor> taskDescriptorJsonCodec,
JsonCodec<TaskSource> taskSourceJsonCodec,
JsonCodec<TaskInfo> taskInfoJsonCodec,
Executor notificationExecutor,
ScheduledExecutorService yieldExecutor,
ScheduledExecutorService memoryUpdateExecutor,
LocalExecutionPlanner localExecutionPlanner,
PrestoSparkExecutionExceptionFactory executionExceptionFactory,
TaskExecutor taskExecutor,
SplitMonitor splitMonitor,
Set<PrestoSparkAuthenticatorProvider> authenticatorProviders,
FragmentResultCacheManager fragmentResultCacheManager,
ObjectMapper objectMapper,
DataSize maxUserMemory,
DataSize maxTotalMemory,
DataSize maxRevocableMemory,
DataSize maxSpillMemory,
DataSize sinkMaxBufferSize,
boolean perOperatorCpuTimerEnabled,
boolean cpuTimerEnabled,
boolean perOperatorAllocationTrackingEnabled,
boolean allocationTrackingEnabled)
{
this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null");
this.blockEncodingManager = requireNonNull(blockEncodingManager, "blockEncodingManager is null");
this.functionAndTypeManager = requireNonNull(functionAndTypeManager, "functionManager is null");
this.taskDescriptorJsonCodec = requireNonNull(taskDescriptorJsonCodec, "sparkTaskDescriptorJsonCodec is null");
this.taskSourceJsonCodec = requireNonNull(taskSourceJsonCodec, "taskSourceJsonCodec is null");
this.taskInfoJsonCodec = requireNonNull(taskInfoJsonCodec, "taskInfoJsonCodec is null");
this.notificationExecutor = requireNonNull(notificationExecutor, "notificationExecutor is null");
this.yieldExecutor = requireNonNull(yieldExecutor, "yieldExecutor is null");
this.memoryUpdateExecutor = requireNonNull(memoryUpdateExecutor, "memoryUpdateExecutor is null");
this.localExecutionPlanner = requireNonNull(localExecutionPlanner, "localExecutionPlanner is null");
this.executionExceptionFactory = requireNonNull(executionExceptionFactory, "executionExceptionFactory is null");
this.taskExecutor = requireNonNull(taskExecutor, "taskExecutor is null");
this.splitMonitor = requireNonNull(splitMonitor, "splitMonitor is null");
this.authenticatorProviders = ImmutableSet.copyOf(requireNonNull(authenticatorProviders, "authenticatorProviders is null"));
this.fragmentResultCacheManager = requireNonNull(fragmentResultCacheManager, "fragmentResultCacheManager is null");
// Ordering is needed to make sure serialized plans are consistent for the same map
this.objectMapper = objectMapper.copy().configure(ORDER_MAP_ENTRIES_BY_KEYS, true);
this.maxUserMemory = requireNonNull(maxUserMemory, "maxUserMemory is null");
this.maxTotalMemory = requireNonNull(maxTotalMemory, "maxTotalMemory is null");
this.maxRevocableMemory = requireNonNull(maxRevocableMemory, "maxRevocableMemory is null");
this.maxSpillMemory = requireNonNull(maxSpillMemory, "maxSpillMemory is null");
this.sinkMaxBufferSize = requireNonNull(sinkMaxBufferSize, "sinkMaxBufferSize is null");
this.perOperatorCpuTimerEnabled = perOperatorCpuTimerEnabled;
this.cpuTimerEnabled = cpuTimerEnabled;
this.perOperatorAllocationTrackingEnabled = perOperatorAllocationTrackingEnabled;
this.allocationTrackingEnabled = allocationTrackingEnabled;
}
@Override
public <T extends PrestoSparkTaskOutput> IPrestoSparkTaskExecutor<T> create(
int partitionId,
int attemptNumber,
SerializedPrestoSparkTaskDescriptor serializedTaskDescriptor,
Iterator<SerializedPrestoSparkTaskSource> serializedTaskSources,
PrestoSparkTaskInputs inputs,
CollectionAccumulator<SerializedTaskInfo> taskInfoCollector,
CollectionAccumulator<PrestoSparkShuffleStats> shuffleStatsCollector,
Class<T> outputType)
{
try {
return doCreate(
partitionId,
attemptNumber,
serializedTaskDescriptor,
serializedTaskSources,
inputs,
taskInfoCollector,
shuffleStatsCollector,
outputType);
}
catch (RuntimeException e) {
throw executionExceptionFactory.toPrestoSparkExecutionException(e);
}
}
public <T extends PrestoSparkTaskOutput> IPrestoSparkTaskExecutor<T> doCreate(
int partitionId,
int attemptNumber,
SerializedPrestoSparkTaskDescriptor serializedTaskDescriptor,
Iterator<SerializedPrestoSparkTaskSource> serializedTaskSources,
PrestoSparkTaskInputs inputs,
CollectionAccumulator<SerializedTaskInfo> taskInfoCollector,
CollectionAccumulator<PrestoSparkShuffleStats> shuffleStatsCollector,
Class<T> outputType)
{
PrestoSparkTaskDescriptor taskDescriptor = taskDescriptorJsonCodec.fromJson(serializedTaskDescriptor.getBytes());
ImmutableMap.Builder<String, TokenAuthenticator> extraAuthenticators = ImmutableMap.builder();
authenticatorProviders.forEach(provider -> extraAuthenticators.putAll(provider.getTokenAuthenticators()));
Session session = taskDescriptor.getSession().toSession(
sessionPropertyManager,
taskDescriptor.getExtraCredentials(),
extraAuthenticators.build());
PlanFragment fragment = taskDescriptor.getFragment();
StageId stageId = new StageId(session.getQueryId(), fragment.getId().getId());
// TODO: include attemptId in taskId
TaskId taskId = new TaskId(new StageExecutionId(stageId, 0), partitionId);
List<TaskSource> taskSources = getTaskSources(serializedTaskSources);
log.info("Task [%s] received %d splits.",
taskId,
taskSources.stream()
.mapToInt(taskSource -> taskSource.getSplits().size())
.sum());
OptionalLong totalSplitSize = computeAllSplitsSize(taskSources);
if (totalSplitSize.isPresent()) {
log.info("Total split size: %s bytes.", totalSplitSize.getAsLong());
}
// TODO: Remove this once we can display the plan on Spark UI.
log.info(PlanPrinter.textPlanFragment(fragment, functionAndTypeManager, session, true));
MemoryPool memoryPool = new MemoryPool(new MemoryPoolId("spark-executor-memory-pool"), maxTotalMemory);
SpillSpaceTracker spillSpaceTracker = new SpillSpaceTracker(maxSpillMemory);
QueryContext queryContext = new QueryContext(
session.getQueryId(),
maxUserMemory,
maxTotalMemory,
maxUserMemory,
maxRevocableMemory,
memoryPool,
new TestingGcMonitor(),
notificationExecutor,
yieldExecutor,
maxSpillMemory,
spillSpaceTracker);
TaskStateMachine taskStateMachine = new TaskStateMachine(taskId, notificationExecutor);
TaskContext taskContext = queryContext.addTaskContext(
taskStateMachine,
session,
perOperatorCpuTimerEnabled,
cpuTimerEnabled,
perOperatorAllocationTrackingEnabled,
allocationTrackingEnabled,
false,
createFragmentResultCacheContext(fragmentResultCacheManager, fragment.getRoot(), fragment.getPartitioningScheme(), session, objectMapper));
ImmutableMap.Builder<PlanNodeId, List<PrestoSparkShuffleInput>> shuffleInputs = ImmutableMap.builder();
ImmutableMap.Builder<PlanNodeId, List<java.util.Iterator<PrestoSparkSerializedPage>>> pageInputs = ImmutableMap.builder();
for (RemoteSourceNode remoteSource : fragment.getRemoteSourceNodes()) {
List<PrestoSparkShuffleInput> remoteSourceRowInputs = new ArrayList<>();
List<java.util.Iterator<PrestoSparkSerializedPage>> remoteSourcePageInputs = new ArrayList<>();
for (PlanFragmentId sourceFragmentId : remoteSource.getSourceFragmentIds()) {
Iterator<Tuple2<MutablePartitionId, PrestoSparkMutableRow>> shuffleInput = inputs.getShuffleInputs().get(sourceFragmentId.toString());
Broadcast<List<PrestoSparkSerializedPage>> broadcastInput = inputs.getBroadcastInputs().get(sourceFragmentId.toString());
List<PrestoSparkSerializedPage> inMemoryInput = inputs.getInMemoryInputs().get(sourceFragmentId.toString());
if (shuffleInput != null) {
checkArgument(broadcastInput == null, "single remote source is not expected to accept different kind of inputs");
checkArgument(inMemoryInput == null, "single remote source is not expected to accept different kind of inputs");
remoteSourceRowInputs.add(new PrestoSparkShuffleInput(sourceFragmentId.getId(), shuffleInput));
continue;
}
if (broadcastInput != null) {
checkArgument(inMemoryInput == null, "single remote source is not expected to accept different kind of inputs");
// TODO: Enable NullifyingIterator once migrated to one task per JVM model
// NullifyingIterator removes element from the list upon return
// This allows GC to gradually reclaim memory
// remoteSourcePageInputs.add(getNullifyingIterator(broadcastInput.value()));
remoteSourcePageInputs.add(broadcastInput.value().iterator());
continue;
}
if (inMemoryInput != null) {
remoteSourcePageInputs.add(inMemoryInput.iterator());
continue;
}
throw new IllegalArgumentException("Input not found for sourceFragmentId: " + sourceFragmentId);
}
if (!remoteSourceRowInputs.isEmpty()) {
shuffleInputs.put(remoteSource.getId(), remoteSourceRowInputs);
}
if (!remoteSourcePageInputs.isEmpty()) {
pageInputs.put(remoteSource.getId(), remoteSourcePageInputs);
}
}
OutputBufferMemoryManager memoryManager = new OutputBufferMemoryManager(
sinkMaxBufferSize.toBytes(),
() -> queryContext.getTaskContextByTaskId(taskId).localSystemMemoryContext(),
notificationExecutor);
Optional<OutputPartitioning> preDeterminedPartition = Optional.empty();
if (fragment.getPartitioningScheme().getPartitioning().getHandle().equals(FIXED_ARBITRARY_DISTRIBUTION)) {
int partitionCount = getHashPartitionCount(session);
preDeterminedPartition = Optional.of(new OutputPartitioning(
new PreDeterminedPartitionFunction(partitionId % partitionCount, partitionCount),
ImmutableList.of(),
ImmutableList.of(),
false,
OptionalInt.empty()));
}
Output<T> output = configureOutput(
outputType,
blockEncodingManager,
memoryManager,
getShuffleOutputTargetAverageRowSize(session),
preDeterminedPartition);
PrestoSparkOutputBuffer<?> outputBuffer = output.getOutputBuffer();
LocalExecutionPlan localExecutionPlan = localExecutionPlanner.plan(
taskContext,
fragment.getRoot(),
fragment.getPartitioningScheme(),
fragment.getStageExecutionDescriptor(),
fragment.getTableScanSchedulingOrder(),
output.getOutputFactory(),
new PrestoSparkRemoteSourceFactory(
blockEncodingManager,
shuffleInputs.build(),
pageInputs.build(),
partitionId,
shuffleStatsCollector),
taskDescriptor.getTableWriteInfo(),
true);
taskStateMachine.addStateChangeListener(state -> {
if (state.isDone()) {
outputBuffer.setNoMoreRows();
}
});
PrestoSparkTaskExecution taskExecution = new PrestoSparkTaskExecution(
taskStateMachine,
taskContext,
localExecutionPlan,
taskExecutor,
splitMonitor,
notificationExecutor,
memoryUpdateExecutor);
taskExecution.start(taskSources);
return new PrestoSparkTaskExecutor<>(
taskContext,
taskStateMachine,
output.getOutputSupplier(),
taskInfoJsonCodec,
taskInfoCollector,
shuffleStatsCollector,
executionExceptionFactory,
output.getOutputBufferType(),
outputBuffer);
}
private static OptionalLong computeAllSplitsSize(List<TaskSource> taskSources)
{
long sum = 0;
for (TaskSource taskSource : taskSources) {
for (ScheduledSplit scheduledSplit : taskSource.getSplits()) {
ConnectorSplit connectorSplit = scheduledSplit.getSplit().getConnectorSplit();
if (!connectorSplit.getSplitSizeInBytes().isPresent()) {
return OptionalLong.empty();
}
sum += connectorSplit.getSplitSizeInBytes().getAsLong();
}
}
return OptionalLong.of(sum);
}
private List<TaskSource> getTaskSources(Iterator<SerializedPrestoSparkTaskSource> serializedTaskSources)
{
ImmutableList.Builder<TaskSource> result = ImmutableList.builder();
while (serializedTaskSources.hasNext()) {
SerializedPrestoSparkTaskSource serializedTaskSource = serializedTaskSources.next();
result.add(taskSourceJsonCodec.fromJson(decompress(serializedTaskSource.getBytes())));
}
return result.build();
}
@SuppressWarnings("unchecked")
private static <T extends PrestoSparkTaskOutput> Output<T> configureOutput(
Class<T> outputType,
BlockEncodingManager blockEncodingManager,
OutputBufferMemoryManager memoryManager,
DataSize targetAverageRowSize,
Optional<OutputPartitioning> preDeterminedPartition)
{
if (outputType.equals(PrestoSparkMutableRow.class)) {
PrestoSparkOutputBuffer<PrestoSparkRowBatch> outputBuffer = new PrestoSparkOutputBuffer<>(memoryManager);
OutputFactory outputFactory = new PrestoSparkRowOutputFactory(outputBuffer, targetAverageRowSize, preDeterminedPartition);
OutputSupplier<T> outputSupplier = (OutputSupplier<T>) new RowOutputSupplier(outputBuffer);
return new Output<>(OutputBufferType.SPARK_ROW_OUTPUT_BUFFER, outputBuffer, outputFactory, outputSupplier);
}
else if (outputType.equals(PrestoSparkSerializedPage.class)) {
PrestoSparkOutputBuffer<PrestoSparkBufferedSerializedPage> outputBuffer = new PrestoSparkOutputBuffer<>(memoryManager);
OutputFactory outputFactory = new PrestoSparkPageOutputFactory(outputBuffer, blockEncodingManager);
OutputSupplier<T> outputSupplier = (OutputSupplier<T>) new PageOutputSupplier(outputBuffer);
return new Output<>(OutputBufferType.SPARK_PAGE_OUTPUT_BUFFER, outputBuffer, outputFactory, outputSupplier);
}
else {
throw new IllegalArgumentException("Unexpected output type: " + outputType.getName());
}
}
private static class PrestoSparkTaskExecutor<T extends PrestoSparkTaskOutput>
extends AbstractIterator<Tuple2<MutablePartitionId, T>>
implements IPrestoSparkTaskExecutor<T>
{
private final TaskContext taskContext;
private final TaskStateMachine taskStateMachine;
private final OutputSupplier<T> outputSupplier;
private final JsonCodec<TaskInfo> taskInfoJsonCodec;
private final CollectionAccumulator<SerializedTaskInfo> taskInfoCollector;
private final CollectionAccumulator<PrestoSparkShuffleStats> shuffleStatsCollector;
private final PrestoSparkExecutionExceptionFactory executionExceptionFactory;
private final OutputBufferType outputBufferType;
private final PrestoSparkOutputBuffer<?> outputBuffer;
private final UUID taskInstanceId = randomUUID();
private Tuple2<MutablePartitionId, T> next;
private Long start;
private long processedRows;
private long processedRowBatches;
private long processedBytes;
private PrestoSparkTaskExecutor(
TaskContext taskContext,
TaskStateMachine taskStateMachine,
OutputSupplier<T> outputSupplier,
JsonCodec<TaskInfo> taskInfoJsonCodec,
CollectionAccumulator<SerializedTaskInfo> taskInfoCollector,
CollectionAccumulator<PrestoSparkShuffleStats> shuffleStatsCollector,
PrestoSparkExecutionExceptionFactory executionExceptionFactory,
OutputBufferType outputBufferType,
PrestoSparkOutputBuffer<?> outputBuffer)
{
this.taskContext = requireNonNull(taskContext, "taskContext is null");
this.taskStateMachine = requireNonNull(taskStateMachine, "taskStateMachine is null");
this.outputSupplier = requireNonNull(outputSupplier, "outputSupplier is null");
this.taskInfoJsonCodec = requireNonNull(taskInfoJsonCodec, "taskInfoJsonCodec is null");
this.taskInfoCollector = requireNonNull(taskInfoCollector, "taskInfoCollector is null");
this.shuffleStatsCollector = requireNonNull(shuffleStatsCollector, "shuffleStatsCollector is null");
this.executionExceptionFactory = requireNonNull(executionExceptionFactory, "executionExceptionFactory is null");
this.outputBufferType = requireNonNull(outputBufferType, "outputBufferType is null");
this.outputBuffer = requireNonNull(outputBuffer, "outputBuffer is null");
}
@Override
public boolean hasNext()
{
if (next == null) {
next = computeNext();
}
return next != null;
}
@Override
public Tuple2<MutablePartitionId, T> next()
{
if (next == null) {
next = computeNext();
}
if (next == null) {
throw new NoSuchElementException();
}
Tuple2<MutablePartitionId, T> result = next;
next = null;
return result;
}
protected Tuple2<MutablePartitionId, T> computeNext()
{
try {
return doComputeNext();
}
catch (RuntimeException e) {
throw executionExceptionFactory.toPrestoSparkExecutionException(e);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
taskStateMachine.abort();
throw new RuntimeException(e);
}
}
private Tuple2<MutablePartitionId, T> doComputeNext()
throws InterruptedException
{
if (start == null) {
start = System.currentTimeMillis();
}
Tuple2<MutablePartitionId, T> output = outputSupplier.getNext();
if (output != null) {
processedRows += output._2.getPositionCount();
processedRowBatches++;
processedBytes += output._2.getSize();
return output;
}
// task finished
TaskState taskState = taskStateMachine.getState();
checkState(taskState.isDone(), "task is expected to be done");
long end = System.currentTimeMillis();
PrestoSparkShuffleStats shuffleStats = new PrestoSparkShuffleStats(
taskContext.getTaskId().getStageExecutionId().getStageId().getId(),
taskContext.getTaskId().getId(),
WRITE,
processedRows,
processedRowBatches,
processedBytes,
end - start - outputSupplier.getTimeSpentWaitingForOutputInMillis());
shuffleStatsCollector.add(shuffleStats);
TaskInfo taskInfo = createTaskInfo(taskContext, taskStateMachine, taskInstanceId, outputBufferType, outputBuffer);
SerializedTaskInfo serializedTaskInfo = new SerializedTaskInfo(
taskInfo.getTaskId().getStageExecutionId().getStageId().getId(),
taskInfo.getTaskId().getId(),
compress(taskInfoJsonCodec.toJsonBytes(taskInfo)));
taskInfoCollector.add(serializedTaskInfo);
LinkedBlockingQueue<Throwable> failures = taskStateMachine.getFailureCauses();
if (failures.isEmpty()) {
return null;
}
Throwable failure = getFirst(failures, null);
propagateIfPossible(failure, Error.class);
propagateIfPossible(failure, RuntimeException.class);
propagateIfPossible(failure, InterruptedException.class);
throw new RuntimeException(failure);
}
private static TaskInfo createTaskInfo(
TaskContext taskContext,
TaskStateMachine taskStateMachine,
UUID taskInstanceId,
OutputBufferType outputBufferType,
PrestoSparkOutputBuffer<?> outputBuffer)
{
TaskId taskId = taskContext.getTaskId();
TaskState taskState = taskContext.getState();
TaskStats taskStats = taskContext.getTaskStats();
List<ExecutionFailureInfo> failures = ImmutableList.of();
if (taskState == FAILED) {
failures = toFailures(taskStateMachine.getFailureCauses());
}
TaskStatus taskStatus = new TaskStatus(
taskInstanceId.getLeastSignificantBits(),
taskInstanceId.getMostSignificantBits(),
STARTING_VERSION,
taskState,
URI.create("http://fake.invalid/task/" + taskId),
taskContext.getCompletedDriverGroups(),
failures,
taskStats.getQueuedPartitionedDrivers(),
taskStats.getRunningPartitionedDrivers(),
0,
false,
taskStats.getPhysicalWrittenDataSizeInBytes(),
taskStats.getUserMemoryReservationInBytes(),
taskStats.getSystemMemoryReservationInBytes(),
taskStats.getPeakNodeTotalMemoryInBytes(),
taskStats.getFullGcCount(),
taskStats.getFullGcTimeInMillis());
OutputBufferInfo outputBufferInfo = new OutputBufferInfo(
outputBufferType.name(),
FINISHED,
false,
false,
0,
0,
outputBuffer.getTotalRowsProcessed(),
outputBuffer.getTotalPagesProcessed(),
ImmutableList.of());
return new TaskInfo(
taskId,
taskStatus,
DateTime.now(),
outputBufferInfo,
ImmutableSet.of(),
taskStats,
false,
DEFAULT_METADATA_UPDATES);
}
}
private static class Output<T extends PrestoSparkTaskOutput>
{
private final OutputBufferType outputBufferType;
private final PrestoSparkOutputBuffer<?> outputBuffer;
private final OutputFactory outputFactory;
private final OutputSupplier<T> outputSupplier;
private Output(
OutputBufferType outputBufferType,
PrestoSparkOutputBuffer<?> outputBuffer,
OutputFactory outputFactory,
OutputSupplier<T> outputSupplier)
{
this.outputBufferType = requireNonNull(outputBufferType, "outputBufferType is null");
this.outputBuffer = requireNonNull(outputBuffer, "outputBuffer is null");
this.outputFactory = requireNonNull(outputFactory, "outputFactory is null");
this.outputSupplier = requireNonNull(outputSupplier, "outputSupplier is null");
}
public OutputBufferType getOutputBufferType()
{
return outputBufferType;
}
public PrestoSparkOutputBuffer<?> getOutputBuffer()
{
return outputBuffer;
}
public OutputFactory getOutputFactory()
{
return outputFactory;
}
public OutputSupplier<T> getOutputSupplier()
{
return outputSupplier;
}
}
private interface OutputSupplier<T extends PrestoSparkTaskOutput>
{
Tuple2<MutablePartitionId, T> getNext()
throws InterruptedException;
long getTimeSpentWaitingForOutputInMillis();
}
private static class RowOutputSupplier
implements OutputSupplier<PrestoSparkMutableRow>
{
private final PrestoSparkOutputBuffer<PrestoSparkRowBatch> outputBuffer;
private RowTupleSupplier currentRowTupleSupplier;
private long timeSpentWaitingForOutputInMillis;
private RowOutputSupplier(PrestoSparkOutputBuffer<PrestoSparkRowBatch> outputBuffer)
{
this.outputBuffer = requireNonNull(outputBuffer, "outputBuffer is null");
}
@Override
public Tuple2<MutablePartitionId, PrestoSparkMutableRow> getNext()
throws InterruptedException
{
Tuple2<MutablePartitionId, PrestoSparkMutableRow> next = null;
while (next == null) {
if (currentRowTupleSupplier == null) {
long start = System.currentTimeMillis();
PrestoSparkRowBatch rowBatch = outputBuffer.get();
long end = System.currentTimeMillis();
timeSpentWaitingForOutputInMillis += (end - start);
if (rowBatch == null) {
return null;
}
currentRowTupleSupplier = rowBatch.createRowTupleSupplier();
}
next = currentRowTupleSupplier.getNext();
if (next == null) {
currentRowTupleSupplier = null;
}
}
return next;
}
@Override
public long getTimeSpentWaitingForOutputInMillis()
{
return timeSpentWaitingForOutputInMillis;
}
}
private static class PageOutputSupplier
implements OutputSupplier<PrestoSparkSerializedPage>
{
private static final MutablePartitionId DEFAULT_PARTITION = new MutablePartitionId();
private final PrestoSparkOutputBuffer<PrestoSparkBufferedSerializedPage> outputBuffer;
private long timeSpentWaitingForOutputInMillis;
private PageOutputSupplier(PrestoSparkOutputBuffer<PrestoSparkBufferedSerializedPage> outputBuffer)
{
this.outputBuffer = requireNonNull(outputBuffer, "outputBuffer is null");
}
@Override
public Tuple2<MutablePartitionId, PrestoSparkSerializedPage> getNext()
throws InterruptedException
{
long start = System.currentTimeMillis();
PrestoSparkBufferedSerializedPage page = outputBuffer.get();
long end = System.currentTimeMillis();
timeSpentWaitingForOutputInMillis += (end - start);
if (page == null) {
return null;
}
return new Tuple2<>(DEFAULT_PARTITION, toPrestoSparkSerializedPage(page.getSerializedPage()));
}
@Override
public long getTimeSpentWaitingForOutputInMillis()
{
return timeSpentWaitingForOutputInMillis;
}
}
private enum OutputBufferType
{
SPARK_ROW_OUTPUT_BUFFER,
SPARK_PAGE_OUTPUT_BUFFER,
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.namespace;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.TableNamespaceManager;
import org.apache.hadoop.hbase.quotas.QuotaExceededException;
import org.apache.hadoop.hbase.util.Bytes;
/**
* NamespaceStateManager manages state (in terms of quota) of all the namespaces. It contains
* a cache which is updated based on the hooks in the NamespaceAuditor class.
*/
@InterfaceAudience.Private
class NamespaceStateManager {
private static Log LOG = LogFactory.getLog(NamespaceStateManager.class);
private ConcurrentMap<String, NamespaceTableAndRegionInfo> nsStateCache;
private MasterServices master;
private volatile boolean initialized = false;
public NamespaceStateManager(MasterServices masterServices) {
nsStateCache = new ConcurrentHashMap<String, NamespaceTableAndRegionInfo>();
master = masterServices;
}
/**
* Starts the NamespaceStateManager. The boot strap of cache
* is done in the post master start hook of the NamespaceAuditor
* class.
*
* @throws IOException Signals that an I/O exception has occurred.
*/
public void start() throws IOException {
LOG.info("Namespace State Manager started.");
initialize();
}
/**
* Gets an instance of NamespaceTableAndRegionInfo associated with namespace.
* @param The name of the namespace
* @return An instance of NamespaceTableAndRegionInfo.
*/
public NamespaceTableAndRegionInfo getState(String name) {
return nsStateCache.get(name);
}
/**
* Check if adding a region violates namespace quota, if not update namespace cache.
*
* @param TableName
* @param regionName
* @param incr
* @return true, if region can be added to table.
* @throws IOException Signals that an I/O exception has occurred.
*/
synchronized boolean checkAndUpdateNamespaceRegionCount(TableName name,
byte[] regionName, int incr) throws IOException {
String namespace = name.getNamespaceAsString();
NamespaceDescriptor nspdesc = getNamespaceDescriptor(namespace);
if (nspdesc != null) {
NamespaceTableAndRegionInfo currentStatus;
currentStatus = getState(namespace);
if (incr > 0 &&
currentStatus.getRegionCount() >= TableNamespaceManager.getMaxRegions(nspdesc)) {
LOG.warn("The region " + Bytes.toStringBinary(regionName)
+ " cannot be created. The region count will exceed quota on the namespace. "
+ "This may be transient, please retry later if there are any ongoing split"
+ " operations in the namespace.");
return false;
}
NamespaceTableAndRegionInfo nsInfo = nsStateCache.get(namespace);
if (nsInfo != null) {
nsInfo.incRegionCountForTable(name, incr);
} else {
LOG.warn("Namespace state found null for namespace : " + namespace);
}
}
return true;
}
private NamespaceDescriptor getNamespaceDescriptor(String namespaceAsString) {
try {
return this.master.getNamespaceDescriptor(namespaceAsString);
} catch (IOException e) {
LOG.error("Error while fetching namespace descriptor for namespace : " + namespaceAsString);
return null;
}
}
synchronized void checkAndUpdateNamespaceTableCount(TableName table, int numRegions)
throws IOException {
String namespace = table.getNamespaceAsString();
NamespaceDescriptor nspdesc = getNamespaceDescriptor(namespace);
if (nspdesc != null) {
NamespaceTableAndRegionInfo currentStatus;
currentStatus = getState(nspdesc.getName());
if ((currentStatus.getTables().size()) >= TableNamespaceManager.getMaxTables(nspdesc)) {
throw new QuotaExceededException("The table " + table.getNameAsString()
+ "cannot be created as it would exceed maximum number of tables allowed "
+ " in the namespace. The total number of tables permitted is "
+ TableNamespaceManager.getMaxTables(nspdesc));
}
if ((currentStatus.getRegionCount() + numRegions) > TableNamespaceManager
.getMaxRegions(nspdesc)) {
throw new QuotaExceededException("The table " + table.getNameAsString()
+ " is not allowed to have " + numRegions
+ " regions. The total number of regions permitted is only "
+ TableNamespaceManager.getMaxRegions(nspdesc)
+ ", while current region count is " + currentStatus.getRegionCount()
+ ". This may be transient, please retry later if there are any"
+ " ongoing split operations in the namespace.");
}
} else {
throw new IOException("Namespace Descriptor found null for " + namespace
+ " This is unexpected.");
}
addTable(table, numRegions);
}
NamespaceTableAndRegionInfo addNamespace(String namespace) {
if (!nsStateCache.containsKey(namespace)) {
NamespaceTableAndRegionInfo a1 = new NamespaceTableAndRegionInfo(namespace);
nsStateCache.put(namespace, a1);
}
return nsStateCache.get(namespace);
}
/**
* Delete the namespace state.
*
* @param An instance of NamespaceTableAndRegionInfo
*/
void deleteNamespace(String namespace) {
this.nsStateCache.remove(namespace);
}
private void addTable(TableName tableName, int regionCount) throws IOException {
NamespaceTableAndRegionInfo info =
nsStateCache.get(tableName.getNamespaceAsString());
if(info != null) {
info.addTable(tableName, regionCount);
} else {
throw new IOException("Bad state : Namespace quota information not found for namespace : "
+ tableName.getNamespaceAsString());
}
}
synchronized void removeTable(TableName tableName) {
NamespaceTableAndRegionInfo info =
nsStateCache.get(tableName.getNamespaceAsString());
if (info != null) {
info.removeTable(tableName);
}
}
/**
* Initialize namespace state cache by scanning meta table.
*/
private void initialize() throws IOException {
List<NamespaceDescriptor> namespaces = this.master.listNamespaceDescriptors();
for (NamespaceDescriptor namespace : namespaces) {
addNamespace(namespace.getName());
List<TableName> tables = this.master.listTableNamesByNamespace(namespace.getName());
for (TableName table : tables) {
if (table.isSystemTable()) {
continue;
}
List<HRegionInfo> regions =
MetaTableAccessor.getTableRegions(this.master.getConnection(), table, true);
addTable(table, regions.size());
}
}
LOG.info("Finished updating state of " + nsStateCache.size() + " namespaces. ");
initialized = true;
}
boolean isInitialized() {
return initialized;
}
public synchronized void removeRegionFromTable(HRegionInfo hri) throws IOException {
String namespace = hri.getTable().getNamespaceAsString();
NamespaceTableAndRegionInfo nsInfo = nsStateCache.get(namespace);
if (nsInfo != null) {
nsInfo.decrementRegionCountForTable(hri.getTable(), 1);
} else {
throw new IOException("Namespace state found null for namespace : " + namespace);
}
}
}
| |
/*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.internal.thrift;
import static java.util.Objects.requireNonNull;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.thrift.AsyncProcessFunction;
import org.apache.thrift.ProcessFunction;
import org.apache.thrift.TApplicationException;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
import org.apache.thrift.TFieldIdEnum;
import org.apache.thrift.meta_data.FieldMetaData;
import org.apache.thrift.protocol.TMessageType;
import com.google.common.collect.ImmutableMap;
/**
* Provides the metadata of a Thrift service function.
*/
public final class ThriftFunction {
private enum Type {
SYNC,
ASYNC
}
private final Object func;
private final Type type;
private final Class<?> serviceType;
private final String name;
private final TBase<TBase<?, ?>, TFieldIdEnum> result;
private final TFieldIdEnum[] argFields;
private final TFieldIdEnum successField;
private final Map<Class<Throwable>, TFieldIdEnum> exceptionFields;
private final Class<?>[] declaredExceptions;
ThriftFunction(Class<?> serviceType, ProcessFunction<?, ?> func) throws Exception {
this(serviceType, func.getMethodName(), func, Type.SYNC,
getArgFields(func), getResult(func), getDeclaredExceptions(func));
}
ThriftFunction(Class<?> serviceType, AsyncProcessFunction<?, ?, ?> func) throws Exception {
this(serviceType, func.getMethodName(), func, Type.ASYNC,
getArgFields(func), getResult(func), getDeclaredExceptions(func));
}
private ThriftFunction(
Class<?> serviceType, String name, Object func, Type type,
TFieldIdEnum[] argFields,
TBase<TBase<?, ?>, TFieldIdEnum> result,
Class<?>[] declaredExceptions) throws Exception {
this.func = func;
this.type = type;
this.serviceType = serviceType;
this.name = name;
this.argFields = argFields;
this.result = result;
this.declaredExceptions = declaredExceptions;
// Determine the success and exception fields of the function.
final ImmutableMap.Builder<Class<Throwable>, TFieldIdEnum> exceptionFieldsBuilder =
ImmutableMap.builder();
TFieldIdEnum successField = null;
if (result != null) { // if not oneway
@SuppressWarnings("rawtypes")
final Class<? extends TBase> resultType = result.getClass();
@SuppressWarnings("unchecked")
final Map<TFieldIdEnum, FieldMetaData> metaDataMap =
(Map<TFieldIdEnum, FieldMetaData>) FieldMetaData.getStructMetaDataMap(resultType);
for (Entry<TFieldIdEnum, FieldMetaData> e : metaDataMap.entrySet()) {
final TFieldIdEnum key = e.getKey();
final String fieldName = key.getFieldName();
if ("success".equals(fieldName)) {
successField = key;
continue;
}
Class<?> fieldType = resultType.getField(fieldName).getType();
if (Throwable.class.isAssignableFrom(fieldType)) {
@SuppressWarnings("unchecked")
Class<Throwable> exceptionFieldType = (Class<Throwable>) fieldType;
exceptionFieldsBuilder.put(exceptionFieldType, key);
}
}
}
this.successField = successField;
exceptionFields = exceptionFieldsBuilder.build();
}
/**
* Returns {@code true} if this function is a one-way.
*/
public boolean isOneWay() {
return result == null;
}
/**
* Returns {@code true} if this function is asynchronous.
*/
public boolean isAsync() {
return type == Type.ASYNC;
}
/**
* Returns the type of this function.
*
* @return {@link TMessageType#CALL} or {@link TMessageType#ONEWAY}
*/
public byte messageType() {
return isOneWay() ? TMessageType.ONEWAY : TMessageType.CALL;
}
/**
* Returns the {@link ProcessFunction}.
*
* @throws ClassCastException if this function is asynchronous
*/
@SuppressWarnings("unchecked")
public ProcessFunction<Object, TBase<TBase<?, ?>, TFieldIdEnum>> syncFunc() {
return (ProcessFunction<Object, TBase<TBase<?, ?>, TFieldIdEnum>>) func;
}
/**
* Returns the {@link AsyncProcessFunction}.
*
* @throws ClassCastException if this function is synchronous
*/
@SuppressWarnings("unchecked")
public AsyncProcessFunction<Object, TBase<TBase<?, ?>, TFieldIdEnum>, Object> asyncFunc() {
return (AsyncProcessFunction<Object, TBase<TBase<?, ?>, TFieldIdEnum>, Object>) func;
}
/**
* Returns the Thrift service interface this function belongs to.
*/
public Class<?> serviceType() {
return serviceType;
}
/**
* Returns the name of this function.
*/
public String name() {
return name;
}
/**
* Returns the field that holds the successful result.
*/
public TFieldIdEnum successField() {
return successField;
}
/**
* Returns the field that holds the exception.
*/
public Collection<TFieldIdEnum> exceptionFields() {
return exceptionFields.values();
}
/**
* Returns the exceptions declared by this function.
*/
public Class<?>[] declaredExceptions() {
return declaredExceptions;
}
/**
* Returns a new empty arguments instance.
*/
public TBase<TBase<?, ?>, TFieldIdEnum> newArgs() {
if (isAsync()) {
return asyncFunc().getEmptyArgsInstance();
} else {
return syncFunc().getEmptyArgsInstance();
}
}
/**
* Returns a new arguments instance.
*/
public TBase<TBase<?, ?>, TFieldIdEnum> newArgs(List<Object> args) {
requireNonNull(args, "args");
final TBase<TBase<?, ?>, TFieldIdEnum> newArgs = newArgs();
final int size = args.size();
for (int i = 0; i < size; i++) {
newArgs.setFieldValue(argFields[i], args.get(i));
}
return newArgs;
}
/**
* Returns a new empty result instance.
*/
public TBase<TBase<?, ?>, TFieldIdEnum> newResult() {
return result.deepCopy();
}
/**
* Sets the success field of the specified {@code result} to the specified {@code value}.
*/
public void setSuccess(TBase<?, TFieldIdEnum> result, Object value) {
if (successField != null) {
result.setFieldValue(successField, value);
}
}
/**
* Converts the specified {@code result} into a Java object.
*/
public Object getResult(TBase<TBase<?, ?>, TFieldIdEnum> result) throws TException {
for (TFieldIdEnum fieldIdEnum : exceptionFields()) {
if (result.isSet(fieldIdEnum)) {
throw (TException) ThriftFieldAccess.get(result, fieldIdEnum);
}
}
final TFieldIdEnum successField = successField();
if (successField == null) { //void method
return null;
} else if (result.isSet(successField)) {
return ThriftFieldAccess.get(result, successField);
} else {
throw new TApplicationException(
TApplicationException.MISSING_RESULT,
result.getClass().getName() + '.' + successField.getFieldName());
}
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getResult(ProcessFunction<?, ?> func) {
return getResult0(Type.SYNC, func.getClass(), func.getMethodName());
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getResult(AsyncProcessFunction<?, ?, ?> asyncFunc) {
return getResult0(Type.ASYNC, asyncFunc.getClass(), asyncFunc.getMethodName());
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getResult0(
Type type, Class<?> funcClass, String methodName) {
final String resultTypeName = typeName(type, funcClass, methodName, methodName + "_result");
try {
@SuppressWarnings("unchecked")
Class<TBase<TBase<?, ?>, TFieldIdEnum>> resultType =
(Class<TBase<TBase<?, ?>, TFieldIdEnum>>) Class.forName(
resultTypeName, false, funcClass.getClassLoader());
return resultType.newInstance();
} catch (ClassNotFoundException ignored) {
// Oneway function does not have a result type.
return null;
} catch (Exception e) {
throw new IllegalStateException("cannot determine the result type of method: " + methodName, e);
}
}
/**
* Sets the exception field of the specified {@code result} to the specified {@code cause}.
*/
public boolean setException(TBase<?, TFieldIdEnum> result, Throwable cause) {
Class<?> causeType = cause.getClass();
for (Entry<Class<Throwable>, TFieldIdEnum> e: exceptionFields.entrySet()) {
if (e.getKey().isAssignableFrom(causeType)) {
result.setFieldValue(e.getValue(), cause);
return true;
}
}
return false;
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getArgs(ProcessFunction<?, ?> func) {
return getArgs0(Type.SYNC, func.getClass(), func.getMethodName());
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getArgs(AsyncProcessFunction<?, ?, ?> asyncFunc) {
return getArgs0(Type.ASYNC, asyncFunc.getClass(), asyncFunc.getMethodName());
}
private static TBase<TBase<?, ?>, TFieldIdEnum> getArgs0(
Type type, Class<?> funcClass, String methodName) {
final String argsTypeName = typeName(type, funcClass, methodName, methodName + "_args");
try {
@SuppressWarnings("unchecked")
Class<TBase<TBase<?, ?>, TFieldIdEnum>> argsType =
(Class<TBase<TBase<?, ?>, TFieldIdEnum>>) Class.forName(
argsTypeName, false, funcClass.getClassLoader());
return argsType.newInstance();
} catch (Exception e) {
throw new IllegalStateException("cannot determine the args class of method: " + methodName, e);
}
}
private static TFieldIdEnum[] getArgFields(ProcessFunction<?, ?> func) {
return getArgFields0(Type.SYNC, func.getClass(), func.getMethodName());
}
private static TFieldIdEnum[] getArgFields(AsyncProcessFunction<?, ?, ?> asyncFunc) {
return getArgFields0(Type.ASYNC, asyncFunc.getClass(), asyncFunc.getMethodName());
}
private static TFieldIdEnum[] getArgFields0(Type type, Class<?> funcClass, String methodName) {
final String fieldIdEnumTypeName = typeName(type, funcClass, methodName, methodName + "_args$_Fields");
try {
Class<?> fieldIdEnumType = Class.forName(fieldIdEnumTypeName, false, funcClass.getClassLoader());
return (TFieldIdEnum[]) requireNonNull(fieldIdEnumType.getEnumConstants(),
"field enum may not be empty");
} catch (Exception e) {
throw new IllegalStateException("cannot determine the arg fields of method: " + methodName, e);
}
}
private static Class<?>[] getDeclaredExceptions(ProcessFunction<?, ?> func) {
return getDeclaredExceptions0(Type.SYNC, func.getClass(), func.getMethodName());
}
private static Class<?>[] getDeclaredExceptions(AsyncProcessFunction<?, ?, ?> asyncFunc) {
return getDeclaredExceptions0(Type.ASYNC, asyncFunc.getClass(), asyncFunc.getMethodName());
}
private static Class<?>[] getDeclaredExceptions0(
Type type, Class<?> funcClass, String methodName) {
final String ifaceTypeName = typeName(type, funcClass, methodName, "Iface");
try {
Class<?> ifaceType = Class.forName(ifaceTypeName, false, funcClass.getClassLoader());
for (Method m : ifaceType.getDeclaredMethods()) {
if (!m.getName().equals(methodName)) {
continue;
}
return m.getExceptionTypes();
}
throw new IllegalStateException("failed to find a method: " + methodName);
} catch (Exception e) {
throw new IllegalStateException(
"cannot determine the declared exceptions of method: " + methodName, e);
}
}
private static String typeName(Type type, Class<?> funcClass, String methodName, String toAppend) {
final String funcClassName = funcClass.getName();
final int serviceClassEndPos = funcClassName.lastIndexOf(
(type == Type.SYNC ? "$Processor$" : "$AsyncProcessor$") + methodName);
if (serviceClassEndPos <= 0) {
throw new IllegalStateException("cannot determine the service class of method: " + methodName);
}
return funcClassName.substring(0, serviceClassEndPos) + '$' + toAppend;
}
}
| |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.ui.modelutil;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.verify;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.robolectric.annotation.Config;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.build.BuildConfig;
import org.chromium.ui.modelutil.PropertyModel.WritableBooleanPropertyKey;
import org.chromium.ui.modelutil.PropertyModel.WritableFloatPropertyKey;
import org.chromium.ui.modelutil.PropertyModel.WritableIntPropertyKey;
import org.chromium.ui.modelutil.PropertyModel.WritableObjectPropertyKey;
import org.chromium.ui.modelutil.PropertyObservable.PropertyObserver;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Tests to ensure/validate the interactions with the PropertyModel.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class PropertyModelTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
public static WritableBooleanPropertyKey BOOLEAN_PROPERTY_A = new WritableBooleanPropertyKey();
public static WritableBooleanPropertyKey BOOLEAN_PROPERTY_B = new WritableBooleanPropertyKey();
public static WritableBooleanPropertyKey BOOLEAN_PROPERTY_C = new WritableBooleanPropertyKey();
public static WritableFloatPropertyKey FLOAT_PROPERTY_A = new WritableFloatPropertyKey();
public static WritableFloatPropertyKey FLOAT_PROPERTY_B = new WritableFloatPropertyKey();
public static WritableFloatPropertyKey FLOAT_PROPERTY_C = new WritableFloatPropertyKey();
public static WritableIntPropertyKey INT_PROPERTY_A = new WritableIntPropertyKey();
public static WritableIntPropertyKey INT_PROPERTY_B = new WritableIntPropertyKey();
public static WritableIntPropertyKey INT_PROPERTY_C = new WritableIntPropertyKey();
public static WritableObjectPropertyKey<Object> OBJECT_PROPERTY_A =
new WritableObjectPropertyKey<>();
public static WritableObjectPropertyKey<String> OBJECT_PROPERTY_B =
new WritableObjectPropertyKey<>();
public static WritableObjectPropertyKey<List<Integer>> OBJECT_PROPERTY_C =
new WritableObjectPropertyKey<>();
public static WritableObjectPropertyKey<Object> OBJECT_PROPERTY_SKIP_EQUALITY =
new WritableObjectPropertyKey<>(true);
@Test
public void getAllSetProperties() {
PropertyModel model = new PropertyModel(
BOOLEAN_PROPERTY_A, FLOAT_PROPERTY_A, INT_PROPERTY_A, OBJECT_PROPERTY_A);
model.set(BOOLEAN_PROPERTY_A, true);
model.set(INT_PROPERTY_A, 42);
Collection<PropertyKey> setProperties = model.getAllSetProperties();
assertThat(setProperties, containsInAnyOrder(BOOLEAN_PROPERTY_A, INT_PROPERTY_A));
assertThat(setProperties.size(), equalTo(2));
}
@Test
public void booleanUpdates() {
PropertyModel model = new PropertyModel(BOOLEAN_PROPERTY_A, BOOLEAN_PROPERTY_B);
verifyBooleanUpdate(model, BOOLEAN_PROPERTY_A, false);
verifyBooleanUpdate(model, BOOLEAN_PROPERTY_A, true);
verifyBooleanUpdate(model, BOOLEAN_PROPERTY_B, true);
verifyBooleanUpdate(model, BOOLEAN_PROPERTY_B, false);
}
private void verifyBooleanUpdate(
PropertyModel model, WritableBooleanPropertyKey key, boolean value) {
@SuppressWarnings("unchecked")
PropertyObserver<PropertyKey> observer = Mockito.mock(PropertyObserver.class);
model.addObserver(observer);
Mockito.<PropertyObserver>reset(observer);
model.set(key, value);
verify(observer).onPropertyChanged(model, key);
assertThat(model.get(key), equalTo(value));
model.removeObserver(observer);
}
@Test
public void floatUpdates() {
PropertyModel model =
new PropertyModel(FLOAT_PROPERTY_A, FLOAT_PROPERTY_B, FLOAT_PROPERTY_C);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, 0f);
verifyFloatUpdate(model, FLOAT_PROPERTY_B, 1f);
verifyFloatUpdate(model, FLOAT_PROPERTY_C, -1f);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, Float.NaN);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, Float.NEGATIVE_INFINITY);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, Float.POSITIVE_INFINITY);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, Float.MIN_VALUE);
verifyFloatUpdate(model, FLOAT_PROPERTY_A, Float.MAX_VALUE);
}
private void verifyFloatUpdate(PropertyModel model, WritableFloatPropertyKey key, float value) {
@SuppressWarnings("unchecked")
PropertyObserver<PropertyKey> observer = Mockito.mock(PropertyObserver.class);
model.addObserver(observer);
Mockito.<PropertyObserver>reset(observer);
model.set(key, value);
verify(observer).onPropertyChanged(model, key);
assertThat(model.get(key), equalTo(value));
model.removeObserver(observer);
}
@Test
public void intUpdates() {
PropertyModel model = new PropertyModel(INT_PROPERTY_A, INT_PROPERTY_B, INT_PROPERTY_C);
verifyIntUpdate(model, INT_PROPERTY_A, 0);
verifyIntUpdate(model, INT_PROPERTY_B, -1);
verifyIntUpdate(model, INT_PROPERTY_C, 1);
verifyIntUpdate(model, INT_PROPERTY_A, Integer.MAX_VALUE);
verifyIntUpdate(model, INT_PROPERTY_A, Integer.MIN_VALUE);
}
private void verifyIntUpdate(PropertyModel model, WritableIntPropertyKey key, int value) {
@SuppressWarnings("unchecked")
PropertyObserver<PropertyKey> observer = Mockito.mock(PropertyObserver.class);
model.addObserver(observer);
Mockito.<PropertyObserver>reset(observer);
model.set(key, value);
verify(observer).onPropertyChanged(model, key);
assertThat(model.get(key), equalTo(value));
model.removeObserver(observer);
}
@Test
public void objectUpdates() {
PropertyModel model =
new PropertyModel(OBJECT_PROPERTY_A, OBJECT_PROPERTY_B, OBJECT_PROPERTY_C);
verifyObjectUpdate(model, OBJECT_PROPERTY_A, new Object());
verifyObjectUpdate(model, OBJECT_PROPERTY_A, null);
verifyObjectUpdate(model, OBJECT_PROPERTY_B, "Test");
verifyObjectUpdate(model, OBJECT_PROPERTY_B, "Test1");
verifyObjectUpdate(model, OBJECT_PROPERTY_B, null);
verifyObjectUpdate(model, OBJECT_PROPERTY_B, "Test");
List<Integer> list = new ArrayList<>();
verifyObjectUpdate(model, OBJECT_PROPERTY_C, list);
list = new ArrayList<>(list);
list.add(1);
verifyObjectUpdate(model, OBJECT_PROPERTY_C, list);
list = new ArrayList<>(list);
list.add(2);
verifyObjectUpdate(model, OBJECT_PROPERTY_C, list);
}
private <T> void verifyObjectUpdate(
PropertyModel model, WritableObjectPropertyKey<T> key, T value) {
@SuppressWarnings("unchecked")
PropertyObserver<PropertyKey> observer = Mockito.mock(PropertyObserver.class);
model.addObserver(observer);
Mockito.<PropertyObserver>reset(observer);
model.set(key, value);
verify(observer).onPropertyChanged(model, key);
assertThat(model.get(key), equalTo(value));
model.removeObserver(observer);
}
@Test
public void duplicateSetChangeSuppression() {
PropertyModel model = new PropertyModel(
BOOLEAN_PROPERTY_A, FLOAT_PROPERTY_A, INT_PROPERTY_A, OBJECT_PROPERTY_A);
model.set(BOOLEAN_PROPERTY_A, true);
model.set(FLOAT_PROPERTY_A, 1f);
model.set(INT_PROPERTY_A, -1);
Object obj = new Object();
model.set(OBJECT_PROPERTY_A, obj);
@SuppressWarnings("unchecked")
PropertyObserver<PropertyKey> observer = Mockito.mock(PropertyObserver.class);
model.addObserver(observer);
Mockito.<PropertyObserver>reset(observer);
model.set(BOOLEAN_PROPERTY_A, true);
model.set(FLOAT_PROPERTY_A, 1f);
model.set(INT_PROPERTY_A, -1);
model.set(OBJECT_PROPERTY_A, obj);
Mockito.verifyZeroInteractions(observer);
}
@Test
public void ensureValidKey() {
if (!BuildConfig.ENABLE_ASSERTS) return;
PropertyModel model = new PropertyModel(BOOLEAN_PROPERTY_A, BOOLEAN_PROPERTY_B);
thrown.expect(IllegalArgumentException.class);
model.set(BOOLEAN_PROPERTY_C, true);
}
@Test(expected = IllegalArgumentException.class)
public void preventsDuplicateKeys() {
new PropertyModel(BOOLEAN_PROPERTY_A, BOOLEAN_PROPERTY_A);
}
@Test
public void testCompareValue_Boolean() {
PropertyModel model1 =
new PropertyModel(BOOLEAN_PROPERTY_A, BOOLEAN_PROPERTY_B, BOOLEAN_PROPERTY_C);
model1.set(BOOLEAN_PROPERTY_A, true);
model1.set(BOOLEAN_PROPERTY_B, true);
model1.set(BOOLEAN_PROPERTY_C, false);
PropertyModel model2 =
new PropertyModel(BOOLEAN_PROPERTY_A, BOOLEAN_PROPERTY_B, BOOLEAN_PROPERTY_C);
model2.set(BOOLEAN_PROPERTY_A, true);
model2.set(BOOLEAN_PROPERTY_B, false);
Assert.assertTrue("BOOLEAN_PROPERTY_A should be equal",
model1.compareValue(model2, BOOLEAN_PROPERTY_A));
Assert.assertFalse("BOOLEAN_PROPERTY_B should not be equal",
model1.compareValue(model2, BOOLEAN_PROPERTY_B));
Assert.assertFalse("BOOLEAN_PROPERTY_C should not be equal",
model1.compareValue(model2, BOOLEAN_PROPERTY_C));
}
@Test
public void testCompareValue_Integer() {
PropertyModel model1 = new PropertyModel(INT_PROPERTY_A, INT_PROPERTY_B, INT_PROPERTY_C);
model1.set(INT_PROPERTY_A, 1);
model1.set(INT_PROPERTY_B, 2);
model1.set(INT_PROPERTY_C, 3);
PropertyModel model2 = new PropertyModel(INT_PROPERTY_A, INT_PROPERTY_B, INT_PROPERTY_C);
model2.set(INT_PROPERTY_A, 1);
model2.set(INT_PROPERTY_B, 3);
Assert.assertTrue(
"INT_PROPERTY_A should be equal", model1.compareValue(model2, INT_PROPERTY_A));
Assert.assertFalse(
"INT_PROPERTY_B should not be equal", model1.compareValue(model2, INT_PROPERTY_B));
Assert.assertFalse(
"INT_PROPERTY_C should not be equal", model1.compareValue(model2, INT_PROPERTY_C));
}
@Test
public void testCompareValue_Float() {
PropertyModel model1 =
new PropertyModel(FLOAT_PROPERTY_A, FLOAT_PROPERTY_B, FLOAT_PROPERTY_C);
model1.set(FLOAT_PROPERTY_A, 1.2f);
model1.set(FLOAT_PROPERTY_B, 2.2f);
model1.set(FLOAT_PROPERTY_C, 3.2f);
PropertyModel model2 =
new PropertyModel(FLOAT_PROPERTY_A, FLOAT_PROPERTY_B, FLOAT_PROPERTY_C);
model2.set(FLOAT_PROPERTY_A, 1.2f);
model2.set(FLOAT_PROPERTY_B, 3.2f);
Assert.assertTrue(
"FLOAT_PROPERTY_A should be equal", model1.compareValue(model2, FLOAT_PROPERTY_A));
Assert.assertFalse("FLOAT_PROPERTY_B should not be equal",
model1.compareValue(model2, FLOAT_PROPERTY_B));
Assert.assertFalse("FLOAT_PROPERTY_C should not be equal",
model1.compareValue(model2, FLOAT_PROPERTY_C));
}
@Test
public void testCompareValue_Object() {
Object sharedObject = new Object();
PropertyModel model1 =
new PropertyModel(OBJECT_PROPERTY_A, OBJECT_PROPERTY_B, OBJECT_PROPERTY_C);
model1.set(OBJECT_PROPERTY_A, sharedObject);
model1.set(OBJECT_PROPERTY_B, "Test");
model1.set(OBJECT_PROPERTY_C, new ArrayList<>());
PropertyModel model2 =
new PropertyModel(OBJECT_PROPERTY_A, OBJECT_PROPERTY_B, OBJECT_PROPERTY_C);
model2.set(OBJECT_PROPERTY_A, sharedObject);
model2.set(OBJECT_PROPERTY_B, "Test");
Assert.assertTrue("OBJECT_PROPERTY_A should be equal",
model1.compareValue(model2, OBJECT_PROPERTY_A));
Assert.assertTrue("OBJECT_PROPERTY_B should be equal",
model1.compareValue(model2, OBJECT_PROPERTY_B));
Assert.assertFalse("OBJECT_PROPERTY_C should not be equal",
model1.compareValue(model2, OBJECT_PROPERTY_C));
model2.set(OBJECT_PROPERTY_B, "Test2");
Assert.assertFalse("OBJECT_PROPERTY_B should not be equal",
model1.compareValue(model2, OBJECT_PROPERTY_B));
}
@Test
public void testCompareValue_Object_SkipEquality() {
Object sharedObject = new Object();
PropertyModel model1 = new PropertyModel(OBJECT_PROPERTY_SKIP_EQUALITY);
model1.set(OBJECT_PROPERTY_SKIP_EQUALITY, sharedObject);
PropertyModel model2 = new PropertyModel(OBJECT_PROPERTY_SKIP_EQUALITY);
model2.set(OBJECT_PROPERTY_SKIP_EQUALITY, sharedObject);
Assert.assertFalse("OBJECT_PROPERTY_A should not be equal",
model1.compareValue(model2, OBJECT_PROPERTY_SKIP_EQUALITY));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.KeyValueTimestamp;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Aggregator;
import org.apache.kafka.streams.kstream.Initializer;
import org.apache.kafka.streams.kstream.Merger;
import org.apache.kafka.streams.kstream.SessionWindows;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.processor.Processor;
import org.apache.kafka.streams.processor.To;
import org.apache.kafka.streams.processor.internals.MockStreamsMetrics;
import org.apache.kafka.streams.processor.internals.ProcessorRecordContext;
import org.apache.kafka.streams.processor.internals.metrics.ThreadMetrics;
import org.apache.kafka.streams.processor.internals.ToInternal;
import org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.internals.ThreadCache;
import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.NoOpRecordCollector;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static java.time.Duration.ofMillis;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByName;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class KStreamSessionWindowAggregateProcessorTest {
private static final long GAP_MS = 5 * 60 * 1000L;
private static final String STORE_NAME = "session-store";
private final ToInternal toInternal = new ToInternal();
private final Initializer<Long> initializer = () -> 0L;
private final Aggregator<String, String, Long> aggregator = (aggKey, value, aggregate) -> aggregate + 1;
private final Merger<String, Long> sessionMerger = (aggKey, aggOne, aggTwo) -> aggOne + aggTwo;
private final KStreamSessionWindowAggregate<String, String, Long> sessionAggregator =
new KStreamSessionWindowAggregate<>(
SessionWindows.with(ofMillis(GAP_MS)),
STORE_NAME,
initializer,
aggregator,
sessionMerger);
private final List<KeyValueTimestamp> results = new ArrayList<>();
private final Processor<String, String> processor = sessionAggregator.get();
private SessionStore<String, Long> sessionStore;
private InternalMockProcessorContext context;
private Metrics metrics;
@Before
public void initializeStore() {
final File stateDir = TestUtils.tempDirectory();
metrics = new Metrics();
final MockStreamsMetrics metrics = new MockStreamsMetrics(KStreamSessionWindowAggregateProcessorTest.this.metrics);
ThreadMetrics.skipRecordSensor(metrics);
context = new InternalMockProcessorContext(
stateDir,
Serdes.String(),
Serdes.String(),
metrics,
new StreamsConfig(StreamsTestUtils.getStreamsConfig()),
NoOpRecordCollector::new,
new ThreadCache(new LogContext("testCache "), 100000, metrics)
) {
@Override
public <K, V> void forward(final K key, final V value, final To to) {
toInternal.update(to);
results.add(new KeyValueTimestamp<>(key, value, toInternal.timestamp()));
}
};
initStore(true);
processor.init(context);
}
private void initStore(final boolean enableCaching) {
final StoreBuilder<SessionStore<String, Long>> storeBuilder =
Stores.sessionStoreBuilder(
Stores.persistentSessionStore(STORE_NAME, ofMillis(GAP_MS * 3)),
Serdes.String(),
Serdes.Long())
.withLoggingDisabled();
if (enableCaching) {
storeBuilder.withCachingEnabled();
}
sessionStore = storeBuilder.build();
sessionStore.init(context, sessionStore);
}
@After
public void closeStore() {
sessionStore.close();
}
@Test
public void shouldCreateSingleSessionWhenWithinGap() {
context.setTime(0);
processor.process("john", "first");
context.setTime(500);
processor.process("john", "second");
final KeyValueIterator<Windowed<String>, Long> values =
sessionStore.findSessions("john", 0, 2000);
assertTrue(values.hasNext());
assertEquals(Long.valueOf(2), values.next().value);
}
@Test
public void shouldMergeSessions() {
context.setTime(0);
final String sessionId = "mel";
processor.process(sessionId, "first");
assertTrue(sessionStore.findSessions(sessionId, 0, 0).hasNext());
// move time beyond gap
context.setTime(GAP_MS + 1);
processor.process(sessionId, "second");
assertTrue(sessionStore.findSessions(sessionId, GAP_MS + 1, GAP_MS + 1).hasNext());
// should still exist as not within gap
assertTrue(sessionStore.findSessions(sessionId, 0, 0).hasNext());
// move time back
context.setTime(GAP_MS / 2);
processor.process(sessionId, "third");
final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions(sessionId, 0, GAP_MS + 1);
final KeyValue<Windowed<String>, Long> kv = iterator.next();
assertEquals(Long.valueOf(3), kv.value);
assertFalse(iterator.hasNext());
}
@Test
public void shouldUpdateSessionIfTheSameTime() {
context.setTime(0);
processor.process("mel", "first");
processor.process("mel", "second");
final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("mel", 0, 0);
assertEquals(Long.valueOf(2L), iterator.next().value);
assertFalse(iterator.hasNext());
}
@Test
public void shouldHaveMultipleSessionsForSameIdWhenTimestampApartBySessionGap() {
final String sessionId = "mel";
long time = 0;
context.setTime(time);
processor.process(sessionId, "first");
context.setTime(time += GAP_MS + 1);
processor.process(sessionId, "second");
processor.process(sessionId, "second");
context.setTime(time += GAP_MS + 1);
processor.process(sessionId, "third");
processor.process(sessionId, "third");
processor.process(sessionId, "third");
sessionStore.flush();
assertEquals(
Arrays.asList(
new KeyValueTimestamp<>(
new Windowed<>(sessionId, new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>(sessionId, new SessionWindow(GAP_MS + 1, GAP_MS + 1)),
new Change<>(2L, null),
GAP_MS + 1),
new KeyValueTimestamp<>(
new Windowed<>(sessionId, new SessionWindow(time, time)),
new Change<>(3L, null),
time)
),
results
);
}
@Test
public void shouldRemoveMergedSessionsFromStateStore() {
context.setTime(0);
processor.process("a", "1");
// first ensure it is in the store
final KeyValueIterator<Windowed<String>, Long> a1 =
sessionStore.findSessions("a", 0, 0);
assertEquals(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L), a1.next());
context.setTime(100);
processor.process("a", "2");
// a1 from above should have been removed
// should have merged session in store
final KeyValueIterator<Windowed<String>, Long> a2 =
sessionStore.findSessions("a", 0, 100);
assertEquals(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 100)), 2L), a2.next());
assertFalse(a2.hasNext());
}
@Test
public void shouldHandleMultipleSessionsAndMerging() {
context.setTime(0);
processor.process("a", "1");
processor.process("b", "1");
processor.process("c", "1");
processor.process("d", "1");
context.setTime(GAP_MS / 2);
processor.process("d", "2");
context.setTime(GAP_MS + 1);
processor.process("a", "2");
processor.process("b", "2");
context.setTime(GAP_MS + 1 + GAP_MS / 2);
processor.process("a", "3");
processor.process("c", "3");
sessionStore.flush();
assertEquals(
Arrays.asList(
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("b", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("c", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("d", new SessionWindow(0, GAP_MS / 2)),
new Change<>(2L, null),
GAP_MS / 2),
new KeyValueTimestamp<>(
new Windowed<>("b", new SessionWindow(GAP_MS + 1, GAP_MS + 1)),
new Change<>(1L, null),
GAP_MS + 1),
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(GAP_MS + 1, GAP_MS + 1 + GAP_MS / 2)),
new Change<>(2L, null),
GAP_MS + 1 + GAP_MS / 2),
new KeyValueTimestamp<>(new Windowed<>(
"c",
new SessionWindow(GAP_MS + 1 + GAP_MS / 2, GAP_MS + 1 + GAP_MS / 2)), new Change<>(1L, null),
GAP_MS + 1 + GAP_MS / 2)
),
results
);
}
@Test
public void shouldGetAggregatedValuesFromValueGetter() {
final KTableValueGetter<Windowed<String>, Long> getter = sessionAggregator.view().get();
getter.init(context);
context.setTime(0);
processor.process("a", "1");
context.setTime(GAP_MS + 1);
processor.process("a", "1");
processor.process("a", "2");
final long t0 = getter.get(new Windowed<>("a", new SessionWindow(0, 0))).value();
final long t1 = getter.get(new Windowed<>("a", new SessionWindow(GAP_MS + 1, GAP_MS + 1))).value();
assertEquals(1L, t0);
assertEquals(2L, t1);
}
@Test
public void shouldImmediatelyForwardNewSessionWhenNonCachedStore() {
initStore(false);
processor.init(context);
context.setTime(0);
processor.process("a", "1");
processor.process("b", "1");
processor.process("c", "1");
assertEquals(
Arrays.asList(
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("b", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("c", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L)
),
results
);
}
@Test
public void shouldImmediatelyForwardRemovedSessionsWhenMerging() {
initStore(false);
processor.init(context);
context.setTime(0);
processor.process("a", "1");
context.setTime(5);
processor.process("a", "1");
assertEquals(
Arrays.asList(
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(0, 0)),
new Change<>(1L, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(0, 0)),
new Change<>(null, null),
0L),
new KeyValueTimestamp<>(
new Windowed<>("a", new SessionWindow(0, 5)),
new Change<>(2L, null),
5L)
),
results
);
}
@Test
public void shouldLogAndMeterWhenSkippingNullKey() {
initStore(false);
processor.init(context);
context.setRecordContext(new ProcessorRecordContext(-1, -2, -3, "topic", null));
final LogCaptureAppender appender = LogCaptureAppender.createAndRegister();
processor.process(null, "1");
LogCaptureAppender.unregister(appender);
assertEquals(
1.0,
getMetricByName(context.metrics().metrics(), "skipped-records-total", "stream-metrics").metricValue());
assertThat(
appender.getMessages(),
hasItem("Skipping record due to null key. value=[1] topic=[topic] partition=[-3] offset=[-2]"));
}
@Test
public void shouldLogAndMeterWhenSkippingLateRecordWithZeroGrace() {
LogCaptureAppender.setClassLoggerToDebug(KStreamSessionWindowAggregate.class);
final LogCaptureAppender appender = LogCaptureAppender.createAndRegister();
final Processor<String, String> processor = new KStreamSessionWindowAggregate<>(
SessionWindows.with(ofMillis(10L)).grace(ofMillis(0L)),
STORE_NAME,
initializer,
aggregator,
sessionMerger
).get();
initStore(false);
processor.init(context);
// dummy record to establish stream time = 0
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("dummy", "dummy");
// record arrives on time, should not be skipped
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("OnTime1", "1");
// dummy record to advance stream time = 1
context.setRecordContext(new ProcessorRecordContext(1, -2, -3, "topic", null));
processor.process("dummy", "dummy");
// record is late
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("Late1", "1");
LogCaptureAppender.unregister(appender);
final MetricName dropMetric = new MetricName(
"late-record-drop-total",
"stream-processor-node-metrics",
"The total number of occurrence of late-record-drop operations.",
mkMap(
mkEntry("client-id", "test"),
mkEntry("task-id", "0_0"),
mkEntry("processor-node-id", "TESTING_NODE")
)
);
assertThat(metrics.metrics().get(dropMetric).metricValue(), is(1.0));
final MetricName dropRate = new MetricName(
"late-record-drop-rate",
"stream-processor-node-metrics",
"The average number of occurrence of late-record-drop operations.",
mkMap(
mkEntry("client-id", "test"),
mkEntry("task-id", "0_0"),
mkEntry("processor-node-id", "TESTING_NODE")
)
);
assertThat(
(Double) metrics.metrics().get(dropRate).metricValue(),
greaterThan(0.0));
assertThat(
appender.getMessages(),
hasItem("Skipping record for expired window. key=[Late1] topic=[topic] partition=[-3] offset=[-2] timestamp=[0] window=[0,0] expiration=[1] streamTime=[1]"));
}
@Test
public void shouldLogAndMeterWhenSkippingLateRecordWithNonzeroGrace() {
LogCaptureAppender.setClassLoggerToDebug(KStreamSessionWindowAggregate.class);
final LogCaptureAppender appender = LogCaptureAppender.createAndRegister();
final Processor<String, String> processor = new KStreamSessionWindowAggregate<>(
SessionWindows.with(ofMillis(10L)).grace(ofMillis(1L)),
STORE_NAME,
initializer,
aggregator,
sessionMerger
).get();
initStore(false);
processor.init(context);
// dummy record to establish stream time = 0
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("dummy", "dummy");
// record arrives on time, should not be skipped
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("OnTime1", "1");
// dummy record to advance stream time = 1
context.setRecordContext(new ProcessorRecordContext(1, -2, -3, "topic", null));
processor.process("dummy", "dummy");
// delayed record arrives on time, should not be skipped
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("OnTime2", "1");
// dummy record to advance stream time = 2
context.setRecordContext(new ProcessorRecordContext(2, -2, -3, "topic", null));
processor.process("dummy", "dummy");
// delayed record arrives late
context.setRecordContext(new ProcessorRecordContext(0, -2, -3, "topic", null));
processor.process("Late1", "1");
LogCaptureAppender.unregister(appender);
final MetricName dropMetric = new MetricName(
"late-record-drop-total",
"stream-processor-node-metrics",
"The total number of occurrence of late-record-drop operations.",
mkMap(
mkEntry("client-id", "test"),
mkEntry("task-id", "0_0"),
mkEntry("processor-node-id", "TESTING_NODE")
)
);
assertThat(metrics.metrics().get(dropMetric).metricValue(), is(1.0));
final MetricName dropRate = new MetricName(
"late-record-drop-rate",
"stream-processor-node-metrics",
"The average number of occurrence of late-record-drop operations.",
mkMap(
mkEntry("client-id", "test"),
mkEntry("task-id", "0_0"),
mkEntry("processor-node-id", "TESTING_NODE")
)
);
assertThat(
(Double) metrics.metrics().get(dropRate).metricValue(),
greaterThan(0.0));
assertThat(
appender.getMessages(),
hasItem("Skipping record for expired window. key=[Late1] topic=[topic] partition=[-3] offset=[-2] timestamp=[0] window=[0,0] expiration=[1] streamTime=[2]"));
}
}
| |
/*
* Copyright (C) 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.internal;
import static java.util.stream.Collectors.joining;
import com.google.common.base.Equivalence;
import com.google.common.base.Objects;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.inject.Key;
import com.google.inject.internal.util.Classes;
import com.google.inject.spi.ElementSource;
import com.google.inject.spi.ErrorDetail;
import com.google.inject.spi.Message;
import java.lang.reflect.Member;
import java.util.Arrays;
import java.util.Collection;
import java.util.Formatter;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/** Utility methods for {@link Message} objects */
public final class Messages {
private Messages() {}
/** Prepends the list of sources to the given {@link Message} */
static Message mergeSources(List<Object> sources, Message message) {
List<Object> messageSources = message.getSources();
// It is possible that the end of getSources() and the beginning of message.getSources() are
// equivalent, in this case we should drop the repeated source when joining the lists. The
// most likely scenario where this would happen is when a scoped binding throws an exception,
// due to the fact that InternalFactoryToProviderAdapter applies the binding source when
// merging errors.
if (!sources.isEmpty()
&& !messageSources.isEmpty()
&& Objects.equal(messageSources.get(0), sources.get(sources.size() - 1))) {
messageSources = messageSources.subList(1, messageSources.size());
}
return message.withSource(
ImmutableList.builder().addAll(sources).addAll(messageSources).build());
}
/**
* Calls {@link String#format} after converting the arguments using some standard guice formatting
* for {@link Key}, {@link Class} and {@link Member} objects.
*/
public static String format(String messageFormat, Object... arguments) {
for (int i = 0; i < arguments.length; i++) {
arguments[i] = convert(arguments[i]);
}
return String.format(messageFormat, arguments);
}
/** Returns the formatted message for an exception with the specified messages. */
public static String formatMessages(String heading, Collection<Message> errorMessages) {
Formatter fmt = new Formatter().format(heading).format(":%n%n");
int index = 1;
boolean displayCauses = getOnlyCause(errorMessages) == null;
List<ErrorDetail<?>> remainingErrors =
errorMessages.stream().map(Message::getErrorDetail).collect(Collectors.toList());
Map<Equivalence.Wrapper<Throwable>, Integer> causes = Maps.newHashMap();
while (!remainingErrors.isEmpty()) {
ErrorDetail<?> currentError = remainingErrors.get(0);
// Split the remaining errors into 2 groups, one that contains mergeable errors with
// currentError and the other that need to be formatted separately in the next iteration.
Map<Boolean, List<ErrorDetail<?>>> partitionedByMergeable =
remainingErrors.subList(1, remainingErrors.size()).stream()
.collect(Collectors.partitioningBy(currentError::isMergeable));
remainingErrors = partitionedByMergeable.get(false);
currentError.format(index, partitionedByMergeable.get(true), fmt);
Throwable cause = currentError.getCause();
if (displayCauses && cause != null) {
Equivalence.Wrapper<Throwable> causeEquivalence = ThrowableEquivalence.INSTANCE.wrap(cause);
if (!causes.containsKey(causeEquivalence)) {
causes.put(causeEquivalence, index);
fmt.format("Caused by: %s", Throwables.getStackTraceAsString(cause));
} else {
int causeIdx = causes.get(causeEquivalence);
fmt.format(
"Caused by: %s (same stack trace as error #%s)",
cause.getClass().getName(), causeIdx);
}
}
fmt.format("%n");
index++;
}
if (index == 2) {
fmt.format("1 error");
} else {
fmt.format("%s errors", index - 1);
}
return PackageNameCompressor.compressPackagesInMessage(fmt.toString());
}
/**
* Creates a new Message without a cause.
*
* @param errorId The enum id for the error
* @param messageFormat Format string
* @param arguments format string arguments
*/
public static Message create(ErrorId errorId, String messageFormat, Object... arguments) {
return create(errorId, null, messageFormat, arguments);
}
/**
* Creates a new Message with the given cause.
*
* @param errorId The enum id for the error
* @param cause The exception that caused the error
* @param messageFormat Format string
* @param arguments format string arguments
*/
public static Message create(
ErrorId errorId, Throwable cause, String messageFormat, Object... arguments) {
return create(errorId, cause, ImmutableList.of(), messageFormat, arguments);
}
/**
* Creates a new Message with the given cause and a binding source stack.
*
* @param errorId The enum id for the error
* @param cause The exception that caused the error
* @param sources The binding sources for the source stack
* @param messageFormat Format string
* @param arguments format string arguments
*/
public static Message create(
ErrorId errorId,
Throwable cause,
List<Object> sources,
String messageFormat,
Object... arguments) {
String message = format(messageFormat, arguments);
return new Message(errorId, sources, message, cause);
}
/** Formats an object in a user friendly way. */
static Object convert(Object o) {
ElementSource source = null;
if (o instanceof ElementSource) {
source = (ElementSource) o;
o = source.getDeclaringSource();
}
return convert(o, source);
}
static Object convert(Object o, ElementSource source) {
for (Converter<?> converter : converters) {
if (converter.appliesTo(o)) {
return appendModules(converter.convert(o), source);
}
}
return appendModules(o, source);
}
private static Object appendModules(Object source, ElementSource elementSource) {
String modules = SourceFormatter.getModuleStack(elementSource);
if (modules.length() == 0) {
return source;
} else {
return source + " (installed by: " + modules + ")";
}
}
private abstract static class Converter<T> {
final Class<T> type;
Converter(Class<T> type) {
this.type = type;
}
boolean appliesTo(Object o) {
return o != null && type.isAssignableFrom(o.getClass());
}
String convert(Object o) {
return toString(type.cast(o));
}
abstract String toString(T t);
}
@SuppressWarnings({"unchecked", "rawtypes"}) // rawtypes aren't avoidable
private static final Collection<Converter<?>> converters =
ImmutableList.of(
new Converter<Class>(Class.class) {
@Override
public String toString(Class c) {
return c.getName();
}
},
new Converter<Member>(Member.class) {
@Override
public String toString(Member member) {
return Classes.toString(member);
}
},
new Converter<Key>(Key.class) {
@Override
public String toString(Key key) {
if (key.getAnnotationType() != null) {
return key.getTypeLiteral()
+ " annotated with "
+ (key.getAnnotation() != null ? key.getAnnotation() : key.getAnnotationType());
} else {
return key.getTypeLiteral().toString();
}
}
});
/**
* Returns the cause throwable if there is exactly one cause in {@code messages}. If there are
* zero or multiple messages with causes, null is returned.
*/
public static Throwable getOnlyCause(Collection<Message> messages) {
Throwable onlyCause = null;
for (Message message : messages) {
Throwable messageCause = message.getCause();
if (messageCause == null) {
continue;
}
if (onlyCause != null && !ThrowableEquivalence.INSTANCE.equivalent(onlyCause, messageCause)) {
return null;
}
onlyCause = messageCause;
}
return onlyCause;
}
private static final class ThrowableEquivalence extends Equivalence<Throwable> {
static final ThrowableEquivalence INSTANCE = new ThrowableEquivalence();
@Override
protected boolean doEquivalent(Throwable a, Throwable b) {
return a.getClass().equals(b.getClass())
&& Objects.equal(a.getMessage(), b.getMessage())
&& Arrays.equals(a.getStackTrace(), b.getStackTrace())
&& equivalent(a.getCause(), b.getCause());
}
@Override
protected int doHash(Throwable t) {
return Objects.hashCode(t.getClass().hashCode(), t.getMessage(), hash(t.getCause()));
}
}
private enum FormatOptions {
RED("\u001B[31m"),
BOLD("\u001B[1m"),
FAINT("\u001B[2m"),
ITALIC("\u001B[3m"),
UNDERLINE("\u001B[4m"),
RESET("\u001B[0m");
private final String ansiCode;
FormatOptions(String ansiCode) {
this.ansiCode = ansiCode;
}
}
private static final String formatText(String text, FormatOptions... options) {
if (!InternalFlags.enableColorizeErrorMessages()) {
return text;
}
return String.format(
"%s%s%s",
Arrays.stream(options).map(option -> option.ansiCode).collect(joining()),
text,
FormatOptions.RESET.ansiCode);
}
public static final String bold(String text) {
return formatText(text, FormatOptions.BOLD);
}
public static final String redBold(String text) {
return formatText(text, FormatOptions.RED, FormatOptions.BOLD);
}
public static final String underline(String text) {
return formatText(text, FormatOptions.UNDERLINE);
}
public static final String faint(String text) {
return formatText(text, FormatOptions.FAINT);
}
}
| |
// Copyright 2012 Cloudera Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.impala.catalog;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.hbase.HBaseSerDe;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.log4j.Logger;
import com.cloudera.impala.common.Pair;
import com.cloudera.impala.thrift.TCatalogObjectType;
import com.cloudera.impala.thrift.TColumn;
import com.cloudera.impala.thrift.THBaseTable;
import com.cloudera.impala.thrift.TResultSet;
import com.cloudera.impala.thrift.TResultSetMetadata;
import com.cloudera.impala.thrift.TTable;
import com.cloudera.impala.thrift.TTableDescriptor;
import com.cloudera.impala.thrift.TTableType;
import com.cloudera.impala.util.StatsHelper;
import com.cloudera.impala.util.TResultRowBuilder;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Impala representation of HBase table metadata,
* as loaded from Hive's metastore.
* This implies that we inherit the metastore's limitations related to HBase,
* for example the lack of support for composite HBase row keys.
* We sort the HBase columns (cols) by family/qualifier
* to simplify the retrieval logic in the backend, since
* HBase returns data ordered by family/qualifier.
* This implies that a "select *"-query on an HBase table
* will not have the columns ordered as they were declared in the DDL.
* They will be ordered by family/qualifier.
*
*/
public class HBaseTable extends Table {
// Maximum deviation from the average to stop querying more regions
// to estimate the row count
private static final double DELTA_FROM_AVERAGE = 0.15;
private static final Logger LOG = Logger.getLogger(HBaseTable.class);
// Copied from Hive's HBaseStorageHandler.java.
public static final String DEFAULT_PREFIX = "default.";
// Number of rows fetched during the row count estimation per region
public static final int ROW_COUNT_ESTIMATE_BATCH_SIZE = 10;
// Minimum number of regions that are checked to estimate the row count
private static final int MIN_NUM_REGIONS_TO_CHECK = 5;
// Column referring to HBase row key.
// Hive (including metastore) currently doesn't support composite HBase keys.
protected HBaseColumn rowKey_;
// Name of table in HBase.
// 'this.name' is the alias of the HBase table in Hive.
protected String hbaseTableName_;
// Input format class for HBase tables read by Hive.
private static final String HBASE_INPUT_FORMAT =
"org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat";
// Serialization class for HBase tables set in the corresponding Metastore table.
private static final String HBASE_SERIALIZATION_LIB =
"org.apache.hadoop.hive.hbase.HBaseSerDe";
// Storage handler class for HBase tables read by Hive.
private static final String HBASE_STORAGE_HANDLER =
"org.apache.hadoop.hive.hbase.HBaseStorageHandler";
// Column family of HBase row key
private static final String ROW_KEY_COLUMN_FAMILY = ":key";
// Keep the conf around
private final static Configuration hbaseConf_ = HBaseConfiguration.create();
private HTable hTable_ = null;
// Cached column families. Used primarily for speeding up row stats estimation
// (see CDH-19292).
private HColumnDescriptor[] columnFamilies_ = null;
protected HBaseTable(TableId id, org.apache.hadoop.hive.metastore.api.Table msTbl,
Db db, String name, String owner) {
super(id, msTbl, db, name, owner);
}
// Parse the column description string to the column families and column
// qualifies. This is a copy of HBaseSerDe.parseColumnMapping and
// parseColumnStorageTypes with parts we don't use removed. The hive functions
// are not public.
// tableDefaultStorageIsBinary - true if table is default to binary encoding
// columnsMappingSpec - input string format describing the table
// fieldSchemas - input field schema from metastore table
// columnFamilies/columnQualifiers/columnBinaryEncodings - out parameters that will be
// filled with the column family, column qualifier and encoding for each column.
private void parseColumnMapping(boolean tableDefaultStorageIsBinary,
String columnsMappingSpec, List<FieldSchema> fieldSchemas,
List<String> columnFamilies, List<String> columnQualifiers,
List<Boolean> colIsBinaryEncoded) throws SerDeException {
if (columnsMappingSpec == null) {
throw new SerDeException(
"Error: hbase.columns.mapping missing for this HBase table.");
}
if (columnsMappingSpec.equals("") ||
columnsMappingSpec.equals(HBaseSerDe.HBASE_KEY_COL)) {
throw new SerDeException("Error: hbase.columns.mapping specifies only "
+ "the HBase table row key. A valid Hive-HBase table must specify at "
+ "least one additional column.");
}
int rowKeyIndex = -1;
String[] columnSpecs = columnsMappingSpec.split(",");
// If there was an implicit key column mapping, the number of columns (fieldSchemas)
// will be one more than the number of column mapping specs.
int fsStartIdxOffset = fieldSchemas.size() - columnSpecs.length;
if (fsStartIdxOffset != 0 && fsStartIdxOffset != 1) {
// This should never happen - Hive blocks creating a mismatched table and both Hive
// and Impala currently block all column-level DDL on HBase tables.
throw new SerDeException(String.format("Number of entries in " +
"'hbase.columns.mapping' does not match the number of columns in the " +
"table: %d != %d (counting the key if implicit)",
columnSpecs.length, fieldSchemas.size()));
}
for (int i = 0; i < columnSpecs.length; ++i) {
String mappingSpec = columnSpecs[i];
String[] mapInfo = mappingSpec.split("#");
// Trim column info so that serdeproperties with new lines still parse correctly.
String colInfo = mapInfo[0].trim();
int idxFirst = colInfo.indexOf(":");
int idxLast = colInfo.lastIndexOf(":");
if (idxFirst < 0 || !(idxFirst == idxLast)) {
throw new SerDeException("Error: the HBase columns mapping contains a "
+ "badly formed column family, column qualifier specification.");
}
if (colInfo.equals(HBaseSerDe.HBASE_KEY_COL)) {
Preconditions.checkState(fsStartIdxOffset == 0);
rowKeyIndex = i;
columnFamilies.add(colInfo);
columnQualifiers.add(null);
} else {
String[] parts = colInfo.split(":");
Preconditions.checkState(parts.length > 0 && parts.length <= 2);
columnFamilies.add(parts[0]);
if (parts.length == 2) {
columnQualifiers.add(parts[1]);
} else {
columnQualifiers.add(null);
}
}
// Set column binary encoding
FieldSchema fieldSchema = fieldSchemas.get(i + fsStartIdxOffset);
boolean supportsBinaryEncoding = supportsBinaryEncoding(fieldSchema);
if (mapInfo.length == 1) {
// There is no column level storage specification. Use the table storage spec.
colIsBinaryEncoded.add(
new Boolean(tableDefaultStorageIsBinary && supportsBinaryEncoding));
} else if (mapInfo.length == 2) {
// There is a storage specification for the column
String storageOption = mapInfo[1];
if (!(storageOption.equals("-") || "string".startsWith(storageOption) || "binary"
.startsWith(storageOption))) {
throw new SerDeException("Error: A column storage specification is one of"
+ " the following: '-', a prefix of 'string', or a prefix of 'binary'. "
+ storageOption + " is not a valid storage option specification for "
+ fieldSchema.getName());
}
boolean isBinaryEncoded = false;
if ("-".equals(storageOption)) {
isBinaryEncoded = tableDefaultStorageIsBinary;
} else if ("binary".startsWith(storageOption)) {
isBinaryEncoded = true;
}
if (isBinaryEncoded && !supportsBinaryEncoding) {
// Use string encoding and log a warning if the column spec is binary but the
// column type does not support it.
// TODO: Hive/HBase does not raise an exception, but should we?
LOG.warn("Column storage specification for column " + fieldSchema.getName()
+ " is binary" + " but the column type " + fieldSchema.getType() +
" does not support binary encoding. Fallback to string format.");
isBinaryEncoded = false;
}
colIsBinaryEncoded.add(isBinaryEncoded);
} else {
// error in storage specification
throw new SerDeException("Error: " + HBaseSerDe.HBASE_COLUMNS_MAPPING
+ " storage specification " + mappingSpec + " is not valid for column: "
+ fieldSchema.getName());
}
}
if (rowKeyIndex == -1) {
columnFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
columnQualifiers.add(0, null);
colIsBinaryEncoded.add(0,
supportsBinaryEncoding(fieldSchemas.get(0)) && tableDefaultStorageIsBinary);
}
}
private boolean supportsBinaryEncoding(FieldSchema fs) {
try {
Type colType = parseColumnType(fs);
// Only boolean, integer and floating point types can use binary storage.
return colType.isBoolean() || colType.isIntegerType()
|| colType.isFloatingPointType();
} catch (TableLoadingException e) {
return false;
}
}
@Override
/**
* For hbase tables, we can support tables with columns we don't understand at
* all (e.g. map) as long as the user does not select those. This is in contrast
* to hdfs tables since we typically need to understand all columns to make sense
* of the file at all.
*/
public void load(Table oldValue, HiveMetaStoreClient client,
org.apache.hadoop.hive.metastore.api.Table msTbl, boolean force) throws TableLoadingException {
Preconditions.checkNotNull(getMetaStoreTable());
try {
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
hTable_ = new HTable(hbaseConf_, hbaseTableName_);
columnFamilies_ = null;
Map<String, String> serdeParams =
getMetaStoreTable().getSd().getSerdeInfo().getParameters();
String hbaseColumnsMapping = serdeParams.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
if (hbaseColumnsMapping == null) {
throw new MetaException("No hbase.columns.mapping defined in Serde.");
}
String hbaseTableDefaultStorageType = getMetaStoreTable().getParameters().get(
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE);
boolean tableDefaultStorageIsBinary = false;
if (hbaseTableDefaultStorageType != null &&
!hbaseTableDefaultStorageType.isEmpty()) {
if (hbaseTableDefaultStorageType.equalsIgnoreCase("binary")) {
tableDefaultStorageIsBinary = true;
} else if (!hbaseTableDefaultStorageType.equalsIgnoreCase("string")) {
throw new SerDeException("Error: " +
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE +
" parameter must be specified as" +
" 'string' or 'binary'; '" + hbaseTableDefaultStorageType +
"' is not a valid specification for this table/serde property.");
}
}
// Parse HBase column-mapping string.
List<FieldSchema> fieldSchemas = getMetaStoreTable().getSd().getCols();
List<String> hbaseColumnFamilies = new ArrayList<String>();
List<String> hbaseColumnQualifiers = new ArrayList<String>();
List<Boolean> hbaseColumnBinaryEncodings = new ArrayList<Boolean>();
parseColumnMapping(tableDefaultStorageIsBinary, hbaseColumnsMapping, fieldSchemas,
hbaseColumnFamilies, hbaseColumnQualifiers, hbaseColumnBinaryEncodings);
Preconditions.checkState(
hbaseColumnFamilies.size() == hbaseColumnQualifiers.size());
Preconditions.checkState(fieldSchemas.size() == hbaseColumnFamilies.size());
// Populate tmp cols in the order they appear in the Hive metastore.
// We will reorder the cols below.
List<HBaseColumn> tmpCols = Lists.newArrayList();
// Store the key column separately.
// TODO: Change this to an ArrayList once we support composite row keys.
HBaseColumn keyCol = null;
for (int i = 0; i < fieldSchemas.size(); ++i) {
FieldSchema s = fieldSchemas.get(i);
Type t = Type.INVALID;
try {
t = parseColumnType(s);
} catch (TableLoadingException e) {
// Ignore hbase types we don't support yet. We can load the metadata
// but won't be able to select from it.
}
HBaseColumn col = new HBaseColumn(s.getName(), hbaseColumnFamilies.get(i),
hbaseColumnQualifiers.get(i), hbaseColumnBinaryEncodings.get(i),
t, s.getComment(), -1);
if (col.getColumnFamily().equals(ROW_KEY_COLUMN_FAMILY)) {
// Store the row key column separately from the rest
keyCol = col;
} else {
tmpCols.add(col);
}
}
Preconditions.checkState(keyCol != null);
// The backend assumes that the row key column is always first and
// that the remaining HBase columns are ordered by columnFamily,columnQualifier,
// so the final position depends on the other mapped HBase columns.
// Sort columns and update positions.
Collections.sort(tmpCols);
clearColumns();
keyCol.setPosition(0);
addColumn(keyCol);
// Update the positions of the remaining columns
for (int i = 0; i < tmpCols.size(); ++i) {
HBaseColumn col = tmpCols.get(i);
col.setPosition(i + 1);
addColumn(col);
}
// Set table stats.
numRows_ = getRowCount(super.getMetaStoreTable().getParameters());
// since we don't support composite hbase rowkeys yet, all hbase tables have a
// single clustering col
numClusteringCols_ = 1;
loadAllColumnStats(client);
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table: " +
name_, e);
}
}
@Override
protected void loadFromThrift(TTable table) throws TableLoadingException {
super.loadFromThrift(table);
try {
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
hTable_ = new HTable(hbaseConf_, hbaseTableName_);
columnFamilies_ = null;
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table from " +
"thrift table: " + name_, e);
}
}
// This method is completely copied from Hive's HBaseStorageHandler.java.
private String getHBaseTableName(org.apache.hadoop.hive.metastore.api.Table tbl) {
// Give preference to TBLPROPERTIES over SERDEPROPERTIES
// (really we should only use TBLPROPERTIES, so this is just
// for backwards compatibility with the original specs).
String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME);
if (tableName == null) {
tableName = tbl.getSd().getSerdeInfo().getParameters().get(
HBaseSerDe.HBASE_TABLE_NAME);
}
if (tableName == null) {
tableName = tbl.getDbName() + "." + tbl.getTableName();
if (tableName.startsWith(DEFAULT_PREFIX)) {
tableName = tableName.substring(DEFAULT_PREFIX.length());
}
}
return tableName;
}
/**
* Estimates the number of rows for a single region and returns a pair with
* the estimated row count and the estimated size in bytes per row.
*/
private Pair<Long, Long> getEstimatedRowStatsForRegion(HRegionLocation location,
boolean isCompressed) throws IOException {
HRegionInfo info = location.getRegionInfo();
Scan s = new Scan(info.getStartKey());
// Get a small sample of rows
s.setBatch(ROW_COUNT_ESTIMATE_BATCH_SIZE);
// Try and get every version so the row's size can be used to estimate.
s.setMaxVersions(Short.MAX_VALUE);
// Don't cache the blocks as we don't think these are
// necessarily important blocks.
s.setCacheBlocks(false);
// Try and get deletes too so their size can be counted.
s.setRaw(false);
ResultScanner rs = hTable_.getScanner(s);
long currentRowSize = 0;
long currentRowCount = 0;
try {
// Get the the ROW_COUNT_ESTIMATE_BATCH_SIZE fetched rows
// for a representative sample
for (int i = 0; i < ROW_COUNT_ESTIMATE_BATCH_SIZE; ++i) {
Result r = rs.next();
if (r == null)
break;
// Check for empty rows, see IMPALA-1451
if (r.isEmpty())
continue;
++currentRowCount;
// To estimate the number of rows we simply use the amount of bytes
// returned from the underlying buffer. Since HBase internally works
// with these structures as well this gives us ok estimates.
Cell[] cells = r.rawCells();
for (Cell c : cells) {
if (c instanceof KeyValue) {
currentRowSize += KeyValue.getKeyValueDataStructureSize(c.getRowLength(),
c.getFamilyLength(), c.getQualifierLength(), c.getValueLength(),
c.getTagsLength());
} else {
throw new IllegalStateException("Celltype " + c.getClass().getName() +
" not supported.");
}
}
}
} finally {
rs.close();
}
// If there are no rows then no need to estimate.
if (currentRowCount == 0)
return new Pair<Long, Long>(0L, 0L);
// Get the size on hdfs
long currentHdfsSize = getHdfsSize(info);
// estimate the number of rows.
double bytesPerRow = currentRowSize / (double) currentRowCount;
// Compression factor two is only a best effort guess
long estimatedRowCount =
(long) ((isCompressed ? 2 : 1) * (currentHdfsSize / bytesPerRow));
return new Pair<Long, Long>(estimatedRowCount, (long) bytesPerRow);
}
/**
* Get an estimate of the number of rows and bytes per row in regions between
* startRowKey and endRowKey.
*
* This number is calculated by incrementally checking as many region servers as
* necessary until we observe a relatively constant row size per region on average.
* Depending on the skew of data in the regions this can either mean that we need
* to check only a minimal number of regions or that we will scan all regions.
*
* The accuracy of this number is determined by the number of rows that are written
* and kept in the memstore and have not been flushed until now. A large number
* of key-value pairs in the memstore will lead to bad estimates as this number
* is not reflected in the file size on HDFS that is used to estimate this number.
*
* Currently, the algorithm does not consider the case that the key range used as a
* parameter might be generally of different size than the rest of the region.
*
* The values computed here should be cached so that in high qps workloads
* the nn is not overwhelmed. Could be done in load(); Synchronized to make
* sure that only one thread at a time is using the htable.
*
* @param startRowKey
* First row key in the range
* @param endRowKey
* Last row key in the range
* @return The estimated number of rows in the regions between the row keys (first) and
* the estimated row size in bytes (second).
*/
public synchronized Pair<Long, Long> getEstimatedRowStats(byte[] startRowKey,
byte[] endRowKey) {
Preconditions.checkNotNull(startRowKey);
Preconditions.checkNotNull(endRowKey);
boolean isCompressed = false;
long rowCount = 0;
long rowSize = 0;
try {
// Check to see if things are compressed.
// If they are we'll estimate a compression factor.
if (columnFamilies_ == null) {
columnFamilies_ = hTable_.getTableDescriptor().getColumnFamilies();
}
Preconditions.checkNotNull(columnFamilies_);
for (HColumnDescriptor desc : columnFamilies_) {
isCompressed |= desc.getCompression() != Compression.Algorithm.NONE;
}
// Fetch all regions for the key range
List<HRegionLocation> locations =
getRegionsInRange(hTable_, startRowKey, endRowKey);
Collections.shuffle(locations);
// The following variables track the number and size of 'rows' in
// HBase and allow incremental calculation of the average and standard
// deviation.
StatsHelper<Long> statsCount = new StatsHelper<Long>();
StatsHelper<Long> statsSize = new StatsHelper<Long>();
// Collects stats samples from at least MIN_NUM_REGIONS_TO_CHECK
// and at most all regions until the delta is small enough.
while ((statsSize.count() < MIN_NUM_REGIONS_TO_CHECK ||
statsSize.stddev() > statsSize.mean() * DELTA_FROM_AVERAGE) &&
statsSize.count() < locations.size()) {
Pair<Long, Long> tmp = getEstimatedRowStatsForRegion(
locations.get((int) statsCount.count()), isCompressed);
statsCount.addSample(tmp.first);
statsSize.addSample(tmp.second);
}
rowCount = (long) (getHdfsSize(null) / statsSize.mean());
rowSize = (long) statsSize.mean();
} catch (IOException ioe) {
// Print the stack trace, but we'll ignore it
// as this is just an estimate.
// TODO: Put this into the per query log.
LOG.error("Error computing HBase row count estimate", ioe);
return new Pair<Long, Long>(-1l, -1l);
}
return new Pair<Long, Long>(rowCount, rowSize);
}
/**
* Returns the Hdfs size of the given region in bytes. NULL can be
* passed as a parameter to retrieve the size of the complete table.
*/
public long getHdfsSize(HRegionInfo info) throws IOException {
Path tableDir = HTableDescriptor.getTableDir(
getRootDir(hbaseConf_), Bytes.toBytes(hbaseTableName_));
FileSystem fs = tableDir.getFileSystem(hbaseConf_);
if (info != null) {
Path regionDir = tableDir.suffix("/" + info.getEncodedName());
return fs.getContentSummary(regionDir).getLength();
} else {
return fs.getContentSummary(tableDir).getLength();
}
}
/**
* Returns hbase's root directory: i.e. <code>hbase.rootdir</code> from
* the given configuration as a qualified Path.
* Method copied from HBase FSUtils.java to avoid depending on HBase server.
*/
public static Path getRootDir(final Configuration c) throws IOException {
Path p = new Path(c.get(HConstants.HBASE_DIR));
FileSystem fs = p.getFileSystem(c);
return p.makeQualified(fs);
}
/**
* Hive returns the columns in order of their declaration for HBase tables.
*/
@Override
public ArrayList<Column> getColumnsInHiveOrder() {
return getColumns();
}
@Override
public TTableDescriptor toThriftDescriptor(Set<Long> referencedPartitions) {
TTableDescriptor tableDescriptor =
new TTableDescriptor(id_.asInt(), TTableType.HBASE_TABLE, getColumns().size(),
numClusteringCols_, hbaseTableName_, db_.getName());
tableDescriptor.setHbaseTable(getTHBaseTable());
tableDescriptor.setColNames(getColumnNames());
return tableDescriptor;
}
public String getHBaseTableName() {
return hbaseTableName_;
}
public HTable getHTable() {
return hTable_;
}
public static Configuration getHBaseConf() {
return hbaseConf_;
}
@Override
public int getNumNodes() {
// TODO: implement
return 100;
}
@Override
public TCatalogObjectType getCatalogObjectType() {
return TCatalogObjectType.TABLE;
}
@Override
public TTable toThrift() {
TTable table = super.toThrift();
table.setTable_type(TTableType.HBASE_TABLE);
table.setHbase_table(getTHBaseTable());
return table;
}
private THBaseTable getTHBaseTable() {
THBaseTable tHbaseTable = new THBaseTable();
tHbaseTable.setTableName(hbaseTableName_);
for (Column c : getColumns()) {
HBaseColumn hbaseCol = (HBaseColumn) c;
tHbaseTable.addToFamilies(hbaseCol.getColumnFamily());
if (hbaseCol.getColumnQualifier() != null) {
tHbaseTable.addToQualifiers(hbaseCol.getColumnQualifier());
} else {
tHbaseTable.addToQualifiers("");
}
tHbaseTable.addToBinary_encoded(hbaseCol.isBinaryEncoded());
}
return tHbaseTable;
}
/**
* This is copied from org.apache.hadoop.hbase.client.HTable. The only difference is
* that it does not use cache when calling getRegionLocation.
* TODO: Remove this function and use HTable.getRegionsInRange when the non-cache
* version has been ported to CDH (DISTRO-477).
* Get the corresponding regions for an arbitrary range of keys.
* <p>
*
* @param startRow
* Starting row in range, inclusive
* @param endRow
* Ending row in range, exclusive
* @return A list of HRegionLocations corresponding to the regions that
* contain the specified range
* @throws IOException
* if a remote or network exception occurs
*/
public static List<HRegionLocation> getRegionsInRange(HTable hbaseTbl,
final byte[] startKey, final byte[] endKey) throws IOException {
final boolean endKeyIsEndOfTable = Bytes.equals(endKey, HConstants.EMPTY_END_ROW);
if ((Bytes.compareTo(startKey, endKey) > 0) && !endKeyIsEndOfTable) {
throw new IllegalArgumentException("Invalid range: " +
Bytes.toStringBinary(startKey) + " > " + Bytes.toStringBinary(endKey));
}
final List<HRegionLocation> regionList = new ArrayList<HRegionLocation>();
byte[] currentKey = startKey;
// Make sure only one thread is accessing the hbaseTbl.
synchronized (hbaseTbl) {
do {
// always reload region location info.
HRegionLocation regionLocation = hbaseTbl.getRegionLocation(currentKey, true);
regionList.add(regionLocation);
currentKey = regionLocation.getRegionInfo().getEndKey();
} while (!Bytes.equals(currentKey, HConstants.EMPTY_END_ROW) &&
(endKeyIsEndOfTable || Bytes.compareTo(currentKey, endKey) < 0));
}
return regionList;
}
/**
* Returns the storage handler class for HBase tables read by Hive.
*/
@Override
public String getStorageHandlerClassName() {
return HBASE_STORAGE_HANDLER;
}
/**
* Returns statistics on this table as a tabular result set. Used for the
* SHOW TABLE STATS statement. The schema of the returned TResultSet is set
* inside this method.
*/
public TResultSet getTableStats() {
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
resultSchema.addToColumns(
new TColumn("Region Location", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Start RowKey",
Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Est. #Rows", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Size", Type.STRING.toThrift()));
// TODO: Consider fancier stats maintenance techniques for speeding up this process.
// Currently, we list all regions and perform a mini-scan of each of them to
// estimate the number of rows, the data size, etc., which is rather expensive.
try {
long totalNumRows = 0;
long totalHdfsSize = 0;
List<HRegionLocation> regions = HBaseTable.getRegionsInRange(hTable_,
HConstants.EMPTY_END_ROW, HConstants.EMPTY_START_ROW);
for (HRegionLocation region : regions) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
HRegionInfo regionInfo = region.getRegionInfo();
Pair<Long, Long> estRowStats = getEstimatedRowStatsForRegion(region, false);
long numRows = estRowStats.first.longValue();
long hdfsSize = getHdfsSize(regionInfo);
totalNumRows += numRows;
totalHdfsSize += hdfsSize;
// Add the region location, start rowkey, number of rows and raw Hdfs size.
rowBuilder.add(String.valueOf(region.getHostname()))
.add(Bytes.toString(regionInfo.getStartKey())).add(numRows)
.addBytes(hdfsSize);
result.addToRows(rowBuilder.get());
}
// Total num rows and raw Hdfs size.
if (regions.size() > 1) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
rowBuilder.add("Total").add("").add(totalNumRows).addBytes(totalHdfsSize);
result.addToRows(rowBuilder.get());
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return result;
}
/**
* Returns true if the given Metastore Table represents an HBase table.
* Versions of Hive/HBase are inconsistent which HBase related fields are set
* (e.g., HIVE-6548 changed the input format to null).
* For maximum compatibility consider all known fields that indicate an HBase table.
*/
public static boolean isHBaseTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
if (msTbl.getParameters() != null &&
msTbl.getParameters().containsKey(HBASE_STORAGE_HANDLER)) {
return true;
}
StorageDescriptor sd = msTbl.getSd();
if (sd == null) return false;
if (sd.getInputFormat() != null && sd.getInputFormat().equals(HBASE_INPUT_FORMAT)) {
return true;
} else if (sd.getSerdeInfo() != null &&
sd.getSerdeInfo().getSerializationLib() != null &&
sd.getSerdeInfo().getSerializationLib().equals(HBASE_SERIALIZATION_LIB)) {
return true;
}
return false;
}
}
| |
package org.spongycastle.crypto.test;
import java.security.SecureRandom;
import org.spongycastle.crypto.BlockCipher;
import org.spongycastle.crypto.BufferedBlockCipher;
import org.spongycastle.crypto.CipherParameters;
import org.spongycastle.crypto.InvalidCipherTextException;
import org.spongycastle.crypto.engines.AESEngine;
import org.spongycastle.crypto.modes.CBCBlockCipher;
import org.spongycastle.crypto.modes.CFBBlockCipher;
import org.spongycastle.crypto.modes.OFBBlockCipher;
import org.spongycastle.crypto.modes.SICBlockCipher;
import org.spongycastle.crypto.params.KeyParameter;
import org.spongycastle.crypto.params.ParametersWithIV;
import org.spongycastle.util.encoders.Hex;
import org.spongycastle.util.test.SimpleTest;
/**
* Test vectors from the NIST standard tests and Brian Gladman's vector set
* <a href="http://fp.gladman.plus.com/cryptography_technology/rijndael/">
* http://fp.gladman.plus.com/cryptography_technology/rijndael/</a>
*/
public class AESTest
extends CipherTest
{
private static final byte[] tData = Hex.decode("AAFE47EE82411A2BF3F6752AE8D7831138F041560631B114F3F6752AE8D7831138F041560631B1145A01020304050607");
private static final byte[] outCBC1 = Hex.decode("a444a9a4d46eb30cb7ed34d62873a89f8fdf2bf8a54e1aeadd06fd85c9cb46f021ee7cd4f418fa0bb72e9d07c70d5d20");
private static final byte[] outCBC2 = Hex.decode("585681354f0e01a86b32f94ebb6a675045d923cf201263c2aaecca2b4de82da0edd74ca5efd654c688f8a58e61955b11");
private static final byte[] outSIC1 = Hex.decode("82a1744e8ebbd053ca72362d5e570326e0b6fdaf824ab673fbf029042886b23c75129a015852913790f81f94447475a0");
private static final byte[] outSIC2 = Hex.decode("146cbb581d9e12c3333dd9c736fbb93043c92019f78580da48f81f80b3f551d58ea836fed480fc6912fefa9c5c89cc24");
private static final byte[] outCFB1 = Hex.decode("82a1744e8ebbd053ca72362d5e5703264b4182de3208c374b8ac4fa36af9c5e5f4f87d1e3b67963d06acf5eb13914c90");
private static final byte[] outCFB2 = Hex.decode("146cbb581d9e12c3333dd9c736fbb9303c8a3eb5185e2809e9d3c28e25cc2d2b6f5c11ee28d6530f72c412b1438a816a");
private static final byte[] outOFB1 = Hex.decode("82a1744e8ebbd053ca72362d5e5703261ebf1fdbec05e57b3465b583132f84b43bf95b2c89040ad1677b22d42db69a7a");
private static final byte[] outOFB2 = Hex.decode("146cbb581d9e12c3333dd9c736fbb9309ea4c2a7696c84959a2dada49f2f1c5905db1f0cec3a31acbc4701e74ab05e1f");
static SimpleTest[] tests =
{
new BlockCipherVectorTest(0, new AESEngine(),
new KeyParameter(Hex.decode("80000000000000000000000000000000")),
"00000000000000000000000000000000", "0EDD33D3C621E546455BD8BA1418BEC8"),
new BlockCipherVectorTest(1, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000080")),
"00000000000000000000000000000000", "172AEAB3D507678ECAF455C12587ADB7"),
new BlockCipherMonteCarloTest(2, 10000, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000000")),
"00000000000000000000000000000000", "C34C052CC0DA8D73451AFE5F03BE297F"),
new BlockCipherMonteCarloTest(3, 10000, new AESEngine(),
new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917")),
"355F697E8B868B65B25A04E18D782AFA", "ACC863637868E3E068D2FD6E3508454A"),
new BlockCipherVectorTest(4, new AESEngine(),
new KeyParameter(Hex.decode("000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "6CD02513E8D4DC986B4AFE087A60BD0C"),
new BlockCipherMonteCarloTest(5, 10000, new AESEngine(),
new KeyParameter(Hex.decode("AAFE47EE82411A2BF3F6752AE8D7831138F041560631B114")),
"F3F6752AE8D7831138F041560631B114", "77BA00ED5412DFF27C8ED91F3C376172"),
new BlockCipherVectorTest(6, new AESEngine(),
new KeyParameter(Hex.decode("0000000000000000000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "DDC6BF790C15760D8D9AEB6F9A75FD4E"),
new BlockCipherMonteCarloTest(7, 10000, new AESEngine(),
new KeyParameter(Hex.decode("28E79E2AFC5F7745FCCABE2F6257C2EF4C4EDFB37324814ED4137C288711A386")),
"C737317FE0846F132B23C8C2A672CE22", "E58B82BFBA53C0040DC610C642121168"),
new BlockCipherVectorTest(8, new AESEngine(),
new KeyParameter(Hex.decode("80000000000000000000000000000000")),
"00000000000000000000000000000000", "0EDD33D3C621E546455BD8BA1418BEC8"),
new BlockCipherVectorTest(9, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000080")),
"00000000000000000000000000000000", "172AEAB3D507678ECAF455C12587ADB7"),
new BlockCipherMonteCarloTest(10, 10000, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000000")),
"00000000000000000000000000000000", "C34C052CC0DA8D73451AFE5F03BE297F"),
new BlockCipherMonteCarloTest(11, 10000, new AESEngine(),
new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917")),
"355F697E8B868B65B25A04E18D782AFA", "ACC863637868E3E068D2FD6E3508454A"),
new BlockCipherVectorTest(12, new AESEngine(),
new KeyParameter(Hex.decode("000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "6CD02513E8D4DC986B4AFE087A60BD0C"),
new BlockCipherMonteCarloTest(13, 10000, new AESEngine(),
new KeyParameter(Hex.decode("AAFE47EE82411A2BF3F6752AE8D7831138F041560631B114")),
"F3F6752AE8D7831138F041560631B114", "77BA00ED5412DFF27C8ED91F3C376172"),
new BlockCipherVectorTest(14, new AESEngine(),
new KeyParameter(Hex.decode("0000000000000000000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "DDC6BF790C15760D8D9AEB6F9A75FD4E"),
new BlockCipherMonteCarloTest(15, 10000, new AESEngine(),
new KeyParameter(Hex.decode("28E79E2AFC5F7745FCCABE2F6257C2EF4C4EDFB37324814ED4137C288711A386")),
"C737317FE0846F132B23C8C2A672CE22", "E58B82BFBA53C0040DC610C642121168"),
new BlockCipherVectorTest(16, new AESEngine(),
new KeyParameter(Hex.decode("80000000000000000000000000000000")),
"00000000000000000000000000000000", "0EDD33D3C621E546455BD8BA1418BEC8"),
new BlockCipherVectorTest(17, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000080")),
"00000000000000000000000000000000", "172AEAB3D507678ECAF455C12587ADB7"),
new BlockCipherMonteCarloTest(18, 10000, new AESEngine(),
new KeyParameter(Hex.decode("00000000000000000000000000000000")),
"00000000000000000000000000000000", "C34C052CC0DA8D73451AFE5F03BE297F"),
new BlockCipherMonteCarloTest(19, 10000, new AESEngine(),
new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917")),
"355F697E8B868B65B25A04E18D782AFA", "ACC863637868E3E068D2FD6E3508454A"),
new BlockCipherVectorTest(20, new AESEngine(),
new KeyParameter(Hex.decode("000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "6CD02513E8D4DC986B4AFE087A60BD0C"),
new BlockCipherMonteCarloTest(21, 10000, new AESEngine(),
new KeyParameter(Hex.decode("AAFE47EE82411A2BF3F6752AE8D7831138F041560631B114")),
"F3F6752AE8D7831138F041560631B114", "77BA00ED5412DFF27C8ED91F3C376172"),
new BlockCipherVectorTest(22, new AESEngine(),
new KeyParameter(Hex.decode("0000000000000000000000000000000000000000000000000000000000000000")),
"80000000000000000000000000000000", "DDC6BF790C15760D8D9AEB6F9A75FD4E"),
new BlockCipherMonteCarloTest(23, 10000, new AESEngine(),
new KeyParameter(Hex.decode("28E79E2AFC5F7745FCCABE2F6257C2EF4C4EDFB37324814ED4137C288711A386")),
"C737317FE0846F132B23C8C2A672CE22", "E58B82BFBA53C0040DC610C642121168")
};
private BlockCipher _engine = new AESEngine();
public AESTest()
{
super(tests, new AESEngine(), new KeyParameter(new byte[16]));
}
public String getName()
{
return "AES";
}
private void testNullSIC()
throws InvalidCipherTextException
{
BufferedBlockCipher b = new BufferedBlockCipher(new SICBlockCipher(new AESEngine()));
KeyParameter kp = new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917"));
b.init(true, new ParametersWithIV(kp, new byte[16]));
byte[] out = new byte[b.getOutputSize(tData.length)];
int len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outSIC1, out))
{
fail("no match on first nullSIC check");
}
b.init(true, new ParametersWithIV(null, Hex.decode("000102030405060708090a0b0c0d0e0f")));
len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outSIC2, out))
{
fail("no match on second nullSIC check");
}
}
private void testNullCBC()
throws InvalidCipherTextException
{
BufferedBlockCipher b = new BufferedBlockCipher(new CBCBlockCipher(new AESEngine()));
KeyParameter kp = new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917"));
b.init(true, new ParametersWithIV(kp, new byte[16]));
byte[] out = new byte[b.getOutputSize(tData.length)];
int len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outCBC1, out))
{
fail("no match on first nullCBC check");
}
b.init(true, new ParametersWithIV(null, Hex.decode("000102030405060708090a0b0c0d0e0f")));
len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outCBC2, out))
{
fail("no match on second nullCBC check");
}
}
private void testNullOFB()
throws InvalidCipherTextException
{
BufferedBlockCipher b = new BufferedBlockCipher(new OFBBlockCipher(new AESEngine(), 128));
KeyParameter kp = new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917"));
b.init(true, new ParametersWithIV(kp, new byte[16]));
byte[] out = new byte[b.getOutputSize(tData.length)];
int len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outOFB1, out))
{
fail("no match on first nullOFB check");
}
b.init(true, new ParametersWithIV(null, Hex.decode("000102030405060708090a0b0c0d0e0f")));
len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outOFB2, out))
{
fail("no match on second nullOFB check");
}
}
private void testNullCFB()
throws InvalidCipherTextException
{
BufferedBlockCipher b = new BufferedBlockCipher(new CFBBlockCipher(new AESEngine(), 128));
KeyParameter kp = new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917"));
b.init(true, new ParametersWithIV(kp, new byte[16]));
byte[] out = new byte[b.getOutputSize(tData.length)];
int len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outCFB1, out))
{
fail("no match on first nullCFB check");
}
b.init(true, new ParametersWithIV(null, Hex.decode("000102030405060708090a0b0c0d0e0f")));
len = b.processBytes(tData, 0, tData.length, out, 0);
len += b.doFinal(out, len);
if (!areEqual(outCFB2, out))
{
fail("no match on second nullCFB check");
}
}
private boolean areEqual(byte[] a, int aOff, byte[] b, int bOff)
{
for (int i = bOff; i != b.length; i++)
{
if (a[aOff + i - bOff] != b[i])
{
return false;
}
}
return true;
}
private void skipTest()
{
CipherParameters params = new ParametersWithIV(new KeyParameter(Hex.decode("5F060D3716B345C253F6749ABAC10917")), Hex.decode("00000000000000000000000000000000"));
SICBlockCipher engine = new SICBlockCipher(new AESEngine());
engine.init(true, params);
SecureRandom rand = new SecureRandom();
byte[] plain = new byte[50000];
byte[] cipher = new byte[50000];
rand.nextBytes(plain);
engine.processBytes(plain, 0, plain.length, cipher, 0);
byte[] fragment = new byte[20];
engine.init(true, params);
engine.skip(10);
if (engine.getPosition() != 10)
{
fail("skip position incorrect - 10 got " + engine.getPosition());
}
engine.processBytes(plain, 10, fragment.length, fragment, 0);
if (!areEqual(cipher, 10, fragment, 0))
{
fail("skip forward 10 failed");
}
engine.skip(1000);
if (engine.getPosition() != 1010 + fragment.length)
{
fail("skip position incorrect - " + (1010 + fragment.length) + " got " + engine.getPosition());
}
engine.processBytes(plain, 1010 + fragment.length, fragment.length, fragment, 0);
if (!areEqual(cipher, 1010 + fragment.length, fragment, 0))
{
fail("skip forward 1000 failed");
}
engine.skip(-10);
if (engine.getPosition() != 1010 + 2 * fragment.length - 10)
{
fail("skip position incorrect - " + (1010 + 2 * fragment.length - 10) + " got " + engine.getPosition());
}
engine.processBytes(plain, 1010 + 2 * fragment.length - 10, fragment.length, fragment, 0);
if (!areEqual(cipher, 1010 + 2 * fragment.length - 10, fragment, 0))
{
fail("skip back 10 failed");
}
engine.skip(-1000);
if (engine.getPosition() != 60)
{
fail("skip position incorrect - " + 60 + " got " + engine.getPosition());
}
engine.processBytes(plain, 60, fragment.length, fragment, 0);
if (!areEqual(cipher, 60, fragment, 0))
{
fail("skip back 1000 failed");
}
long pos = engine.seekTo(1010);
if (pos != 1010)
{
fail("position incorrect - " + 1010 + " got " + pos);
}
engine.processBytes(plain, 1010, fragment.length, fragment, 0);
if (!areEqual(cipher, 1010, fragment, 0))
{
fail("seek to 1010 failed");
}
engine.reset();
for (int i = 0; i != 5000; i++)
{
engine.skip(i);
if (engine.getPosition() != i)
{
fail("skip forward at wrong position");
}
engine.processBytes(plain, i, fragment.length, fragment, 0);
if (!areEqual(cipher, i, fragment, 0))
{
fail("skip forward i failed: " + i);
}
if (engine.getPosition() != i + fragment.length)
{
fail("cipher at wrong position: " + engine.getPosition() + " [" + i + "]");
}
engine.skip(-fragment.length);
if (engine.getPosition() != i)
{
fail("skip back at wrong position");
}
engine.processBytes(plain, i, fragment.length, fragment, 0);
if (!areEqual(cipher, i, fragment, 0))
{
fail("skip back i failed: " + i);
}
engine.reset();
}
}
public void performTest()
throws Exception
{
super.performTest();
byte[] keyBytes = new byte[16];
_engine.init(true, new KeyParameter(keyBytes));
//
// init tests
//
try
{
byte[] dudKey = new byte[6];
_engine.init(true, new KeyParameter(dudKey));
fail("failed key length check");
}
catch (IllegalArgumentException e)
{
// expected
}
try
{
byte[] iv = new byte[16];
_engine.init(true, new ParametersWithIV(null, iv));
fail("failed parameter check");
}
catch (IllegalArgumentException e)
{
// expected
}
testNullCBC();
testNullSIC();
testNullOFB();
testNullCFB();
skipTest();
}
public static void main(
String[] args)
{
runTest(new AESTest());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.internals.SessionWindow;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.test.KeyValueIteratorStub;
import org.easymock.EasyMockRule;
import org.easymock.Mock;
import org.easymock.MockType;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.ROLLUP_VALUE;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.aryEq;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class MeteredSessionStoreTest {
@Rule
public EasyMockRule rule = new EasyMockRule(this);
private static final String STORE_TYPE = "scope";
private static final String STORE_LEVEL_GROUP_FROM_0100_TO_24 = "stream-" + STORE_TYPE + "-state-metrics";
private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
private static final String THREAD_ID_TAG_KEY_FROM_0100_TO_24 = "client-id";
private static final String THREAD_ID_TAG_KEY = "thread-id";
private final String threadId = Thread.currentThread().getName();
private final TaskId taskId = new TaskId(0, 0);
private final Metrics metrics = new Metrics();
private MeteredSessionStore<String, String> metered;
@Mock(type = MockType.NICE)
private SessionStore<Bytes, byte[]> inner;
@Mock(type = MockType.NICE)
private InternalProcessorContext context;
private final String key = "a";
private final byte[] keyBytes = key.getBytes();
private final Windowed<Bytes> windowedKeyBytes = new Windowed<>(Bytes.wrap(keyBytes), new SessionWindow(0, 0));
private String storeLevelGroup;
private String threadIdTagKey;
private Map<String, String> tags;
@Parameters(name = "{0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{StreamsConfig.METRICS_LATEST},
{StreamsConfig.METRICS_0100_TO_24}
});
}
@Parameter
public String builtInMetricsVersion;
@Before
public void before() {
metered = new MeteredSessionStore<>(
inner,
"scope",
Serdes.String(),
Serdes.String(),
new MockTime());
metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
expect(context.metrics())
.andReturn(new StreamsMetricsImpl(metrics, "test-client", builtInMetricsVersion)).anyTimes();
expect(context.taskId()).andReturn(taskId).anyTimes();
expect(inner.name()).andReturn("metered").anyTimes();
storeLevelGroup =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? STORE_LEVEL_GROUP_FROM_0100_TO_24 : STORE_LEVEL_GROUP;
threadIdTagKey =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? THREAD_ID_TAG_KEY_FROM_0100_TO_24 : THREAD_ID_TAG_KEY;
tags = mkMap(
mkEntry(threadIdTagKey, threadId),
mkEntry("task-id", taskId.toString()),
mkEntry(STORE_TYPE + "-state-id", "metered")
);
}
private void init() {
replay(inner, context);
metered.init(context, metered);
}
@Test
public void testMetrics() {
init();
final JmxReporter reporter = new JmxReporter("kafka.streams");
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
storeLevelGroup,
threadIdTagKey,
threadId,
taskId.toString(),
STORE_TYPE,
"metered"
)));
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
storeLevelGroup,
threadIdTagKey,
threadId,
taskId.toString(),
STORE_TYPE,
ROLLUP_VALUE
)));
}
}
@Test
public void shouldWriteBytesToInnerStoreAndRecordPutMetric() {
inner.put(eq(windowedKeyBytes), aryEq(keyBytes));
expectLastCall();
init();
metered.put(new Windowed<>(key, new SessionWindow(0, 0)), key);
final KafkaMetric metric = metric("put-rate");
assertTrue(((Double) metric.metricValue()) > 0);
verify(inner);
}
@Test
public void shouldFindSessionsFromStoreAndRecordFetchMetric() {
expect(inner.findSessions(Bytes.wrap(keyBytes), 0, 0))
.andReturn(new KeyValueIteratorStub<>(
Collections.singleton(KeyValue.pair(windowedKeyBytes, keyBytes)).iterator()));
init();
final KeyValueIterator<Windowed<String>, String> iterator = metered.findSessions(key, 0, 0);
assertThat(iterator.next().value, equalTo(key));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("fetch-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldFindSessionRangeFromStoreAndRecordFetchMetric() {
expect(inner.findSessions(Bytes.wrap(keyBytes), Bytes.wrap(keyBytes), 0, 0))
.andReturn(new KeyValueIteratorStub<>(
Collections.singleton(KeyValue.pair(windowedKeyBytes, keyBytes)).iterator()));
init();
final KeyValueIterator<Windowed<String>, String> iterator = metered.findSessions(key, key, 0, 0);
assertThat(iterator.next().value, equalTo(key));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("fetch-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldRemoveFromStoreAndRecordRemoveMetric() {
inner.remove(windowedKeyBytes);
expectLastCall();
init();
metered.remove(new Windowed<>(key, new SessionWindow(0, 0)));
final KafkaMetric metric = metric("remove-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldFetchForKeyAndRecordFetchMetric() {
expect(inner.fetch(Bytes.wrap(keyBytes)))
.andReturn(new KeyValueIteratorStub<>(
Collections.singleton(KeyValue.pair(windowedKeyBytes, keyBytes)).iterator()));
init();
final KeyValueIterator<Windowed<String>, String> iterator = metered.fetch(key);
assertThat(iterator.next().value, equalTo(key));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("fetch-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldFetchRangeFromStoreAndRecordFetchMetric() {
expect(inner.fetch(Bytes.wrap(keyBytes), Bytes.wrap(keyBytes)))
.andReturn(new KeyValueIteratorStub<>(
Collections.singleton(KeyValue.pair(windowedKeyBytes, keyBytes)).iterator()));
init();
final KeyValueIterator<Windowed<String>, String> iterator = metered.fetch(key, key);
assertThat(iterator.next().value, equalTo(key));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("fetch-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldRecordRestoreTimeOnInit() {
init();
final KafkaMetric metric = metric("restore-rate");
assertTrue((Double) metric.metricValue() > 0);
}
@Test
public void shouldNotThrowNullPointerExceptionIfFetchSessionReturnsNull() {
expect(inner.fetchSession(Bytes.wrap("a".getBytes()), 0, Long.MAX_VALUE)).andReturn(null);
init();
assertNull(metered.fetchSession("a", 0, Long.MAX_VALUE));
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnPutIfKeyIsNull() {
metered.put(null, "a");
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnRemoveIfKeyIsNull() {
metered.remove(null);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFetchIfKeyIsNull() {
metered.fetch(null);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFetchRangeIfFromIsNull() {
metered.fetch(null, "to");
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFetchRangeIfToIsNull() {
metered.fetch("from", null);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFindSessionsIfKeyIsNull() {
metered.findSessions(null, 0, 0);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFindSessionsRangeIfFromIsNull() {
metered.findSessions(null, "a", 0, 0);
}
@Test(expected = NullPointerException.class)
public void shouldThrowNullPointerOnFindSessionsRangeIfToIsNull() {
metered.findSessions("a", null, 0, 0);
}
private interface CachedSessionStore extends SessionStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { }
@SuppressWarnings("unchecked")
@Test
public void shouldSetFlushListenerOnWrappedCachingStore() {
final CachedSessionStore cachedSessionStore = mock(CachedSessionStore.class);
expect(cachedSessionStore.setFlushListener(anyObject(CacheFlushListener.class), eq(false))).andReturn(true);
replay(cachedSessionStore);
metered = new MeteredSessionStore<>(
cachedSessionStore,
STORE_TYPE,
Serdes.String(),
Serdes.String(),
new MockTime());
assertTrue(metered.setFlushListener(null, false));
verify(cachedSessionStore);
}
@Test
public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() {
assertFalse(metered.setFlushListener(null, false));
}
@Test
public void shouldRemoveMetricsOnClose() {
inner.close();
expectLastCall();
init(); // replays "inner"
// There's always a "count" metric registered
assertThat(storeMetrics(), not(empty()));
metered.close();
assertThat(storeMetrics(), empty());
verify(inner);
}
@Test
public void shouldRemoveMetricsEvenIfWrappedStoreThrowsOnClose() {
inner.close();
expectLastCall().andThrow(new RuntimeException("Oops!"));
init(); // replays "inner"
assertThat(storeMetrics(), not(empty()));
assertThrows(RuntimeException.class, metered::close);
assertThat(storeMetrics(), empty());
verify(inner);
}
private KafkaMetric metric(final String name) {
return this.metrics.metric(new MetricName(name, storeLevelGroup, "", this.tags));
}
private List<MetricName> storeMetrics() {
return metrics.metrics()
.keySet()
.stream()
.filter(name -> name.group().equals(storeLevelGroup) && name.tags().equals(tags))
.collect(Collectors.toList());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.Collections;
import java.util.Map;
import com.google.common.cache.Cache;
import com.google.common.collect.Maps;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.commons.json.JsopReader;
import org.apache.jackrabbit.oak.commons.json.JsopTokenizer;
import org.apache.jackrabbit.oak.plugins.document.util.RevisionsKey;
import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A diff cache, which is pro-actively filled after a commit.
*/
public class LocalDiffCache extends DiffCache {
private static final Logger LOG = LoggerFactory.getLogger(LocalDiffCache.class);
/**
* Limit is arbitrary for now i.e. 16 MB. Same as in MongoDiffCache
*/
private static int MAX_ENTRY_SIZE = 16 * 1024 * 1024;
private final Cache<RevisionsKey, Diff> diffCache;
private final CacheStats diffCacheStats;
LocalDiffCache(DocumentNodeStoreBuilder<?> builder) {
this.diffCache = builder.buildLocalDiffCache();
this.diffCacheStats = new CacheStats(diffCache,
"Document-LocalDiff",
builder.getWeigher(), builder.getLocalDiffCacheSize());
}
@Override
public String getChanges(@NotNull RevisionVector from,
@NotNull RevisionVector to,
@NotNull String path,
@Nullable Loader loader) {
RevisionsKey key = new RevisionsKey(from, to);
Diff diff = diffCache.getIfPresent(key);
if (diff != null) {
String result = diff.get(path);
return result != null ? result : "";
}
if (loader != null) {
return loader.call();
}
return null;
}
@NotNull
@Override
public Entry newEntry(final @NotNull RevisionVector from,
final @NotNull RevisionVector to,
boolean local /*ignored*/) {
return new Entry() {
private final Map<String, String> changesPerPath = Maps.newHashMap();
private long size;
@Override
public void append(@NotNull String path, @NotNull String changes) {
if (exceedsSize()){
return;
}
size += size(path) + size(changes);
changesPerPath.put(path, changes);
}
@Override
public boolean done() {
if (exceedsSize()){
return false;
}
diffCache.put(new RevisionsKey(from, to),
new Diff(changesPerPath, size));
LOG.debug("Adding cache entry from {} to {}", from, to);
return true;
}
private boolean exceedsSize(){
return size > MAX_ENTRY_SIZE;
}
};
}
@NotNull
@Override
public Iterable<CacheStats> getStats() {
return Collections.singleton(diffCacheStats);
}
//-----------------------------< internal >---------------------------------
public static final class Diff implements CacheValue {
private final Map<String, String> changes;
private long memory;
public Diff(Map<String, String> changes, long memory) {
this.changes = changes;
this.memory = memory;
}
public static Diff fromString(String value) {
Map<String, String> map = Maps.newHashMap();
JsopReader reader = new JsopTokenizer(value);
while (true) {
if (reader.matches(JsopReader.END)) {
break;
}
String k = reader.readString();
reader.read(':');
String v = reader.readString();
map.put(k, v);
if (reader.matches(JsopReader.END)) {
break;
}
reader.read(',');
}
return new Diff(map, 0);
}
public String asString(){
JsopBuilder builder = new JsopBuilder();
for (Map.Entry<String, String> entry : changes.entrySet()) {
builder.key(entry.getKey());
builder.value(entry.getValue());
}
return builder.toString();
}
public Map<String, String> getChanges() {
return Collections.unmodifiableMap(changes);
}
@Override
public int getMemory() {
if (memory == 0) {
long m = 0;
for (Map.Entry<String, String> e : changes.entrySet()){
m += size(e.getKey()) + size(e.getValue());
}
memory = m;
}
if (memory > Integer.MAX_VALUE) {
LOG.debug("Estimated memory footprint larger than Integer.MAX_VALUE: {}.", memory);
return Integer.MAX_VALUE;
} else {
return (int) memory;
}
}
String get(String path) {
return changes.get(path);
}
@Override
public String toString() {
return asString();
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Diff) {
Diff other = (Diff) obj;
return changes.equals(other.changes);
}
return false;
}
}
private static long size(String s){
return StringValue.getMemory(s);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.templeton;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.exec.ExecuteException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
import org.apache.hive.hcatalog.templeton.LauncherDelegator.JobType;
import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
/**
* The Templeton Web API server.
*/
@Path("/v1")
public class Server {
public static final String VERSION = "v1";
public static final String DO_AS_PARAM = "doAs";
/**
* The status message. Always "ok"
*/
public static final Map<String, String> STATUS_OK = createStatusMsg();
/**
* The list of supported api versions.
*/
public static final Map<String, Object> SUPPORTED_VERSIONS = createVersions();
/**
* The list of supported return formats. Always json.
*/
public static final Map<String, Object> SUPPORTED_FORMATS = createFormats();
// Build the status message for the /status call.
private static Map<String, String> createStatusMsg() {
HashMap<String, String> res = new HashMap<String, String>();
res.put("status", "ok");
res.put("version", VERSION);
return Collections.unmodifiableMap(res);
}
// Build the versions list.
private static Map<String, Object> createVersions() {
ArrayList<String> versions = new ArrayList<String>();
versions.add(VERSION);
HashMap<String, Object> res = new HashMap<String, Object>();
res.put("supportedVersions", versions);
res.put("version", VERSION);
return Collections.unmodifiableMap(res);
}
// Build the supported formats list
private static Map<String, Object> createFormats() {
ArrayList<String> formats = new ArrayList<String>();
formats.add(MediaType.APPLICATION_JSON);
HashMap<String, Object> res = new HashMap<String, Object>();
res.put("responseTypes", formats);
return Collections.unmodifiableMap(res);
}
protected static ExecService execService = ExecServiceImpl.getInstance();
private static AppConfig appConf = Main.getAppConfigInstance();
// The SecurityContext set by AuthFilter
private
@Context
SecurityContext theSecurityContext;
// The uri requested
private
@Context
UriInfo theUriInfo;
private @QueryParam(DO_AS_PARAM) String doAs;
private @Context HttpServletRequest request;
private static final Logger LOG = LoggerFactory.getLogger(Server.class);
/**
* Check the status of this server. Always OK.
*/
@GET
@Path("status")
@Produces({MediaType.APPLICATION_JSON})
public Map<String, String> status() {
return STATUS_OK;
}
/**
* Check the supported request formats of this server.
*/
@GET
@Produces({MediaType.APPLICATION_JSON})
public Map<String, Object> requestFormats() {
return SUPPORTED_FORMATS;
}
/**
* Check the version(s) supported by this server.
*/
@GET
@Path("version")
@Produces({MediaType.APPLICATION_JSON})
public Map<String, Object> version() {
return SUPPORTED_VERSIONS;
}
/**
* Get version of hadoop software being run by this WebHCat server
*/
@GET
@Path("version/hadoop")
@Produces(MediaType.APPLICATION_JSON)
public Response hadoopVersion() throws IOException {
VersionDelegator d = new VersionDelegator(appConf);
return d.getVersion("hadoop");
}
/**
* Get version of hive software being run by this WebHCat server
*/
@GET
@Path("version/hive")
@Produces(MediaType.APPLICATION_JSON)
public Response hiveVersion() throws IOException {
VersionDelegator d = new VersionDelegator(appConf);
return d.getVersion("hive");
}
/**
* Get version of sqoop software being run by this WebHCat server
*/
@GET
@Path("version/sqoop")
@Produces(MediaType.APPLICATION_JSON)
public Response sqoopVersion() throws IOException {
VersionDelegator d = new VersionDelegator(appConf);
return d.getVersion("sqoop");
}
/**
* Get version of pig software being run by this WebHCat server
*/
@GET
@Path("version/pig")
@Produces(MediaType.APPLICATION_JSON)
public Response pigVersion() throws IOException {
VersionDelegator d = new VersionDelegator(appConf);
return d.getVersion("pig");
}
/**
* Execute an hcat ddl expression on the local box. It is run
* as the authenticated user and rate limited.
*/
@POST
@Path("ddl")
@Produces({MediaType.APPLICATION_JSON})
public ExecBean ddl(@FormParam("exec") String exec,
@FormParam("group") String group,
@FormParam("permissions") String permissions)
throws NotAuthorizedException, BusyException, BadParam,
ExecuteException, IOException {
verifyUser();
verifyParam(exec, "exec");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.run(getDoAsUser(), exec, false, group, permissions);
}
/**
* List all the tables in an hcat database.
*/
@GET
@Path("ddl/database/{db}/table")
@Produces(MediaType.APPLICATION_JSON)
public Response listTables(@PathParam("db") String db,
@QueryParam("like") String tablePattern)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
HcatDelegator d = new HcatDelegator(appConf, execService);
if (!TempletonUtils.isset(tablePattern)) {
tablePattern = "*";
}
return d.listTables(getDoAsUser(), db, tablePattern);
}
/**
* Create a new table.
*/
@PUT
@Path("ddl/database/{db}/table/{table}")
@Produces(MediaType.APPLICATION_JSON)
public Response createTable(@PathParam("db") String db,
@PathParam("table") String table,
TableDesc desc)
throws SimpleWebException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
desc.table = table;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.createTable(getDoAsUser(), db, desc);
}
/**
* Create a new table like another table.
*/
@PUT
@Path("ddl/database/{db}/table/{existingTable}/like/{newTable}")
@Produces(MediaType.APPLICATION_JSON)
public Response createTableLike(@PathParam("db") String db,
@PathParam("existingTable") String existingTable,
@PathParam("newTable") String newTable,
TableLikeDesc desc)
throws SimpleWebException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(existingTable, ":existingTable");
verifyDdlParam(newTable, ":newTable");
desc.existingTable = existingTable;
desc.newTable = newTable;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.createTableLike(getDoAsUser(), db, desc);
}
/**
* Describe an hcat table. This is normally a simple list of
* columns (using "desc table"), but the extended format will show
* more information (using "show table extended like").
*/
@GET
@Path("ddl/database/{db}/table/{table}")
@Produces(MediaType.APPLICATION_JSON)
public Response descTable(@PathParam("db") String db,
@PathParam("table") String table,
@QueryParam("format") String format)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
HcatDelegator d = new HcatDelegator(appConf, execService);
if ("extended".equals(format)) {
return d.descExtendedTable(getDoAsUser(), db, table);
}
else {
return d.descTable(getDoAsUser(), db, table, false);
}
}
/**
* Drop an hcat table.
*/
@DELETE
@Path("ddl/database/{db}/table/{table}")
@Produces(MediaType.APPLICATION_JSON)
public Response dropTable(@PathParam("db") String db,
@PathParam("table") String table,
@QueryParam("ifExists") boolean ifExists,
@QueryParam("group") String group,
@QueryParam("permissions") String permissions)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.dropTable(getDoAsUser(), db, table, ifExists, group, permissions);
}
/**
* Rename an hcat table.
*/
@POST
@Path("ddl/database/{db}/table/{table}")
@Produces(MediaType.APPLICATION_JSON)
public Response renameTable(@PathParam("db") String db,
@PathParam("table") String oldTable,
@FormParam("rename") String newTable,
@FormParam("group") String group,
@FormParam("permissions") String permissions)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(oldTable, ":table");
verifyDdlParam(newTable, "rename");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.renameTable(getDoAsUser(), db, oldTable, newTable, group, permissions);
}
/**
* Describe a single property on an hcat table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/property/{property}")
@Produces(MediaType.APPLICATION_JSON)
public Response descOneTableProperty(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("property") String property)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyPropertyParam(property, ":property");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.descTableProperty(getDoAsUser(), db, table, property);
}
/**
* List all the properties on an hcat table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/property")
@Produces(MediaType.APPLICATION_JSON)
public Response listTableProperties(@PathParam("db") String db,
@PathParam("table") String table)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.listTableProperties(getDoAsUser(), db, table);
}
/**
* Add a single property on an hcat table.
*/
@PUT
@Path("ddl/database/{db}/table/{table}/property/{property}")
@Produces(MediaType.APPLICATION_JSON)
public Response addOneTableProperty(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("property") String property,
TablePropertyDesc desc)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyPropertyParam(property, ":property");
desc.name = property;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.addOneTableProperty(getDoAsUser(), db, table, desc);
}
/**
* List all the partitions in an hcat table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/partition")
@Produces(MediaType.APPLICATION_JSON)
public Response listPartitions(@PathParam("db") String db,
@PathParam("table") String table)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.listPartitions(getDoAsUser(), db, table);
}
/**
* Describe a single partition in an hcat table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/partition/{partition}")
@Produces(MediaType.APPLICATION_JSON)
public Response descPartition(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("partition") String partition)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyParam(partition, ":partition");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.descOnePartition(getDoAsUser(), db, table, partition);
}
/**
* Create a partition in an hcat table.
*/
@PUT
@Path("ddl/database/{db}/table/{table}/partition/{partition}")
@Produces(MediaType.APPLICATION_JSON)
public Response addOnePartition(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("partition") String partition,
PartitionDesc desc)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyParam(partition, ":partition");
desc.partition = partition;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.addOnePartition(getDoAsUser(), db, table, desc);
}
/**
* Drop a partition in an hcat table.
*/
@DELETE
@Path("ddl/database/{db}/table/{table}/partition/{partition}")
@Produces(MediaType.APPLICATION_JSON)
public Response dropPartition(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("partition") String partition,
@QueryParam("ifExists") boolean ifExists,
@QueryParam("group") String group,
@QueryParam("permissions") String permissions)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyParam(partition, ":partition");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.dropPartition(getDoAsUser(), db, table, partition, ifExists,
group, permissions);
}
/**
* List all databases, or those that match a pattern.
*/
@GET
@Path("ddl/database/")
@Produces(MediaType.APPLICATION_JSON)
public Response listDatabases(@QueryParam("like") String dbPattern)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
HcatDelegator d = new HcatDelegator(appConf, execService);
if (!TempletonUtils.isset(dbPattern)) {
dbPattern = "*";
}
return d.listDatabases(getDoAsUser(), dbPattern);
}
/**
* Describe a database
*/
@GET
@Path("ddl/database/{db}")
@Produces(MediaType.APPLICATION_JSON)
public Response descDatabase(@PathParam("db") String db,
@QueryParam("format") String format)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.descDatabase(getDoAsUser(), db, "extended".equals(format));
}
/**
* Create a database
*/
@PUT
@Path("ddl/database/{db}")
@Produces(MediaType.APPLICATION_JSON)
public Response createDatabase(@PathParam("db") String db,
DatabaseDesc desc)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
desc.database = db;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.createDatabase(getDoAsUser(), desc);
}
/**
* Drop a database
*/
@DELETE
@Path("ddl/database/{db}")
@Produces(MediaType.APPLICATION_JSON)
public Response dropDatabase(@PathParam("db") String db,
@QueryParam("ifExists") boolean ifExists,
@QueryParam("option") String option,
@QueryParam("group") String group,
@QueryParam("permissions") String permissions)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
if (TempletonUtils.isset(option)) {
verifyDdlParam(option, "option");
}
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.dropDatabase(getDoAsUser(), db, ifExists, option,
group, permissions);
}
/**
* List the columns in an hcat table. Currently the same as
* describe table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/column")
@Produces(MediaType.APPLICATION_JSON)
public Response listColumns(@PathParam("db") String db,
@PathParam("table") String table)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.listColumns(getDoAsUser(), db, table);
}
/**
* Describe a single column in an hcat table.
*/
@GET
@Path("ddl/database/{db}/table/{table}/column/{column}")
@Produces(MediaType.APPLICATION_JSON)
public Response descColumn(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("column") String column)
throws SimpleWebException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyParam(column, ":column");
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.descOneColumn(getDoAsUser(), db, table, column);
}
/**
* Create a column in an hcat table.
*/
@PUT
@Path("ddl/database/{db}/table/{table}/column/{column}")
@Produces(MediaType.APPLICATION_JSON)
public Response addOneColumn(@PathParam("db") String db,
@PathParam("table") String table,
@PathParam("column") String column,
ColumnDesc desc)
throws HcatException, NotAuthorizedException, BusyException,
BadParam, ExecuteException, IOException {
verifyUser();
verifyDdlParam(db, ":db");
verifyDdlParam(table, ":table");
verifyParam(column, ":column");
verifyParam(desc.type, "type");
desc.name = column;
HcatDelegator d = new HcatDelegator(appConf, execService);
return d.addOneColumn(getDoAsUser(), db, table, desc);
}
/**
* Run a MapReduce Streaming job.
* @param callback URL which WebHCat will call when the hive job finishes
*/
@POST
@Path("mapreduce/streaming")
@Produces({MediaType.APPLICATION_JSON})
public EnqueueBean mapReduceStreaming(@FormParam("input") List<String> inputs,
@FormParam("inputreader") String inputreader,
@FormParam("output") String output,
@FormParam("mapper") String mapper,
@FormParam("reducer") String reducer,
@FormParam("combiner") String combiner,
@FormParam("file") List<String> fileList,
@FormParam("files") String files,
@FormParam("define") List<String> defines,
@FormParam("cmdenv") List<String> cmdenvs,
@FormParam("arg") List<String> args,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
@FormParam("enablelog") boolean enablelog,
@FormParam("enablejobreconnect") Boolean enablejobreconnect)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException, TooManyRequestsException {
verifyUser();
verifyParam(inputs, "input");
verifyParam(mapper, "mapper");
verifyParam(reducer, "reducer");
Map<String, Object> userArgs = new HashMap<String, Object>();
userArgs.put("user.name", getDoAsUser());
userArgs.put("input", inputs);
userArgs.put("inputreader", inputreader);
userArgs.put("output", output);
userArgs.put("mapper", mapper);
userArgs.put("reducer", reducer);
userArgs.put("combiner", combiner);
userArgs.put("file", fileList);
userArgs.put("files", files);
userArgs.put("define", defines);
userArgs.put("cmdenv", cmdenvs);
userArgs.put("arg", args);
userArgs.put("statusdir", statusdir);
userArgs.put("callback", callback);
userArgs.put("enablelog", Boolean.toString(enablelog));
userArgs.put("enablejobreconnect", enablejobreconnect);
checkEnableLogPrerequisite(enablelog, statusdir);
StreamingDelegator d = new StreamingDelegator(appConf);
return d.run(getDoAsUser(), userArgs, inputs, inputreader, output, mapper, reducer, combiner,
fileList, files, defines, cmdenvs, args,
statusdir, callback, getCompletedUrl(), enablelog, enablejobreconnect, JobType.STREAMING);
}
/**
* Run a MapReduce Jar job.
* Params correspond to the REST api params
* @param usesHcatalog if {@code true}, means the Jar uses HCat and thus needs to access
* metastore, which requires additional steps for WebHCat to perform in a secure cluster.
* @param callback URL which WebHCat will call when the hive job finishes
* @see org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob
*/
@POST
@Path("mapreduce/jar")
@Produces({MediaType.APPLICATION_JSON})
public EnqueueBean mapReduceJar(@FormParam("jar") String jar,
@FormParam("class") String mainClass,
@FormParam("libjars") String libjars,
@FormParam("files") String files,
@FormParam("arg") List<String> args,
@FormParam("define") List<String> defines,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
@FormParam("usehcatalog") boolean usesHcatalog,
@FormParam("enablelog") boolean enablelog,
@FormParam("enablejobreconnect") Boolean enablejobreconnect)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException, TooManyRequestsException {
verifyUser();
verifyParam(jar, "jar");
verifyParam(mainClass, "class");
Map<String, Object> userArgs = new HashMap<String, Object>();
userArgs.put("user.name", getDoAsUser());
userArgs.put("jar", jar);
userArgs.put("class", mainClass);
userArgs.put("libjars", libjars);
userArgs.put("files", files);
userArgs.put("arg", args);
userArgs.put("define", defines);
userArgs.put("statusdir", statusdir);
userArgs.put("callback", callback);
userArgs.put("enablelog", Boolean.toString(enablelog));
userArgs.put("enablejobreconnect", enablejobreconnect);
checkEnableLogPrerequisite(enablelog, statusdir);
JarDelegator d = new JarDelegator(appConf);
return d.run(getDoAsUser(), userArgs,
jar, mainClass,
libjars, files, args, defines,
statusdir, callback, usesHcatalog, getCompletedUrl(), enablelog, enablejobreconnect, JobType.JAR);
}
/**
* Run a Pig job.
* Params correspond to the REST api params. If '-useHCatalog' is in the {@code pigArgs, usesHcatalog},
* is interpreted as true.
* @param usesHcatalog if {@code true}, means the Pig script uses HCat and thus needs to access
* metastore, which requires additional steps for WebHCat to perform in a secure cluster.
* This does nothing to ensure that Pig is installed on target node in the cluster.
* @param callback URL which WebHCat will call when the hive job finishes
* @see org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob
*/
@POST
@Path("pig")
@Produces({MediaType.APPLICATION_JSON})
public EnqueueBean pig(@FormParam("execute") String execute,
@FormParam("file") String srcFile,
@FormParam("arg") List<String> pigArgs,
@FormParam("files") String otherFiles,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
@FormParam("usehcatalog") boolean usesHcatalog,
@FormParam("enablelog") boolean enablelog,
@FormParam("enablejobreconnect") Boolean enablejobreconnect)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException, TooManyRequestsException {
verifyUser();
if (execute == null && srcFile == null) {
throw new BadParam("Either execute or file parameter required");
}
//add all function arguments to a map
Map<String, Object> userArgs = new HashMap<String, Object>();
userArgs.put("user.name", getDoAsUser());
userArgs.put("execute", execute);
userArgs.put("file", srcFile);
userArgs.put("arg", pigArgs);
userArgs.put("files", otherFiles);
userArgs.put("statusdir", statusdir);
userArgs.put("callback", callback);
userArgs.put("enablelog", Boolean.toString(enablelog));
userArgs.put("enablejobreconnect", enablejobreconnect);
checkEnableLogPrerequisite(enablelog, statusdir);
PigDelegator d = new PigDelegator(appConf);
return d.run(getDoAsUser(), userArgs,
execute, srcFile,
pigArgs, otherFiles,
statusdir, callback, usesHcatalog, getCompletedUrl(), enablelog, enablejobreconnect);
}
/**
* Run a Sqoop job.
* @param optionsFile name of option file which contains Sqoop command to run
* @param otherFiles additional files to be shipped to the launcher, such as option
files which contain part of the Sqoop command
* @param libdir dir containing JDBC jars that Sqoop will need to interact with the database
* @param statusdir where the stderr/stdout of templeton controller job goes
* @param callback URL which WebHCat will call when the sqoop job finishes
* @param enablelog whether to collect mapreduce log into statusdir/logs
* @param enablejobreconnect whether to reconnect to a running child job on templeton
* controller job retry
*/
@POST
@Path("sqoop")
@Produces({MediaType.APPLICATION_JSON})
public EnqueueBean sqoop(@FormParam("command") String command,
@FormParam("optionsfile") String optionsFile,
@FormParam("libdir") String libdir,
@FormParam("files") String otherFiles,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
@FormParam("enablelog") boolean enablelog,
@FormParam("enablejobreconnect") Boolean enablejobreconnect)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
IOException, InterruptedException, TooManyRequestsException {
verifyUser();
if (command == null && optionsFile == null)
throw new BadParam("Must define Sqoop command or a optionsfile contains Sqoop command to run Sqoop job.");
if (command != null && optionsFile != null)
throw new BadParam("Cannot set command and optionsfile at the same time.");
checkEnableLogPrerequisite(enablelog, statusdir);
//add all function arguments to a map
Map<String, Object> userArgs = new HashMap<String, Object>();
userArgs.put("user.name", getDoAsUser());
userArgs.put("command", command);
userArgs.put("optionsfile", optionsFile);
userArgs.put("libdir", libdir);
userArgs.put("files", otherFiles);
userArgs.put("statusdir", statusdir);
userArgs.put("callback", callback);
userArgs.put("enablelog", Boolean.toString(enablelog));
userArgs.put("enablejobreconnect", enablejobreconnect);
SqoopDelegator d = new SqoopDelegator(appConf);
return d.run(getDoAsUser(), userArgs, command, optionsFile, otherFiles,
statusdir, callback, getCompletedUrl(), enablelog, enablejobreconnect, libdir);
}
/**
* Run a Hive job.
* @param execute SQL statement to run, equivalent to "-e" from hive command line
* @param srcFile name of hive script file to run, equivalent to "-f" from hive
* command line
* @param hiveArgs additional command line argument passed to the hive command line.
* Please check https://cwiki.apache.org/Hive/languagemanual-cli.html
* for detailed explanation of command line arguments
* @param otherFiles additional files to be shipped to the launcher, such as the jars
* used in "add jar" statement in hive script
* @param defines shortcut for command line arguments "--define"
* @param statusdir where the stderr/stdout of templeton controller job goes
* @param callback URL which WebHCat will call when the hive job finishes
* @param enablelog whether to collect mapreduce log into statusdir/logs
* @param enablejobreconnect whether to reconnect to a running child job on templeton
* controller job retry
*/
@POST
@Path("hive")
@Produces({MediaType.APPLICATION_JSON})
public EnqueueBean hive(@FormParam("execute") String execute,
@FormParam("file") String srcFile,
@FormParam("arg") List<String> hiveArgs,
@FormParam("files") String otherFiles,
@FormParam("define") List<String> defines,
@FormParam("statusdir") String statusdir,
@FormParam("callback") String callback,
@FormParam("enablelog") boolean enablelog,
@FormParam("enablejobreconnect") Boolean enablejobreconnect)
throws NotAuthorizedException, BusyException, BadParam, QueueException,
ExecuteException, IOException, InterruptedException, TooManyRequestsException {
verifyUser();
if (execute == null && srcFile == null) {
throw new BadParam("Either execute or file parameter required");
}
//add all function arguments to a map
Map<String, Object> userArgs = new HashMap<String, Object>();
userArgs.put("user.name", getDoAsUser());
userArgs.put("execute", execute);
userArgs.put("file", srcFile);
userArgs.put("define", defines);
userArgs.put("files", otherFiles);
userArgs.put("statusdir", statusdir);
userArgs.put("callback", callback);
userArgs.put("enablelog", Boolean.toString(enablelog));
userArgs.put("enablejobreconnect", enablejobreconnect);
checkEnableLogPrerequisite(enablelog, statusdir);
HiveDelegator d = new HiveDelegator(appConf);
return d.run(getDoAsUser(), userArgs, execute, srcFile, defines, hiveArgs, otherFiles,
statusdir, callback, getCompletedUrl(), enablelog, enablejobreconnect);
}
/**
* Return the status of the jobid.
*/
@GET
@Path("jobs/{jobid}")
@Produces({MediaType.APPLICATION_JSON})
public QueueStatusBean showJobId(@PathParam("jobid") String jobid)
throws NotAuthorizedException, BadParam, IOException, InterruptedException,
BusyException, TimeoutException, ExecutionException, TooManyRequestsException {
verifyUser();
verifyParam(jobid, ":jobid");
StatusDelegator d = new StatusDelegator(appConf);
return d.run(getDoAsUser(), jobid);
}
/**
* Kill a job in the queue.
*/
@DELETE
@Path("jobs/{jobid}")
@Produces({MediaType.APPLICATION_JSON})
public QueueStatusBean deleteJobId(@PathParam("jobid") String jobid)
throws NotAuthorizedException, BadParam, IOException, InterruptedException {
verifyUser();
verifyParam(jobid, ":jobid");
DeleteDelegator d = new DeleteDelegator(appConf);
return d.run(getDoAsUser(), jobid);
}
/**
* Return all the known job ids for this user based on the optional filter conditions.
* <p>
* Example usages:
* 1. curl -s 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan'
* Return all the Job IDs submitted by hsubramaniyan
* 2. curl -s
* 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan%26showall=true'
* Return all the Job IDs that are visible to hsubramaniyan
* 3. curl -s
* 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan%26jobid=job_201312091733_0003'
* Return all the Job IDs for hsubramaniyan after job_201312091733_0003.
* 4. curl -s 'http://localhost:50111/templeton/v1/jobs?
* user.name=hsubramaniyan%26jobid=job_201312091733_0003%26numrecords=5'
* Return the first 5(atmost) Job IDs submitted by hsubramaniyan after job_201312091733_0003.
* 5. curl -s
* 'http://localhost:50111/templeton/v1/jobs?user.name=hsubramaniyan%26numrecords=5'
* Return the first 5(atmost) Job IDs submitted by hsubramaniyan after sorting the Job ID list
* lexicographically.
* </p>
* <p>
* Supporting pagination using "jobid" and "numrecords" parameters:
* Step 1: Get the start "jobid" = job_xxx_000, "numrecords" = n
* Step 2: Issue a curl command by specifying the user-defined "numrecords" and "jobid"
* Step 3: If list obtained from Step 2 has size equal to "numrecords", retrieve the list's
* last record and get the Job Id of the last record as job_yyy_k, else quit.
* Step 4: set "jobid"=job_yyy_k and go to step 2.
* </p>
* @param fields If "fields" set to "*", the request will return full details of the job.
* If "fields" is missing, will only return the job ID. Currently the value can only
* be "*", other values are not allowed and will throw exception.
* @param showall If "showall" is set to "true", the request will return all jobs the user
* has permission to view, not only the jobs belonging to the user.
* @param jobid If "jobid" is present, the records whose Job Id is lexicographically greater
* than "jobid" are only returned. For example, if "jobid" = "job_201312091733_0001",
* the jobs whose Job ID is greater than "job_201312091733_0001" are returned. The number of
* records returned depends on the value of "numrecords".
* @param numrecords If the "jobid" and "numrecords" parameters are present, the top #numrecords
* records appearing after "jobid" will be returned after sorting the Job Id list
* lexicographically.
* If "jobid" parameter is missing and "numrecords" is present, the top #numrecords will
* be returned after lexicographically sorting the Job Id list. If "jobid" parameter is present
* and "numrecords" is missing, all the records whose Job Id is greater than "jobid" are returned.
* @return list of job items based on the filter conditions specified by the user.
*/
@GET
@Path("jobs")
@Produces({MediaType.APPLICATION_JSON})
public List<JobItemBean> showJobList(@QueryParam("fields") String fields,
@QueryParam("showall") boolean showall,
@QueryParam("jobid") String jobid,
@QueryParam("numrecords") String numrecords)
throws NotAuthorizedException, BadParam, IOException, InterruptedException,
BusyException, TimeoutException, ExecutionException, TooManyRequestsException {
verifyUser();
boolean showDetails = false;
if (fields!=null && !fields.equals("*")) {
throw new BadParam("fields value other than * is not supported");
}
if (fields!=null && fields.equals("*")) {
showDetails = true;
}
int numRecords;
// Parse numrecords to an integer
try {
if (numrecords != null) {
numRecords = Integer.parseInt(numrecords);
if (numRecords <= 0) {
throw new BadParam("numrecords should be an integer > 0");
}
}
else {
numRecords = -1;
}
}
catch(Exception e) {
throw new BadParam("Invalid numrecords format: numrecords should be an integer > 0");
}
ListDelegator ld = new ListDelegator(appConf);
return ld.run(getDoAsUser(), showall, jobid, numRecords, showDetails);
}
/**
* Notify on a completed job. Called by JobTracker.
*/
@GET
@Path("internal/complete/{jobid}")
@Produces({MediaType.APPLICATION_JSON})
public CompleteBean completeJob(@PathParam("jobid") String jobid,
@QueryParam("status") String jobStatus)
throws CallbackFailedException, IOException {
LOG.debug("Received callback " + theUriInfo.getRequestUri());
CompleteDelegator d = new CompleteDelegator(appConf);
return d.run(jobid, jobStatus);
}
/**
* Verify that we have a valid user. Throw an exception if invalid.
*/
public void verifyUser() throws NotAuthorizedException {
String requestingUser = getRequestingUser();
if (requestingUser == null) {
String msg = "No user found.";
if (!UserGroupInformation.isSecurityEnabled()) {
msg += " Missing " + PseudoAuthenticator.USER_NAME + " parameter.";
}
throw new NotAuthorizedException(msg);
}
if(doAs != null && !doAs.equals(requestingUser)) {
/*if doAs user is different than logged in user, need to check that
that logged in user is authorized to run as 'doAs'*/
ProxyUserSupport.validate(requestingUser, getRequestingHost(requestingUser, request), doAs);
}
}
/**
* All 'tasks' spawned by WebHCat should be run as this user. W/o doAs query parameter
* this is just the user making the request (or
* {@link org.apache.hadoop.security.authentication.client.PseudoAuthenticator#USER_NAME}
* query param).
* @return value of doAs query parameter or {@link #getRequestingUser()}
*/
private String getDoAsUser() {
return doAs != null && !doAs.equals(getRequestingUser()) ? doAs : getRequestingUser();
}
/**
* Verify that the parameter exists. Throw an exception if invalid.
*/
public void verifyParam(String param, String name)
throws BadParam {
if (param == null) {
throw new BadParam("Missing " + name + " parameter");
}
}
/**
* Verify that the parameter exists. Throw an exception if invalid.
*/
public void verifyParam(List<String> param, String name)
throws BadParam {
if (param == null || param.isEmpty()) {
throw new BadParam("Missing " + name + " parameter");
}
}
public static final Pattern DDL_ID = Pattern.compile("[a-zA-Z]\\w*");
public static final Pattern PROPERTY_ID =
Pattern.compile("[a-zA-Z0-9][\\w\\.\\-]*(?<!\\-)(?<!\\.)(?<!\\_)$");
/**
* Verify that the parameter exists and is a simple DDL identifier
* name. Throw an exception if invalid.
*
* Bug: This needs to allow for quoted ddl identifiers.
*/
public void verifyDdlParam(String param, String name)
throws BadParam {
verifyParam(param, name);
Matcher m = DDL_ID.matcher(param);
if (!m.matches()) {
throw new BadParam("Invalid DDL identifier " + name);
}
}
/**
* Verify that the parameter exists and is a valid property
* name. Throw an exception if invalid.
*
*/
public void verifyPropertyParam(String param, String name)
throws BadParam {
verifyParam(param, name);
Matcher m = PROPERTY_ID.matcher(param);
if (!m.matches()) {
throw new BadParam("Invalid property name " + name);
}
}
/**
* Get the user name from the security context, i.e. the user making the HTTP request.
* With simple/pseudo security mode this should return the
* value of user.name query param, in kerberos mode it's the kinit'ed user.
*/
private String getRequestingUser() {
if (theSecurityContext == null) {
return null;
}
String userName = null;
if (theSecurityContext.getUserPrincipal() == null) {
userName = Main.UserNameHandler.getUserName(request);
}
else {
userName = theSecurityContext.getUserPrincipal().getName();
}
if(userName == null) {
return null;
}
//map hue/foo.bar@something.com->hue since user group checks
// and config files are in terms of short name
return UserGroupInformation.createRemoteUser(userName).getShortUserName();
}
/**
* The callback url on this server when a task is completed.
*/
public String getCompletedUrl() {
if (theUriInfo == null) {
return null;
}
if (theUriInfo.getBaseUri() == null) {
return null;
}
return theUriInfo.getBaseUri() + VERSION
+ "/internal/complete/$jobId?status=$jobStatus";
}
/**
* Returns canonical host name from which the request is made; used for doAs validation
*/
private static String getRequestingHost(String requestingUser, HttpServletRequest request) {
final String unkHost = "???";
if(request == null) {
LOG.warn("request is null; cannot determine hostname");
return unkHost;
}
try {
String address = request.getRemoteAddr();//returns IP addr
if(address == null) {
LOG.warn(MessageFormat.format("Request remote address is NULL for user [{0}]", requestingUser));
return unkHost;
}
//Inet4Address/Inet6Address
String hostName = InetAddress.getByName(address).getCanonicalHostName();
if(LOG.isDebugEnabled()) {
LOG.debug(MessageFormat.format("Resolved remote hostname: [{0}]", hostName));
}
return hostName;
} catch (UnknownHostException ex) {
LOG.warn(MessageFormat.format("Request remote address could not be resolved, {0}", ex.toString(), ex));
return unkHost;
}
}
private void checkEnableLogPrerequisite(boolean enablelog, String statusdir) throws BadParam {
if (enablelog && !TempletonUtils.isset(statusdir))
throw new BadParam("enablelog is only applicable when statusdir is set");
if(enablelog && "0.23".equalsIgnoreCase(ShimLoader.getMajorVersion())) {
throw new BadParam("enablelog=true is only supported with Hadoop 1.x");
}
}
}
| |
package org.bouncycastle.pqc.math.linearalgebra;
import java.security.SecureRandom;
/**
* This class describes operations with elements from the finite field F =
* GF(2^m). ( GF(2^m)= GF(2)[A] where A is a root of irreducible polynomial with
* degree m, each field element B has a polynomial basis representation, i.e. it
* is represented by a different binary polynomial of degree less than m, B =
* poly(A) ) All operations are defined only for field with 1< m <32. For the
* representation of field elements the map f: F->Z, poly(A)->poly(2) is used,
* where integers have the binary representation. For example: A^7+A^3+A+1 ->
* (00...0010001011)=139 Also for elements type Integer is used.
*
* @see PolynomialRingGF2
*/
public class GF2mField
{
/*
* degree - degree of the field polynomial - the field polynomial ring -
* polynomial ring over the finite field GF(2)
*/
private int degree = 0;
private int polynomial;
/**
* create a finite field GF(2^m)
*
* @param degree the degree of the field
*/
public GF2mField(int degree)
{
if (degree >= 32)
{
throw new IllegalArgumentException(
" Error: the degree of field is too large ");
}
if (degree < 1)
{
throw new IllegalArgumentException(
" Error: the degree of field is non-positive ");
}
this.degree = degree;
polynomial = PolynomialRingGF2.getIrreduciblePolynomial(degree);
}
/**
* create a finite field GF(2^m) with the fixed field polynomial
*
* @param degree the degree of the field
* @param poly the field polynomial
*/
public GF2mField(int degree, int poly)
{
if (degree != PolynomialRingGF2.degree(poly))
{
throw new IllegalArgumentException(
" Error: the degree is not correct");
}
if (!PolynomialRingGF2.isIrreducible(poly))
{
throw new IllegalArgumentException(
" Error: given polynomial is reducible");
}
this.degree = degree;
polynomial = poly;
}
public GF2mField(byte[] enc)
{
if (enc.length != 4)
{
throw new IllegalArgumentException(
"byte array is not an encoded finite field");
}
polynomial = LittleEndianConversions.OS2IP(enc);
if (!PolynomialRingGF2.isIrreducible(polynomial))
{
throw new IllegalArgumentException(
"byte array is not an encoded finite field");
}
degree = PolynomialRingGF2.degree(polynomial);
}
public GF2mField(GF2mField field)
{
degree = field.degree;
polynomial = field.polynomial;
}
/**
* return degree of the field
*
* @return degree of the field
*/
public int getDegree()
{
return degree;
}
/**
* return the field polynomial
*
* @return the field polynomial
*/
public int getPolynomial()
{
return polynomial;
}
/**
* return the encoded form of this field
*
* @return the field in byte array form
*/
public byte[] getEncoded()
{
return LittleEndianConversions.I2OSP(polynomial);
}
/**
* Return sum of two elements
*
* @param a
* @param b
* @return a+b
*/
public int add(int a, int b)
{
return a ^ b;
}
/**
* Return product of two elements
*
* @param a
* @param b
* @return a*b
*/
public int mult(int a, int b)
{
return PolynomialRingGF2.modMultiply(a, b, polynomial);
}
/**
* compute exponentiation a^k
*
* @param a a field element a
* @param k k degree
* @return a^k
*/
public int exp(int a, int k)
{
if (a == 0)
{
return 0;
}
if (a == 1)
{
return 1;
}
int result = 1;
if (k < 0)
{
a = inverse(a);
k = -k;
}
while (k != 0)
{
if ((k & 1) == 1)
{
result = mult(result, a);
}
a = mult(a, a);
k >>>= 1;
}
return result;
}
/**
* compute the multiplicative inverse of a
*
* @param a a field element a
* @return a<sup>-1</sup>
*/
public int inverse(int a)
{
int d = (1 << degree) - 2;
return exp(a, d);
}
/**
* compute the square root of an integer
*
* @param a a field element a
* @return a<sup>1/2</sup>
*/
public int sqRoot(int a)
{
for (int i = 1; i < degree; i++)
{
a = mult(a, a);
}
return a;
}
/**
* create a random field element using PRNG sr
*
* @param sr SecureRandom
* @return a random element
*/
public int getRandomElement(SecureRandom sr)
{
int result = RandUtils.nextInt(sr, 1 << degree);
return result;
}
/**
* create a random non-zero field element
*
* @return a random element
*/
public int getRandomNonZeroElement()
{
return getRandomNonZeroElement(new SecureRandom());
}
/**
* create a random non-zero field element using PRNG sr
*
* @param sr SecureRandom
* @return a random non-zero element
*/
public int getRandomNonZeroElement(SecureRandom sr)
{
int controltime = 1 << 20;
int count = 0;
int result = RandUtils.nextInt(sr, 1 << degree);
while ((result == 0) && (count < controltime))
{
result = RandUtils.nextInt(sr, 1 << degree);
count++;
}
if (count == controltime)
{
result = 1;
}
return result;
}
/**
* @return true if e is encoded element of this field and false otherwise
*/
public boolean isElementOfThisField(int e)
{
// e is encoded element of this field iff 0<= e < |2^m|
if (degree == 31)
{
return e >= 0;
}
return e >= 0 && e < (1 << degree);
}
/*
* help method for visual control
*/
public String elementToStr(int a)
{
String s = "";
for (int i = 0; i < degree; i++)
{
if (((byte)a & 0x01) == 0)
{
s = "0" + s;
}
else
{
s = "1" + s;
}
a >>>= 1;
}
return s;
}
/**
* checks if given object is equal to this field.
* <p>
* The method returns false whenever the given object is not GF2m.
*
* @param other object
* @return true or false
*/
public boolean equals(Object other)
{
if ((other == null) || !(other instanceof GF2mField))
{
return false;
}
GF2mField otherField = (GF2mField)other;
if ((degree == otherField.degree)
&& (polynomial == otherField.polynomial))
{
return true;
}
return false;
}
public int hashCode()
{
return polynomial;
}
/**
* Returns a human readable form of this field.
*
* @return a human readable form of this field.
*/
public String toString()
{
String str = "Finite Field GF(2^" + degree + ") = " + "GF(2)[X]/<"
+ polyToString(polynomial) + "> ";
return str;
}
private static String polyToString(int p)
{
String str = "";
if (p == 0)
{
str = "0";
}
else
{
byte b = (byte)(p & 0x01);
if (b == 1)
{
str = "1";
}
p >>>= 1;
int i = 1;
while (p != 0)
{
b = (byte)(p & 0x01);
if (b == 1)
{
str = str + "+x^" + i;
}
p >>>= 1;
i++;
}
}
return str;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.wikipedia;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.util.AttributeSource;
import java.io.IOException;
import java.io.Reader;
import java.util.*;
/**
* Extension of StandardTokenizer that is aware of Wikipedia syntax. It is based off of the
* Wikipedia tutorial available at http://en.wikipedia.org/wiki/Wikipedia:Tutorial, but it may not be complete.
* <p/>
* <p/>
* @lucene.experimental
*/
public final class WikipediaTokenizer extends Tokenizer {
public static final String INTERNAL_LINK = "il";
public static final String EXTERNAL_LINK = "el";
//The URL part of the link, i.e. the first token
public static final String EXTERNAL_LINK_URL = "elu";
public static final String CITATION = "ci";
public static final String CATEGORY = "c";
public static final String BOLD = "b";
public static final String ITALICS = "i";
public static final String BOLD_ITALICS = "bi";
public static final String HEADING = "h";
public static final String SUB_HEADING = "sh";
public static final int ALPHANUM_ID = 0;
public static final int APOSTROPHE_ID = 1;
public static final int ACRONYM_ID = 2;
public static final int COMPANY_ID = 3;
public static final int EMAIL_ID = 4;
public static final int HOST_ID = 5;
public static final int NUM_ID = 6;
public static final int CJ_ID = 7;
public static final int INTERNAL_LINK_ID = 8;
public static final int EXTERNAL_LINK_ID = 9;
public static final int CITATION_ID = 10;
public static final int CATEGORY_ID = 11;
public static final int BOLD_ID = 12;
public static final int ITALICS_ID = 13;
public static final int BOLD_ITALICS_ID = 14;
public static final int HEADING_ID = 15;
public static final int SUB_HEADING_ID = 16;
public static final int EXTERNAL_LINK_URL_ID = 17;
/** String token types that correspond to token type int constants */
public static final String [] TOKEN_TYPES = new String [] {
"<ALPHANUM>",
"<APOSTROPHE>",
"<ACRONYM>",
"<COMPANY>",
"<EMAIL>",
"<HOST>",
"<NUM>",
"<CJ>",
INTERNAL_LINK,
EXTERNAL_LINK,
CITATION,
CATEGORY,
BOLD,
ITALICS,
BOLD_ITALICS,
HEADING,
SUB_HEADING,
EXTERNAL_LINK_URL
};
/**
* Only output tokens
*/
public static final int TOKENS_ONLY = 0;
/**
* Only output untokenized tokens, which are tokens that would normally be split into several tokens
*/
public static final int UNTOKENIZED_ONLY = 1;
/**
* Output the both the untokenized token and the splits
*/
public static final int BOTH = 2;
/**
* This flag is used to indicate that the produced "Token" would, if {@link #TOKENS_ONLY} was used, produce multiple tokens.
*/
public static final int UNTOKENIZED_TOKEN_FLAG = 1;
/**
* A private instance of the JFlex-constructed scanner
*/
private final WikipediaTokenizerImpl scanner;
private int tokenOutput = TOKENS_ONLY;
private Set<String> untokenizedTypes = Collections.emptySet();
private Iterator<AttributeSource.State> tokens = null;
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class);
private boolean first;
/**
* Creates a new instance of the {@link WikipediaTokenizer}. Attaches the
* <code>input</code> to a newly created JFlex scanner.
*
* @param input The Input Reader
*/
public WikipediaTokenizer(Reader input) {
this(input, TOKENS_ONLY, Collections.<String>emptySet());
}
/**
* Creates a new instance of the {@link org.apache.lucene.analysis.wikipedia.WikipediaTokenizer}. Attaches the
* <code>input</code> to a the newly created JFlex scanner.
*
* @param input The input
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
*/
public WikipediaTokenizer(Reader input, int tokenOutput, Set<String> untokenizedTypes) {
super(input);
this.scanner = new WikipediaTokenizerImpl(null); // best effort NPE if you dont call reset
init(tokenOutput, untokenizedTypes);
}
/**
* Creates a new instance of the {@link org.apache.lucene.analysis.wikipedia.WikipediaTokenizer}. Attaches the
* <code>input</code> to a the newly created JFlex scanner. Uses the given {@link org.apache.lucene.util.AttributeSource.AttributeFactory}.
*
* @param input The input
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
*/
public WikipediaTokenizer(AttributeFactory factory, Reader input, int tokenOutput, Set<String> untokenizedTypes) {
super(factory, input);
this.scanner = new WikipediaTokenizerImpl(null); // best effort NPE if you dont call reset
init(tokenOutput, untokenizedTypes);
}
private void init(int tokenOutput, Set<String> untokenizedTypes) {
// TODO: cutover to enum
if (tokenOutput != TOKENS_ONLY &&
tokenOutput != UNTOKENIZED_ONLY &&
tokenOutput != BOTH) {
throw new IllegalArgumentException("tokenOutput must be TOKENS_ONLY, UNTOKENIZED_ONLY or BOTH");
}
this.tokenOutput = tokenOutput;
this.untokenizedTypes = untokenizedTypes;
}
/*
* (non-Javadoc)
*
* @see org.apache.lucene.analysis.TokenStream#next()
*/
@Override
public final boolean incrementToken() throws IOException {
if (tokens != null && tokens.hasNext()){
AttributeSource.State state = tokens.next();
restoreState(state);
return true;
}
clearAttributes();
int tokenType = scanner.getNextToken();
if (tokenType == WikipediaTokenizerImpl.YYEOF) {
return false;
}
String type = WikipediaTokenizerImpl.TOKEN_TYPES[tokenType];
if (tokenOutput == TOKENS_ONLY || untokenizedTypes.contains(type) == false){
setupToken();
} else if (tokenOutput == UNTOKENIZED_ONLY && untokenizedTypes.contains(type) == true){
collapseTokens(tokenType);
}
else if (tokenOutput == BOTH){
//collapse into a single token, add it to tokens AND output the individual tokens
//output the untokenized Token first
collapseAndSaveTokens(tokenType, type);
}
int posinc = scanner.getPositionIncrement();
if (first && posinc == 0) {
posinc = 1; // don't emit posinc=0 for the first token!
}
posIncrAtt.setPositionIncrement(posinc);
typeAtt.setType(type);
first = false;
return true;
}
private void collapseAndSaveTokens(int tokenType, String type) throws IOException {
//collapse
StringBuilder buffer = new StringBuilder(32);
int numAdded = scanner.setText(buffer);
//TODO: how to know how much whitespace to add
int theStart = scanner.yychar();
int lastPos = theStart + numAdded;
int tmpTokType;
int numSeen = 0;
List<AttributeSource.State> tmp = new ArrayList<AttributeSource.State>();
setupSavedToken(0, type);
tmp.add(captureState());
//while we can get a token and that token is the same type and we have not transitioned to a new wiki-item of the same type
while ((tmpTokType = scanner.getNextToken()) != WikipediaTokenizerImpl.YYEOF && tmpTokType == tokenType && scanner.getNumWikiTokensSeen() > numSeen){
int currPos = scanner.yychar();
//append whitespace
for (int i = 0; i < (currPos - lastPos); i++){
buffer.append(' ');
}
numAdded = scanner.setText(buffer);
setupSavedToken(scanner.getPositionIncrement(), type);
tmp.add(captureState());
numSeen++;
lastPos = currPos + numAdded;
}
//trim the buffer
// TODO: this is inefficient
String s = buffer.toString().trim();
termAtt.setEmpty().append(s);
offsetAtt.setOffset(correctOffset(theStart), correctOffset(theStart + s.length()));
flagsAtt.setFlags(UNTOKENIZED_TOKEN_FLAG);
//The way the loop is written, we will have proceeded to the next token. We need to pushback the scanner to lastPos
if (tmpTokType != WikipediaTokenizerImpl.YYEOF){
scanner.yypushback(scanner.yylength());
}
tokens = tmp.iterator();
}
private void setupSavedToken(int positionInc, String type){
setupToken();
posIncrAtt.setPositionIncrement(positionInc);
typeAtt.setType(type);
}
private void collapseTokens(int tokenType) throws IOException {
//collapse
StringBuilder buffer = new StringBuilder(32);
int numAdded = scanner.setText(buffer);
//TODO: how to know how much whitespace to add
int theStart = scanner.yychar();
int lastPos = theStart + numAdded;
int tmpTokType;
int numSeen = 0;
//while we can get a token and that token is the same type and we have not transitioned to a new wiki-item of the same type
while ((tmpTokType = scanner.getNextToken()) != WikipediaTokenizerImpl.YYEOF && tmpTokType == tokenType && scanner.getNumWikiTokensSeen() > numSeen){
int currPos = scanner.yychar();
//append whitespace
for (int i = 0; i < (currPos - lastPos); i++){
buffer.append(' ');
}
numAdded = scanner.setText(buffer);
numSeen++;
lastPos = currPos + numAdded;
}
//trim the buffer
// TODO: this is inefficient
String s = buffer.toString().trim();
termAtt.setEmpty().append(s);
offsetAtt.setOffset(correctOffset(theStart), correctOffset(theStart + s.length()));
flagsAtt.setFlags(UNTOKENIZED_TOKEN_FLAG);
//The way the loop is written, we will have proceeded to the next token. We need to pushback the scanner to lastPos
if (tmpTokType != WikipediaTokenizerImpl.YYEOF){
scanner.yypushback(scanner.yylength());
} else {
tokens = null;
}
}
private void setupToken() {
scanner.getText(termAtt);
final int start = scanner.yychar();
offsetAtt.setOffset(correctOffset(start), correctOffset(start + termAtt.length()));
}
/*
* (non-Javadoc)
*
* @see org.apache.lucene.analysis.TokenStream#reset()
*/
@Override
public void reset() throws IOException {
scanner.yyreset(input);
tokens = null;
scanner.reset();
first = true;
}
@Override
public void end() {
// set final offset
final int finalOffset = correctOffset(scanner.yychar() + scanner.yylength());
this.offsetAtt.setOffset(finalOffset, finalOffset);
}
}
| |
/*
* Copyright 2013 Goldman Sachs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.impl.set.immutable.primitive;
import java.io.IOException;
import java.io.Serializable;
import java.util.NoSuchElementException;
import com.gs.collections.api.BooleanIterable;
import com.gs.collections.api.LazyBooleanIterable;
import com.gs.collections.api.bag.primitive.MutableBooleanBag;
import com.gs.collections.api.block.function.primitive.BooleanToObjectFunction;
import com.gs.collections.api.block.function.primitive.ObjectBooleanToObjectFunction;
import com.gs.collections.api.block.predicate.primitive.BooleanPredicate;
import com.gs.collections.api.block.procedure.primitive.BooleanProcedure;
import com.gs.collections.api.iterator.BooleanIterator;
import com.gs.collections.api.list.primitive.MutableBooleanList;
import com.gs.collections.api.set.ImmutableSet;
import com.gs.collections.api.set.primitive.BooleanSet;
import com.gs.collections.api.set.primitive.ImmutableBooleanSet;
import com.gs.collections.api.set.primitive.MutableBooleanSet;
import com.gs.collections.impl.bag.mutable.primitive.BooleanHashBag;
import com.gs.collections.impl.factory.Sets;
import com.gs.collections.impl.lazy.primitive.LazyBooleanIterableAdapter;
import com.gs.collections.impl.list.mutable.primitive.BooleanArrayList;
import com.gs.collections.impl.set.mutable.primitive.BooleanHashSet;
import net.jcip.annotations.Immutable;
@Immutable
final class ImmutableTrueSet implements ImmutableBooleanSet, Serializable
{
static final ImmutableBooleanSet INSTANCE = new ImmutableTrueSet();
private ImmutableTrueSet()
{
// Singleton
}
public ImmutableBooleanSet newWith(boolean element)
{
return element ? this : ImmutableTrueFalseSet.INSTANCE;
}
public ImmutableBooleanSet newWithout(boolean element)
{
return element ? ImmutableBooleanEmptySet.INSTANCE : this;
}
public ImmutableBooleanSet newWithAll(BooleanIterable elements)
{
ImmutableBooleanSet result = this;
BooleanIterator booleanIterator = elements.booleanIterator();
while (booleanIterator.hasNext())
{
result = result.newWith(booleanIterator.next());
}
return result;
}
public ImmutableBooleanSet newWithoutAll(BooleanIterable elements)
{
return elements.contains(true) ? ImmutableBooleanEmptySet.INSTANCE : this;
}
public BooleanIterator booleanIterator()
{
return new TrueIterator();
}
public void forEach(BooleanProcedure procedure)
{
procedure.value(true);
}
public <T> T injectInto(T injectedValue, ObjectBooleanToObjectFunction<? super T, ? extends T> function)
{
return function.valueOf(injectedValue, true);
}
public int count(BooleanPredicate predicate)
{
return predicate.accept(true) ? 1 : 0;
}
public boolean anySatisfy(BooleanPredicate predicate)
{
return predicate.accept(true);
}
public boolean allSatisfy(BooleanPredicate predicate)
{
return predicate.accept(true);
}
public boolean noneSatisfy(BooleanPredicate predicate)
{
return !predicate.accept(true);
}
public ImmutableBooleanSet select(BooleanPredicate predicate)
{
return predicate.accept(true) ? this : ImmutableBooleanEmptySet.INSTANCE;
}
public ImmutableBooleanSet reject(BooleanPredicate predicate)
{
return predicate.accept(true) ? ImmutableBooleanEmptySet.INSTANCE : this;
}
public boolean detectIfNone(BooleanPredicate predicate, boolean ifNone)
{
return predicate.accept(true) || ifNone;
}
public <V> ImmutableSet<V> collect(BooleanToObjectFunction<? extends V> function)
{
return Sets.immutable.with(function.valueOf(true));
}
public boolean[] toArray()
{
return new boolean[]{true};
}
public boolean contains(boolean value)
{
return value;
}
public boolean containsAll(boolean... source)
{
for (boolean item : source)
{
if (!item)
{
return false;
}
}
return true;
}
public boolean containsAll(BooleanIterable source)
{
for (BooleanIterator iterator = source.booleanIterator(); iterator.hasNext(); )
{
if (!iterator.next())
{
return false;
}
}
return true;
}
public BooleanSet freeze()
{
return this;
}
public ImmutableBooleanSet toImmutable()
{
return this;
}
public int size()
{
return 1;
}
public boolean isEmpty()
{
return false;
}
public boolean notEmpty()
{
return true;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (!(obj instanceof BooleanSet))
{
return false;
}
BooleanSet other = (BooleanSet) obj;
return !other.contains(false) && other.contains(true);
}
@Override
public int hashCode()
{
return 1231;
}
@Override
public String toString()
{
return "[true]";
}
public String makeString()
{
return "true";
}
public String makeString(String separator)
{
return "true";
}
public String makeString(String start, String separator, String end)
{
return start + "true" + end;
}
public void appendString(Appendable appendable)
{
try
{
appendable.append("true");
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
public void appendString(Appendable appendable, String separator)
{
try
{
appendable.append("true");
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
public void appendString(Appendable appendable, String start, String separator, String end)
{
try
{
appendable.append(start);
appendable.append("true");
appendable.append(end);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
public MutableBooleanList toList()
{
return BooleanArrayList.newList(this);
}
public MutableBooleanSet toSet()
{
return BooleanHashSet.newSet(this);
}
public MutableBooleanBag toBag()
{
return BooleanHashBag.newBag(this);
}
public LazyBooleanIterable asLazy()
{
return new LazyBooleanIterableAdapter(this);
}
private static final class TrueIterator implements BooleanIterator
{
private int currentIndex;
public boolean next()
{
if (this.currentIndex == 0)
{
this.currentIndex++;
return true;
}
throw new NoSuchElementException();
}
public boolean hasNext()
{
return this.currentIndex == 0;
}
}
private Object writeReplace()
{
return new ImmutableBooleanSetSerializationProxy(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.core.deliver;
import java.util.Date;
import org.apache.ivy.core.settings.IvySettings;
/**
* A set of options used to do a deliver.
*/
public class DeliverOptions {
private String status;
private Date pubdate;
private PublishingDependencyRevisionResolver pdrResolver = new DefaultPublishingDRResolver();
private boolean validate = true;
private boolean resolveDynamicRevisions = true;
private boolean replaceForcedRevisions = false;
private String resolveId;
private String[] confs;
private String pubBranch;
/**
* True to indicate that the revConstraint attribute should be generated if applicable, false to
* never generate the revConstraint attribute.
*/
private boolean generateRevConstraint = true;
/** true to merge parent descriptor elements into delivered child descriptor */
private boolean merge = true;
/**
* Returns an instance of DeliverOptions with options corresponding to default values taken from
* the given settings.
*
* @param settings
* The settings to use to get default option values
* @return a DeliverOptions instance ready to be used or customized
*/
public static DeliverOptions newInstance(IvySettings settings) {
return new DeliverOptions(null, new Date(), new DefaultPublishingDRResolver(),
settings.doValidate(), true, null);
}
/**
* Creates an instance of DeliverOptions which require to be configured using the appropriate
* setters.
*/
public DeliverOptions() {
}
/**
* Creates an instance of DeliverOptions with all options explicitly set.
* @param status String
* @param pubDate Date
* @param pdrResolver PublishingDependencyRevisionResolver
* @param validate boolean
* @param resolveDynamicRevisions boolean
* @param confs String[]
*/
public DeliverOptions(String status, Date pubDate,
PublishingDependencyRevisionResolver pdrResolver, boolean validate,
boolean resolveDynamicRevisions, String[] confs) {
this.status = status;
this.pubdate = pubDate;
this.pdrResolver = pdrResolver;
this.validate = validate;
this.resolveDynamicRevisions = resolveDynamicRevisions;
this.confs = confs;
}
/**
* Return the pdrResolver that will be used during deliver for each dependency to get its
* published information. This can particularly useful when the deliver is made for a release,
* and when we wish to deliver each dependency which is still in integration. The
* PublishingDependencyRevisionResolver can then do the delivering work for the dependency and
* return the new (delivered) dependency info (with the delivered revision). Note that
* PublishingDependencyRevisionResolver is only called for each <b>direct</b> dependency.
*
* @return the pdrResolver that will be used during deliver
*/
public PublishingDependencyRevisionResolver getPdrResolver() {
return pdrResolver;
}
/**
* Sets the pdrResolver that will be used during deliver for each dependency to get its
* published information. This can particularly useful when the deliver is made for a release,
* and when we wish to deliver each dependency which is still in integration. The
* PublishingDependencyRevisionResolver can then do the delivering work for the dependency and
* return the new (delivered) dependency info (with the delivered revision). Note that
* PublishingDependencyRevisionResolver is only called for each <b>direct</b> dependency.
*
* @param pdrResolver PublishingDependencyRevisionResolver
* @return the instance of DeliverOptions on which the method has been called, for easy method
* chaining
*/
public DeliverOptions setPdrResolver(PublishingDependencyRevisionResolver pdrResolver) {
this.pdrResolver = pdrResolver;
return this;
}
public boolean isResolveDynamicRevisions() {
return resolveDynamicRevisions;
}
public DeliverOptions setResolveDynamicRevisions(boolean resolveDynamicRevisions) {
this.resolveDynamicRevisions = resolveDynamicRevisions;
return this;
}
public boolean isReplaceForcedRevisions() {
return replaceForcedRevisions;
}
public DeliverOptions setReplaceForcedRevisions(boolean replaceForcedRevisions) {
this.replaceForcedRevisions = replaceForcedRevisions;
return this;
}
public boolean isValidate() {
return validate;
}
public DeliverOptions setValidate(boolean validate) {
this.validate = validate;
return this;
}
public Date getPubdate() {
return pubdate;
}
public DeliverOptions setPubdate(Date pubdate) {
this.pubdate = pubdate;
return this;
}
/**
* Returns the status to which the module should be delivered, or null if the current status
* should be kept.
*
* @return the status to which the module should be delivered
*/
public String getStatus() {
return status;
}
/**
* Sets the status to which the module should be delivered, use null if the current status
* should be kept.
*
* @param status String
* @return the instance of DeliverOptions on which the method has been called, for easy method
* chaining
*/
public DeliverOptions setStatus(String status) {
this.status = status;
return this;
}
/**
* Returns the id of a previous resolve to use for delivering.
*
* @return the id of a previous resolve
*/
public String getResolveId() {
return resolveId;
}
/**
* Sets the id of a previous resolve to use for delivering.
*
* @param resolveId
* the id of a previous resolve
* @return the instance of DeliverOptions on which the method has been called, for easy method
* chaining
*/
public DeliverOptions setResolveId(String resolveId) {
this.resolveId = resolveId;
return this;
}
/**
* Return the configurations which must be delivered. Returns <tt>null</tt> if all
* configurations has to be delivered. Attention: the returned array can contain wildcards!
*
* @return the configurations to deliver
*/
public String[] getConfs() {
return confs;
}
/**
* Sets the configurations to deliver.
*
* @param confs
* the configurations to deliver
* @return the instance of DeliverOptions on which the method has been called, for easy method
* chaining
*/
public DeliverOptions setConfs(String[] confs) {
this.confs = confs;
return this;
}
/**
* Returns the branch with which the Ivy file should be delivered, or <code>null</code> if
* branch info shouldn't be changed.
*
* @return the branch with which the Ivy file should be delivered
*/
public String getPubBranch() {
return pubBranch;
}
/**
* Sets the branch with which the Ivy file should be delivered.
*
* @param pubBranch
* the branch with which the Ivy file should be delivered
* @return the instance of DeliverOptions on which the method has been called, for easy method
* chaining
*/
public DeliverOptions setPubBranch(String pubBranch) {
this.pubBranch = pubBranch;
return this;
}
public boolean isGenerateRevConstraint() {
return generateRevConstraint;
}
public DeliverOptions setGenerateRevConstraint(boolean generateRevConstraint) {
this.generateRevConstraint = generateRevConstraint;
return this;
}
public boolean isMerge() {
return merge;
}
public DeliverOptions setMerge(boolean merge) {
this.merge = merge;
return this;
}
public String toString() {
return "status=" + status + " pubdate=" + pubdate + " validate=" + validate
+ " resolveDynamicRevisions=" + resolveDynamicRevisions + " merge=" + merge
+ " resolveId=" + resolveId + " pubBranch=" + pubBranch;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.