gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Group name: All Caps Bats * Team Members: * Alan Mulhall 10335911 * Barbara DeKegel 11702369 * Stephen Read 11312696 * Thomas Higgins 11322981 */ package formulator; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Panel; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.geom.Ellipse2D; import java.awt.geom.Line2D; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.Vector; import javax.swing.AbstractAction; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.KeyStroke; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.FileDialog; public class Cartesian { } @SuppressWarnings("serial") class CartesianFrame extends JFrame { CartesianPanel panel; boolean labels; boolean dots; boolean lines; boolean axes; String name; Vector<GraphFunction> graphs = new Vector<GraphFunction>(); public CartesianFrame(Vector<GraphFunction> graphs_in, boolean label_tog, boolean dots_tog, boolean lines_tog, boolean axes_in, String name_in) { labels = label_tog; dots = dots_tog; lines = lines_tog; graphs = graphs_in; axes = axes_in; name = name_in; panel = new CartesianPanel(graphs, labels, dots, lines, axes); add(panel); setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); // KEY BINDINGS // Actions taken by KeyBindings panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( KeyStroke.getKeyStroke(KeyEvent.VK_1, 0), "one"); panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( KeyStroke.getKeyStroke(KeyEvent.VK_2, 0), "two"); panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( KeyStroke.getKeyStroke(KeyEvent.VK_3, 0), "three"); panel.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( KeyStroke.getKeyStroke(KeyEvent.VK_4, 0), "four"); //Labels panel.getActionMap().put("one", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { labels = !labels; panel = new CartesianPanel(graphs, labels, dots, lines, axes); add(panel); showUI(); } }); //Dots panel.getActionMap().put("two", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { dots = !dots; panel = new CartesianPanel(graphs, labels, dots, lines,axes); add(panel); showUI(); } }); //Lines panel.getActionMap().put("three", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { lines = !lines; panel = new CartesianPanel(graphs, labels, dots, lines, axes); add(panel); showUI(); } }); //Axes panel.getActionMap().put("four", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { axes = !axes; panel = new CartesianPanel(graphs, labels, dots, lines, axes); add(panel); showUI(); } }); //Draw top menu JMenu file = new JMenu("File"); file.setMnemonic('F'); JMenuItem newItem = new JMenuItem("Save Graph"); newItem.setMnemonic('S'); file.add(newItem); JMenuItem exitItem = new JMenuItem("Exit"); exitItem.setMnemonic('x'); file.add(exitItem); //adding action listener to menu items newItem.addActionListener( new ActionListener(){ public void actionPerformed(ActionEvent e) { //Open file explorer, retrieve name to save file as Display display2 = new Display(); final Shell shell2 = new Shell(display2); FileDialog dlg2 = new FileDialog(shell2, SWT.SAVE); String[] extensions2={"*.csv", "*.txt"}; dlg2.setFilterExtensions(extensions2); String fileName = dlg2.open(); display2.dispose(); if (fileName != null) { PrintWriter writer = null; try { writer = new PrintWriter(fileName, "UTF-8"); // Take points in current graph(s) and print out to file String output = ""; for(GraphFunction g: graphs) { for(Point p: g.points){ output = output + p.x + ", " + p.y + ", "; } } writer.print(output); writer.close(); } catch (FileNotFoundException e1) { System.out.println("File not found!"); } catch (UnsupportedEncodingException e1) { System.out.println("Encoding exception."); } } else dispose(); } } ); //Exit menu button exitItem.addActionListener( new ActionListener(){ public void actionPerformed(ActionEvent e) { setVisible(false); dispose(); } } ); JMenuBar bar = new JMenuBar(); setJMenuBar(bar); bar.add(file); } public void showUI() { setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); setTitle(name); setSize(700, 700); setVisible(true); } } @SuppressWarnings("serial") class CartesianPanel extends JPanel { FormulaElement node; double range_min; double range_max; double range_total; boolean labels; boolean dots; boolean lines; double increment; Vector<GraphFunction> graphs; boolean axes; // x-axis coord constants public static final int X_AXIS_FIRST_X_COORD = 50; public static final int X_AXIS_SECOND_X_COORD = 600; public static int X_AXIS_Y_COORD = 600; // y-axis coord constants public static final int Y_AXIS_FIRST_Y_COORD = 50; public static final int Y_AXIS_SECOND_Y_COORD = 600; public static int Y_AXIS_X_COORD = 50; // size of axis indents public static final int FIRST_LENGHT = 10; public static final int SECOND_LENGHT = 5; // size of start coordinate lenght public static final int ORIGIN_COORDINATE_LENGHT = 6; // distance of coordinate strings from axis public static final int AXIS_STRING_DISTANCE = 20; public CartesianPanel(Vector<GraphFunction> graphs_in, boolean label_tog, boolean dot_tog, boolean lines_tog, boolean axes_tog) { labels = label_tog; dots = dot_tog; lines = lines_tog; graphs = graphs_in; axes = axes_tog; } public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2 = (Graphics2D) g; g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); double x; double y; double max_y = 0, min_y = 0, max_x = 0, min_x = 0; String variable = ""; double xCoordNumbers = 0; double yCoordNumbers = 0; // Cycle through the GraphFunction objects, find all the points and then // work out scales and ranges. for (int e = 0; e < graphs.size(); e++) { GraphFunction graph = graphs.get(e); node = graph.root; range_min = graph.min; range_max = graph.max; increment = graph.increment; variable = graph.var_name; if (range_min < 0) { range_total = range_max - range_min; } else { range_total = range_max; } // Find points graph.points.clear(); for (x = range_min; x <= range_max; x += increment) { ((FormulaElement) node).setVariableValue(variable, x); y = node.evaluate(); // if(x>550 || y>550){ // g2.drawString("Range is too large to display. Please shorten range or edit formula.", 50, 300); // g2.dispose(); // } Point current = new Point(x, y); if (max_x < x) { max_x = x; } if (min_x > x) { min_x = x; } if (max_y < y) { max_y = y; } if (min_y > y) { min_y = y; } graph.points.add(current); //System.out.println("Adding point: " + x + ", " + y); } } // finding ranges double xRange = Math.ceil(max_x); double yRange = Math.ceil(max_y); if (min_x < 0) { xRange = Math.ceil(max_x) + Math.abs(Math.floor(min_x)); } if (min_y < 0) { yRange = Math.ceil(max_y) + Math.abs(Math.floor(min_y)); } xCoordNumbers = (int) xRange; yCoordNumbers = (int) yRange; double xLength = (X_AXIS_SECOND_X_COORD - X_AXIS_FIRST_X_COORD) / xCoordNumbers; double yLength = (Y_AXIS_SECOND_Y_COORD - Y_AXIS_FIRST_Y_COORD) / yCoordNumbers; //System.out.println("CoordNumbers: " + xCoordNumbers + ", " + yCoordNumbers); //System.out.println("Lengths: " + xLength + ", " + yLength); double x_meets_y = 50; double y_meets_x = 600; // Find where axes meet if (min_x < 0) { double r = X_AXIS_FIRST_X_COORD + (xLength * -(min_x)); Y_AXIS_X_COORD = (int) r; x_meets_y = r; } if (min_y < 0) { double r = 0; r = Y_AXIS_SECOND_Y_COORD - (yLength * -(Math.floor(min_y))); X_AXIS_Y_COORD = (int) r; y_meets_x = r; } if(axes == true){ // draw x-axis g2.drawLine(X_AXIS_FIRST_X_COORD, X_AXIS_Y_COORD, X_AXIS_SECOND_X_COORD, X_AXIS_Y_COORD); // draw y-axis g2.drawLine(Y_AXIS_X_COORD, Y_AXIS_FIRST_Y_COORD, Y_AXIS_X_COORD, Y_AXIS_SECOND_Y_COORD); // draw text "X" & text "Y" g2.drawString("X", X_AXIS_SECOND_X_COORD + 20, X_AXIS_Y_COORD - 15 + AXIS_STRING_DISTANCE); g2.drawString("Y", Y_AXIS_X_COORD + 15 - AXIS_STRING_DISTANCE, Y_AXIS_FIRST_Y_COORD - 20 + AXIS_STRING_DISTANCE / 2); // if range extends over 30, draw every 5th. // if over 70, draw every tenth. int j; int div_factor = 1; if (xRange >= 30) { div_factor = 5; } if (xRange >= 70) { div_factor = 10; } if (xRange >= 200) { div_factor = 50; } if (xRange >= 500) { div_factor = 100; } if (xRange >= 2000) { div_factor = 1000; } if (range_min < 0) { j = (int) range_min; } else { j = 0; } // draw x-axis numbers for (int i = 0; i <= xRange; i++) { if (i % div_factor == 0) { g2.drawString(Integer.toString(j), (int) (X_AXIS_FIRST_X_COORD + (i * xLength) - 3), X_AXIS_Y_COORD + AXIS_STRING_DISTANCE); Shape l = new Line2D.Double(X_AXIS_FIRST_X_COORD + (i * xLength), (int) y_meets_x - SECOND_LENGHT, X_AXIS_FIRST_X_COORD + (i * xLength), (int) y_meets_x + SECOND_LENGHT); g2.draw(l); } j++; } // draw y-axis numbers div_factor = 1; if (yRange >= 30) { div_factor = 5; } if (yRange >= 70) { div_factor = 10; } if (yRange >= 200) { div_factor = 50; } if (yRange >= 500) { div_factor = 100; } if (yRange >= 2000) { div_factor = 1000; } j = (int) Math.floor(min_y); for (int i = 0; i <= yRange; i++) { if (i % div_factor == 0) { g2.drawString(Integer.toString(j), Y_AXIS_X_COORD - AXIS_STRING_DISTANCE, (int) (Y_AXIS_SECOND_Y_COORD - (i * yLength))); Shape l = new Line2D.Double(Y_AXIS_X_COORD - SECOND_LENGHT, Y_AXIS_SECOND_Y_COORD - (i * yLength), Y_AXIS_X_COORD + SECOND_LENGHT, Y_AXIS_SECOND_Y_COORD - (i * yLength)); g2.draw(l); } j++; } } // Draw Origin point where axes meet if(axes == true){ g2.fill(new Ellipse2D.Double( x_meets_y - (ORIGIN_COORDINATE_LENGHT / 2), y_meets_x - (ORIGIN_COORDINATE_LENGHT / 2), ORIGIN_COORDINATE_LENGHT, ORIGIN_COORDINATE_LENGHT)); } double old_dotx = 0; double old_doty = 0; // For each graph, draw lines between it's points for (int e = 0; e < graphs.size(); e++) { Vector<Point> current_points = graphs.get(e).points; for (int i = 0; i < current_points.size(); i++) { // Draw points double px = current_points.get(i).getx(); double py = current_points.get(i).gety(); double dotx = (50 + (px * xLength)); double doty = (600 - (py * yLength)); if (x_meets_y != 0) { dotx = (x_meets_y + (px * xLength)); } if (y_meets_x != 0) { doty = (y_meets_x - (py * yLength)); } //Assign colours if (e % 3 == 0) { g2.setPaint(Color.BLACK); } if (e % 3 == 1) { g2.setPaint(Color.RED); } if (e % 3 == 2) { g2.setPaint(Color.BLUE); } if (e % 3 == 4) { g2.setPaint(Color.GREEN); } if (e % 3 == 5) { g2.setPaint(Color.CYAN); } //System.out.println("Drawing point: " + dotx + ", " + doty); // Draw points if (dots == true) { g2.fill(new Ellipse2D.Double( (dotx - (ORIGIN_COORDINATE_LENGHT / 2)), doty - (ORIGIN_COORDINATE_LENGHT / 2), ORIGIN_COORDINATE_LENGHT, ORIGIN_COORDINATE_LENGHT)); } // Draw labels if (labels == true) { double px2 = Math.round(px *100.0)/100.0; double py2 = Math.round(py *100.0)/100.0; int pxt = (int) px2; int pyt = (int) py2; String t = ""; if ((px2 % 1 == 0) && (py2 %1 == 0)){ t = "(" + pxt + ", " + pyt + ")"; } if ((px2 % 1 == 0) && (py2 %1 != 0)){ t = "(" + pxt + ", " + py2 + ")"; } if ((px2 % 1 != 0) && (py2 %1 == 0)){ t = "(" + px2 + ", " + pyt + ")"; } if ((px2 % 1 != 0) && (py2 %1 != 0)){ t = "(" + px2 + ", " + py2 + ")"; } g2.drawString(t, (int) dotx + 12, (int) doty); } // Draw lines if (i > 0 && lines == true) { Shape l = new Line2D.Double(old_dotx, old_doty, dotx, doty); g2.draw(l); } old_dotx = dotx; old_doty = doty; } } } }
/* Copyright 2002-2003 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.xml; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.util.Locale; import java.util.MissingResourceException; import org.apache.batik.i18n.Localizable; import org.apache.batik.i18n.LocalizableSupport; import org.apache.batik.util.io.NormalizingReader; import org.apache.batik.util.io.StreamNormalizingReader; import org.apache.batik.util.io.StringNormalizingReader; /** * This class represents a scanner for XML documents. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class XMLScanner implements Localizable { /** * The document start context. */ public final static int DOCUMENT_START_CONTEXT = 0; /** * The top level context. */ public final static int TOP_LEVEL_CONTEXT = 1; /** * The processing instruction context. */ public final static int PI_CONTEXT = 2; /** * The XML declaration context. */ public final static int XML_DECL_CONTEXT = 3; /** * The doctype context. */ public final static int DOCTYPE_CONTEXT = 4; /** * The start tag context. */ public final static int START_TAG_CONTEXT = 5; /** * The content context. */ public final static int CONTENT_CONTEXT = 6; /** * The DTD declarations context. */ public final static int DTD_DECLARATIONS_CONTEXT = 7; /** * The CDATA section context. */ public final static int CDATA_SECTION_CONTEXT = 8; /** * The end tag context. */ public final static int END_TAG_CONTEXT = 9; /** * The attribute value context. */ public final static int ATTRIBUTE_VALUE_CONTEXT = 10; /** * The ATTLIST context. */ public final static int ATTLIST_CONTEXT = 11; /** * The element declaration context. */ public final static int ELEMENT_DECLARATION_CONTEXT = 12; /** * The entity context. */ public final static int ENTITY_CONTEXT = 13; /** * The notation context. */ public final static int NOTATION_CONTEXT = 14; /** * The notation type context. */ public final static int NOTATION_TYPE_CONTEXT = 15; /** * The enumeration context. */ public final static int ENUMERATION_CONTEXT = 16; /** * The entity value context. */ public final static int ENTITY_VALUE_CONTEXT = 17; /** * The default resource bundle base name. */ protected final static String BUNDLE_CLASSNAME = "org.apache.batik.xml.resources.Messages"; /** * The localizable support. */ protected LocalizableSupport localizableSupport = new LocalizableSupport(BUNDLE_CLASSNAME, XMLScanner.class.getClassLoader()); /** * The reader. */ protected NormalizingReader reader; /** * The current char. */ protected int current; /** * The type of the current lexical unit. */ protected int type; /** * The recording buffer. */ protected char[] buffer = new char[1024]; /** * The current position in the buffer. */ protected int position; /** * The start offset of the last lexical unit. */ protected int start; /** * The end offset of the last lexical unit. */ protected int end; /** * The current scanning context. */ protected int context; /** * The depth in the xml tree. */ protected int depth; /** * A PI end has been previously read. */ protected boolean piEndRead; /** * The scanner is in the internal DTD. */ protected boolean inDTD; /** * The last attribute delimiter encountered. */ protected char attrDelimiter; /** * A CDATA section end is the next token */ protected boolean cdataEndRead; /** * Creates a new XML scanner. * @param r The reader to scan. */ public XMLScanner(Reader r) throws XMLException { context = DOCUMENT_START_CONTEXT; try { reader = new StreamNormalizingReader(r); current = nextChar(); } catch (IOException e) { throw new XMLException(e); } } /** * Creates a new XML scanner. * @param is The input stream to scan. * @param enc The character encoding to use. */ public XMLScanner(InputStream is, String enc) throws XMLException { context = DOCUMENT_START_CONTEXT; try { reader = new StreamNormalizingReader(is, enc); current = nextChar(); } catch (IOException e) { throw new XMLException(e); } } /** * Creates a new XML scanner. * @param s The string to parse. */ public XMLScanner(String s) throws XMLException { context = DOCUMENT_START_CONTEXT; try { reader = new StringNormalizingReader(s); current = nextChar(); } catch (IOException e) { throw new XMLException(e); } } /** * Implements {@link org.apache.batik.i18n.Localizable#setLocale(Locale)}. */ public void setLocale(Locale l) { localizableSupport.setLocale(l); } /** * Implements {@link org.apache.batik.i18n.Localizable#getLocale()}. */ public Locale getLocale() { return localizableSupport.getLocale(); } /** * Implements {@link * org.apache.batik.i18n.Localizable#formatMessage(String,Object[])}. */ public String formatMessage(String key, Object[] args) throws MissingResourceException { return localizableSupport.formatMessage(key, args); } /** * Sets the current depth in the XML tree. */ public void setDepth(int i) { depth = i; } /** * Returns the current depth in the XML tree. */ public int getDepth() { return depth; } /** * Sets the current context. */ public void setContext(int c) { context = c; } /** * Returns the current context. */ public int getContext() { return context; } /** * The current lexical unit type like defined in LexicalUnits. */ public int getType() { return type; } /** * Returns the current line. */ public int getLine() { return reader.getLine(); } /** * Returns the current column. */ public int getColumn() { return reader.getColumn(); } /** * Returns the buffer used to store the chars. */ public char[] getBuffer() { return buffer; } /** * Returns the start offset of the last lexical unit. */ public int getStart() { return start; } /** * Returns the end offset of the last lexical unit. */ public int getEnd() { return end; } /** * Returns the last encountered string delimiter. */ public char getStringDelimiter() { return attrDelimiter; } /** * Returns the start offset of the current lexical unit. */ public int getStartOffset() { switch (type) { case LexicalUnits.SECTION_END: return -3; case LexicalUnits.PI_END: return -2; case LexicalUnits.STRING: case LexicalUnits.ENTITY_REFERENCE: case LexicalUnits.PARAMETER_ENTITY_REFERENCE: case LexicalUnits.START_TAG: case LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT: return 1; case LexicalUnits.PI_START: case LexicalUnits.END_TAG: case LexicalUnits.CHARACTER_REFERENCE: return 2; case LexicalUnits.COMMENT: return 4; default: return 0; } } /** * Returns the end offset of the current lexical unit. */ public int getEndOffset() { switch (type) { case LexicalUnits.STRING: case LexicalUnits.ENTITY_REFERENCE: case LexicalUnits.CHARACTER_REFERENCE: case LexicalUnits.PARAMETER_ENTITY_REFERENCE: case LexicalUnits.LAST_ATTRIBUTE_FRAGMENT: return -1; case LexicalUnits.PI_DATA: return -2; case LexicalUnits.COMMENT: return -3; case LexicalUnits.CHARACTER_DATA: if (cdataEndRead) { return -3; } return 0; default: return 0; } } /** * Clears the buffer. */ public void clearBuffer() { if (position <= 0) { position = 0; } else { buffer[0] = buffer[position - 1]; position = 1; } } /** * Advances to the next lexical unit. * @return The type of the lexical unit like defined in LexicalUnits. */ public int next() throws XMLException { return next(context); } /** * Advances to the next lexical unit. * @param ctx The context to use for scanning. * @return The type of the lexical unit like defined in LexicalUnits. */ public int next(int ctx) throws XMLException { start = position - 1; try { switch (ctx) { case DOCUMENT_START_CONTEXT: type = nextInDocumentStart(); break; case TOP_LEVEL_CONTEXT: type = nextInTopLevel(); break; case PI_CONTEXT: type = nextInPI(); break; case START_TAG_CONTEXT: type = nextInStartTag(); break; case ATTRIBUTE_VALUE_CONTEXT: type = nextInAttributeValue(); break; case CONTENT_CONTEXT: type = nextInContent(); break; case END_TAG_CONTEXT: type = nextInEndTag(); break; case CDATA_SECTION_CONTEXT: type = nextInCDATASection(); break; case XML_DECL_CONTEXT: type = nextInXMLDecl(); break; case DOCTYPE_CONTEXT: type = nextInDoctype(); break; case DTD_DECLARATIONS_CONTEXT: type = nextInDTDDeclarations(); break; case ELEMENT_DECLARATION_CONTEXT: type = nextInElementDeclaration(); break; case ATTLIST_CONTEXT: type = nextInAttList(); break; case NOTATION_CONTEXT: type = nextInNotation(); break; case ENTITY_CONTEXT: type = nextInEntity(); break; case ENTITY_VALUE_CONTEXT: return nextInEntityValue(); case NOTATION_TYPE_CONTEXT: return nextInNotationType(); case ENUMERATION_CONTEXT: return nextInEnumeration(); default: throw new IllegalArgumentException("unexpected ctx:" + ctx ); } } catch (IOException e) { throw new XMLException(e); } end = position - ((current == -1) ? 0 : 1); return type; } /** * Reads the first token in the stream. */ protected int nextInDocumentStart() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); context = (depth == 0) ? TOP_LEVEL_CONTEXT : CONTENT_CONTEXT; return LexicalUnits.S; case '<': switch (nextChar()) { case '?': int c1 = nextChar(); if (c1 == -1 || !XMLUtilities.isXMLNameFirstCharacter((char)c1)) { throw createXMLException("invalid.pi.target"); } context = PI_CONTEXT; int c2 = nextChar(); if (c2 == -1 || !XMLUtilities.isXMLNameCharacter((char)c2)) { return LexicalUnits.PI_START; } int c3 = nextChar(); if (c3 == -1 || !XMLUtilities.isXMLNameCharacter((char)c3)) { return LexicalUnits.PI_START; } int c4 = nextChar(); if (c4 != -1 && XMLUtilities.isXMLNameCharacter((char)c4)) { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.PI_START; } if (c1 == 'x' && c2 == 'm' && c3 == 'l') { context = XML_DECL_CONTEXT; return LexicalUnits.XML_DECL_START; } if ((c1 == 'x' || c1 == 'X') && (c2 == 'm' || c2 == 'M') && (c3 == 'l' || c3 == 'L')) { throw createXMLException("xml.reserved"); } return LexicalUnits.PI_START; case '!': switch (nextChar()) { case '-': return readComment(); case 'D': context = DOCTYPE_CONTEXT; return readIdentifier("OCTYPE", LexicalUnits.DOCTYPE_START, -1); default: throw createXMLException("invalid.doctype"); } default: context = START_TAG_CONTEXT; depth++; return readName(LexicalUnits.START_TAG); } case -1: return LexicalUnits.EOF; default: if (depth == 0) { throw createXMLException("invalid.character"); } else { return nextInContent(); } } } /** * Advances to the next lexical unit in the top level context. * @return The type of the lexical unit like defined in LexicalUnits. */ protected int nextInTopLevel() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '<': switch (nextChar()) { case '?': context = PI_CONTEXT; return readPIStart(); case '!': switch (nextChar()) { case '-': return readComment(); case 'D': context = DOCTYPE_CONTEXT; return readIdentifier("OCTYPE", LexicalUnits.DOCTYPE_START, -1); default: throw createXMLException("invalid.character"); } default: context = START_TAG_CONTEXT; depth++; return readName(LexicalUnits.START_TAG); } case -1: return LexicalUnits.EOF; default: throw createXMLException("invalid.character"); } } /** * Returns the next lexical unit in the context of a processing * instruction. */ protected int nextInPI() throws IOException, XMLException { if (piEndRead) { piEndRead = false; context = (depth == 0) ? TOP_LEVEL_CONTEXT : CONTENT_CONTEXT; return LexicalUnits.PI_END; } switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '?': if (nextChar() != '>') { throw createXMLException("pi.end.expected"); } nextChar(); if (inDTD) { context = DTD_DECLARATIONS_CONTEXT; } else if (depth == 0) { context = TOP_LEVEL_CONTEXT; } else { context = CONTENT_CONTEXT; } return LexicalUnits.PI_END; default: do { do { nextChar(); } while (current != -1 && current != '?'); nextChar(); } while (current != -1 && current != '>'); nextChar(); piEndRead = true; return LexicalUnits.PI_DATA; } } /** * Returns the next lexical unit in the context of a start tag. */ protected int nextInStartTag() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '/': if (nextChar() != '>') { throw createXMLException("malformed.tag.end"); } nextChar(); context = (--depth == 0) ? TOP_LEVEL_CONTEXT : CONTENT_CONTEXT; return LexicalUnits.EMPTY_ELEMENT_END; case '>': nextChar(); context = CONTENT_CONTEXT; return LexicalUnits.END_CHAR; case '=': nextChar(); return LexicalUnits.EQ; case '"': attrDelimiter = '"'; nextChar(); for (;;) { switch (current) { case '"': nextChar(); return LexicalUnits.STRING; case '&': context = ATTRIBUTE_VALUE_CONTEXT; return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; case '<': throw createXMLException("invalid.character"); case -1: throw createXMLException("unexpected.eof"); } nextChar(); } case '\'': attrDelimiter = '\''; nextChar(); for (;;) { switch (current) { case '\'': nextChar(); return LexicalUnits.STRING; case '&': context = ATTRIBUTE_VALUE_CONTEXT; return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; case '<': throw createXMLException("invalid.character"); case -1: throw createXMLException("unexpected.eof"); } nextChar(); } default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of an attribute value. */ protected int nextInAttributeValue() throws IOException, XMLException { if (current == -1) { return LexicalUnits.EOF; } if (current == '&') { return readReference(); } else { loop: for (;;) { switch (current) { case '&': case '<': case -1: break loop; case '"': case '\'': if (current == attrDelimiter) { break loop; } } nextChar(); } switch (current) { case -1: break; case '<': throw createXMLException("invalid.character"); case '&': return LexicalUnits.ATTRIBUTE_FRAGMENT; case '\'': case '"': nextChar(); if (inDTD) { context = ATTLIST_CONTEXT; } else { context = START_TAG_CONTEXT; } } return LexicalUnits.LAST_ATTRIBUTE_FRAGMENT; } } /** * Returns the next lexical unit in the context of an element content. */ protected int nextInContent() throws IOException, XMLException { switch (current) { case -1: return LexicalUnits.EOF; case '&': return readReference(); case '<': switch (nextChar()) { case '?': context = PI_CONTEXT; return readPIStart(); case '!': switch (nextChar()) { case '-': return readComment(); case '[': context = CDATA_SECTION_CONTEXT; return readIdentifier("CDATA[", LexicalUnits.CDATA_START, -1); default: throw createXMLException("invalid.character"); } case '/': nextChar(); context = END_TAG_CONTEXT; return readName(LexicalUnits.END_TAG); default: depth++; context = START_TAG_CONTEXT; return readName(LexicalUnits.START_TAG); } default: loop: for (;;) { switch (current) { default: nextChar(); break; case -1: case '&': case '<': break loop; } } return LexicalUnits.CHARACTER_DATA; } } /** * Returns the next lexical unit in the context of a end tag. */ protected int nextInEndTag() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': if (--depth < 0) { throw createXMLException("unexpected.end.tag"); } else if (depth == 0) { context = TOP_LEVEL_CONTEXT; } else { context = CONTENT_CONTEXT; } nextChar(); return LexicalUnits.END_CHAR; default: throw createXMLException("invalid.character"); } } /** * Returns the next lexical unit in the context of a CDATA section. */ protected int nextInCDATASection() throws IOException, XMLException { if (cdataEndRead) { cdataEndRead = false; context = CONTENT_CONTEXT; return LexicalUnits.SECTION_END; } while (current != -1) { while (current != ']' && current != -1) { nextChar(); } if (current != -1) { nextChar(); if (current == ']') { nextChar(); if (current == '>') { break; } } } } if (current == -1) { throw createXMLException("unexpected.eof"); } nextChar(); cdataEndRead = true; return LexicalUnits.CHARACTER_DATA; } /** * Returns the next lexical unit in the context of an XML declaration. */ protected int nextInXMLDecl() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case 'v': return readIdentifier("ersion", LexicalUnits.VERSION_IDENTIFIER, -1); case 'e': return readIdentifier("ncoding", LexicalUnits.ENCODING_IDENTIFIER, -1); case 's': return readIdentifier("tandalone", LexicalUnits.STANDALONE_IDENTIFIER, -1); case '=': nextChar(); return LexicalUnits.EQ; case '?': nextChar(); if (current != '>') { throw createXMLException("pi.end.expected"); } nextChar(); context = TOP_LEVEL_CONTEXT; return LexicalUnits.PI_END; case '"': attrDelimiter = '"'; return readString(); case '\'': attrDelimiter = '\''; return readString(); default: throw createXMLException("invalid.character"); } } /** * Returns the next lexical unit in the context of a doctype. */ protected int nextInDoctype() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': nextChar(); context = TOP_LEVEL_CONTEXT; return LexicalUnits.END_CHAR; case 'S': return readIdentifier("YSTEM", LexicalUnits.SYSTEM_IDENTIFIER, LexicalUnits.NAME); case 'P': return readIdentifier("UBLIC", LexicalUnits.PUBLIC_IDENTIFIER, LexicalUnits.NAME); case '"': attrDelimiter = '"'; return readString(); case '\'': attrDelimiter = '\''; return readString(); case '[': nextChar(); context = DTD_DECLARATIONS_CONTEXT; inDTD = true; return LexicalUnits.LSQUARE_BRACKET; default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context dtd declarations. */ protected int nextInDTDDeclarations() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case ']': nextChar(); context = DOCTYPE_CONTEXT; inDTD = false; return LexicalUnits.RSQUARE_BRACKET; case '%': return readPEReference(); case '<': switch (nextChar()) { case '?': context = PI_CONTEXT; return readPIStart(); case '!': switch (nextChar()) { case '-': return readComment(); case 'E': switch (nextChar()) { case 'L': context = ELEMENT_DECLARATION_CONTEXT; return readIdentifier ("EMENT", LexicalUnits.ELEMENT_DECLARATION_START, -1); case 'N': context = ENTITY_CONTEXT; return readIdentifier("TITY", LexicalUnits.ENTITY_START, -1); default: throw createXMLException("invalid.character"); } case 'A': context = ATTLIST_CONTEXT; return readIdentifier("TTLIST", LexicalUnits.ATTLIST_START, -1); case 'N': context = NOTATION_CONTEXT; return readIdentifier("OTATION", LexicalUnits.NOTATION_START, -1); default: throw createXMLException("invalid.character"); } default: throw createXMLException("invalid.character"); } default: throw createXMLException("invalid.character"); } } /** * Reads a simple string, like the ones used for version, encoding, * public/system identifiers... * The current character must be the string delimiter. * @return type. */ protected int readString() throws IOException, XMLException { do { nextChar(); } while (current != -1 && current != attrDelimiter); if (current == -1) { throw createXMLException("unexpected.eof"); } nextChar(); return LexicalUnits.STRING; } /** * Reads a comment. '&lt;!-' must have been read. */ protected int readComment() throws IOException, XMLException { if (nextChar() != '-') { throw createXMLException("malformed.comment"); } int c = nextChar(); while (c != -1) { while (c != -1 && c != '-') { c = nextChar(); } c = nextChar(); if (c == '-') { break; } } if (c == -1) { throw createXMLException("unexpected.eof"); } c = nextChar(); if (c != '>') { throw createXMLException("malformed.comment"); } nextChar(); return LexicalUnits.COMMENT; } /** * Reads the given identifier. * @param s The portion of the identifier to read. * @param type The lexical unit type of the identifier. * @param ntype The lexical unit type to set if the identifier do not * match or -1 if an error must be signaled. */ protected int readIdentifier(String s, int type, int ntype) throws IOException, XMLException { int len = s.length(); for (int i = 0; i < len; i++) { nextChar(); if (current != s.charAt(i)) { if (ntype == -1) { throw createXMLException("invalid.character"); } else { while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)) { nextChar(); } return ntype; } } } nextChar(); return type; } /** * Reads a name. The current character must be the first character. * @param type The lexical unit type to set. * @return type. */ protected int readName(int type) throws IOException, XMLException { if (current == -1) { throw createXMLException("unexpected.eof"); } if (!XMLUtilities.isXMLNameFirstCharacter((char)current)) { throw createXMLException("invalid.name"); } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return type; } /** * Reads a processing instruction start. * @return type. */ protected int readPIStart() throws IOException, XMLException { int c1 = nextChar(); if (c1 == -1) { throw createXMLException("unexpected.eof"); } if (!XMLUtilities.isXMLNameFirstCharacter((char)current)) { throw createXMLException("malformed.pi.target"); } int c2 = nextChar(); if (c2 == -1 || !XMLUtilities.isXMLNameCharacter((char)c2)) { return LexicalUnits.PI_START; } int c3 = nextChar(); if (c3 == -1 || !XMLUtilities.isXMLNameCharacter((char)c3)) { return LexicalUnits.PI_START; } int c4 = nextChar(); if (c4 != -1 && XMLUtilities.isXMLNameCharacter((char)c4)) { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.PI_START; } if ((c1 == 'x' || c1 == 'X') && (c2 == 'm' || c2 == 'M') && (c3 == 'l' || c3 == 'L')) { throw createXMLException("xml.reserved"); } return LexicalUnits.PI_START; } /** * Returns the next lexical unit in the context of a element declaration. */ protected int nextInElementDeclaration() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': nextChar(); context = DTD_DECLARATIONS_CONTEXT; return LexicalUnits.END_CHAR; case '%': nextChar(); int t = readName(LexicalUnits.PARAMETER_ENTITY_REFERENCE); if (current != ';') { throw createXMLException("malformed.parameter.entity"); } nextChar(); return t; case 'E': return readIdentifier("MPTY", LexicalUnits.EMPTY_IDENTIFIER, LexicalUnits.NAME); case 'A': return readIdentifier("NY", LexicalUnits.ANY_IDENTIFIER, LexicalUnits.NAME); case '?': nextChar(); return LexicalUnits.QUESTION; case '+': nextChar(); return LexicalUnits.PLUS; case '*': nextChar(); return LexicalUnits.STAR; case '(': nextChar(); return LexicalUnits.LEFT_BRACE; case ')': nextChar(); return LexicalUnits.RIGHT_BRACE; case '|': nextChar(); return LexicalUnits.PIPE; case ',': nextChar(); return LexicalUnits.COMMA; case '#': return readIdentifier("PCDATA", LexicalUnits.PCDATA_IDENTIFIER, -1); default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of an attribute list. */ protected int nextInAttList() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': nextChar(); context = DTD_DECLARATIONS_CONTEXT; return type = LexicalUnits.END_CHAR; case '%': int t = readName(LexicalUnits.PARAMETER_ENTITY_REFERENCE); if (current != ';') { throw createXMLException("malformed.parameter.entity"); } nextChar(); return t; case 'C': return readIdentifier("DATA", LexicalUnits.CDATA_IDENTIFIER, LexicalUnits.NAME); case 'I': nextChar(); if (current != 'D') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.ID_IDENTIFIER; } if (current != 'R') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'E') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'F') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.IDREF_IDENTIFIER; } if (current != 'S') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.IDREFS_IDENTIFIER; } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return type = LexicalUnits.NAME; case 'N': switch (nextChar()) { default: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; case 'O': context = NOTATION_TYPE_CONTEXT; return readIdentifier("TATION", LexicalUnits.NOTATION_IDENTIFIER, LexicalUnits.NAME); case 'M': nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'T') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'O') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'K') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'E') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'N') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NMTOKEN_IDENTIFIER; } if (current != 'S') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NMTOKENS_IDENTIFIER; } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } case 'E': nextChar(); if (current != 'N') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'T') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'I') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'T') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return type = LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } switch (current) { case 'Y': nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.ENTITY_IDENTIFIER; } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; case 'I': nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'E') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } nextChar(); if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } if (current != 'S') { do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } return LexicalUnits.ENTITIES_IDENTIFIER; default: if (current == -1 || !XMLUtilities.isXMLNameCharacter((char)current)) { return LexicalUnits.NAME; } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); return LexicalUnits.NAME; } case '"': attrDelimiter = '"'; nextChar(); if (current == -1) { throw createXMLException("unexpected.eof"); } if (current != '"' && current != '&') { do { nextChar(); } while (current != -1 && current != '"' && current != '&'); } switch (current) { case '&': context = ATTRIBUTE_VALUE_CONTEXT; return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; case '"': nextChar(); return LexicalUnits.STRING; default: throw createXMLException("invalid.character"); } case '\'': attrDelimiter = '\''; nextChar(); if (current == -1) { throw createXMLException("unexpected.eof"); } if (current != '\'' && current != '&') { do { nextChar(); } while (current != -1 && current != '\'' && current != '&'); } switch (current) { case '&': context = ATTRIBUTE_VALUE_CONTEXT; return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; case '\'': nextChar(); return LexicalUnits.STRING; default: throw createXMLException("invalid.character"); } case '#': switch (nextChar()) { case 'R': return readIdentifier("EQUIRED", LexicalUnits.REQUIRED_IDENTIFIER, -1); case 'I': return readIdentifier("MPLIED", LexicalUnits.IMPLIED_IDENTIFIER, -1); case 'F': return readIdentifier("IXED", LexicalUnits.FIXED_IDENTIFIER, -1); default: throw createXMLException("invalid.character"); } case '(': nextChar(); context = ENUMERATION_CONTEXT; return LexicalUnits.LEFT_BRACE; default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of a notation. */ protected int nextInNotation() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': nextChar(); context = DTD_DECLARATIONS_CONTEXT; return LexicalUnits.END_CHAR; case '%': int t = readName(LexicalUnits.PARAMETER_ENTITY_REFERENCE); if (current != ';') { throw createXMLException("malformed.parameter.entity"); } nextChar(); return t; case 'S': return readIdentifier("YSTEM", LexicalUnits.SYSTEM_IDENTIFIER, LexicalUnits.NAME); case 'P': return readIdentifier("UBLIC", LexicalUnits.PUBLIC_IDENTIFIER, LexicalUnits.NAME); case '"': attrDelimiter = '"'; return readString(); case '\'': attrDelimiter = '\''; return readString(); default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of an entity. */ protected int nextInEntity() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '>': nextChar(); context = DTD_DECLARATIONS_CONTEXT; return LexicalUnits.END_CHAR; case '%': nextChar(); return LexicalUnits.PERCENT; case 'S': return readIdentifier("YSTEM", LexicalUnits.SYSTEM_IDENTIFIER, LexicalUnits.NAME); case 'P': return readIdentifier("UBLIC", LexicalUnits.PUBLIC_IDENTIFIER, LexicalUnits.NAME); case 'N': return readIdentifier("DATA", LexicalUnits.NDATA_IDENTIFIER, LexicalUnits.NAME); case '"': attrDelimiter = '"'; nextChar(); if (current == -1) { throw createXMLException("unexpected.eof"); } if (current != '"' && current != '&' && current != '%') { do { nextChar(); } while (current != -1 && current != '"' && current != '&' && current != '%'); } switch (current) { default: throw createXMLException("invalid.character"); case '&': case '%': context = ENTITY_VALUE_CONTEXT; break; case '"': nextChar(); return LexicalUnits.STRING; } return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; case '\'': attrDelimiter = '\''; nextChar(); if (current == -1) { throw createXMLException("unexpected.eof"); } if (current != '\'' && current != '&' && current != '%') { do { nextChar(); } while (current != -1 && current != '\'' && current != '&' && current != '%'); } switch (current) { default: throw createXMLException("invalid.character"); case '&': case '%': context = ENTITY_VALUE_CONTEXT; break; case '\'': nextChar(); return LexicalUnits.STRING; } return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of an entity value. */ protected int nextInEntityValue() throws IOException, XMLException { switch (current) { case '&': return readReference(); case '%': int t = nextChar(); readName(LexicalUnits.PARAMETER_ENTITY_REFERENCE); if (current != ';') { throw createXMLException("invalid.parameter.entity"); } nextChar(); return t; default: while (current != -1 && current != attrDelimiter && current != '&' && current != '%') { nextChar(); } switch (current) { case -1: throw createXMLException("unexpected.eof"); case '\'': case '"': nextChar(); context = ENTITY_CONTEXT; return LexicalUnits.STRING; } return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT; } } /** * Returns the next lexical unit in the context of a notation type. */ protected int nextInNotationType() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '|': nextChar(); return LexicalUnits.PIPE; case '(': nextChar(); return LexicalUnits.LEFT_BRACE; case ')': nextChar(); context = ATTLIST_CONTEXT; return LexicalUnits.RIGHT_BRACE; default: return readName(LexicalUnits.NAME); } } /** * Returns the next lexical unit in the context of an enumeration. */ protected int nextInEnumeration() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '|': nextChar(); return LexicalUnits.PIPE; case ')': nextChar(); context = ATTLIST_CONTEXT; return LexicalUnits.RIGHT_BRACE; default: return readNmtoken(); } } /** * Reads an entity or character reference. The current character * must be '&amp;'. * @return type. */ protected int readReference() throws IOException, XMLException { nextChar(); if (current == '#') { nextChar(); int i = 0; switch (current) { case 'x': do { i++; nextChar(); } while ((current >= '0' && current <= '9') || (current >= 'a' && current <= 'f') || (current >= 'A' && current <= 'F')); break; default: do { i++; nextChar(); } while (current >= '0' && current <= '9'); break; case -1: throw createXMLException("unexpected.eof"); } if (i == 1 || current != ';') { throw createXMLException("character.reference"); } nextChar(); return LexicalUnits.CHARACTER_REFERENCE; } else { int t = readName(LexicalUnits.ENTITY_REFERENCE); if (current != ';') { throw createXMLException("character.reference"); } nextChar(); return t; } } /** * Reads a parameter entity reference. The current character must be '%'. * @return type. */ protected int readPEReference() throws IOException, XMLException { nextChar(); if (current == -1) { throw createXMLException("unexpected.eof"); } if (!XMLUtilities.isXMLNameFirstCharacter((char)current)) { throw createXMLException("invalid.parameter.entity"); } do { nextChar(); } while (current != -1 && XMLUtilities.isXMLNameCharacter((char)current)); if (current != ';') { throw createXMLException("invalid.parameter.entity"); } nextChar(); return LexicalUnits.PARAMETER_ENTITY_REFERENCE; } /** * Reads a Nmtoken. The current character must be the first character. * @return LexicalUnits.NMTOKEN. */ protected int readNmtoken() throws IOException, XMLException { if (current == -1) { throw createXMLException("unexpected.eof"); } while (XMLUtilities.isXMLNameCharacter((char)current)) { nextChar(); } return LexicalUnits.NMTOKEN; } /** * Sets the value of the current char to the next character or -1 if the * end of stream has been reached. */ protected int nextChar() throws IOException { current = reader.read(); if (current == -1) { return current; } if (position == buffer.length) { char[] t = new char[position * 3 / 2]; for (int i = 0; i < position; i++) { t[i] = buffer[i]; } buffer = t; } return buffer[position++] = (char)current; } /** * Returns an XMLException initialized with the given message key. */ protected XMLException createXMLException(String message) { String m; try { m = formatMessage(message, new Object[] { new Integer(reader.getLine()), new Integer(reader.getColumn()) }); } catch (MissingResourceException e) { m = message; } return new XMLException(m); } }
package com.progress.codeshare.esbservice.db; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.Statement; import java.util.Iterator; import java.util.Map; import com.sonicsw.xq.*; import com.sonicsw.xq.service.sj.MessageUtils; public class DBService implements XQService { // This is the XQLog (the container's logging mechanism). private XQLog m_xqLog = null; // This is the the log prefix that helps identify this service during // logging private static String m_logPrefix = ""; // These hold version information. private static int s_major = 1; private static int s_minor = 0; private static int s_buildNumber = 0; private static final String INPUT_RESULT = "Result"; private static final String MODE_DISPATCH_API = "Dispatch API"; private static final String NAMESPACE_URI = "http://www.progress.com/codeshare/esbservice/db"; private static final String NAMESPACE_URI_DEFAULT_PROGRESS = "http://www.sonicsw.com/esb/service/dbservice"; private static final String PARAM_NAME_INPUT = "input"; private static final String PARAM_NAME_JDBC_DRIVER = "jdbcDriver"; private static final String PARAM_NAME_MODE = "mode"; private static final String PARAM_NAME_DEFAULT_PROGRESS = "defaultProgress"; private static final String PARAM_NAME_PASSWORD = "password"; private static final String PARAM_NAME_SQL_FILE = "sqlFile"; private static final String PARAM_NAME_URL = "url"; private static final String PARAM_NAME_USERNAME = "username"; private String jdbcDriver; private String password; private String url; private String username; /** * Constructor for a DBService */ public DBService() { } /** * Initialize the XQService by processing its initialization parameters. * * <p> * This method implements a required XQService method. * * @param initialContext * The Initial Service Context provides access to:<br> * <ul> * <li>The configuration parameters for this instance of the * DBServiceType.</li> * <li>The XQLog for this instance of the DBServiceType.</li> * </ul> * @exception XQServiceException * Used in the event of some error. */ public void init(XQInitContext initialContext) throws XQServiceException { XQParameters params = initialContext.getParameters(); m_xqLog = initialContext.getLog(); setLogPrefix(params); m_xqLog.logInformation(m_logPrefix + " Initializing ..."); writeStartupMessage(params); writeParameters(params); // perform initilization work. final String jdbcDriver = params.getParameter(PARAM_NAME_JDBC_DRIVER, XQConstants.PARAM_STRING); this.jdbcDriver = jdbcDriver; final String password = params.getParameter(PARAM_NAME_PASSWORD, XQConstants.PARAM_STRING); this.password = password; final String url = params.getParameter(PARAM_NAME_URL, XQConstants.PARAM_STRING); this.url = url; final String username = params.getParameter(PARAM_NAME_USERNAME, XQConstants.PARAM_STRING); this.username = username; m_xqLog.logInformation(m_logPrefix + " Initialized ..."); } private void dbServiceServiceContext(XQServiceContext ctx) throws XQServiceException { try { final XQParameters params = ctx.getParameters(); final String input = params.getParameter(PARAM_NAME_INPUT, XQConstants.PARAM_STRING); final String mode = params.getParameter(PARAM_NAME_MODE, XQConstants.PARAM_STRING); final boolean defaultProgress = params.getBooleanParameter( PARAM_NAME_DEFAULT_PROGRESS, XQConstants.PARAM_STRING); Connection conn = null; /* Ensure that the JDBC driver is loaded */ Class.forName(jdbcDriver); Statement stmt = null; final String sqlFile = params.getParameter(PARAM_NAME_SQL_FILE, XQConstants.PARAM_STRING); ResultSet rs = null; final XQMessageFactory msgFactory = ctx.getMessageFactory(); if (input.equals(INPUT_RESULT) && mode.equals(MODE_DISPATCH_API)) { final XQEnvelopeFactory envFactory = ctx.getEnvelopeFactory(); final XQDispatch dispatcher = ctx.getDispatcher(); while (ctx.hasNextIncoming()) { try { /* Connect to the DB */ conn = DriverManager.getConnection(url, username, password); stmt = conn.createStatement(); /* Execute the query */ rs = stmt.executeQuery(sqlFile); final XQEnvelope origEnv = ctx.getNextIncoming(); final XQMessage origMsg = origEnv.getMessage(); final ResultSetMetaData rsMetaData = rs.getMetaData(); while (rs.next()) { final XQEnvelope newEnv = envFactory .createDefaultEnvelope(); final XQMessage newMsg = msgFactory.createMessage(); /* * Copy all headers from the original message to the * new message */ MessageUtils.copyAllHeaders(origMsg, newMsg); /* Clear the Reply-To header to avoid failing back */ newMsg.setReplyTo(null); final XQPart newPart = newMsg.createPart(); newPart.setContentId("Result-0"); final StringBuilder builder = new StringBuilder(); builder .append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); if (!defaultProgress) { builder.append("<db:result xmlns:db=\"" + NAMESPACE_URI + "\">"); builder.append("<db:resultSet>"); builder.append("<db:row>"); } else { builder .append("<db:result xmlns:db=\"" + NAMESPACE_URI_DEFAULT_PROGRESS + "\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xsi:schemaLocation=\"http://www.sonicsw.com/esb/service/dbservice sonicfs:///System/Schemas/esb/service/DBService.xsd\">"); builder .append("<db:resultSet version=\"1.1\">"); builder.append("<db:row>"); } for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { final String columnName = rsMetaData .getColumnName(i); if (!defaultProgress) { builder.append("<db:" + columnName.toUpperCase() + ">" + rs.getString(i) + "</db:" + columnName.toUpperCase() + ">"); } else { builder.append("<" + columnName.toUpperCase() + ">" + rs.getString(i) + "</" + columnName.toUpperCase() + ">"); } } builder.append("</db:row>"); builder.append("</db:resultSet>"); builder.append("</db:result>"); newPart.setContent(builder.toString(), XQConstants.CONTENT_TYPE_XML); newMsg.addPart(newPart); newEnv.setMessage(newMsg); dispatcher.dispatch(newEnv); } } finally { try { if (rs != null) rs.close(); } finally { try { if (stmt != null) stmt.close(); } finally { if (conn != null) conn.close(); } } } } } } catch (final Exception e) { throw new XQServiceException(e); } } /** * Handle the arrival of XQMessages in the INBOX. * * <p> * This method implement a required XQService method. * * @param ctx * The service context. * @exception XQServiceException * Thrown in the event of a processing error. */ public void service(XQServiceContext ctx) throws XQServiceException { if (ctx == null) throw new XQServiceException("Service Context cannot be null."); else { dbServiceServiceContext(ctx); } } /** * Clean up and get ready to destroy the service. * * <p> * This method implement a required XQService method. */ public void destroy() { m_xqLog.logInformation(m_logPrefix + "Destroying..."); m_xqLog.logInformation(m_logPrefix + "Destroyed..."); } /** * Called by the container on container start. * * <p> * This method implement a required XQServiceEx method. */ public void start() { m_xqLog.logInformation(m_logPrefix + "Starting..."); m_xqLog.logInformation(m_logPrefix + "Started..."); } /** * Called by the container on container stop. * * <p> * This method implement a required XQServiceEx method. */ public void stop() { m_xqLog.logInformation(m_logPrefix + "Stopping..."); m_xqLog.logInformation(m_logPrefix + "Stopped..."); } /** * Clean up and get ready to destroy the service. * */ protected void setLogPrefix(XQParameters params) { String serviceName = params.getParameter( XQConstants.PARAM_SERVICE_NAME, XQConstants.PARAM_STRING); m_logPrefix = "[ " + serviceName + " ]"; } /** * Provide access to the service implemented version. * */ protected String getVersion() { return s_major + "." + s_minor + ". build " + s_buildNumber; } /** * Writes a standard service startup message to the log. */ protected void writeStartupMessage(XQParameters params) { final StringBuffer buffer = new StringBuffer(); String serviceTypeName = params.getParameter( XQConstants.SERVICE_PARAM_SERVICE_TYPE, XQConstants.PARAM_STRING); buffer.append("\n\n"); buffer.append("\t\t " + serviceTypeName + "\n "); buffer.append("\t\t Version "); buffer.append(" " + getVersion()); buffer.append("\n"); buffer.append("\t\t Copyright (c) 2008, Progress Sonic Software Corporation."); buffer.append("\n"); buffer.append("\t\t All rights reserved. "); buffer.append("\n"); m_xqLog.logInformation(buffer.toString()); } /** * Writes parameters to log. */ protected void writeParameters(XQParameters params) { final Map map = params.getAllInfo(); final Iterator iter = map.values().iterator(); while (iter.hasNext()) { final XQParameterInfo info = (XQParameterInfo) iter.next(); if (info.getType() == XQConstants.PARAM_XML) { m_xqLog.logDebug(m_logPrefix + "Parameter Name = " + info.getName()); } else if (info.getType() == XQConstants.PARAM_STRING) { m_xqLog.logDebug(m_logPrefix + "Parameter Name = " + info.getName()); } if (info.getRef() != null) { m_xqLog.logDebug(m_logPrefix + "Parameter Reference = " + info.getRef()); // If this is too verbose // /then a simple change from logInformation to logDebug // will ensure file content is not displayed // unless the logging level is set to debug for the ESB // Container. m_xqLog.logDebug(m_logPrefix + "----Parameter Value Start--------"); m_xqLog.logDebug("\n" + info.getValue() + "\n"); m_xqLog.logDebug(m_logPrefix + "----Parameter Value End--------"); } else { m_xqLog.logDebug(m_logPrefix + "Parameter Value = " + info.getValue()); } } } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.drivers.server; import org.onosproject.drivers.server.behavior.CpuStatisticsDiscovery; import org.onosproject.drivers.server.behavior.MonitoringStatisticsDiscovery; import org.onosproject.drivers.server.devices.CpuDevice; import org.onosproject.drivers.server.devices.CpuVendor; import org.onosproject.drivers.server.devices.nic.NicDevice; import org.onosproject.drivers.server.devices.nic.NicRxFilter; import org.onosproject.drivers.server.devices.nic.NicRxFilter.RxFilter; import org.onosproject.drivers.server.devices.ServerDeviceDescription; import org.onosproject.drivers.server.devices.RestServerSBDevice; import org.onosproject.drivers.server.stats.CpuStatistics; import org.onosproject.drivers.server.stats.MonitoringStatistics; import org.onosproject.drivers.server.stats.TimingStatistics; import org.onosproject.drivers.server.impl.devices.DefaultCpuDevice; import org.onosproject.drivers.server.impl.devices.DefaultNicDevice; import org.onosproject.drivers.server.impl.devices.DefaultRestServerSBDevice; import org.onosproject.drivers.server.impl.devices.DefaultServerDeviceDescription; import org.onosproject.drivers.server.impl.stats.DefaultCpuStatistics; import org.onosproject.drivers.server.impl.stats.DefaultMonitoringStatistics; import org.onosproject.drivers.server.impl.stats.DefaultTimingStatistics; import org.onlab.packet.ChassisId; import org.onosproject.net.AnnotationKeys; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.behaviour.DevicesDiscovery; import org.onosproject.net.device.DeviceDescription; import org.onosproject.net.device.DeviceDescriptionDiscovery; import org.onosproject.net.device.DefaultPortStatistics; import org.onosproject.net.device.DefaultPortDescription; import org.onosproject.net.device.PortDescription; import org.onosproject.net.device.PortStatistics; import org.onosproject.net.device.PortStatisticsDiscovery; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.protocol.rest.RestSBDevice; import org.onosproject.protocol.rest.RestSBDevice.AuthenticationScheme; import org.slf4j.Logger; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.ImmutableList; import javax.ws.rs.ProcessingException; import java.io.InputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static org.slf4j.LoggerFactory.getLogger; /** * Discovers the device details of * REST-based commodity server devices. */ public class ServerDevicesDiscovery extends BasicServerDriver implements DevicesDiscovery, DeviceDescriptionDiscovery, PortStatisticsDiscovery, CpuStatisticsDiscovery, MonitoringStatisticsDiscovery { private final Logger log = getLogger(getClass()); /** * Resource endpoints of the server agent (REST server-side). */ private static final String RESOURCE_DISCOVERY_URL = BASE_URL + SLASH + "resources"; private static final String GLOBAL_STATS_URL = BASE_URL + SLASH + "stats"; private static final String SERVICE_CHAINS_STATS_URL = BASE_URL + SLASH + "chains_stats"; // + /ID /** * Parameters to be exchanged with the server's agent. */ private static final String PARAM_MANUFACTURER = "manufacturer"; private static final String PARAM_HW_VENDOR = "hwVersion"; private static final String PARAM_SW_VENDOR = "swVersion"; private static final String PARAM_SERIAL = "serial"; private static final String PARAM_TIMING_STATS = "timingStats"; private static final String PARAM_TIMING_AUTOSCALE = "autoScaleTimingStats"; private static final String NIC_PARAM_NAME = "name"; private static final String NIC_PARAM_PORT_INDEX = "index"; private static final String NIC_PARAM_PORT_TYPE = "portType"; private static final String NIC_PARAM_PORT_TYPE_FIBER = "fiber"; private static final String NIC_PARAM_PORT_TYPE_COPPER = "copper"; private static final String NIC_PARAM_SPEED = "speed"; private static final String NIC_PARAM_STATUS = "status"; private static final String NIC_PARAM_HW_ADDR = "hwAddr"; /** * NIC statistics. */ private static final String NIC_STATS_TX_COUNT = "txCount"; private static final String NIC_STATS_TX_BYTES = "txBytes"; private static final String NIC_STATS_TX_DROPS = "txDropped"; private static final String NIC_STATS_TX_ERRORS = "txErrors"; private static final String NIC_STATS_RX_COUNT = "rxCount"; private static final String NIC_STATS_RX_BYTES = "rxBytes"; private static final String NIC_STATS_RX_DROPS = "rxDropped"; private static final String NIC_STATS_RX_ERRORS = "rxErrors"; /** * CPU statistics. */ private static final String CPU_PARAM_ID = "id"; private static final String CPU_PARAM_VENDOR = "vendor"; private static final String CPU_PARAM_FREQUENCY = "frequency"; private static final String CPU_PARAM_LOAD = "load"; private static final String CPU_PARAM_QUEUE = "queue"; private static final String CPU_PARAM_STATUS = "busy"; private static final String CPU_PARAM_THROUGHPUT = "throughput"; private static final String CPU_PARAM_LATENCY = "latency"; private static final String MON_PARAM_UNIT = "unit"; private static final String MON_PARAM_BUSY_CPUS = "busyCpus"; private static final String MON_PARAM_FREE_CPUS = "freeCpus"; private static final String MON_PARAM_MIN = "min"; private static final String MON_PARAM_AVERAGE = "average"; private static final String MON_PARAM_MAX = "max"; /** * Timing statistics. */ private static final String TIMING_PARAM_PARSE = "parseTime"; private static final String TIMING_PARAM_LAUNCH = "launchTime"; private static final String TIMING_PARAM_DEPLOY = "deployTime"; private static final String TIMING_PARAM_AUTOSCALE = "autoScaleTime"; /** * Auxiliary constants. */ private static final short DISCOVERY_RETRIES = 3; private static final String CPU_VENDOR_NULL = "Unsupported CPU vendor" + " Choose one in: " + BasicServerDriver.enumTypesToString(CpuVendor.class); private static final String NIC_RX_FILTER_NULL = "Unsupported NIC Rx filter" + " Choose one in: " + BasicServerDriver.enumTypesToString(RxFilter.class); /** * Port types that usually appear in commodity servers. */ public static final Map<String, Port.Type> PORT_TYPE_MAP = Collections.unmodifiableMap( new HashMap<String, Port.Type>() { { put(NIC_PARAM_PORT_TYPE_FIBER, Port.Type.FIBER); put(NIC_PARAM_PORT_TYPE_COPPER, Port.Type.COPPER); } } ); /** * Constructs server device discovery. */ public ServerDevicesDiscovery() { super(); log.debug("Started"); } @Override public Set<DeviceId> deviceIds() { // Set of devices to return Set<DeviceId> devices = new HashSet<DeviceId>(); DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); devices.add(deviceId); return devices; } @Override public DeviceDescription deviceDetails(DeviceId deviceId) { return getDeviceDetails(deviceId); } @Override public DeviceDescription discoverDeviceDetails() { return getDeviceDetails(null); } /** * Query a server to retrieve its features. * * @param deviceId the device ID to be queried * @return a DeviceDescription with the device's features */ private DeviceDescription getDeviceDetails(DeviceId deviceId) { // Retrieve the device ID, if null given if (deviceId == null) { deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); } // Get the device RestSBDevice device = getController().getDevice(deviceId); checkNotNull(device, DEVICE_NULL); // Hit the path that provides the server's resources InputStream response = null; try { response = getController().get(deviceId, RESOURCE_DISCOVERY_URL, JSON); } catch (ProcessingException pEx) { log.error("Failed to discover the device details of: {}", deviceId); return null; } // Load the JSON into objects ObjectMapper mapper = new ObjectMapper(); Map<String, Object> jsonMap = null; JsonNode jsonNode = null; ObjectNode objNode = null; try { jsonMap = mapper.readValue(response, Map.class); jsonNode = mapper.convertValue(jsonMap, JsonNode.class); objNode = (ObjectNode) jsonNode; } catch (IOException ioEx) { log.error("Failed to discover the device details of: {}", deviceId); return null; } if (jsonMap == null) { log.error("Failed to discover the device details of: {}", deviceId); return null; } // Get all the attributes String id = get(jsonNode, BasicServerDriver.PARAM_ID); String vendor = get(jsonNode, PARAM_MANUFACTURER); String hw = get(jsonNode, PARAM_HW_VENDOR); String sw = get(jsonNode, PARAM_SW_VENDOR); String serial = get(jsonNode, PARAM_SERIAL); // CPUs are composite attributes Set<CpuDevice> cpuSet = new HashSet<CpuDevice>(); JsonNode cpuNode = objNode.path(BasicServerDriver.PARAM_CPUS); // Construct CPU objects for (JsonNode cn : cpuNode) { ObjectNode cpuObjNode = (ObjectNode) cn; // All the CPU attributes int cpuId = cpuObjNode.path(CPU_PARAM_ID).asInt(); String cpuVendorStr = get(cn, CPU_PARAM_VENDOR); long cpuFrequency = cpuObjNode.path(CPU_PARAM_FREQUENCY).asLong(); // Verify that this is a valid vendor CpuVendor cpuVendor = CpuVendor.getByName(cpuVendorStr); checkNotNull(cpuVendor, CPU_VENDOR_NULL); // Construct a CPU device CpuDevice cpu = new DefaultCpuDevice(cpuId, cpuVendor, cpuFrequency); // Add it to the set cpuSet.add(cpu); } // NICs are composite attributes too Set<NicDevice> nicSet = new HashSet<NicDevice>(); JsonNode nicNode = objNode.path(PARAM_NICS); DefaultAnnotations.Builder annotations = DefaultAnnotations.builder(); // Construct NIC objects for (JsonNode nn : nicNode) { ObjectNode nicObjNode = (ObjectNode) nn; // All the NIC attributes String nicName = get(nn, NIC_PARAM_NAME); long nicIndex = nicObjNode.path(NIC_PARAM_PORT_INDEX).asLong(); long speed = nicObjNode.path(NIC_PARAM_SPEED).asLong(); String portTypeStr = get(nn, NIC_PARAM_PORT_TYPE); Port.Type portType = PORT_TYPE_MAP.get(portTypeStr); if (portType == null) { throw new IllegalArgumentException( portTypeStr + " is not a valid port type for NIC " + nicName); } boolean status = nicObjNode.path(NIC_PARAM_STATUS).asInt() > 0; String hwAddr = get(nn, NIC_PARAM_HW_ADDR); JsonNode tagNode = nicObjNode.path(BasicServerDriver.NIC_PARAM_RX_FILTER); if (tagNode == null) { throw new IllegalArgumentException( "The Rx filters of NIC " + nicName + " are not reported"); } // Convert the JSON list into an array of strings List<String> rxFilters = null; try { rxFilters = mapper.readValue(tagNode.traverse(), new TypeReference<ArrayList<String>>() { }); } catch (IOException ioEx) { continue; } // Parse the array of strings and create an RxFilter object NicRxFilter rxFilterMechanism = new NicRxFilter(); for (String s : rxFilters) { // Verify that this is a valid Rx filter RxFilter rf = RxFilter.getByName(s); checkNotNull(rf, NIC_RX_FILTER_NULL); rxFilterMechanism.addRxFilter(rf); } // Store NIC name to number mapping as an annotation annotations.set(nicName, Long.toString(nicIndex)); // Construct a NIC device for this server NicDevice nic = new DefaultNicDevice( nicName, nicIndex, portType, speed, status, hwAddr, rxFilterMechanism); // Add it to the set nicSet.add(nic); } // Construct a complete server device object. // Lists of NICs and CPUs extend the information // already in RestSBDevice (parent class). RestServerSBDevice dev = new DefaultRestServerSBDevice( device.ip(), device.port(), device.username(), device.password(), device.protocol(), device.url(), device.isActive(), device.testUrl().orElse(""), vendor, hw, sw, AuthenticationScheme.BASIC, "", cpuSet, nicSet ); checkNotNull(dev, DEVICE_NULL); // Set alive raiseDeviceReconnect(dev); // Updates the controller with the complete device information getController().removeDevice(deviceId); getController().addDevice((RestSBDevice) dev); // Create a description for this server device ServerDeviceDescription desc = null; try { desc = new DefaultServerDeviceDescription( new URI(id), Device.Type.SERVER, vendor, hw, sw, serial, new ChassisId(), cpuSet, nicSet, annotations.build() ); } catch (URISyntaxException uEx) { log.error("Failed to create a server device description for: {}", deviceId); return null; } log.info("Device's {} details sent to the controller", deviceId); return desc; } @Override public List<PortDescription> discoverPortDetails() { // Retrieve the device ID DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); // .. and object RestServerSBDevice device = null; try { device = (RestServerSBDevice) getController().getDevice(deviceId); } catch (ClassCastException ccEx) { log.error("Failed to discover ports for device {}", deviceId); return Collections.EMPTY_LIST; } if (device == null) { log.error("No device with ID {} is available for port discovery", deviceId); return Collections.EMPTY_LIST; } if ((device.nics() == null) || (device.nics().size() == 0)) { log.error("No ports available on {}", deviceId); return Collections.EMPTY_LIST; } // List of port descriptions to return List<PortDescription> portDescriptions = Lists.newArrayList(); // Sorted list of NIC ports Set<NicDevice> nics = new TreeSet(device.nics()); // Iterate through the NICs of this device to populate the list for (NicDevice nic : nics) { // Include the name of this device as an annotation DefaultAnnotations.Builder annotations = DefaultAnnotations.builder() .set(AnnotationKeys.PORT_NAME, nic.name()); // Create a port description and add it to the list portDescriptions.add( DefaultPortDescription.builder() .withPortNumber(PortNumber.portNumber(nic.portNumber(), nic.name())) .isEnabled(nic.status()) .type(nic.portType()) .portSpeed(nic.speed()) .annotations(annotations.build()) .build()); log.info("Port discovery on device {}: NIC {} is {} at {} Mbps", deviceId, nic.portNumber(), nic.status() ? "up" : "down", nic.speed()); } return ImmutableList.copyOf(portDescriptions); } @Override public Collection<PortStatistics> discoverPortStatistics() { // Retrieve the device ID DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); // Get port statistics for this device return getPortStatistics(deviceId); } /** * Query a server to retrieve its port statistics. * * @param deviceId the device ID to be queried * @return list of (per port) PortStatistics */ private Collection<PortStatistics> getPortStatistics(DeviceId deviceId) { // Get global monitoring statistics MonitoringStatistics monStats = getGlobalMonitoringStatistics(deviceId); if (monStats == null) { return Collections.EMPTY_LIST; } // Filter out the NIC statistics Collection<PortStatistics> portStats = monStats.nicStatisticsAll(); if (portStats == null) { return Collections.EMPTY_LIST; } log.debug("Port statistics: {}", portStats.toString()); return portStats; } @Override public Collection<CpuStatistics> discoverCpuStatistics() { // Retrieve the device ID DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); // Get CPU statistics for this device return getCpuStatistics(deviceId); } /** * Query a server to retrieve its CPU statistics. * * @param deviceId the device ID to be queried * @return list of (per core) CpuStatistics */ public Collection<CpuStatistics> getCpuStatistics(DeviceId deviceId) { // Get global monitoring statistics MonitoringStatistics monStats = getGlobalMonitoringStatistics(deviceId); if (monStats == null) { return Collections.EMPTY_LIST; } // Filter out the CPU statistics Collection<CpuStatistics> cpuStats = monStats.cpuStatisticsAll(); if (cpuStats == null) { return Collections.EMPTY_LIST; } log.debug("CPU statistics: {}", cpuStats.toString()); return cpuStats; } @Override public MonitoringStatistics discoverGlobalMonitoringStatistics() { // Retrieve the device ID DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); // Get global monitoring statistics for this device return getGlobalMonitoringStatistics(deviceId); } /** * Query a server to retrieve its global monitoring statistics. * * @param deviceId the device ID to be queried * @return global monitoring statistics */ public MonitoringStatistics getGlobalMonitoringStatistics(DeviceId deviceId) { // Monitoring statistics to return MonitoringStatistics monStats = null; RestServerSBDevice device = null; try { device = (RestServerSBDevice) getController().getDevice(deviceId); } catch (ClassCastException ccEx) { log.error("Failed to retrieve global monitoring statistics from device {}", deviceId); return monStats; } if ((device == null) || (!device.isActive())) { return monStats; } // Hit the path that provides the server's global resources InputStream response = null; try { response = getController().get(deviceId, GLOBAL_STATS_URL, JSON); } catch (ProcessingException pEx) { log.error("Failed to retrieve global monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } // Load the JSON into objects ObjectMapper mapper = new ObjectMapper(); Map<String, Object> jsonMap = null; ObjectNode objNode = null; try { jsonMap = mapper.readValue(response, Map.class); JsonNode jsonNode = mapper.convertValue(jsonMap, JsonNode.class); objNode = (ObjectNode) jsonNode; } catch (IOException ioEx) { log.error("Failed to retrieve global monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } if (jsonMap == null) { log.error("Failed to retrieve global monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } // Get high-level CPU statistics int busyCpus = objNode.path(MON_PARAM_BUSY_CPUS).asInt(); int freeCpus = objNode.path(MON_PARAM_FREE_CPUS).asInt(); // Get a list of CPU statistics per core Collection<CpuStatistics> cpuStats = parseCpuStatistics(deviceId, objNode); // Get a list of port statistics Collection<PortStatistics> nicStats = parseNicStatistics(deviceId, objNode); // Get zero timing statistics TimingStatistics timinsgStats = getZeroTimingStatistics(); // Ready to construct the grand object DefaultMonitoringStatistics.Builder statsBuilder = DefaultMonitoringStatistics.builder(); statsBuilder.setDeviceId(deviceId) .setTimingStatistics(timinsgStats) .setCpuStatistics(cpuStats) .setNicStatistics(nicStats); monStats = statsBuilder.build(); // When a device reports monitoring data, it means it is alive raiseDeviceReconnect(device); log.debug("Global monitoring statistics: {}", monStats.toString()); return monStats; } @Override public MonitoringStatistics discoverMonitoringStatistics(URI tcId) { // Retrieve the device ID DeviceId deviceId = getHandler().data().deviceId(); checkNotNull(deviceId, DEVICE_ID_NULL); // Get resource-specific monitoring statistics for this device return getMonitoringStatistics(deviceId, tcId); } /** * Query a server to retrieve monitoring statistics for a * specific resource (i.e., traffic class). * * @param deviceId the device ID to be queried * @param tcId the ID of the traffic class to be monitored * @return resource-specific monitoring statistics */ private MonitoringStatistics getMonitoringStatistics(DeviceId deviceId, URI tcId) { // Monitoring statistics to return MonitoringStatistics monStats = null; RestServerSBDevice device = null; try { device = (RestServerSBDevice) getController().getDevice(deviceId); } catch (ClassCastException ccEx) { log.error("Failed to retrieve monitoring statistics from device {}", deviceId); return monStats; } if (device == null) { return monStats; } // Create a resource-specific URL String scUrl = SERVICE_CHAINS_STATS_URL + SLASH + tcId.toString(); // Hit the path that provides the server's specific resources InputStream response = null; try { response = getController().get(deviceId, scUrl, JSON); } catch (ProcessingException pEx) { log.error("Failed to retrieve monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } // Load the JSON into objects ObjectMapper mapper = new ObjectMapper(); Map<String, Object> jsonMap = null; JsonNode jsonNode = null; ObjectNode objNode = null; try { jsonMap = mapper.readValue(response, Map.class); jsonNode = mapper.convertValue(jsonMap, JsonNode.class); objNode = (ObjectNode) jsonNode; } catch (IOException ioEx) { log.error("Failed to retrieve monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } if (jsonMap == null) { log.error("Failed to retrieve monitoring statistics from device {}", deviceId); raiseDeviceDisconnect(device); return monStats; } // Get the ID of the traffic class String id = get(jsonNode, PARAM_ID); // And verify that this is the traffic class we want to monitor if (!id.equals(tcId.toString())) { throw new IllegalStateException( "Failed to retrieve monitoring data for traffic class " + tcId + ". Traffic class ID does not agree." ); } // Get a list of CPU statistics per core Collection<CpuStatistics> cpuStats = parseCpuStatistics(deviceId, objNode); // Get a list of port statistics Collection<PortStatistics> nicStats = parseNicStatistics(deviceId, objNode); // Get timing statistics TimingStatistics timinsgStats = parseTimingStatistics(objNode); // Ready to construct the grand object DefaultMonitoringStatistics.Builder statsBuilder = DefaultMonitoringStatistics.builder(); statsBuilder.setDeviceId(deviceId) .setTimingStatistics(timinsgStats) .setCpuStatistics(cpuStats) .setNicStatistics(nicStats); monStats = statsBuilder.build(); // When a device reports monitoring data, it means it is alive raiseDeviceReconnect(device); log.debug("Monitoring statistics: {}", monStats.toString()); return monStats; } /** * Parse the input JSON object, looking for CPU-related * statistics. Upon success, construct and return a list * of CPU statistics objects. * * @param deviceId the device ID that sent the JSON object * @param objNode input JSON node with CPU statistics information * @return list of (per core) CpuStatistics */ private Collection<CpuStatistics> parseCpuStatistics(DeviceId deviceId, JsonNode objNode) { if ((deviceId == null) || (objNode == null)) { return Collections.EMPTY_LIST; } Collection<CpuStatistics> cpuStats = Lists.newArrayList(); JsonNode cpuNode = objNode.path(BasicServerDriver.PARAM_CPUS); for (JsonNode cn : cpuNode) { ObjectNode cpuObjNode = (ObjectNode) cn; // CPU statistics builder DefaultCpuStatistics.Builder cpuBuilder = DefaultCpuStatistics.builder(); // Throughput statistics are optional JsonNode throughputNode = cpuObjNode.get(CPU_PARAM_THROUGHPUT); if (throughputNode != null) { String throughputUnit = get(throughputNode, MON_PARAM_UNIT); if (!Strings.isNullOrEmpty(throughputUnit)) { cpuBuilder.setThroughputUnit(throughputUnit); } float averageThroughput = (float) 0; if (throughputNode.get(MON_PARAM_AVERAGE) != null) { averageThroughput = throughputNode.path(MON_PARAM_AVERAGE).floatValue(); } cpuBuilder.setAverageThroughput(averageThroughput); } // Latency statistics are optional JsonNode latencyNode = cpuObjNode.get(CPU_PARAM_LATENCY); if (latencyNode != null) { String latencyUnit = get(latencyNode, MON_PARAM_UNIT); if (!Strings.isNullOrEmpty(latencyUnit)) { cpuBuilder.setLatencyUnit(latencyUnit); } float minLatency = (float) 0; if (latencyNode.get(MON_PARAM_MIN) != null) { minLatency = latencyNode.path(MON_PARAM_MIN).floatValue(); } float averageLatency = (float) 0; if (latencyNode.get(MON_PARAM_AVERAGE) != null) { averageLatency = latencyNode.path(MON_PARAM_AVERAGE).floatValue(); } float maxLatency = (float) 0; if (latencyNode.get(MON_PARAM_MAX) != null) { maxLatency = latencyNode.path(MON_PARAM_MAX).floatValue(); } cpuBuilder.setMinLatency(minLatency) .setAverageLatency(averageLatency) .setMaxLatency(maxLatency); } // CPU ID with its load and status int cpuId = cpuObjNode.path(CPU_PARAM_ID).asInt(); float cpuLoad = cpuObjNode.path(CPU_PARAM_LOAD).floatValue(); int queueId = cpuObjNode.path(CPU_PARAM_QUEUE).asInt(); boolean isBusy = cpuObjNode.path(CPU_PARAM_STATUS).booleanValue(); // This is mandatory information cpuBuilder.setDeviceId(deviceId) .setId(cpuId) .setLoad(cpuLoad) .setQueue(queueId) .setIsBusy(isBusy); // We have all the statistics for this CPU core cpuStats.add(cpuBuilder.build()); } return cpuStats; } /** * Parse the input JSON object, looking for NIC-related * statistics. Upon success, construct and return a list * of NIC statistics objects. * * @param deviceId the device ID that sent the JSON object * @param objNode input JSON node with NIC statistics information * @return list of (per port) PortStatistics */ private Collection<PortStatistics> parseNicStatistics(DeviceId deviceId, JsonNode objNode) { if ((deviceId == null) || (objNode == null)) { return Collections.EMPTY_LIST; } RestServerSBDevice device = null; try { device = (RestServerSBDevice) getController().getDevice(deviceId); } catch (ClassCastException ccEx) { return Collections.EMPTY_LIST; } if (device == null) { return Collections.EMPTY_LIST; } Collection<PortStatistics> nicStats = Lists.newArrayList(); JsonNode nicNode = objNode.path(PARAM_NICS); for (JsonNode nn : nicNode) { ObjectNode nicObjNode = (ObjectNode) nn; // All the NIC attributes String nicName = get(nn, NIC_PARAM_NAME); checkArgument(!Strings.isNullOrEmpty(nicName), "NIC name is empty or NULL"); long portNumber = device.portNumberFromName(nicName); checkArgument(portNumber >= 0, "Unknown port ID " + portNumber + " for NIC " + nicName); long rxCount = nicObjNode.path(NIC_STATS_RX_COUNT).asLong(); long rxBytes = nicObjNode.path(NIC_STATS_RX_BYTES).asLong(); long rxDropped = nicObjNode.path(NIC_STATS_RX_DROPS).asLong(); long rxErrors = nicObjNode.path(NIC_STATS_RX_ERRORS).asLong(); long txCount = nicObjNode.path(NIC_STATS_TX_COUNT).asLong(); long txBytes = nicObjNode.path(NIC_STATS_TX_BYTES).asLong(); long txDropped = nicObjNode.path(NIC_STATS_TX_DROPS).asLong(); long txErrors = nicObjNode.path(NIC_STATS_TX_ERRORS).asLong(); // Incorporate these statistics into an object DefaultPortStatistics.Builder nicBuilder = DefaultPortStatistics.builder(); nicBuilder.setDeviceId(deviceId) .setPort((int) portNumber) .setPacketsReceived(rxCount) .setPacketsSent(txCount) .setBytesReceived(rxBytes) .setBytesSent(txBytes) .setPacketsRxDropped(rxDropped) .setPacketsRxErrors(rxErrors) .setPacketsTxDropped(txDropped) .setPacketsTxErrors(txErrors); // We have statistics for this NIC nicStats.add(nicBuilder.build()); } return nicStats; } /** * Parse the input JSON object, looking for timing-related statistics. * Upon success, return a timing statistics object with the advertized values. * Upon failure, return a timing statistics object with zero-initialized values. * * @param objNode input JSON node with timing statistics information * @return TimingStatistics object or null */ private TimingStatistics parseTimingStatistics(JsonNode objNode) { TimingStatistics timinsgStats = null; if (objNode == null) { return timinsgStats; } // If no timing statistics are present, then send zeros if (objNode.get(PARAM_TIMING_STATS) == null) { return getZeroTimingStatistics(); } DefaultTimingStatistics.Builder timingBuilder = DefaultTimingStatistics.builder(); // Get timing statistics JsonNode timingNode = objNode.path(PARAM_TIMING_STATS); ObjectNode timingObjNode = (ObjectNode) timingNode; // The unit of timing statistics String timingStatsUnit = get(timingNode, MON_PARAM_UNIT); if (!Strings.isNullOrEmpty(timingStatsUnit)) { timingBuilder.setUnit(timingStatsUnit); } // Time (ns) to parse the controller's deployment instruction long parsingTime = 0; if (timingObjNode.get(TIMING_PARAM_PARSE) != null) { parsingTime = timingObjNode.path(TIMING_PARAM_PARSE).asLong(); } // Time (ns) to do the deployment long launchingTime = 0; if (timingObjNode.get(TIMING_PARAM_LAUNCH) != null) { launchingTime = timingObjNode.path(TIMING_PARAM_LAUNCH).asLong(); } // Deployment time (ns) equals to time to parse + time to launch long deployTime = 0; if (timingObjNode.get(TIMING_PARAM_DEPLOY) != null) { deployTime = timingObjNode.path(TIMING_PARAM_DEPLOY).asLong(); } checkArgument(deployTime == parsingTime + launchingTime, "Inconsistent timing statistics"); timingBuilder.setParsingTime(parsingTime) .setLaunchingTime(launchingTime); // Get autoscale timing statistics JsonNode autoscaleTimingNode = objNode.path(PARAM_TIMING_AUTOSCALE); if (autoscaleTimingNode == null) { return timingBuilder.build(); } ObjectNode autoScaleTimingObjNode = (ObjectNode) autoscaleTimingNode; // Time (ns) to autoscale a server's load long autoScaleTime = 0; if (autoScaleTimingObjNode.get(TIMING_PARAM_AUTOSCALE) != null) { autoScaleTime = autoScaleTimingObjNode.path(TIMING_PARAM_AUTOSCALE).asLong(); } timingBuilder.setAutoScaleTime(autoScaleTime); return timingBuilder.build(); } /** * Return a timing statistics object with zero counters. * This is useful when constructing MonitoringStatistics * objects that do not require timers. * * @return TimingStatistics object */ private TimingStatistics getZeroTimingStatistics() { DefaultTimingStatistics.Builder zeroTimingBuilder = DefaultTimingStatistics.builder(); zeroTimingBuilder.setParsingTime(0) .setLaunchingTime(0) .setAutoScaleTime(0); return zeroTimingBuilder.build(); } }
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.materials; import com.thoughtworks.go.config.CaseInsensitiveString; import com.thoughtworks.go.config.materials.Materials; import com.thoughtworks.go.config.materials.PackageMaterial; import com.thoughtworks.go.config.materials.ScmMaterial; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.materials.git.GitMaterial; import com.thoughtworks.go.config.materials.svn.SvnMaterial; import com.thoughtworks.go.domain.MaterialRevision; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.Stage; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.domain.materials.Material; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.Modifications; import com.thoughtworks.go.domain.packagerepository.ConfigurationPropertyMother; import com.thoughtworks.go.helper.MaterialsMother; import com.thoughtworks.go.helper.StageMother; import com.thoughtworks.go.server.persistence.MaterialRepository; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import java.util.Arrays; import java.util.Date; import static com.thoughtworks.go.domain.materials.Modification.modifications; import static java.lang.String.format; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.never; import static org.mockito.Mockito.when; public class MaterialCheckerTest { private MaterialRepository materialRepository; private ScmMaterial mockMaterial; private MaterialChecker materialChecker; @Before public void setUp() throws Exception { materialRepository = Mockito.mock(MaterialRepository.class); mockMaterial = Mockito.mock(ScmMaterial.class); materialChecker = new MaterialChecker(materialRepository); } @Test public void shouldUseFlyweightWorkingFolderForLatestModificationCheck() throws Exception { Modification modification = new Modification(); Mockito.when(materialRepository.findLatestModification(mockMaterial)).thenReturn(revisions(mockMaterial, modification)); materialChecker.findLatestRevisions(new MaterialRevisions(), new Materials(mockMaterial)); Mockito.verify(materialRepository).findLatestModification(mockMaterial); } private MaterialRevisions revisions(Material material, Modification modification) { return new MaterialRevisions(new MaterialRevision(material, modifications(modification))); } @Test public void shouldUseLatestPipelineInstanceForDependentPipelineGivenThePreviousRevision() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); MaterialRevisions materialRevisions = new MaterialRevisions(); Modification previous = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1/stage-name/0"); MaterialRevision previousRevision = revisions(dependencyMaterial, previous).getMaterialRevision(0); when(materialRepository.findModificationsSince(dependencyMaterial, previousRevision)).thenReturn(Arrays.asList(new Modification(new Date(), "pipeline-name/2/stage-name/0", "MOCK_LABEL-12", null))); MaterialRevisions revisionsSince = materialChecker.findRevisionsSince(materialRevisions, new Materials(dependencyMaterial), new MaterialRevisions(previousRevision), new MaterialRevisions()/*will not be used, as no new material has been introduced*/); assertThat(revisionsSince.getMaterialRevision(0).getRevision().getRevision(), is("pipeline-name/2/stage-name/0")); } @Test public void shouldUseLatestPipelineInstanceForDependentPipeline() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); Modification modification = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1[LABEL-1]/stage-name/0"); Mockito.when(materialRepository.findLatestModification(dependencyMaterial)).thenReturn(revisions(dependencyMaterial,modification)); materialChecker.findLatestRevisions(new MaterialRevisions(), new Materials(dependencyMaterial)); Mockito.verify(materialRepository).findLatestModification(dependencyMaterial); } @Test public void shouldSkipLatestRevisionsForMaterialsThatWereAlreadyChecked() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); SvnMaterial svnMaterial = new SvnMaterial("svnUrl", null, null, false); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); Modification dependencyModification = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1[LABEL-1]/stage-name/0"); Modification svnModification = new Modification("user", "commend", "em@il", new Date(), "1"); Mockito.when(materialRepository.findLatestModification(svnMaterial)).thenReturn(revisions(dependencyMaterial, svnModification)); materialChecker.findLatestRevisions(new MaterialRevisions(new MaterialRevision(dependencyMaterial, dependencyModification)), new Materials(dependencyMaterial, svnMaterial)); Mockito.verify(materialRepository, never()).findLatestModification(dependencyMaterial); Mockito.verify(materialRepository).findLatestModification(svnMaterial); } @Test public void shouldFindSpecificRevisionForDependentPipeline() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); Modification modification = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1/stage-name/0"); Mockito.when(materialRepository.findModificationWithRevision(dependencyMaterial,"pipeline-name/1/stage-name/0")).thenReturn(modification); MaterialRevision actualRevision = materialChecker.findSpecificRevision(dependencyMaterial, "pipeline-name/1/stage-name/0"); assertThat(actualRevision.getModifications().size(), is(1)); assertThat(actualRevision.getModification(0).getModifiedTime(), is(passedStage.completedDate())); assertThat(actualRevision.getModification(0).getRevision(), is("pipeline-name/1/stage-name/0")); } @Test public void shouldThrowExceptionIfSpecifiedRevisionDoesNotExist() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); Mockito.when(materialRepository.findModificationWithRevision(dependencyMaterial,"pipeline-name/500/stage-name/0")).thenReturn(null); try { materialChecker.findSpecificRevision(dependencyMaterial, "pipeline-name/500/stage-name/0"); fail("Should not be able to find revision"); } catch (Exception expected) { assertThat(expected.getMessage(), is(format("Unable to find revision [pipeline-name/500/stage-name/0] for material [%s]", dependencyMaterial))); } } @Test public void shouldThrowExceptionIfRevisionIsNotSpecified() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); try { materialChecker.findSpecificRevision(dependencyMaterial, ""); fail("Should not be able to empty revision"); } catch (Exception expected) { assertThat(expected.getMessage(), is(format("Revision was not specified for material [%s]", dependencyMaterial))); } } @Test public void shouldSkipFindingRevisionsSinceForMaterialsThatWereAlreadyChecked() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); SvnMaterial svnMaterial = new SvnMaterial("svnUrl", null, null, false); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); MaterialRevision previousDependantRevision = new MaterialRevision(dependencyMaterial, new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1[LABEL-1]/stage-name/0")); Modification dependencyModification = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/2[LABEL-2]/stage-name/0"); MaterialRevision previousSvnRevision = new MaterialRevision(svnMaterial, mod(1L)); Modification svnModification = new Modification("user", "commend", "em@il", new Date(), "2"); Mockito.when(materialRepository.findModificationsSince(svnMaterial, previousSvnRevision)).thenReturn(modifications(svnModification)); MaterialRevisions alreadyFoundRevisions = new MaterialRevisions(new MaterialRevision(dependencyMaterial, dependencyModification)); MaterialRevisions latestRevisions = new MaterialRevisions(); //will not be used, as no new materials have appeared MaterialRevisions revisionsSince = materialChecker.findRevisionsSince(alreadyFoundRevisions, new Materials(dependencyMaterial, svnMaterial), new MaterialRevisions(previousDependantRevision, previousSvnRevision), latestRevisions); assertThat(revisionsSince, is(new MaterialRevisions(new MaterialRevision(dependencyMaterial, dependencyModification), new MaterialRevision(svnMaterial, svnModification)))); Mockito.verify(materialRepository, never()).findLatestModification(dependencyMaterial); Mockito.verify(materialRepository).findModificationsSince(svnMaterial, previousSvnRevision); } @Test public void shouldUseLatestMaterialDuringCreationOfNewRevisionsSince_bug7486() throws Exception { DependencyMaterial dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name")); PackageMaterial oldPkgMaterial = MaterialsMother.packageMaterial("repo-id", "repo-old-name", "pkg-id", "pkg-old-name", ConfigurationPropertyMother.create("key", false, "value")); Stage passedStage = StageMother.passedStageInstance("stage-name", "job-name", "pipeline-name"); MaterialRevision previousDependantRevision = new MaterialRevision(dependencyMaterial, new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/1[LABEL-1]/stage-name/0")); Modification dependencyModification = new Modification("Unknown", "Unknown", null, passedStage.completedDate(), "pipeline-name/2[LABEL-2]/stage-name/0"); Modification oldPkgMod = mod(1L); MaterialRevision previousPkgRevision = new MaterialRevision(oldPkgMaterial, oldPkgMod); PackageMaterial newPkgMaterial = MaterialsMother.packageMaterial("repo-id", "repo-new-name", "pkg-id", "pkg-new-name", ConfigurationPropertyMother.create("key", false, "value")); Modification newPkgMod = mod(2L); Mockito.when(materialRepository.findModificationsSince(oldPkgMaterial, previousPkgRevision)).thenReturn(modifications(newPkgMod)); MaterialRevisions alreadyFoundRevisions = new MaterialRevisions(new MaterialRevision(dependencyMaterial, dependencyModification)); MaterialRevisions latestRevisions = new MaterialRevisions(); //will not be used, as no new materials have appeared MaterialRevisions revisionsSince = materialChecker.findRevisionsSince(alreadyFoundRevisions, new Materials(dependencyMaterial, newPkgMaterial), new MaterialRevisions(previousDependantRevision, previousPkgRevision), latestRevisions); assertThat(revisionsSince, is(new MaterialRevisions(new MaterialRevision(dependencyMaterial, dependencyModification), new MaterialRevision(oldPkgMaterial, newPkgMod)))); // since name is not part of equals assertThat(((PackageMaterial)revisionsSince.getMaterialRevision(1).getMaterial()).getPackageDefinition().getName(), is("pkg-new-name")); assertThat(((PackageMaterial)revisionsSince.getMaterialRevision(1).getMaterial()).getPackageDefinition().getRepository().getName(), is("repo-new-name")); Mockito.verify(materialRepository, never()).findLatestModification(dependencyMaterial); Mockito.verify(materialRepository).findModificationsSince(oldPkgMaterial, previousPkgRevision); } @Test public void shouldNOTSkipFindingRevisionsSinceForMaterialsThatAreNewlyAdded() throws Exception { SvnMaterial svnMaterial = new SvnMaterial("svnUrl", null, null, false); SvnMaterial svnExternalMaterial = new SvnMaterial("svnExternalUrl", null, null, false); Modification svnExternalModification = new Modification("user", "external commit", "em@il", new Date(), "3"); MaterialRevision previousSvnRevision = new MaterialRevision(svnMaterial, mod(1L)); Modification svnModification = new Modification("user", "commend", "em@il", new Date(), "2"); MaterialRevisions latestRevisions = new MaterialRevisions(new MaterialRevision(svnMaterial, svnModification), new MaterialRevision(svnExternalMaterial, svnExternalModification)); Mockito.when(materialRepository.findModificationsSince(svnMaterial, previousSvnRevision)).thenReturn(modifications(svnModification)); MaterialRevisions revisionsSince = materialChecker.findRevisionsSince(new MaterialRevisions(), new Materials(svnMaterial, svnExternalMaterial), new MaterialRevisions(previousSvnRevision), latestRevisions); assertThat(revisionsSince, is(new MaterialRevisions(new MaterialRevision(svnMaterial, svnModification), new MaterialRevision(svnExternalMaterial, svnExternalModification)))); Mockito.verify(materialRepository).findModificationsSince(svnMaterial, previousSvnRevision); } @Test public void updateChangedRevisionsShouldFilterRevisionsThatHaveBuiltBefore() { CaseInsensitiveString pipelineName = new CaseInsensitiveString("pipelineName"); GitMaterial gitMaterial = new GitMaterial("git://foo"); BuildCause buildCause = BuildCause.createWithModifications(new MaterialRevisions(new MaterialRevision(gitMaterial, mod(10L), mod(9L), mod(8L))), "user"); when(materialRepository.latestModificationRunByPipeline(pipelineName, gitMaterial)).thenReturn(9L); materialChecker.updateChangedRevisions(pipelineName, buildCause); MaterialRevisions actualRevisions = buildCause.getMaterialRevisions(); assertThat(actualRevisions.getModifications(gitMaterial), is(new Modifications(mod(10L)))); assertThat(actualRevisions.findRevisionFor(gitMaterial).isChanged(), is(true)); } @Test public void updateChangedRevisionsShouldRetainLatestRevisionIfAllHaveBuiltBefore() { CaseInsensitiveString pipelineName = new CaseInsensitiveString("pipelineName"); GitMaterial gitMaterial = new GitMaterial("git://foo"); BuildCause buildCause = BuildCause.createWithModifications(new MaterialRevisions(new MaterialRevision(gitMaterial, mod(10L), mod(9L), mod(8L))), "user"); when(materialRepository.latestModificationRunByPipeline(pipelineName, gitMaterial)).thenReturn(10L); materialChecker.updateChangedRevisions(pipelineName, buildCause); MaterialRevisions actualRevisions = buildCause.getMaterialRevisions(); assertThat(actualRevisions.getModifications(gitMaterial), is(new Modifications(mod(10L), mod(9L), mod(8L)))); assertThat(actualRevisions.findRevisionFor(gitMaterial).isChanged(), is(false)); } private Modification mod(final Long revision) { Modification modification = new Modification("user", "comment", "em@il", new Date(12121), revision.toString()); modification.setId(revision); return modification; } }
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2012 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package org.appcelerator.titanium.util; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.ref.WeakReference; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.codec.digest.DigestUtils; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.common.CurrentActivityListener; import org.appcelerator.kroll.common.Log; import org.appcelerator.kroll.common.TiMessenger; import org.appcelerator.titanium.TiApplication; import org.appcelerator.titanium.TiBaseActivity; import org.appcelerator.titanium.TiBlob; import org.appcelerator.titanium.TiC; import org.appcelerator.titanium.TiDimension; import org.appcelerator.titanium.io.TiBaseFile; import org.appcelerator.titanium.io.TiFileFactory; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.proxy.TiWindowProxy; import org.appcelerator.titanium.proxy.TiWindowProxy.PostOpenListener; import org.appcelerator.titanium.view.TiBackgroundDrawable; import org.appcelerator.titanium.view.TiDrawableReference; import org.appcelerator.titanium.view.TiUIView; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.res.AssetManager; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.Bitmap.Config; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.ColorMatrix; import android.graphics.ColorMatrixColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.Shader; import android.graphics.Typeface; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.LayerDrawable; import android.graphics.drawable.StateListDrawable; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.os.Process; import android.text.Spanned; import android.text.method.LinkMovementMethod; import android.text.util.Linkify; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.View.MeasureSpec; import android.view.inputmethod.InputMethodManager; import android.widget.TextView; /** * A set of utility methods focused on UI and View operations. */ public class TiUIHelper { private static final String TAG = "TiUIHelper"; private static final String customFontPath = "Resources/fonts"; public static final int PORTRAIT = 1; public static final int UPSIDE_PORTRAIT = 2; public static final int LANDSCAPE_LEFT = 3; public static final int LANDSCAPE_RIGHT = 4; public static final int FACE_UP = 5; public static final int FACE_DOWN = 6; public static final int UNKNOWN = 7; public static final Pattern SIZED_VALUE = Pattern.compile("([0-9]*\\.?[0-9]+)\\W*(px|dp|dip|sp|sip|mm|pt|in)?"); public static final String MIME_TYPE_PNG = "image/png"; private static Method overridePendingTransition; private static Map<String, String> resourceImageKeys = Collections.synchronizedMap(new HashMap<String, String>()); private static Map<String, Typeface> mCustomTypeFaces = Collections.synchronizedMap(new HashMap<String, Typeface>()); public static OnClickListener createDoNothingListener() { return new OnClickListener() { public void onClick(DialogInterface dialog, int which) { // Do nothing } }; } public static OnClickListener createKillListener() { return new OnClickListener() { public void onClick(DialogInterface dialog, int which) { Process.killProcess(Process.myPid()); } }; } public static OnClickListener createFinishListener(final Activity me) { return new OnClickListener(){ public void onClick(DialogInterface dialog, int which) { me.finish(); } }; } public static void doKillOrContinueDialog(Context context, String title, String message, OnClickListener positiveListener, OnClickListener negativeListener) { if (positiveListener == null) { positiveListener = createDoNothingListener(); } if (negativeListener == null) { negativeListener = createKillListener(); } new AlertDialog.Builder(context).setTitle(title).setMessage(message) .setPositiveButton("Continue", positiveListener) .setNegativeButton("Kill", negativeListener) .setCancelable(false).create().show(); } public static void linkifyIfEnabled(TextView tv, Object autoLink) { if (autoLink != null) { //Default to Ti.UI.AUTOLINK_NONE boolean success = Linkify.addLinks(tv, TiConvert.toInt(autoLink, 16)); if (!success && tv.getText() instanceof Spanned) { tv.setMovementMethod(LinkMovementMethod.getInstance()); } } } /** * Waits for the current activity to be ready, then invokes * {@link CurrentActivityListener#onCurrentActivityReady(Activity)}. * @param l the CurrentActivityListener. */ public static void waitForCurrentActivity(final CurrentActivityListener l) { // Some window opens are async, so we need to make sure we don't // sandwich ourselves in between windows when transitioning // between activities TIMOB-3644 TiWindowProxy waitingForOpen = TiWindowProxy.getWaitingForOpen(); if (waitingForOpen != null) { waitingForOpen.setPostOpenListener(new PostOpenListener() { // TODO @Override public void onPostOpen(TiWindowProxy window) { TiApplication app = TiApplication.getInstance(); Activity activity = app.getCurrentActivity(); if (activity != null) { l.onCurrentActivityReady(activity); } } }); } else { TiApplication app = TiApplication.getInstance(); Activity activity = app.getCurrentActivity(); if (activity != null) { l.onCurrentActivityReady(activity); } } } /** * Creates and shows a dialog with an OK button given title and message. * The dialog's creation context is the current activity. * @param title the title of dialog. * @param message the dialog's message. * @param listener the click listener for click events. */ public static void doOkDialog(final String title, final String message, OnClickListener listener) { if (listener == null) { listener = new OnClickListener() { public void onClick(DialogInterface dialog, int which) { Activity ownerActivity = ((AlertDialog)dialog).getOwnerActivity(); //if activity is not finishing, remove dialog to free memory if (ownerActivity != null && !ownerActivity.isFinishing()) { ((TiBaseActivity)ownerActivity).removeDialog((AlertDialog) dialog); } }}; } final OnClickListener fListener = listener; waitForCurrentActivity(new CurrentActivityListener() { // TODO @Override public void onCurrentActivityReady(Activity activity) { //add dialog to activity for cleaning up purposes if (!activity.isFinishing()) { AlertDialog dialog = new AlertDialog.Builder(activity).setTitle(title).setMessage(message) .setPositiveButton(android.R.string.ok, fListener) .setCancelable(false).create(); if (activity instanceof TiBaseActivity) { TiBaseActivity baseActivity = (TiBaseActivity) activity; baseActivity.addDialog(baseActivity.new DialogWrapper(dialog, true, new WeakReference<TiBaseActivity>(baseActivity))); dialog.setOwnerActivity(activity); } dialog.show(); } } }); } public static int toTypefaceStyle(String fontWeight, String fontStyle) { int style = Typeface.NORMAL; if (fontWeight != null) { if (fontWeight.equals("bold")) { if (fontStyle != null && fontStyle.equals("italic")) { style = Typeface.BOLD_ITALIC; } else { style = Typeface.BOLD; } } else if (fontStyle != null && fontStyle.equals("italic")) { style = Typeface.ITALIC; } } else if (fontStyle != null && fontStyle.equals("italic")) { style = Typeface.ITALIC; } return style; } public static int getSizeUnits(String size) { int units = TypedValue.COMPLEX_UNIT_PX; String unitString = null; if (size != null) { Matcher m = SIZED_VALUE.matcher(size.trim()); if (m.matches()) { if (m.groupCount() == 2) { unitString = m.group(2); } } } if (unitString == null) { unitString = TiApplication.getInstance().getDefaultUnit(); } if (TiDimension.UNIT_PX.equals(unitString) || TiDimension.UNIT_SYSTEM.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_PX; } else if (TiDimension.UNIT_PT.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_PT; } else if (TiDimension.UNIT_DP.equals(unitString) || TiDimension.UNIT_DIP.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_DIP; } else if (TiDimension.UNIT_SP.equals(unitString) || TiDimension.UNIT_SIP.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_SP; } else if (TiDimension.UNIT_MM.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_MM; } else if (TiDimension.UNIT_CM.equals(unitString)) { units = TiDimension.COMPLEX_UNIT_CM; } else if (TiDimension.UNIT_IN.equals(unitString)) { units = TypedValue.COMPLEX_UNIT_IN; } else { if (unitString != null) { Log.w(TAG, "Unknown unit: " + unitString, Log.DEBUG_MODE); } } return units; } public static float getSize(String size) { float value = 15.0f; if (size != null) { Matcher m = SIZED_VALUE.matcher(size.trim()); if (m.matches()) { value = Float.parseFloat(m.group(1)); } } return value; } public static float getRawSize(int unit, float size, Context context) { Resources r; if (context != null) { r = context.getResources(); } else { r = Resources.getSystem(); } return TypedValue.applyDimension(unit, size, r.getDisplayMetrics()); } public static float getRawDIPSize(float size, Context context) { return getRawSize(TypedValue.COMPLEX_UNIT_DIP, size, context); } public static float getRawSize(String size, Context context) { return getRawSize(getSizeUnits(size), getSize(size), context); } public static void styleText(TextView tv, HashMap<String, Object> d) { if (d == null) { TiUIHelper.styleText(tv, null, null, null); return; } String fontSize = null; String fontWeight = null; String fontFamily = null; String fontStyle = null; if (d.containsKey("fontSize")) { fontSize = TiConvert.toString(d, "fontSize"); } if (d.containsKey("fontWeight")) { fontWeight = TiConvert.toString(d, "fontWeight"); } if (d.containsKey("fontFamily")) { fontFamily = TiConvert.toString(d, "fontFamily"); } if (d.containsKey("fontStyle")) { fontStyle = TiConvert.toString(d, "fontStyle"); } TiUIHelper.styleText(tv, fontFamily, fontSize, fontWeight, fontStyle); } public static void styleText(TextView tv, String fontFamily, String fontSize, String fontWeight) { styleText(tv, fontFamily, fontSize, fontWeight, null); } public static void styleText(TextView tv, String fontFamily, String fontSize, String fontWeight, String fontStyle) { Typeface tf = tv.getTypeface(); tf = toTypeface(tv.getContext(), fontFamily); tv.setTypeface(tf, toTypefaceStyle(fontWeight, fontStyle)); tv.setTextSize(getSizeUnits(fontSize), getSize(fontSize)); } public static Typeface toTypeface(Context context, String fontFamily) { Typeface tf = Typeface.SANS_SERIF; // default if (fontFamily != null) { if ("monospace".equals(fontFamily)) { tf = Typeface.MONOSPACE; } else if ("serif".equals(fontFamily)) { tf = Typeface.SERIF; } else if ("sans-serif".equals(fontFamily)) { tf = Typeface.SANS_SERIF; } else { Typeface loadedTf = null; if (context != null) { loadedTf = loadTypeface(context, fontFamily); } if (loadedTf == null) { Log.w(TAG, "Unsupported font: '" + fontFamily + "' supported fonts are 'monospace', 'serif', 'sans-serif'.", Log.DEBUG_MODE); } else { tf = loadedTf; } } } return tf; } public static Typeface toTypeface(String fontFamily) { return toTypeface(null, fontFamily); } private static Typeface loadTypeface(Context context, String fontFamily) { if (context == null) { return null; } if (mCustomTypeFaces.containsKey(fontFamily)) { return mCustomTypeFaces.get(fontFamily); } AssetManager mgr = context.getAssets(); try { String[] fontFiles = mgr.list(customFontPath); for (String f : fontFiles) { if (f.toLowerCase() == fontFamily.toLowerCase() || f.toLowerCase().startsWith(fontFamily.toLowerCase() + ".")) { Typeface tf = Typeface.createFromAsset(mgr, customFontPath + "/" + f); synchronized(mCustomTypeFaces) { mCustomTypeFaces.put(fontFamily, tf); } return tf; } } } catch (IOException e) { Log.e(TAG, "Unable to load 'fonts' assets. Perhaps doesn't exist? " + e.getMessage()); } mCustomTypeFaces.put(fontFamily, null); return null; } public static String getDefaultFontSize(Context context) { String size = "15.0px"; TextView tv = new TextView(context); if (tv != null) { size = String.valueOf(tv.getTextSize()) + "px"; tv = null; } return size; } public static String getDefaultFontWeight(Context context) { String style = "normal"; TextView tv = new TextView(context); if (tv != null) { Typeface tf = tv.getTypeface(); if (tf != null && tf.isBold()) { style = "bold"; } } return style; } public static void setAlignment(TextView tv, String textAlign, String verticalAlign) { int gravity = Gravity.NO_GRAVITY; if (textAlign != null) { if ("left".equals(textAlign)) { gravity |= Gravity.LEFT; } else if ("center".equals(textAlign)) { gravity |= Gravity.CENTER_HORIZONTAL; } else if ("right".equals(textAlign)) { gravity |= Gravity.RIGHT; } else { Log.w(TAG, "Unsupported horizontal alignment: " + textAlign); } } else { // Nothing has been set - let's set if something was set previously // You can do this with shortcut syntax - but long term maint of code is easier if it's explicit Log.w(TAG, "No alignment set - old horizontal align was: " + (tv.getGravity() & Gravity.HORIZONTAL_GRAVITY_MASK), Log.DEBUG_MODE); if ((tv.getGravity() & Gravity.HORIZONTAL_GRAVITY_MASK) != Gravity.NO_GRAVITY) { // Something was set before - so let's use it gravity |= tv.getGravity() & Gravity.HORIZONTAL_GRAVITY_MASK; } } if (verticalAlign != null) { if ("top".equals(verticalAlign)) { gravity |= Gravity.TOP; } else if ("middle".equals(verticalAlign)) { gravity |= Gravity.CENTER_VERTICAL; } else if ("bottom".equals(verticalAlign)) { gravity |= Gravity.BOTTOM; } else { Log.w(TAG, "Unsupported vertical alignment: " + verticalAlign); } } else { // Nothing has been set - let's set if something was set previously // You can do this with shortcut syntax - but long term maint of code is easier if it's explicit Log.w(TAG, "No alignment set - old vertical align was: " + (tv.getGravity() & Gravity.VERTICAL_GRAVITY_MASK), Log.DEBUG_MODE); if ((tv.getGravity() & Gravity.VERTICAL_GRAVITY_MASK) != Gravity.NO_GRAVITY) { // Something was set before - so let's use it gravity |= tv.getGravity() & Gravity.VERTICAL_GRAVITY_MASK; } } tv.setGravity(gravity); } public static final int FONT_SIZE_POSITION = 0; public static final int FONT_FAMILY_POSITION = 1; public static final int FONT_WEIGHT_POSITION = 2; public static final int FONT_STYLE_POSITION = 3; public static String[] getFontProperties(KrollDict fontProps) { boolean bFontSet = false; String[] fontProperties = new String[4]; if (fontProps.containsKey(TiC.PROPERTY_FONT) && fontProps.get(TiC.PROPERTY_FONT) instanceof HashMap) { bFontSet = true; KrollDict font = fontProps.getKrollDict(TiC.PROPERTY_FONT); if (font.containsKey(TiC.PROPERTY_FONTSIZE)) { fontProperties[FONT_SIZE_POSITION] = TiConvert.toString(font, TiC.PROPERTY_FONTSIZE); } if (font.containsKey(TiC.PROPERTY_FONTFAMILY)) { fontProperties[FONT_FAMILY_POSITION] = TiConvert.toString(font, TiC.PROPERTY_FONTFAMILY); } if (font.containsKey(TiC.PROPERTY_FONTWEIGHT)) { fontProperties[FONT_WEIGHT_POSITION] = TiConvert.toString(font, TiC.PROPERTY_FONTWEIGHT); } if (font.containsKey(TiC.PROPERTY_FONTSTYLE)) { fontProperties[FONT_STYLE_POSITION] = TiConvert.toString(font, TiC.PROPERTY_FONTSTYLE); } } else { if (fontProps.containsKey(TiC.PROPERTY_FONT_FAMILY)) { bFontSet = true; fontProperties[FONT_FAMILY_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONT_FAMILY); } if (fontProps.containsKey(TiC.PROPERTY_FONT_SIZE)) { bFontSet = true; fontProperties[FONT_SIZE_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONT_SIZE); } if (fontProps.containsKey(TiC.PROPERTY_FONT_WEIGHT)) { bFontSet = true; fontProperties[FONT_WEIGHT_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONT_WEIGHT); } if (fontProps.containsKey(TiC.PROPERTY_FONTFAMILY)) { bFontSet = true; fontProperties[FONT_FAMILY_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONTFAMILY); } if (fontProps.containsKey(TiC.PROPERTY_FONTSIZE)) { bFontSet = true; fontProperties[FONT_SIZE_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONTSIZE); } if (fontProps.containsKey(TiC.PROPERTY_FONTWEIGHT)) { bFontSet = true; fontProperties[FONT_WEIGHT_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONTWEIGHT); } if (fontProps.containsKey(TiC.PROPERTY_FONTSTYLE)) { bFontSet = true; fontProperties[FONT_STYLE_POSITION] = TiConvert.toString(fontProps, TiC.PROPERTY_FONTSTYLE); } } if (!bFontSet) { return null; } return fontProperties; } public static void setTextViewDIPPadding(TextView textView, int horizontalPadding, int verticalPadding) { int rawHPadding = (int)getRawDIPSize(horizontalPadding, textView.getContext()); int rawVPadding = (int)getRawDIPSize(verticalPadding, textView.getContext()); textView.setPadding(rawHPadding, rawVPadding, rawHPadding, rawVPadding); } public static Drawable buildBackgroundDrawable(String color, String image, boolean tileImage, Drawable gradientDrawable) { // Create an array of the layers that will compose this background. // Note that the order in which the layers is important to get the // correct rendering behavior. ArrayList<Drawable> layers = new ArrayList<Drawable>(3); if (color != null) { Drawable colorDrawable = new ColorDrawable(TiColorHelper.parseColor(color)); layers.add(colorDrawable); } if (gradientDrawable != null) { layers.add(gradientDrawable); } Drawable imageDrawable = null; if (image != null) { TiFileHelper tfh = TiFileHelper.getInstance(); imageDrawable = tfh.loadDrawable(image, false, true, false); if (tileImage) { if (imageDrawable instanceof BitmapDrawable) { BitmapDrawable tiledBackground = (BitmapDrawable) imageDrawable; tiledBackground.setTileModeX(Shader.TileMode.REPEAT); tiledBackground.setTileModeY(Shader.TileMode.REPEAT); imageDrawable = tiledBackground; } } if (imageDrawable != null) { layers.add(imageDrawable); } } return new LayerDrawable(layers.toArray(new Drawable[layers.size()])); } private static final int[] BACKGROUND_DEFAULT_STATE_1 = { android.R.attr.state_window_focused, android.R.attr.state_enabled }; private static final int[] BACKGROUND_DEFAULT_STATE_2 = { android.R.attr.state_enabled }; private static final int[] BACKGROUND_SELECTED_STATE = { android.R.attr.state_window_focused, android.R.attr.state_enabled, android.R.attr.state_pressed }; private static final int[] BACKGROUND_FOCUSED_STATE = { android.R.attr.state_focused, android.R.attr.state_window_focused, android.R.attr.state_enabled }; private static final int[] BACKGROUND_DISABLED_STATE = { -android.R.attr.state_enabled }; public static StateListDrawable buildBackgroundDrawable( String image, boolean tileImage, String color, String selectedImage, String selectedColor, String disabledImage, String disabledColor, String focusedImage, String focusedColor, Drawable gradientDrawable) { StateListDrawable sld = new StateListDrawable(); Drawable bgSelectedDrawable = buildBackgroundDrawable(selectedColor, selectedImage, tileImage, gradientDrawable); if (bgSelectedDrawable != null) { sld.addState(BACKGROUND_SELECTED_STATE, bgSelectedDrawable); } Drawable bgFocusedDrawable = buildBackgroundDrawable(focusedColor, focusedImage, tileImage, gradientDrawable); if (bgFocusedDrawable != null) { sld.addState(BACKGROUND_FOCUSED_STATE, bgFocusedDrawable); } Drawable bgDisabledDrawable = buildBackgroundDrawable(disabledColor, disabledImage, tileImage, gradientDrawable); if (bgDisabledDrawable != null) { sld.addState(BACKGROUND_DISABLED_STATE, bgDisabledDrawable); } Drawable bgDrawable = buildBackgroundDrawable(color, image, tileImage, gradientDrawable); if (bgDrawable != null) { sld.addState(BACKGROUND_DEFAULT_STATE_1, bgDrawable); sld.addState(BACKGROUND_DEFAULT_STATE_2, bgDrawable); } return sld; } public static KrollDict createDictForImage(int width, int height, byte[] data) { KrollDict d = new KrollDict(); d.put(TiC.PROPERTY_X, 0); d.put(TiC.PROPERTY_Y, 0); d.put(TiC.PROPERTY_WIDTH, width); d.put(TiC.PROPERTY_HEIGHT, height); d.put(TiC.PROPERTY_MIMETYPE, MIME_TYPE_PNG); KrollDict cropRect = new KrollDict(); cropRect.put(TiC.PROPERTY_X, 0); cropRect.put(TiC.PROPERTY_X, 0); cropRect.put(TiC.PROPERTY_WIDTH, width); cropRect.put(TiC.PROPERTY_HEIGHT, height); d.put(TiC.PROPERTY_CROP_RECT, cropRect); d.put(TiC.PROPERTY_MEDIA, TiBlob.blobFromData(data, MIME_TYPE_PNG)); return d; } public static TiBlob getImageFromDict(KrollDict dict) { if (dict != null) { if (dict.containsKey(TiC.PROPERTY_MEDIA)) { Object media = dict.get(TiC.PROPERTY_MEDIA); if (media instanceof TiBlob) { return (TiBlob) media; } } } return null; } public static KrollDict viewToImage(KrollDict proxyDict, View view) { KrollDict image = new KrollDict(); if (view != null) { int width = view.getWidth(); int height = view.getHeight(); // maybe move this out to a separate method once other refactor regarding "getWidth", etc is done if (view.getWidth() == 0 && proxyDict != null && proxyDict.containsKey(TiC.PROPERTY_WIDTH)) { TiDimension widthDimension = new TiDimension(proxyDict.getString(TiC.PROPERTY_WIDTH), TiDimension.TYPE_WIDTH); width = widthDimension.getAsPixels(view); } if (view.getHeight() == 0 && proxyDict != null && proxyDict.containsKey(TiC.PROPERTY_HEIGHT)) { TiDimension heightDimension = new TiDimension(proxyDict.getString(TiC.PROPERTY_HEIGHT), TiDimension.TYPE_HEIGHT); height = heightDimension.getAsPixels(view); } int wmode = width == 0 ? MeasureSpec.UNSPECIFIED : MeasureSpec.EXACTLY; int hmode = height == 0 ? MeasureSpec.UNSPECIFIED : MeasureSpec.EXACTLY; view.measure(MeasureSpec.makeMeasureSpec(width, wmode), MeasureSpec.makeMeasureSpec(height, hmode)); // Will force the view to layout itself, grab dimensions width = view.getMeasuredWidth(); height = view.getMeasuredHeight(); // set a default BS value if the dimension is still 0 and log a warning if (width == 0) { width = 100; Log.e(TAG, "Width property is 0 for view, display view before calling toImage()", Log.DEBUG_MODE); } if (height == 0) { height = 100; Log.e(TAG, "Height property is 0 for view, display view before calling toImage()", Log.DEBUG_MODE); } if (view.getParent() == null) { Log.i(TAG, "View does not have parent, calling layout", Log.DEBUG_MODE); view.layout(0, 0, width, height); } // opacity should support transparency by default Config bitmapConfig = Config.ARGB_8888; Drawable viewBackground = view.getBackground(); if (viewBackground != null) { /* * If the background is opaque then we should be able to safely use a space saving format that * does not support the alpha channel. Basically, if a view has a background color set then the * the pixel format will be opaque. If a background image supports an alpha channel, the pixel * format will report transparency (even if the image doesn't actually look transparent). In * short, most of the time the Config.ARGB_8888 format will be used when viewToImage is used * but in the cases where the background is opaque, the lower memory approach will be used. */ if (viewBackground.getOpacity() == PixelFormat.OPAQUE) { bitmapConfig = Config.RGB_565; } } Bitmap bitmap = Bitmap.createBitmap(width, height, bitmapConfig); Canvas canvas = new Canvas(bitmap); view.draw(canvas); ByteArrayOutputStream bos = new ByteArrayOutputStream(); if (bitmap.compress(CompressFormat.PNG, 100, bos)) { image = createDictForImage(width, height, bos.toByteArray()); } canvas = null; bitmap.recycle(); } return image; } /** * Creates and returns a Bitmap from an InputStream. * @param stream an InputStream to read bitmap data. * @return a new bitmap instance. * @module.api */ public static Bitmap createBitmap(InputStream stream) { Rect pad = new Rect(); BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPurgeable = true; opts.inInputShareable = true; Bitmap b = null; try { b = BitmapFactory.decodeResourceStream(null, null, stream, pad, opts); } catch (OutOfMemoryError e) { Log.e(TAG, "Unable to load bitmap. Not enough memory: " + e.getMessage()); } return b; } /** * Creates and returns a density scaled Bitmap from an InputStream. * @param stream an InputStream to read bitmap data. * @return a new bitmap instance. */ public static Bitmap createDensityScaledBitmap(InputStream stream) { Rect pad = new Rect(); BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPurgeable = true; opts.inInputShareable = true; DisplayMetrics dm = new DisplayMetrics(); dm.setToDefaults(); opts.inDensity = DisplayMetrics.DENSITY_MEDIUM; opts.inTargetDensity = dm.densityDpi; opts.inScaled = true; Bitmap b = null; try { b = BitmapFactory.decodeResourceStream(null, null, stream, pad, opts); } catch (OutOfMemoryError e) { Log.e(TAG, "Unable to load bitmap. Not enough memory: " + e.getMessage()); } return b; } private static String getResourceKeyForImage(String url) { if (resourceImageKeys.containsKey(url)) { return resourceImageKeys.get(url); } Pattern pattern = Pattern.compile("^.*/Resources/images/(.*$)"); Matcher matcher = pattern.matcher(url); if (!matcher.matches()) { return null; } String chopped = matcher.group(1); if (chopped == null) { return null; } chopped = chopped.toLowerCase(); String forHash = chopped; if (forHash.endsWith(".9.png")) { forHash = forHash.replace(".9.png", ".png"); } String withoutExtension = chopped; if (chopped.matches("^.*\\..*$")) { if (chopped.endsWith(".9.png")) { withoutExtension = chopped.substring(0, chopped.lastIndexOf(".9.png")); } else { withoutExtension = chopped.substring(0, chopped.lastIndexOf('.')); } } String cleanedWithoutExtension = withoutExtension.replaceAll("[^a-z0-9_]", "_"); StringBuilder result = new StringBuilder(100); result.append(cleanedWithoutExtension.substring(0, Math.min(cleanedWithoutExtension.length(), 80))) ; result.append("_"); result.append(DigestUtils.md5Hex(forHash).substring(0, 10)); String sResult = result.toString(); resourceImageKeys.put(url, sResult); return sResult; } public static int getResourceId(String url) { if (!url.contains("Resources/images/")) { return 0; } String key = getResourceKeyForImage(url); if (key == null) { return 0; } try { return TiRHelper.getResource("drawable." + key, false); } catch (TiRHelper.ResourceNotFoundException e) { return 0; } } /** * Creates and returns a bitmap from its url. * @param url the bitmap url. * @return a new bitmap instance * @module.api */ public static Bitmap getResourceBitmap(String url) { int id = getResourceId(url); if (id == 0) { return null; } else { return getResourceBitmap(id); } } /** * Creates and returns a bitmap for the specified resource ID. * @param res_id the bitmap id. * @return a new bitmap instance. * @module.api */ public static Bitmap getResourceBitmap(int res_id) { BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPurgeable = true; opts.inInputShareable = true; Bitmap bitmap = null; try { bitmap = BitmapFactory.decodeResource(TiApplication.getInstance().getResources(), res_id, opts); } catch (OutOfMemoryError e) { Log.e(TAG, "Unable to load bitmap. Not enough memory: " + e.getMessage()); } return bitmap; } public static Drawable loadFastDevDrawable(String url) { try { TiBaseFile tbf = TiFileFactory.createTitaniumFile(new String[] { url }, false); InputStream stream = tbf.getInputStream(); Drawable d = BitmapDrawable.createFromStream(stream, url); stream.close(); return d; } catch (IOException e) { Log.w(TAG, e.getMessage(), e); } return null; } public static Drawable getResourceDrawable(String url) { int id = getResourceId(url); if (id == 0) { return null; } return getResourceDrawable(id); } public static Drawable getResourceDrawable(int res_id) { return TiApplication.getInstance().getResources().getDrawable(res_id); } public static Drawable getResourceDrawable(Object path) { Drawable d = null; try { if (path instanceof String) { TiUrl imageUrl = new TiUrl((String) path); TiFileHelper tfh = new TiFileHelper(TiApplication.getInstance()); d = tfh.loadDrawable(imageUrl.resolve(), false); } else { d = TiDrawableReference.fromObject(TiApplication.getInstance().getCurrentActivity(), path).getDrawable(); } } catch (Exception e) { Log.w(TAG, "Could not load drawable "+e.getMessage(), Log.DEBUG_MODE); d = null; } return d; } public static void overridePendingTransition(Activity activity) { if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.DONUT) { return; } if (overridePendingTransition == null) { try { overridePendingTransition = Activity.class.getMethod("overridePendingTransition", Integer.TYPE, Integer.TYPE); } catch (NoSuchMethodException e) { Log.w(TAG, "Activity.overridePendingTransition() not found"); } } if (overridePendingTransition != null) { try { overridePendingTransition.invoke(activity, new Object[]{0,0}); } catch (InvocationTargetException e) { Log.e(TAG, "Called incorrectly: " + e.getMessage()); } catch (IllegalAccessException e) { Log.e(TAG, "Illegal access: " + e.getMessage()); } } } public static ColorFilter createColorFilterForOpacity(float opacity) { // 5x4 identity color matrix + fade the alpha to achieve opacity float[] matrix = { 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, opacity, 0 }; return new ColorMatrixColorFilter(new ColorMatrix(matrix)); } public static void setDrawableOpacity(Drawable drawable, float opacity) { if (drawable instanceof ColorDrawable || drawable instanceof TiBackgroundDrawable) { drawable.setAlpha(Math.round(opacity * 255)); } else if (drawable != null) { drawable.setColorFilter(createColorFilterForOpacity(opacity)); } } public static void setPaintOpacity(Paint paint, float opacity) { paint.setColorFilter(createColorFilterForOpacity(opacity)); } public static void requestSoftInputChange(KrollProxy proxy, View view) { int focusState = TiUIView.SOFT_KEYBOARD_DEFAULT_ON_FOCUS; if (proxy.hasProperty(TiC.PROPERTY_SOFT_KEYBOARD_ON_FOCUS)) { focusState = TiConvert.toInt(proxy.getProperty(TiC.PROPERTY_SOFT_KEYBOARD_ON_FOCUS)); } if (focusState > TiUIView.SOFT_KEYBOARD_DEFAULT_ON_FOCUS) { if (focusState == TiUIView.SOFT_KEYBOARD_SHOW_ON_FOCUS) { showSoftKeyboard(view, true); } else if (focusState == TiUIView.SOFT_KEYBOARD_HIDE_ON_FOCUS) { showSoftKeyboard(view, false); } else { Log.w(TAG, "Unknown onFocus state: " + focusState); } } } /** * Shows/hides the soft keyboard. * @param view the current focused view. * @param show whether to show soft keyboard. */ public static void showSoftKeyboard(View view, boolean show) { InputMethodManager imm = (InputMethodManager) view.getContext().getSystemService(Activity.INPUT_METHOD_SERVICE); if (imm != null) { boolean useForce = (Build.VERSION.SDK_INT <= Build.VERSION_CODES.DONUT || Build.VERSION.SDK_INT >= 8) ? true : false; String model = TiPlatformHelper.getInstance().getModel(); if (model != null && model.toLowerCase().startsWith("droid")) { useForce = true; } if (show) { imm.showSoftInput(view, useForce ? InputMethodManager.SHOW_FORCED : InputMethodManager.SHOW_IMPLICIT); } else { imm.hideSoftInputFromWindow(view.getWindowToken(), useForce ? 0 : InputMethodManager.HIDE_IMPLICIT_ONLY); } } } /** * Run the Runnable "delayed" by using an AsyncTask to first require a new * thread and only then, in onPostExecute, run the Runnable on the UI thread. * @param runnable Runnable to run on UI thread. */ public static void runUiDelayed(final Runnable runnable) { (new AsyncTask<Void, Void, Void>() { @Override protected Void doInBackground(Void... arg0) { return null; } /** * Always invoked on UI thread. */ @Override protected void onPostExecute(Void result) { Handler handler = new Handler(Looper.getMainLooper()); handler.post(runnable); } }).execute(); } /** * If there is a block on the UI message queue, run the Runnable "delayed". * @param runnable Runnable to run on UI thread. */ public static void runUiDelayedIfBlock(final Runnable runnable) { //if (TiApplication.getInstance().getMessageQueue().isBlocking()) { if (TiMessenger.getMainMessenger().isBlocking()) { runUiDelayed(runnable); } else { //Handler handler = new Handler(Looper.getMainLooper()); //handler.post(runnable); TiMessenger.getMainMessenger().getHandler().post(runnable); } } public static void firePostLayoutEvent(TiViewProxy proxy) { if (proxy != null && proxy.hasListeners(TiC.EVENT_POST_LAYOUT)) { proxy.fireEvent(TiC.EVENT_POST_LAYOUT, null, false); } } /** * To get the redirected Uri * @param Uri */ public static Uri getRedirectUri(Uri mUri) throws MalformedURLException, IOException { if (Build.VERSION.SDK_INT < TiC.API_LEVEL_HONEYCOMB && ("http".equals(mUri.getScheme()) || "https".equals(mUri.getScheme()))) { // Media player doesn't handle redirects, try to follow them // here. (Redirects work fine without this in ICS.) while (true) { // java.net.URL doesn't handle rtsp if (mUri.getScheme() != null && mUri.getScheme().equals("rtsp")) break; URL url = new URL(mUri.toString()); HttpURLConnection cn = (HttpURLConnection) url.openConnection(); cn.setInstanceFollowRedirects(false); String location = cn.getHeaderField("Location"); if (location != null) { String host = mUri.getHost(); int port = mUri.getPort(); String scheme = mUri.getScheme(); mUri = Uri.parse(location); if (mUri.getScheme() == null) { // Absolute URL on existing host/port/scheme if (scheme == null) { scheme = "http"; } String authority = port == -1 ? host : host + ":" + port; mUri = mUri.buildUpon().scheme(scheme).encodedAuthority(authority).build(); } } else { break; } } } return mUri; } }
/* Generic definitions */ /* Assertions (useful to generate conditional code) */ /* Current type and class (and size, if applicable) */ /* Value methods */ /* Interfaces (keys) */ /* Interfaces (values) */ /* Abstract implementations (keys) */ /* Abstract implementations (values) */ /* Static containers (keys) */ /* Static containers (values) */ /* Implementations */ /* Synchronized wrappers */ /* Unmodifiable wrappers */ /* Other wrappers */ /* Methods (keys) */ /* Methods (values) */ /* Methods (keys/values) */ /* Methods that have special names depending on keys (but the special names depend on values) */ /* Equality */ /* Object/Reference-only definitions (keys) */ /* Primitive-type-only definitions (keys) */ /* Object/Reference-only definitions (values) */ /* Primitive-type-only definitions (values) */ /* * Copyright (C) 2002-2013 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.longs; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectSortedSets; import java.util.Comparator; import java.util.Map; import java.util.SortedMap; import java.util.NoSuchElementException; /** A class providing static methods and objects that do useful things with type-specific sorted maps. * * @see java.util.Collections */ public class Long2ByteSortedMaps { private Long2ByteSortedMaps() {} /** Returns a comparator for entries based on a given comparator on keys. * * @param comparator a comparator on keys. * @return the associated comparator on entries. */ public static Comparator<? super Map.Entry<Long, ?>> entryComparator( final LongComparator comparator ) { return new Comparator<Map.Entry<Long, ?>>() { public int compare( Map.Entry<Long, ?> x, Map.Entry<Long, ?> y ) { return comparator.compare( x.getKey(), y.getKey() ); } }; } /** An immutable class representing an empty type-specific sorted map. * * <P>This class may be useful to implement your own in case you subclass * a type-specific sorted map. */ public static class EmptySortedMap extends Long2ByteMaps.EmptyMap implements Long2ByteSortedMap , java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected EmptySortedMap() {} public LongComparator comparator() { return null; } @SuppressWarnings("unchecked") public ObjectSortedSet<Long2ByteMap.Entry > long2ByteEntrySet() { return ObjectSortedSets.EMPTY_SET; } @SuppressWarnings("unchecked") public ObjectSortedSet<Map.Entry<Long, Byte>> entrySet() { return ObjectSortedSets.EMPTY_SET; } @SuppressWarnings("unchecked") public LongSortedSet keySet() { return LongSortedSets.EMPTY_SET; } @SuppressWarnings("unchecked") public Long2ByteSortedMap subMap( final long from, final long to ) { return EMPTY_MAP; } @SuppressWarnings("unchecked") public Long2ByteSortedMap headMap( final long to ) { return EMPTY_MAP; } @SuppressWarnings("unchecked") public Long2ByteSortedMap tailMap( final long from ) { return EMPTY_MAP; } public long firstLongKey() { throw new NoSuchElementException(); } public long lastLongKey() { throw new NoSuchElementException(); } public Long2ByteSortedMap headMap( Long oto ) { return headMap( ((oto).longValue()) ); } public Long2ByteSortedMap tailMap( Long ofrom ) { return tailMap( ((ofrom).longValue()) ); } public Long2ByteSortedMap subMap( Long ofrom, Long oto ) { return subMap( ((ofrom).longValue()), ((oto).longValue()) ); } public Long firstKey() { return (Long.valueOf(firstLongKey())); } public Long lastKey() { return (Long.valueOf(lastLongKey())); } } /** An empty type-specific sorted map (immutable). It is serializable and cloneable. */ @SuppressWarnings("rawtypes") public static final EmptySortedMap EMPTY_MAP = new EmptySortedMap(); /** An immutable class representing a type-specific singleton sorted map. * * <P>This class may be useful to implement your own in case you subclass * a type-specific sorted map. */ public static class Singleton extends Long2ByteMaps.Singleton implements Long2ByteSortedMap , java.io.Serializable, Cloneable { private static final long serialVersionUID = -7046029254386353129L; protected final LongComparator comparator; protected Singleton( final long key, final byte value, LongComparator comparator ) { super( key, value ); this.comparator = comparator; } protected Singleton( final long key, final byte value ) { this( key, value, null ); } @SuppressWarnings("unchecked") final int compare( final long k1, final long k2 ) { return comparator == null ? ( (k1) < (k2) ? -1 : ( (k1) == (k2) ? 0 : 1 ) ) : comparator.compare( k1, k2 ); } public LongComparator comparator() { return comparator; } @SuppressWarnings("unchecked") public ObjectSortedSet<Long2ByteMap.Entry > long2ByteEntrySet() { if ( entries == null ) entries = ObjectSortedSets.singleton( (Long2ByteMap.Entry )new SingletonEntry(), (Comparator<? super Long2ByteMap.Entry >)entryComparator( comparator ) ); return (ObjectSortedSet<Long2ByteMap.Entry >)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet<Map.Entry<Long, Byte>> entrySet() { return (ObjectSortedSet)long2ByteEntrySet(); } public LongSortedSet keySet() { if ( keys == null ) keys = LongSortedSets.singleton( key, comparator ); return (LongSortedSet )keys; } @SuppressWarnings("unchecked") public Long2ByteSortedMap subMap( final long from, final long to ) { if ( compare( from, key ) <= 0 && compare( key, to ) < 0 ) return this; return EMPTY_MAP; } @SuppressWarnings("unchecked") public Long2ByteSortedMap headMap( final long to ) { if ( compare( key, to ) < 0 ) return this; return EMPTY_MAP; } @SuppressWarnings("unchecked") public Long2ByteSortedMap tailMap( final long from ) { if ( compare( from, key ) <= 0 ) return this; return EMPTY_MAP; } public long firstLongKey() { return key; } public long lastLongKey() { return key; } public Long2ByteSortedMap headMap( Long oto ) { return headMap( ((oto).longValue()) ); } public Long2ByteSortedMap tailMap( Long ofrom ) { return tailMap( ((ofrom).longValue()) ); } public Long2ByteSortedMap subMap( Long ofrom, Long oto ) { return subMap( ((ofrom).longValue()), ((oto).longValue()) ); } public Long firstKey() { return (Long.valueOf(firstLongKey())); } public Long lastKey() { return (Long.valueOf(lastLongKey())); } } /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * * <P>Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <code>&lt;key,value></code>. */ public static Long2ByteSortedMap singleton( final Long key, Byte value ) { return new Singleton ( ((key).longValue()), ((value).byteValue()) ); } /** RETURNS a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * * <P>Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @param comparator the comparator to use in the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <code>&lt;key,value></code>. */ public static Long2ByteSortedMap singleton( final Long key, Byte value, LongComparator comparator ) { return new Singleton ( ((key).longValue()), ((value).byteValue()), comparator ); } /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * * <P>Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <code>&lt;key,value></code>. */ public static Long2ByteSortedMap singleton( final long key, final byte value ) { return new Singleton ( key, value ); } /** Returns a type-specific immutable sorted map containing only the specified pair. The returned sorted map is serializable and cloneable. * * <P>Note that albeit the returned map is immutable, its default return value may be changed. * * @param key the only key of the returned sorted map. * @param value the only value of the returned sorted map. * @param comparator the comparator to use in the returned sorted map. * @return a type-specific immutable sorted map containing just the pair <code>&lt;key,value></code>. */ public static Long2ByteSortedMap singleton( final long key, final byte value, LongComparator comparator ) { return new Singleton ( key, value, comparator ); } /** A synchronized wrapper class for sorted maps. */ public static class SynchronizedSortedMap extends Long2ByteMaps.SynchronizedMap implements Long2ByteSortedMap , java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final Long2ByteSortedMap sortedMap; protected SynchronizedSortedMap( final Long2ByteSortedMap m, final Object sync ) { super( m, sync ); sortedMap = m; } protected SynchronizedSortedMap( final Long2ByteSortedMap m ) { super( m ); sortedMap = m; } public LongComparator comparator() { synchronized( sync ) { return sortedMap.comparator(); } } public ObjectSortedSet<Long2ByteMap.Entry > long2ByteEntrySet() { if ( entries == null ) entries = ObjectSortedSets.synchronize( sortedMap.long2ByteEntrySet(), sync ); return (ObjectSortedSet<Long2ByteMap.Entry >)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet<Map.Entry<Long, Byte>> entrySet() { return (ObjectSortedSet)long2ByteEntrySet(); } public LongSortedSet keySet() { if ( keys == null ) keys = LongSortedSets.synchronize( sortedMap.keySet(), sync ); return (LongSortedSet )keys; } public Long2ByteSortedMap subMap( final long from, final long to ) { return new SynchronizedSortedMap ( sortedMap.subMap( from, to ), sync ); } public Long2ByteSortedMap headMap( final long to ) { return new SynchronizedSortedMap ( sortedMap.headMap( to ), sync ); } public Long2ByteSortedMap tailMap( final long from ) { return new SynchronizedSortedMap ( sortedMap.tailMap( from ), sync ); } public long firstLongKey() { synchronized( sync ) { return sortedMap.firstLongKey(); } } public long lastLongKey() { synchronized( sync ) { return sortedMap.lastLongKey(); } } public Long firstKey() { synchronized( sync ) { return sortedMap.firstKey(); } } public Long lastKey() { synchronized( sync ) { return sortedMap.lastKey(); } } public Long2ByteSortedMap subMap( final Long from, final Long to ) { return new SynchronizedSortedMap ( sortedMap.subMap( from, to ), sync ); } public Long2ByteSortedMap headMap( final Long to ) { return new SynchronizedSortedMap ( sortedMap.headMap( to ), sync ); } public Long2ByteSortedMap tailMap( final Long from ) { return new SynchronizedSortedMap ( sortedMap.tailMap( from ), sync ); } } /** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map. * * @param m the sorted map to be wrapped in a synchronized sorted map. * @return a synchronized view of the specified sorted map. * @see java.util.Collections#synchronizedSortedMap(SortedMap) */ public static Long2ByteSortedMap synchronize( final Long2ByteSortedMap m ) { return new SynchronizedSortedMap ( m ); } /** Returns a synchronized type-specific sorted map backed by the given type-specific sorted map, using an assigned object to synchronize. * * @param m the sorted map to be wrapped in a synchronized sorted map. * @param sync an object that will be used to synchronize the access to the sorted sorted map. * @return a synchronized view of the specified sorted map. * @see java.util.Collections#synchronizedSortedMap(SortedMap) */ public static Long2ByteSortedMap synchronize( final Long2ByteSortedMap m, final Object sync ) { return new SynchronizedSortedMap ( m, sync ); } /** An unmodifiable wrapper class for sorted maps. */ public static class UnmodifiableSortedMap extends Long2ByteMaps.UnmodifiableMap implements Long2ByteSortedMap , java.io.Serializable { private static final long serialVersionUID = -7046029254386353129L; protected final Long2ByteSortedMap sortedMap; protected UnmodifiableSortedMap( final Long2ByteSortedMap m ) { super( m ); sortedMap = m; } public LongComparator comparator() { return sortedMap.comparator(); } public ObjectSortedSet<Long2ByteMap.Entry > long2ByteEntrySet() { if ( entries == null ) entries = ObjectSortedSets.unmodifiable( sortedMap.long2ByteEntrySet() ); return (ObjectSortedSet<Long2ByteMap.Entry >)entries; } @SuppressWarnings({ "rawtypes", "unchecked" }) public ObjectSortedSet<Map.Entry<Long, Byte>> entrySet() { return (ObjectSortedSet)long2ByteEntrySet(); } public LongSortedSet keySet() { if ( keys == null ) keys = LongSortedSets.unmodifiable( sortedMap.keySet() ); return (LongSortedSet )keys; } public Long2ByteSortedMap subMap( final long from, final long to ) { return new UnmodifiableSortedMap ( sortedMap.subMap( from, to ) ); } public Long2ByteSortedMap headMap( final long to ) { return new UnmodifiableSortedMap ( sortedMap.headMap( to ) ); } public Long2ByteSortedMap tailMap( final long from ) { return new UnmodifiableSortedMap ( sortedMap.tailMap( from ) ); } public long firstLongKey() { return sortedMap.firstLongKey(); } public long lastLongKey() { return sortedMap.lastLongKey(); } public Long firstKey() { return sortedMap.firstKey(); } public Long lastKey() { return sortedMap.lastKey(); } public Long2ByteSortedMap subMap( final Long from, final Long to ) { return new UnmodifiableSortedMap ( sortedMap.subMap( from, to ) ); } public Long2ByteSortedMap headMap( final Long to ) { return new UnmodifiableSortedMap ( sortedMap.headMap( to ) ); } public Long2ByteSortedMap tailMap( final Long from ) { return new UnmodifiableSortedMap ( sortedMap.tailMap( from ) ); } } /** Returns an unmodifiable type-specific sorted map backed by the given type-specific sorted map. * * @param m the sorted map to be wrapped in an unmodifiable sorted map. * @return an unmodifiable view of the specified sorted map. * @see java.util.Collections#unmodifiableSortedMap(SortedMap) */ public static Long2ByteSortedMap unmodifiable( final Long2ByteSortedMap m ) { return new UnmodifiableSortedMap ( m ); } }
package com.maxiee.heartbeat.ui; import android.app.Activity; import android.content.Intent; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.maxiee.heartbeat.R; import com.maxiee.heartbeat.common.FileUtils; import com.maxiee.heartbeat.common.TimeUtils; import com.maxiee.heartbeat.database.utils.ThoughtUtils; import com.maxiee.heartbeat.model.Thoughts; import com.maxiee.heartbeat.ui.common.BaseActivity; import com.wdullaer.materialdatetimepicker.date.DatePickerDialog; import com.wdullaer.materialdatetimepicker.time.RadialPickerLayout; import com.wdullaer.materialdatetimepicker.time.TimePickerDialog; import java.util.Calendar; import java.util.Timer; import java.util.TimerTask; import butterknife.Bind; import butterknife.ButterKnife; /** * Created by maxiee on 15-9-15. */ public class AddEditThoughtActivity extends BaseActivity { private final static String TAG = AddEditThoughtActivity.class.getSimpleName(); public static final String MODE = "mode"; public static final String EVENT_KEY = "event_key"; public static final String THOUGHT_ID = "thought_id"; public static final String THOUGHT = "thought"; public static final String TIMESTAMP = "timestamp"; public static final int MODE_NEW = 0; public static final int MODE_EDIT = 1; public static final int INVALID_EVENT_KEY = -1; public static final long INVALID_THOUGHT_KEY = -1; private static final int ADD_IMAGE = 1127; @Bind(R.id.toolbar) Toolbar mToolbar; @Bind(R.id.edit_thought) EditText mEditThought; @Bind(R.id.image) ImageView mImage; @Bind(R.id.add_imgae) ImageButton mAddImageButton; @Bind(R.id.current_date) TextView mCurrentDate; @Bind(R.id.current_time) TextView mCurrentTime; private String mTextThought; private int mMode; private long mEventKey = INVALID_EVENT_KEY; private long mThoughtKey = INVALID_THOUGHT_KEY; private int mResType = Thoughts.Thought.HAS_NO_RES; private String mResPath = ""; private int mResTypeOld = Thoughts.Thought.HAS_NO_RES; private String mResPathOld = ""; private boolean mExitEnsure = false; private long mTimestamp; private long mTimestampBackup; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_add_edit_thought); ButterKnife.bind(this); Intent intent = getIntent(); mMode = intent.getIntExtra(MODE, MODE_NEW); if (mMode == MODE_NEW) { mEventKey = intent.getLongExtra(EVENT_KEY, INVALID_EVENT_KEY); mTimestamp = System.currentTimeMillis(); mTimestampBackup = mTimestamp; } if (mMode == MODE_EDIT) { mThoughtKey = intent.getLongExtra(THOUGHT_ID, INVALID_THOUGHT_KEY); mTextThought = intent.getStringExtra(THOUGHT); mResType = intent.getIntExtra( Thoughts.Thought.THOUGHT_RES, Thoughts.Thought.HAS_NO_RES ); mResPath = intent.getStringExtra(Thoughts.Thought.THOUGHT_PATH); mResTypeOld = mResType; mResPathOld = mResPath; mTimestamp = intent.getLongExtra(TIMESTAMP, System.currentTimeMillis()); mTimestampBackup = mTimestamp; } setSupportActionBar(mToolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mAddImageButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (Build.VERSION.SDK_INT < 19) { Intent i = new Intent(); i.setType("image/*"); i.setAction(Intent.ACTION_GET_CONTENT); startActivityForResult( Intent.createChooser(i, getString(R.string.add_image)), ADD_IMAGE); } else { Intent i = new Intent(Intent.ACTION_OPEN_DOCUMENT); i.addCategory(Intent.CATEGORY_OPENABLE); i.setType("image/*"); startActivityForResult( Intent.createChooser(i, getString(R.string.add_image)), ADD_IMAGE); } } }); if (mMode == MODE_NEW) setTitle(getString(R.string.add_thought)); if (mMode == MODE_EDIT) setTitle(getString(R.string.dialog_edit_thought)); if (mMode == MODE_EDIT) initEditView(); initDate(); } private void initDate() { updateDate(); mCurrentDate.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(mTimestamp); DatePickerDialog dpd = DatePickerDialog.newInstance( new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePickerDialog view, int year, int monthOfYear, int dayOfMonth) { mTimestamp = TimeUtils.updateTimestampWithDate(year, monthOfYear, dayOfMonth, mTimestamp); updateDate(); } }, cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DAY_OF_MONTH) ); dpd.show(getFragmentManager(), getString(R.string.date)); } }); mCurrentTime.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(mTimestamp); TimePickerDialog tpd = TimePickerDialog.newInstance( new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(RadialPickerLayout view, int hourOfDay, int minute, int second) { mTimestamp = TimeUtils.updateTimestampWithTime(hourOfDay, minute, second, mTimestamp); updateDate(); } }, cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), true); tpd.show(getFragmentManager(), getString(R.string.date)); } }); } private void updateDate() { mCurrentDate.setText(TimeUtils.parseDateDate(this, mTimestamp)); mCurrentTime.setText(TimeUtils.parseDateTime(this, mTimestamp)); } private void initEditView() { mEditThought.setText(mTextThought); if (mResType == Thoughts.Thought.RES_IMAGE) { mImage.setVisibility(View.VISIBLE); Glide.with(this).load(mResPath).into(mImage); } } @Override public boolean onCreateOptionsMenu(Menu menu) { if (mMode == MODE_NEW) getMenuInflater().inflate(R.menu.dialog_new_thought, menu); if (mMode == MODE_EDIT) getMenuInflater().inflate(R.menu.dialog_edit_thought, menu); return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == ADD_IMAGE && resultCode == Activity.RESULT_OK) { mImage.setVisibility(View.VISIBLE); Glide.with(this).load(data.getData()).into(mImage); mResType = Thoughts.Thought.RES_IMAGE; mResPath = FileUtils.uriToPath(this, data.getData()); } } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.add) { mTextThought = mEditThought.getText().toString(); if (!checkThoughtValid()) return true; new AddThoughtTask().execute(); } if (id == R.id.done) { Log.d(TAG, "Thought edit mode:"); mTextThought = mEditThought.getText().toString(); if (!checkThoughtValid()) return true; if (mResTypeOld == Thoughts.Thought.HAS_NO_RES && mResType == Thoughts.Thought.HAS_NO_RES) { Log.d(TAG, "Has no res, update thought directly"); new UpdateThoughtTask().execute(UpdateThoughtTask.RES_DO_NOTHING); return true; } if (mResTypeOld == Thoughts.Thought.HAS_NO_RES && mResType != Thoughts.Thought.HAS_NO_RES) { Log.d(TAG, "No res to has res, insert res"); new UpdateThoughtTask().execute(UpdateThoughtTask.RES_INSERT); return true; } if (mResTypeOld != Thoughts.Thought.HAS_NO_RES && mResType != Thoughts.Thought.HAS_NO_RES) { if (mResType == mResTypeOld && mResPathOld.equals(mResPath)) { Log.d(TAG, "Res not change!"); new UpdateThoughtTask().execute(UpdateThoughtTask.RES_DO_NOTHING); return true; } Log.d(TAG, "Res changed, update it."); new UpdateThoughtTask().execute(UpdateThoughtTask.RES_UPDATE); } return true; } if (id == R.id.delete) { ThoughtUtils.deleteByThoughtId(this, mThoughtKey); finish(); } if (id == android.R.id.home) { ensureExit(); return true; } return super.onOptionsItemSelected(item); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { ensureExit(); return true; } return super.onKeyDown(keyCode, event); } private void ensureExit() { if (!mExitEnsure) { mExitEnsure = true; Toast.makeText(this, getString(R.string.exit_next_time), Toast.LENGTH_SHORT).show(); new Timer().schedule(new TimerTask() { @Override public void run() { mExitEnsure = false; } }, 2000); } else { this.onBackPressed(); } } private boolean checkThoughtValid() { if (mTextThought.isEmpty()) { Toast.makeText( this, R.string.notempty, Toast.LENGTH_LONG).show(); return false; } if (mMode == MODE_NEW && mEventKey == INVALID_EVENT_KEY) { Toast.makeText( this, R.string.dataerror, Toast.LENGTH_LONG).show(); return false; } return true; } private class AddThoughtTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { ThoughtUtils.addThought(AddEditThoughtActivity.this, mEventKey, mTextThought, mTimestamp, mResType, mResPath); return null; } @Override protected void onPostExecute(Void aVoid) { finish(); } } private class UpdateThoughtTask extends AsyncTask<Integer, Void, Void> { public static final int RES_DO_NOTHING =0; public static final int RES_INSERT = 1; public static final int RES_UPDATE =2; @Override protected Void doInBackground(Integer... params) { int state = params[0]; Log.d(TAG, "thoughtKey:" + String.valueOf(mThoughtKey)); Log.d(TAG, "thought:" + mTextThought); ThoughtUtils.updateThought(AddEditThoughtActivity.this, mThoughtKey, mTextThought, mTimestamp); if (state == RES_DO_NOTHING) return null; if (state == RES_INSERT) { ThoughtUtils.addRes(AddEditThoughtActivity.this, mThoughtKey, mResType, mResPath); } if (state == RES_UPDATE) { ThoughtUtils.updateRes(AddEditThoughtActivity.this, mThoughtKey, mResType, mResPath); } return null; } @Override protected void onPostExecute(Void aVoid) { finish(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.tree.randomforest.data.impurity; import org.apache.ignite.ml.dataset.feature.BucketMeta; import org.apache.ignite.ml.dataset.feature.ObjectHistogram; import org.apache.ignite.ml.dataset.impl.bootstrapping.BootstrappedVector; import org.apache.ignite.ml.tree.randomforest.data.NodeSplit; import org.apache.ignite.ml.tree.randomforest.data.impurity.basic.BootstrappedVectorsHistogram; import org.apache.ignite.ml.tree.randomforest.data.impurity.basic.CountersHistogram; import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.TreeMap; /** * Class contains implementation of splitting point finding algorithm based on MSE metric (see * https://en.wikipedia.org/wiki/Mean_squared_error) and represents a set of histograms in according to this metric. */ public class MSEHistogram extends ImpurityHistogram implements ImpurityComputer<BootstrappedVector, MSEHistogram> { /** Serial version uid. */ private static final long serialVersionUID = 9175485616887867623L; /** Bucket meta. */ private final BucketMeta bucketMeta; /** Sample id. */ private final int sampleId; /** Counters. */ private ObjectHistogram<BootstrappedVector> counters; /** Sums of label values. */ private ObjectHistogram<BootstrappedVector> sumOfLabels; /** Sums of squared label values. */ private ObjectHistogram<BootstrappedVector> sumOfSquaredLabels; /** * Creates an instance of MSEHistogram. * * @param sampleId Sample id. * @param bucketMeta Bucket meta. */ public MSEHistogram(int sampleId, BucketMeta bucketMeta) { super(bucketMeta.getFeatureMeta().getFeatureId()); this.bucketMeta = bucketMeta; this.sampleId = sampleId; counters = new CountersHistogram(bucketIds, bucketMeta, featureId, sampleId); sumOfLabels = new SumOfLabelsHistogram(bucketIds, bucketMeta, featureId, sampleId, 1); sumOfSquaredLabels = new SumOfLabelsHistogram(bucketIds, bucketMeta, featureId, sampleId, 2); } /** {@inheritDoc} */ @Override public void addElement(BootstrappedVector vector) { counters.addElement(vector); sumOfLabels.addElement(vector); sumOfSquaredLabels.addElement(vector); } /** {@inheritDoc} */ @Override public MSEHistogram plus(MSEHistogram other) { MSEHistogram res = new MSEHistogram(sampleId, bucketMeta); res.counters = this.counters.plus(other.counters); res.sumOfLabels = this.sumOfLabels.plus(other.sumOfLabels); res.sumOfSquaredLabels = this.sumOfSquaredLabels.plus(other.sumOfSquaredLabels); res.bucketIds.addAll(this.bucketIds); res.bucketIds.addAll(bucketIds); return res; } /** {@inheritDoc} */ @Override public Set<Integer> buckets() { return bucketIds; } /** {@inheritDoc} */ @Override public Optional<Double> getValue(Integer bucketId) { throw new IllegalStateException("MSE histogram doesn't support 'getValue' method"); } /** {@inheritDoc} */ @Override public Optional<NodeSplit> findBestSplit() { double bestImpurity = Double.POSITIVE_INFINITY; double bestSplitVal = Double.NEGATIVE_INFINITY; int bestBucketId = -1; //counter corresponds to number of samples //ys corresponds to sumOfLabels //y2s corresponds to sumOfSquaredLabels TreeMap<Integer, Double> cntrDistrib = counters.computeDistributionFunction(); TreeMap<Integer, Double> ysDistrib = sumOfLabels.computeDistributionFunction(); TreeMap<Integer, Double> y2sDistrib = sumOfSquaredLabels.computeDistributionFunction(); double cntrMax = cntrDistrib.lastEntry().getValue(); double ysMax = ysDistrib.lastEntry().getValue(); double y2sMax = y2sDistrib.lastEntry().getValue(); double lastLeftCntrVal = 0.0; double lastLeftYVal = 0.0; double lastLeftY2Val = 0.0; for (Integer bucketId : bucketIds) { //values for impurity computing to the left of bucket value double leftCnt = cntrDistrib.getOrDefault(bucketId, lastLeftCntrVal); double leftY = ysDistrib.getOrDefault(bucketId, lastLeftYVal); double leftY2 = y2sDistrib.getOrDefault(bucketId, lastLeftY2Val); //values for impurity computing to the right of bucket value double rightCnt = cntrMax - leftCnt; double rightY = ysMax - leftY; double rightY2 = y2sMax - leftY2; double impurity = 0.0; if (leftCnt > 0) impurity += impurity(leftCnt, leftY, leftY2); if (rightCnt > 0) impurity += impurity(rightCnt, rightY, rightY2); if (impurity < bestImpurity) { bestImpurity = impurity; bestSplitVal = bucketMeta.bucketIdToValue(bucketId); bestBucketId = bucketId; } } return checkAndReturnSplitValue(bestBucketId, bestSplitVal, bestImpurity); } /** * Computes impurity function value. * * @param cnt Counter value. * @param ys plus of Ys. * @param y2s plus of Y^2 s. * @return Impurity value. */ private double impurity(double cnt, double ys, double y2s) { return y2s - 2.0 * ys / cnt * ys + Math.pow(ys / cnt, 2) * cnt; } /** * Maps vector to bucket id. * * @param vec Vector. * @return Bucket id. */ private Integer bucketMap(BootstrappedVector vec) { int bucketId = bucketMeta.getBucketId(vec.features().get(featureId)); this.bucketIds.add(bucketId); return bucketId; } /** * Maps vector to counter value. * * @param vec Vector. * @return Counter value. */ private Double counterMap(BootstrappedVector vec) { return (double)vec.counters()[sampleId]; } /** * Maps vector to Y-value. * * @param vec Vector. * @return Y value. */ private Double ysMap(BootstrappedVector vec) { return vec.counters()[sampleId] * vec.label(); } /** * Maps vector to Y^2 value. * * @param vec Vec. * @return Y^2 value. */ private Double y2sMap(BootstrappedVector vec) { return vec.counters()[sampleId] * Math.pow(vec.label(), 2); } /** * @return Counters histogram. */ ObjectHistogram<BootstrappedVector> getCounters() { return counters; } /** * @return Ys histogram. */ ObjectHistogram<BootstrappedVector> getSumOfLabels() { return sumOfLabels; } /** * @return Y^2s histogram. */ ObjectHistogram<BootstrappedVector> getSumOfSquaredLabels() { return sumOfSquaredLabels; } /** {@inheritDoc} */ @Override public boolean isEqualTo(MSEHistogram other) { HashSet<Integer> unionBuckets = new HashSet<>(buckets()); unionBuckets.addAll(other.bucketIds); if (unionBuckets.size() != bucketIds.size()) return false; if (!this.counters.isEqualTo(other.counters)) return false; if (!this.sumOfLabels.isEqualTo(other.sumOfLabels)) return false; return this.sumOfSquaredLabels.isEqualTo(other.sumOfSquaredLabels); } /** * Class for label summurizing in histograms. */ private static class SumOfLabelsHistogram extends BootstrappedVectorsHistogram { /** Serial version uid. */ private static final long serialVersionUID = -3846156279667677800L; /** Sample id. */ private final int sampleId; /** Label power. */ private final double labelPower; /** * Create an instance of SumOfLabelsHistogram. * * @param bucketIds Bucket ids. * @param bucketMeta Bucket meta. * @param featureId Feature id. * @param sampleId Sample id. * @param labelPower Label power. */ public SumOfLabelsHistogram(Set<Integer> bucketIds, BucketMeta bucketMeta, int featureId, int sampleId, double labelPower) { super(bucketIds, bucketMeta, featureId); this.sampleId = sampleId; this.labelPower = labelPower; } /** {@inheritDoc} */ @Override public Integer mapToBucket(BootstrappedVector vec) { int bucketId = bucketMeta.getBucketId(vec.features().get(featureId)); this.bucketIds.add(bucketId); return bucketId; } /** {@inheritDoc} */ @Override public Double mapToCounter(BootstrappedVector vec) { return vec.counters()[sampleId] * Math.pow(vec.label(), labelPower); } /** {@inheritDoc} */ @Override public ObjectHistogram<BootstrappedVector> newInstance() { return new SumOfLabelsHistogram(bucketIds, bucketMeta, featureId, sampleId, labelPower); } } }
/** * Copyright (C) 2014 Xillio (support@xillio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.xillio.xill; import nl.xillio.events.EventHost; import nl.xillio.plugins.XillPlugin; import nl.xillio.xill.api.Debugger; import nl.xillio.xill.api.LanguageFactory; import nl.xillio.xill.api.components.*; import nl.xillio.xill.api.components.Robot; import nl.xillio.xill.api.construct.Construct; import nl.xillio.xill.api.construct.ConstructContext; import nl.xillio.xill.api.construct.ConstructProcessor; import nl.xillio.xill.api.errors.NotImplementedException; import nl.xillio.xill.api.errors.XillParsingException; import nl.xillio.xill.api.events.RobotStartedAction; import nl.xillio.xill.api.events.RobotStoppedAction; import nl.xillio.xill.components.expressions.CallbotExpression; import nl.xillio.xill.components.expressions.ConstructCall; import nl.xillio.xill.components.expressions.pipeline.FilterExpression; import nl.xillio.xill.components.expressions.FunctionCall; import nl.xillio.xill.components.expressions.FunctionParameterExpression; import nl.xillio.xill.components.expressions.pipeline.MapExpression; import nl.xillio.xill.components.expressions.pipeline.PeekExpression; import nl.xillio.xill.components.expressions.RunBulkExpression; import nl.xillio.xill.components.expressions.*; import nl.xillio.xill.components.expressions.pipeline.CollectTerminalExpression; import nl.xillio.xill.components.expressions.pipeline.ConsumeTerminalExpression; import nl.xillio.xill.components.expressions.pipeline.ForeachTerminalExpression; import nl.xillio.xill.components.expressions.pipeline.ReduceTerminalExpression; import nl.xillio.xill.components.instructions.BreakInstruction; import nl.xillio.xill.components.instructions.ContinueInstruction; import nl.xillio.xill.components.instructions.*; import nl.xillio.xill.components.instructions.ErrorInstruction; import nl.xillio.xill.components.instructions.ExpressionInstruction; import nl.xillio.xill.components.instructions.FunctionDeclaration; import nl.xillio.xill.components.instructions.IfInstruction; import nl.xillio.xill.components.instructions.Instruction; import nl.xillio.xill.components.instructions.InstructionSet; import nl.xillio.xill.components.instructions.ReturnInstruction; import nl.xillio.xill.components.instructions.VariableDeclaration; import nl.xillio.xill.components.instructions.WhileInstruction; import nl.xillio.xill.components.operators.*; import nl.xillio.xill.components.operators.And; import nl.xillio.xill.components.operators.Or; import nl.xillio.xill.debugging.DebugInfo; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.nodemodel.util.NodeModelUtils; import xill.lang.xill.*; import xill.lang.xill.Expression; import java.io.File; import java.lang.reflect.InvocationTargetException; import java.math.BigInteger; import java.util.*; import java.util.Map.Entry; import java.util.function.Function; /** * This class is responsible for processing the Robot token into a functional * program tree */ public class XillProgramFactory implements LanguageFactory<xill.lang.xill.Robot> { private final DynamicInvoker<EObject> expressionParseInvoker = new DynamicInvoker<>("parseToken", this); private final Map<xill.lang.xill.Target, VariableDeclaration> variables = new HashMap<>(); private final Map<xill.lang.xill.FunctionDeclaration, FunctionDeclaration> functions = new HashMap<>(); /** * Because functions don't have to be declared before the calls the * declaration might not exist while parsing the call. To fix this we will * not set the declaration on the call until we are finished parsing. */ private final Stack<Map.Entry<xill.lang.xill.FunctionDeclaration, FunctionParameterExpression>> functionParameterExpressions = new Stack<>(); private final Stack<Map.Entry<xill.lang.xill.FunctionCall, FunctionCall>> functionCalls = new Stack<>(); private final Map<xill.lang.xill.FunctionCall, List<Processable>> functionCallArguments = new HashMap<>(); private final Map<xill.lang.xill.UseStatement, XillPlugin> useStatements = new HashMap<>(); private final Map<Resource, RobotID> robotID = new HashMap<>(); private final List<XillPlugin> plugins; private final Debugger debugger; private final RobotID rootRobot; private final Map<EObject, Map.Entry<RobotID, Robot>> compiledRobots = new HashMap<>(); /** * Events for signalling that a robot has started and that a robot has stopped */ private final EventHost<RobotStartedAction> robotStartedEvent = new EventHost<>(); private final EventHost<RobotStoppedAction> robotStoppedEvent = new EventHost<>(); private final UUID compilerSerialId = UUID.randomUUID(); /** * Create a new {@link XillProgramFactory} * * @param plugins list of xill plug-ins. * @param debugger debugger object necessary for processing the robot. * @param robotID the robot. */ public XillProgramFactory(final List<XillPlugin> plugins, final Debugger debugger, final RobotID robotID) { this(plugins, debugger, robotID, false); } /** * Create a new {@link XillProgramFactory} * * @param plugins list of xill plug-ins. * @param debugger debugger object necessary for processing the robot. * @param robotID the robot. * @param verbose verbose logging for the compiler */ public XillProgramFactory(final List<XillPlugin> plugins, final Debugger debugger, final RobotID robotID, final boolean verbose) { this.debugger = debugger; rootRobot = robotID; expressionParseInvoker.setVERBOSE(verbose); this.plugins = plugins; } @Override public void parse(final xill.lang.xill.Robot robot, final RobotID robotID) throws XillParsingException { this.robotID.put(robot.eResource(), robotID); DebugInfo info = new DebugInfo(); info.setVariables(variables); info.setUsing(useStatements); for (UseStatement plugin : robot.getUses()) { String pluginName = plugin.getPlugin(); if (pluginName == null) { // In case of non-qualified name: use MySQL; pluginName = plugin.getName(); } // Really? Java... String searchName = pluginName; Optional<XillPlugin> ActualPlugin = plugins.stream() .filter(pckage -> pckage.getName().equals(searchName)).findAny(); if (!ActualPlugin.isPresent()) { CodePosition pos = pos(plugin); throw new XillParsingException("Could not find plugin " + pluginName, pos.getLineNumber(), pos.getRobotID()); } useStatements.put(plugin, ActualPlugin.get()); } nl.xillio.xill.components.Robot instructionRobot = new nl.xillio.xill.components.Robot(robotID, debugger, robotStartedEvent, robotStoppedEvent, compilerSerialId); compiledRobots.put(robot, new SimpleEntry<>(robotID, instructionRobot)); for (xill.lang.xill.Instruction instruction : robot.getInstructionSet().getInstructions()) { instructionRobot.add(parse(instruction)); } debugger.addDebugInfo(info); } @Override public void compile() throws XillParsingException { // Push all FunctionDeclarations after parsing while (!functionCalls.isEmpty()) { Entry<xill.lang.xill.FunctionCall, FunctionCall> pair = functionCalls.pop(); parseToken(pair.getKey(), pair.getValue()); } // Push all map expressions while (!functionParameterExpressions.isEmpty()) { Entry<xill.lang.xill.FunctionDeclaration, FunctionParameterExpression> pair = functionParameterExpressions .pop(); parseToken(pair.getKey(), pair.getValue()); } // Push all libraries for (EObject token : compiledRobots.keySet()) { xill.lang.xill.Robot robotToken = (xill.lang.xill.Robot) token; Map.Entry<RobotID, Robot> pair = compiledRobots.get(robotToken); Robot robot = pair.getValue(); RobotID id = pair.getKey(); // Get includes for (IncludeStatement include : robotToken.getIncludes()) { // Build robotID String path = StringUtils.join(include.getName(), File.separator) + ".xill"; RobotID expectedID = RobotID.getInstance(new File(id.getProjectPath(), path), id.getProjectPath()); CodePosition pos = pos(include); // Find the matching robot Optional<Entry<RobotID, Robot>> matchingRobot = compiledRobots.values().stream() .filter(entry -> entry.getKey() == expectedID).findAny(); if (!matchingRobot.isPresent()) { throw new XillParsingException("Could not resolve import", pos.getLineNumber(), pos.getRobotID()); } // Push the library ((nl.xillio.xill.components.Robot) robot) .addLibrary((nl.xillio.xill.components.Robot) matchingRobot.get().getValue()); } } } @Override public Robot getRobot(final xill.lang.xill.Robot token) { return compiledRobots.get(token).getValue(); } /** * This method will use a {@link DynamicInvoker} to route the current parse * assignment to the correct method. To do this it will search through all * declared methods called parse that have 1 argument and try to select the * best argument type. * * @param token The component that should be parsed. * @return The resulting expression from the selected parse method * @throws XillParsingException When something went wrong while parsing this component. */ private Processable parse(final Expression token) throws XillParsingException { if (token == null) { throw new NullPointerException("Cannot parse null token."); } try { return expressionParseInvoker.invoke(token, Processable.class); } catch (InvocationTargetException | IllegalArgumentException e) { Throwable root = ExceptionUtils.getRootCause(e); if (root instanceof XillParsingException) { throw (XillParsingException) root; } CodePosition pos = pos(token); throw new XillParsingException("Something went wrong while parsing expression of type " + token.getClass().getSimpleName() + ": " + ExceptionUtils.getRootCauseMessage(e), pos.getLineNumber(), pos.getRobotID(), e); } } /** * @see XillProgramFactory#parse(Expression) */ private Instruction parse(final xill.lang.xill.Instruction token) throws XillParsingException { if (token == null) { throw new NullPointerException("Cannot parse null token."); } try { Instruction result = expressionParseInvoker.invoke(token, Instruction.class); result.setPosition(pos(token)); return result; } catch (InvocationTargetException | IllegalArgumentException e) { Throwable root = ExceptionUtils.getRootCause(e); if (root instanceof XillParsingException) { throw (XillParsingException) root; } CodePosition pos = pos(token); throw new XillParsingException("Something went wrong while parsing instruction of type " + token.getClass().getSimpleName() + ": " + ExceptionUtils.getRootCauseMessage(e), pos.getLineNumber(), pos.getRobotID(), e); } } /** * Parse the instruction set * * @param token * @return * @throws XillParsingException When parsing an instruction wasn't successful */ InstructionSet parseToken(final xill.lang.xill.InstructionSet token) throws XillParsingException { InstructionSet instructionSet = new InstructionSet(debugger); for (xill.lang.xill.Instruction instruction : token.getInstructions()) { instructionSet.add(parse(instruction)); } return instructionSet; } /** * Parse an If Instruction * * @param token * @return * @throws XillParsingException */ IfInstructionBlock parseToken(final xill.lang.xill.IfInstruction token) throws XillParsingException { List<IfInstruction> conditionals = new ArrayList<>(); ElseInstruction elseInstruction = null; // Parse if instructions Iterator<Expression> conditionItt = token.getConditions().iterator(); Iterator<InstructionBlock> instructionItt = token.getInstructionBlocks().iterator(); while (conditionItt.hasNext() && instructionItt.hasNext()) { Expression condition = conditionItt.next(); IfInstruction instruction = new IfInstruction(parse(condition), parseToken(instructionItt.next().getInstructionSet())); instruction.setPosition(pos(condition)); conditionals.add(instruction); } // Parse else if (token.getElseBlock() != null) { elseInstruction = new ElseInstruction(parseToken(token.getElseBlock().getInstructionSet())); elseInstruction.setPosition(pos(token.getElseBlock())); } IfInstructionBlock instruction = new IfInstructionBlock(conditionals, elseInstruction); return instruction; } /** * Parse a While Instruction * * @param token * @return * @throws XillParsingException */ WhileInstruction parseToken(final xill.lang.xill.WhileInstruction token) throws XillParsingException { return new WhileInstruction(parse(token.getCondition()), parseToken(token.getInstructionBlock().getInstructionSet())); } /** * Parse a ErrorInstruction * * @param token * @return * @throws XillParsingException */ ErrorInstruction parseToken(final xill.lang.xill.ErrorInstruction token) throws XillParsingException { Target cause = token.getCause(); VariableDeclaration causeVar = null; if (cause != null) { causeVar = VariableDeclaration.nullDeclaration(pos(token.getErrorBlock()), cause.getName()); variables.put(cause, causeVar); } return new ErrorInstruction( token.getDoBlock() == null ? null : parseToken(token.getDoBlock().getInstructionSet()), token.getSuccessBlock() == null ? null : parseToken(token.getSuccessBlock().getInstructionSet()), token.getErrorBlock() == null ? null : parseToken(token.getErrorBlock().getInstructionSet()), token.getFinallyBlock() == null ? null : parseToken(token.getFinallyBlock().getInstructionSet()), causeVar); } /** * Parse a Foreach Instruction * * @param token * @return * @throws XillParsingException */ ForeachInstruction parseToken(final xill.lang.xill.ForEachInstruction token) throws XillParsingException { VariableDeclaration valueDec = VariableDeclaration.nullDeclaration(pos(token), token.getValueVar().getName()); variables.put(token.getValueVar(), valueDec); if (token.getKeyVar() != null) { VariableDeclaration keyDec = VariableDeclaration.nullDeclaration(pos(token), token.getKeyVar().getName()); variables.put(token.getKeyVar(), keyDec); return new ForeachInstruction(parseToken(token.getInstructionBlock().getInstructionSet()), parse(token.getIterator()), valueDec, keyDec); } return new ForeachInstruction(parseToken(token.getInstructionBlock().getInstructionSet()), parse(token.getIterator()), valueDec); } /** * Parse a Break Instruction * * @param token * @return */ BreakInstruction parseToken(final xill.lang.xill.BreakInstruction token) { return new BreakInstruction(); } /** * Parse a InstructionFlow instruction * * @param token * @return * @throws XillParsingException */ ReturnInstruction parseToken(final xill.lang.xill.ReturnInstruction token) throws XillParsingException { if (token.getValue() == null) { return new ReturnInstruction(); } return new ReturnInstruction(parse(token.getValue())); } /** * Parse a Continue instruction * * @param token * @return */ ContinueInstruction parseToken(final xill.lang.xill.ContinueInstruction token) { return new ContinueInstruction(); } /** * Parse a Function Declaration * * @param token * @return * @throws XillParsingException */ FunctionDeclaration parseToken(final xill.lang.xill.FunctionDeclaration token) throws XillParsingException { // Push the arguments List<VariableDeclaration> parameters = new ArrayList<>(token.getParameters().size()); for (Target parameter : token.getParameters()) { // TODO Default values VariableDeclaration declaration = FunctionParameterDeclaration.nullDeclaration(pos(token), parameter.getName()); parameters.add(declaration); variables.put(parameter, declaration); } InstructionSet instructions = parseToken(token.getInstructionBlock().getInstructionSet()); FunctionDeclaration declaration = new FunctionDeclaration(instructions, parameters); functions.put(token, declaration); return declaration; } /** * Parse a MetaExpression Declaration * * @param token * @return * @throws XillParsingException */ VariableDeclaration parseToken(final xill.lang.xill.VariableDeclaration token) throws XillParsingException { Processable expression = token.getValue() == null ? ExpressionBuilderHelper.NULL : parse(token.getValue()); VariableDeclaration declaration = buildDeclaration(expression, token); variables.put(token.getName(), declaration); return declaration; } private VariableDeclaration buildDeclaration(Processable expression, xill.lang.xill.VariableDeclaration token) throws XillParsingException { if ("var".equalsIgnoreCase(token.getType())) { return new VariableDeclaration(expression, token.getName().getName()); } xill.lang.xill.Robot robotToken = findRobot(token); Robot robot = compiledRobots.get(robotToken).getValue(); return new VariableDeclaration(expression, token.getName().getName(), robot); } private xill.lang.xill.Robot findRobot(EObject object) throws XillParsingException { EObject current = object; while (current != null) { if (current instanceof xill.lang.xill.Robot) { return (xill.lang.xill.Robot) current; } current = current.eContainer(); } CodePosition pos = pos(object); throw new XillParsingException("Could not detect robot for " + object, pos.getLineNumber(), pos.getRobotID()); } /** * Parse an Expression at root level * * @param token * @return * @throws XillParsingException */ ExpressionInstruction parseToken(final xill.lang.xill.ExpressionInstruction token) throws XillParsingException { ExpressionInstruction instruction = new ExpressionInstruction(parse(token.getExpression())); return instruction; } /** * Parse a general Expression * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.Expression token) throws XillParsingException { Processable value = parse(token.getExpression()); // Parse prefixes if (token.getPrefix() != null) { switch (token.getPrefix()) { case "-": value = new Subtract(new ExpressionBuilder(0), value); break; case "!": value = new Negate(value); break; case "++": Target pTarget = getTarget(token.getExpression()); List<Processable> pPath = getPath(token.getExpression()); VariableDeclaration pDeclaration = variables.get(pTarget); value = new IntegerShortcut(pDeclaration, pPath, value, 1, false); break; case "--": Target mTarget = getTarget(token.getExpression()); List<Processable> mPath = getPath(token.getExpression()); VariableDeclaration mDeclaration = variables.get(mTarget); value = new IntegerShortcut(mDeclaration, mPath, value, -1, false); break; case "@": value = new StringConstant(value); break; default: throw new NotImplementedException("This prefix has not been implemented."); } } // Parse suffixes if (token.getSuffix() != null) { switch (token.getSuffix()) { case "++": Target pTarget = getTarget(token.getExpression()); List<Processable> pPath = getPath(token.getExpression()); VariableDeclaration pDeclaration = variables.get(pTarget); value = new IntegerShortcut(pDeclaration, pPath, value, 1, true); break; case "--": Target mTarget = getTarget(token.getExpression()); List<Processable> mPath = getPath(token.getExpression()); VariableDeclaration mDeclaration = variables.get(mTarget); value = new IntegerShortcut(mDeclaration, mPath, value, -1, true); break; default: throw new NotImplementedException("This suffix has not been implemented."); } } return value; } /** * Parse an Or operation * * @param token * @return * @throws XillParsingException */ Or parseToken(final xill.lang.xill.impl.OrImpl token) throws XillParsingException { Or orExpression = new Or(parse(token.getLeft()), parse(token.getRight())); return orExpression; } /** * Parse an And operation * * @param token * @return * @throws XillParsingException */ And parseToken(final xill.lang.xill.impl.AndImpl token) throws XillParsingException { And andExpression = new And(parse(token.getLeft()), parse(token.getRight())); return andExpression; } /** * Parse an Equals operation * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.EqualityImpl token) throws XillParsingException { switch (token.getOp()) { case "==": return new Equals(parse(token.getLeft()), parse(token.getRight())); case "!=": return new NotEquals(parse(token.getLeft()), parse(token.getRight())); default: CodePosition pos = pos(token); throw new XillParsingException("This token has not been implemented.", pos.getLineNumber(), pos.getRobotID()); } } /** * Parse an Add-priority operation * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.AdditionImpl token) throws XillParsingException { Processable expression; switch (token.getOp()) { case "+": expression = new Add(parse(token.getLeft()), parse(token.getRight())); break; case "-": expression = new Subtract(parse(token.getLeft()), parse(token.getRight())); break; case "::": expression = new Concat(parse(token.getLeft()), parse(token.getRight())); break; default: CodePosition pos = pos(token); throw new XillParsingException("This operator has not been implemented.", pos.getLineNumber(), pos.getRobotID()); } return expression; } /** * Parse an Compare-priority operation * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.ComparisonImpl token) throws XillParsingException { Processable expression; switch (token.getOp()) { case ">": expression = new GreaterThan(parse(token.getLeft()), parse(token.getRight())); break; case "<": expression = new SmallerThan(parse(token.getLeft()), parse(token.getRight())); break; case ">=": expression = new GreaterThanOrEquals(parse(token.getLeft()), parse(token.getRight())); break; case "<=": expression = new SmallerThanOrEquals(parse(token.getLeft()), parse(token.getRight())); break; default: CodePosition pos = pos(token); throw new XillParsingException("This operator has not been implemented.", pos.getLineNumber(), pos.getRobotID()); } return expression; } /** * Parse a Multiply-priority operation * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.MultiplicationImpl token) throws XillParsingException { Processable expression; switch (token.getOp()) { case "*": expression = new Multiply(parse(token.getLeft()), parse(token.getRight())); break; case "/": expression = new Divide(parse(token.getLeft()), parse(token.getRight())); break; case "%": expression = new Modulo(parse(token.getLeft()), parse(token.getRight())); break; case "^": expression = new Power(parse(token.getLeft()), parse(token.getRight())); break; default: CodePosition pos = pos(token); throw new XillParsingException("This operator has not been implemented.", pos.getLineNumber(), pos.getRobotID()); } return expression; } /** * Parse a Assignment-priority operation * * @param token * @return * @throws XillParsingException */ Assign parseToken(final xill.lang.xill.impl.AssignmentImpl token) throws XillParsingException { Assign expression; Target target = getTarget(token.getLeft()); VariableDeclaration declaration = variables.get(target); List<Processable> path = getPath(token.getLeft().getExpression()); switch (token.getOp()) { case "=": expression = new Assign(declaration, path, parse(token.getRight())); break; case "+=": expression = new Assign(declaration, path, new Add(parse(token.getLeft()), parse(token.getRight()))); break; case "-=": expression = new Assign(declaration, path, new Subtract(parse(token.getLeft()), parse(token.getRight()))); break; case "*=": expression = new Assign(declaration, path, new Multiply(parse(token.getLeft()), parse(token.getRight()))); break; case "::=": expression = new Assign(declaration, path, new Concat(parse(token.getLeft()), parse(token.getRight()))); break; case "/=": expression = new Assign(declaration, path, new Divide(parse(token.getLeft()), parse(token.getRight()))); break; default: CodePosition pos = pos(token); throw new XillParsingException("This operator has not been implemented.", pos.getLineNumber(), pos.getRobotID()); } return expression; } /** * Get the target of an assignment * * @return */ private static Target getTarget(final EObject start) { EObject currentObject = start; while (currentObject != null && !(currentObject instanceof Target)) { if (currentObject instanceof Variable) { currentObject = ((Variable) currentObject).getTarget(); } else if (currentObject instanceof ListExtraction) { currentObject = ((ListExtraction) currentObject).getValue(); } else if (currentObject instanceof Expression) { currentObject = ((Expression) currentObject).getExpression(); } else { currentObject = null; } } return (Target) currentObject; } /** * Construct a list that represents the path into a variable * * @return * @throws XillParsingException */ private List<Processable> getPath(final EObject start) throws XillParsingException { List<Processable> result = new ArrayList<>(); if (!(start instanceof ListExtraction)) { return result; } ListExtraction extraction = (ListExtraction) start; while (extraction != null) { if (extraction.getIndex() != null) { result.add(parse(extraction.getIndex())); } else if (extraction.getChild() != null) { result.add(new ExpressionBuilder(extraction.getChild())); } else { Processable size = new CollectionSize(parse(extraction.getValue())); result.add(size); } if (extraction.getValue() instanceof ListExtraction) { extraction = (ListExtraction) extraction.getValue(); } else { extraction = null; } } return result; } /** * Parse a list extraction * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.ListExtractionImpl token) throws XillParsingException { Processable expression = parse(token.getValue()); if (token.getIndex() != null) { // We used brackets Processable index = parse(token.getIndex()); return new FromList(expression, index); } if (token.getChild() != null) { // We used dot-notation return new FromList(expression, new ExpressionBuilder(token.getChild())); } // We used neither: listVariable[]. Interpret as // listVariable[listVariable + 0] Processable size = new CollectionSize(expression); return new FromList(expression, size); } /** * Parse a written list * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.ListExpressionImpl token) throws XillParsingException { List<Processable> expressions = new ArrayList<>(token.getValues().size()); for (Expression exp : token.getValues()) { expressions.add(parse(exp)); } return new ExpressionBuilder(expressions); } /** * Parse a written object * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.impl.ObjectExpressionImpl token) throws XillParsingException { Iterator<Expression> keys = token.getNames().iterator(); Iterator<Expression> values = token.getValues().iterator(); LinkedHashMap<Processable, Processable> object = new LinkedHashMap<>(token.getNames().size()); while (keys.hasNext() && values.hasNext()) { object.put(parse(keys.next()), parse(values.next())); } return new ExpressionBuilder(object); } /** * Parse a variable * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.Variable token) throws XillParsingException { VariableDeclaration declaration = variables.get(token.getTarget()); if (declaration == null) { CodePosition pos = pos(token); throw new XillParsingException("No such variable found: " + token.getTarget().getName(), pos.getLineNumber(), pos.getRobotID()); } return new VariableAccessExpression(declaration); } /** * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.FunctionCall token) throws XillParsingException { FunctionCall callExpression = new FunctionCall(); functionCalls.push(new SimpleEntry<>(token, callExpression)); // Parse the arguments List<Processable> arguments = new ArrayList<>(token.getArgumentBlock().getParameters().size()); for (Expression expression : token.getArgumentBlock().getParameters()) { arguments.add(parse(expression)); } functionCallArguments.put(token, arguments); return callExpression; } /** * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.ConstructCall token) throws XillParsingException { XillPlugin pluginPackage = useStatements.get(token.getPackage()); CodePosition pos = pos(token); if (pluginPackage == null) { String pluginName = token.getPackage().getPlugin(); if (pluginName == null) { pluginName = token.getPackage().getName(); } throw new XillParsingException("Could not resolve package `" + pluginName + "`", pos.getLineNumber(), pos.getRobotID()); } Construct construct = pluginPackage.getConstruct(token.getFunction()); if (construct == null) { throw new XillParsingException("The construct " + token.getFunction() + " does not exist in package " + pluginPackage.getName(), pos.getLineNumber(), pos.getRobotID()); } // Parse the arguments List<Processable> arguments = new ArrayList<>(token.getArgumentBlock().getParameters().size()); for (Expression argument : token.getArgumentBlock().getParameters()) { arguments.add(parse(argument)); } // Check argument count by mocking the input ConstructContext constructContext = new ConstructContext(robotID.get(token.eResource()), rootRobot, construct, debugger, compilerSerialId, robotStartedEvent, robotStoppedEvent); try (ConstructProcessor processor = construct.prepareProcess(constructContext)) { return buildCall(construct, processor, arguments, constructContext, pos); } } private ConstructCall buildCall(Construct construct, ConstructProcessor processor, List<Processable> arguments, ConstructContext context, CodePosition pos) throws XillParsingException { for (int i = 0; i < arguments.size(); i++) { if (!processor.setArgument(i, ExpressionBuilderHelper.NULL) && !processor.setArgument(i, ExpressionBuilderHelper.emptyList()) && !processor.setArgument(i, ExpressionBuilderHelper.emptyObject())) { throw new XillParsingException("Failed to find a matching type for " + processor.toString(construct.getName()), pos.getLineNumber(), pos.getRobotID()); } } // Throw exception if count is incorrect (i.e. We're either missing an // argument or provided too many) if (processor.getMissingArgument().isPresent() || processor.getNumberOfArguments() < arguments.size()) { throw new XillParsingException("Argument count mismatch in " + processor.toString(construct.getName()), pos.getLineNumber(), pos.getRobotID()); } // Check whether a construct is deprecated (has a Deprecated annotation) and log a warning if this is the case if (construct.isDeprecated()) { context.getRootLogger().warn("Call to deprecated construct with name \"{}\" at {}", construct.getName(), pos.toString()); } return new ConstructCall(construct, arguments, context); } /** * To fix the call -> declaration order problem this method will be called * for all function calls after parsing the whole robot * * @param token * @param declaration * @throws XillParsingException */ private void parseToken(final xill.lang.xill.FunctionCall token, final FunctionCall declaration) throws XillParsingException { // Parse the assignments List<Processable> arguments = new ArrayList<>(); for (Expression expr : token.getArgumentBlock().getParameters()) { arguments.add(parse(expr)); } FunctionDeclaration functionDeclaration = functions.get(token.getName()); if (functionDeclaration == null) { CodePosition pos = pos(token); throw new XillParsingException("Could not find function " + token.getName().getName(), pos.getLineNumber(), pos.getRobotID()); } // Push the function declaration.initialize(functionDeclaration, arguments); } private void parseToken(final xill.lang.xill.FunctionDeclaration key, final FunctionParameterExpression expression) { FunctionDeclaration functionDeclaration = functions.get(key); expression.setFunction(functionDeclaration); } /** * Parse a {@link MapExpression} * * @param token the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.MapExpression token) throws XillParsingException { return parseFunctionParameter(token, MapExpression::new); } /** * Parse a {@link FilterExpression} * * @param token the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.FilterExpression token) throws XillParsingException { return parseFunctionParameter(token, FilterExpression::new); } /** * Parse a {@link PeekExpression} * * @param token the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.PeekExpression token) throws XillParsingException { return parseFunctionParameter(token, PeekExpression::new); } /** * Parse a {@link ForeachExpression} * * @param token the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.ForeachExpression token) throws XillParsingException { return parseFunctionParameter(token, ForeachTerminalExpression::new); } /** * Parse a {@link ReduceTerminalExpression} * * @param token the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.ReduceExpression token) throws XillParsingException { Processable accumulator = parse(token.getAccumulator()); return parseFunctionParameter(token, iterable -> new ReduceTerminalExpression(accumulator, iterable)); } private Processable parseFunctionParameter(xill.lang.xill.FunctionParameterExpression token, Function<Processable, FunctionParameterExpression> constructor) throws XillParsingException { Processable argument = parse(token.getArgument()); FunctionParameterExpression result = constructor.apply(argument); functionParameterExpressions.push(new SimpleEntry<>(token.getFunction(), result)); return result; } /** * Parse a {@link ConsumeExpression} * * @param expression the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.ConsumeExpression expression) throws XillParsingException { Processable iterableInput = parse(expression.getArgument()); return new ConsumeTerminalExpression(iterableInput); } /** * Parse a {@link CollectExpression} * * @param expression the token * @return the expression * @throws XillParsingException if a compile error occurs */ Processable parseToken(final xill.lang.xill.CollectExpression expression) throws XillParsingException { Processable iterableInput = parse(expression.getArgument()); return new CollectTerminalExpression(iterableInput); } /** * Parse a {@link CallbotExpression} * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.CallbotExpression token) throws XillParsingException { Processable path = parse(token.getPath()); CallbotExpression expression = new CallbotExpression(path, rootRobot, plugins); if (token.getArgument() != null) { expression.setArgument(parse(token.getArgument())); } return expression; } /** * Parse a {@link RunBulkExpression} * * @param token * @return * @throws XillParsingException */ Processable parseToken(final xill.lang.xill.RunBulkExpression token) throws XillParsingException { Processable path = parse(token.getPath()); RunBulkExpression expression = new RunBulkExpression(path, rootRobot, plugins); if (token.getArgument() != null) { expression.setArgument(parse(token.getArgument())); } if (token.getOptions() != null) { expression.setOptions(parse(token.getOptions())); } return expression; } /** * Parse a {@link BooleanLiteral} * * @param token * @return */ Processable parseToken(final xill.lang.xill.BooleanLiteral token) { if (Boolean.parseBoolean(token.getValue())) { return ExpressionBuilderHelper.TRUE; } return ExpressionBuilderHelper.FALSE; } /** * Parse a {@link NullLiteral} * * @param token * @return */ Processable parseToken(final xill.lang.xill.NullLiteral token) { return ExpressionBuilderHelper.NULL; } /** * Parse an {@link IntegerLiteral} * * @param token * @return */ Processable parseToken(final xill.lang.xill.IntegerLiteral token) { try { return new ExpressionBuilder(Integer.parseInt(token.getValue())); } catch (NumberFormatException e) { try { return new ExpressionBuilder(Long.parseLong(token.getValue())); } catch (NumberFormatException e2) { return new ExpressionBuilder(new BigInteger(token.getValue())); } } } /** * Parse an {@link IntegerLiteral} * * @param token * @return */ Processable parseToken(final xill.lang.xill.DecimalLiteral token) { return new ExpressionBuilder(Double.parseDouble(token.getValue())); } /** * Parse a {@link StringLiteral} * * @param token * @return */ Processable parseToken(final xill.lang.xill.StringLiteral token) { return new ExpressionBuilder(token.getValue()); } private CodePosition pos(final EObject object) { INode node = NodeModelUtils.getNode(object); RobotID id = robotID.get(object.eResource()); return new CodePosition(id, node.getStartLine()); } /** * @return the debugger */ public Debugger getDebugger() { return debugger; } private class SimpleEntry<K, V> implements Map.Entry<K, V> { private final K key; private final V value; /** * @param key * @param value */ public SimpleEntry(final K key, final V value) { this.key = key; this.value = value; } @Override public K getKey() { return key; } @Override public V getValue() { return value; } @Override public V setValue(final V value) { return value; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.kerberos.protocol; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.net.InetAddress; import javax.security.auth.kerberos.KerberosPrincipal; import org.apache.directory.server.kerberos.KerberosConfig; import org.apache.directory.server.kerberos.kdc.KdcServer; import org.apache.directory.server.kerberos.protocol.AbstractAuthenticationServiceTest.KrbDummySession; import org.apache.directory.server.kerberos.shared.crypto.encryption.CipherTextHandler; import org.apache.directory.server.kerberos.shared.store.PrincipalStore; import org.apache.directory.shared.kerberos.KerberosTime; import org.apache.directory.shared.kerberos.codec.options.KdcOptions; import org.apache.directory.shared.kerberos.components.EncTicketPart; import org.apache.directory.shared.kerberos.components.EncryptionKey; import org.apache.directory.shared.kerberos.components.HostAddress; import org.apache.directory.shared.kerberos.components.HostAddresses; import org.apache.directory.shared.kerberos.components.KdcReq; import org.apache.directory.shared.kerberos.components.KdcReqBody; import org.apache.directory.shared.kerberos.exceptions.ErrorType; import org.apache.directory.shared.kerberos.flags.TicketFlag; import org.apache.directory.shared.kerberos.messages.KrbError; import org.apache.directory.shared.kerberos.messages.Ticket; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * Tests configuration of Ticket-Granting Service (TGS) policy. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class TicketGrantingPolicyTest extends AbstractTicketGrantingServiceTest { private KerberosConfig config; private KdcServer kdcServer; private PrincipalStore store; private KerberosProtocolHandler handler; private KrbDummySession session; /** * Creates a new instance of {@link TicketGrantingPolicyTest}. */ @Before public void setUp() throws IOException { kdcServer = new KdcServer(); config = kdcServer.getConfig(); /* * Body checksum verification must be disabled because we are bypassing * the codecs, where the body bytes are set on the KdcReq message. */ config.setBodyChecksumVerified( false ); store = new MapPrincipalStoreImpl(); handler = new KerberosProtocolHandler( kdcServer, store ); session = new KrbDummySession(); lockBox = new CipherTextHandler(); } /** * Shutdown the Kerberos server */ @After public void shutDown() { kdcServer.stop(); } /** * Tests when forwardable tickets are disallowed that requests for * forwardable tickets fail with the correct error message. * * @throws Exception */ @Test public void testForwardableTicket() throws Exception { // Deny FORWARDABLE tickets in policy. config.setForwardableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.FORWARDABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.FORWARDABLE ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when forwardable tickets are disallowed that requests for * forwarded tickets fail with the correct error message. * * @throws Exception */ @Test public void testForwardedTicket() throws Exception { // Deny FORWARDABLE tickets in policy. config.setForwardableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.FORWARDABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.FORWARDED ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when empty addresses are disallowed and forwarded tickets are requested * that requests with no addresses fail with the correct error message. * * @throws Exception */ @Test public void testForwardedNoAddressesTicket() throws Exception { // Deny empty addresses tickets in policy. config.setEmptyAddressesAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.FORWARDABLE ); HostAddress[] address = { new HostAddress( InetAddress.getByAddress( new byte[4] ) ) }; HostAddresses addresses = new HostAddresses( address ); encTicketPart.setClientAddresses( addresses ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.FORWARDED ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when proxiable tickets are disallowed that requests for * proxiable tickets fail with the correct error message. * * @throws Exception */ @Test public void testProxiableTicket() throws Exception { // Deny PROXIABLE tickets in policy. config.setProxiableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.PROXIABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.PROXIABLE ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when proxiable tickets are disallowed that requests for * proxy tickets fail with the correct error message. * * @throws Exception */ @Test public void testProxyTicket() throws Exception { // Deny PROXIABLE tickets in policy. config.setProxiableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.PROXIABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.PROXY ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); HostAddress[] address = { new HostAddress( InetAddress.getLocalHost() ) }; HostAddresses addresses = new HostAddresses( address ); kdcReqBody.setAddresses( addresses ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when empty addresses are disallowed and proxy tickets are requested * that requests with no addresses fail with the correct error message. * * @throws Exception */ @Test public void testProxyNoAddressesTicket() throws Exception { // Deny empty addresses tickets in policy. config.setEmptyAddressesAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.PROXIABLE ); HostAddress[] address = { new HostAddress( InetAddress.getByAddress( new byte[4] ) ) }; HostAddresses addresses = new HostAddresses( address ); encTicketPart.setClientAddresses( addresses ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.PROXY ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when postdated tickets are disallowed that requests for * ALLOW-POSTDATE tickets fail with the correct error message. * * @throws Exception */ @Test public void testAllowPostdate() throws Exception { // Deny ALLOW_POSTDATE tickets in policy. config.setPostdatedAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.MAY_POSTDATE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.ALLOW_POSTDATE ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when postdated tickets are disallowed that requests for * postdated tickets fail with the correct error message. * * @throws Exception */ @Test public void testPostdated() throws Exception { // Deny POSTDATED tickets in policy. config.setPostdatedAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.MAY_POSTDATE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.POSTDATED ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when postdated tickets are disallowed that requests for * validation of invalid tickets fail with the correct error message. * * @throws Exception */ @Test public void testValidateInvalidTicket() throws Exception { // Deny VALIDATE tickets in policy. config.setPostdatedAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.INVALID ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "hnelson" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.VALIDATE ); kdcReqBody.setKdcOptions( kdcOptions ); long currentTime = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( currentTime + KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when renewable tickets are disallowed that requests for * renewal of tickets fail with the correct error message. * * @throws Exception */ @Test public void testRenewTicket() throws Exception { // Deny RENEWABLE tickets in policy. config.setRenewableAllowed( false ); KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String serverPassword = "randomKey"; Ticket tgt = getTgt( clientPrincipal, serverPrincipal, serverPassword ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "hnelson" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.RENEW ); kdcReqBody.setKdcOptions( kdcOptions ); long currentTime = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( currentTime + KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when renewable tickets are disallowed that requests for * RENEWABLE-OK tickets fail with the correct error message. * * @throws Exception */ @Test public void testRenewableOk() throws Exception { // Deny RENEWABLE tickets in policy. config.setRenewableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.RENEWABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.RENEWABLE_OK ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + KerberosTime.WEEK ); kdcReqBody.setTill( requestedEndTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } /** * Tests when renewable tickets are disallowed that requests for * renewable tickets fail with the correct error message. * * @throws Exception */ @Test public void testRenewableTicket() throws Exception { // Deny RENEWABLE tickets in policy. config.setRenewableAllowed( false ); // Get the mutable ticket part. KerberosPrincipal clientPrincipal = new KerberosPrincipal( "hnelson@EXAMPLE.COM" ); EncTicketPart encTicketPart = getTicketArchetype( clientPrincipal ); // Make changes to test. encTicketPart.setFlag( TicketFlag.RENEWABLE ); // Seal the ticket for the server. KerberosPrincipal serverPrincipal = new KerberosPrincipal( "krbtgt/EXAMPLE.COM@EXAMPLE.COM" ); String passPhrase = "randomKey"; EncryptionKey serverKey = getEncryptionKey( serverPrincipal, passPhrase ); Ticket tgt = getTicket( encTicketPart, serverPrincipal, serverKey ); KdcReqBody kdcReqBody = new KdcReqBody(); kdcReqBody.setSName( getPrincipalName( "ldap/ldap.example.com@EXAMPLE.COM" ) ); kdcReqBody.setRealm( "EXAMPLE.COM" ); kdcReqBody.setEType( config.getEncryptionTypes() ); kdcReqBody.setNonce( random.nextInt() ); KdcOptions kdcOptions = new KdcOptions(); kdcOptions.set( KdcOptions.RENEWABLE ); kdcReqBody.setKdcOptions( kdcOptions ); long now = System.currentTimeMillis(); KerberosTime requestedEndTime = new KerberosTime( now + 1 * KerberosTime.DAY ); kdcReqBody.setTill( requestedEndTime ); KerberosTime requestedRenewTillTime = new KerberosTime( now + KerberosTime.WEEK / 2 ); kdcReqBody.setRtime( requestedRenewTillTime ); KdcReq message = getKdcRequest( tgt, kdcReqBody ); handler.messageReceived( session, message ); Object msg = session.getMessage(); assertEquals( "session.getMessage() instanceOf", KrbError.class, msg.getClass() ); KrbError error = ( KrbError ) msg; assertEquals( "KDC policy rejects request", ErrorType.KDC_ERR_POLICY, error.getErrorCode() ); } }
/* * Copyright (c) 2016 Constant Contact, Inc. All Rights Reserved. * Boston, MA 02451, USA * Phone: (781) 472-8100 * Fax: (781) 472-8101 * This software is the confidential and proprietary information * of Constant Contact, Inc. created for Constant Contact, Inc. * You shall not disclose such Confidential Information and shall use * it only in accordance with the terms of the license agreement * you entered into with Constant Contact, Inc. */ package com.constantcontact.v2.bulkactivities; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import java.io.Serializable; import java.util.Date; /** * REQUIRED. The minimum requirement is one array element containing one email address. The remainder of fields are optional. */ @JsonInclude(JsonInclude.Include.NON_NULL) @JsonAutoDetect(fieldVisibility = Visibility.ANY, getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE) public class ImportData implements Serializable { @JsonProperty("addresses") protected Address[] _addresses; @JsonProperty("anniversary") protected Date _anniversary; @JsonProperty("birthday_day") protected String _birthdayDay; @JsonProperty("birthday_month") protected String _birthdayMonth; @JsonProperty("company_name") protected String _companyName; @JsonProperty("custom_fields") protected CustomField[] _customFields; @JsonProperty("email_addresses") protected String[] _emailAddresses; @JsonProperty("first_name") protected String _firstName; @JsonProperty("last_name") protected String _lastName; @JsonProperty("home_phone") protected String _homePhone; @JsonProperty("job_title") protected String _jobTitle; @JsonProperty("work_phone") protected String _workPhone; /** * Class Creator */ public ImportData() { } public Address[] getAddresses() { return _addresses; } public void setAddresses(Address[] addresses) { _addresses = addresses; } public Date getAnniversary() { return _anniversary; } public void setAnniversary(Date anniversary) { _anniversary = anniversary; } public String getBirthdayDay() { return _birthdayDay; } public void setBirthdayDay(String birthdayDay) { _birthdayDay = birthdayDay; } public String getBirthdayMonth() { return _birthdayMonth; } public void setBirthdayMonth(String birthdayMonth) { _birthdayMonth = birthdayMonth; } public String getCompanyName() { return _companyName; } public void setCompanyName(String companyName) { _companyName = companyName; } public CustomField[] getCustomFields() { return _customFields; } public void setCustomFields(CustomField[] customFields) { _customFields = customFields; } public String[] getEmailAddresses() { return _emailAddresses; } public void setEmailAddresses(String[] emailAddresses) { _emailAddresses = emailAddresses; } public String getFirstName() { return _firstName; } public void setFirstName(String firstName) { _firstName = firstName; } public String getLastName() { return _lastName; } public void setLastName(String lastName) { _lastName = lastName; } public String getHomePhone() { return _homePhone; } public void setHomePhone(String homePhone) { _homePhone = homePhone; } public String getJobTitle() { return _jobTitle; } public void setJobTitle(String jobTitle) { _jobTitle = jobTitle; } public String getWorkPhone() { return _workPhone; } public void setWorkPhone(String workPhone) { _workPhone = workPhone; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ImportData)) { return false; } else { ImportData rhs = (ImportData) obj; return new EqualsBuilder() .append(_addresses, rhs.getAddresses()) .append(_anniversary, rhs.getAnniversary()) .append(_birthdayDay, rhs.getBirthdayDay()) .append(_birthdayMonth, rhs.getBirthdayMonth()) .append(_companyName, rhs.getCompanyName()) .append(_customFields, rhs.getCustomFields()) .append(_emailAddresses, rhs.getEmailAddresses()) .append(_firstName, rhs.getFirstName()) .append(_lastName, rhs.getLastName()) .append(_homePhone, rhs.getHomePhone()) .append(_jobTitle, rhs.getJobTitle()) .append(_workPhone, rhs.getWorkPhone()) .isEquals(); } } @Override public int hashCode() { return new HashCodeBuilder() .append(_addresses) .append(_anniversary) .append(_birthdayDay) .append(_birthdayMonth) .append(_companyName) .append(_customFields) .append(_emailAddresses) .append(_firstName) .append(_lastName) .append(_homePhone) .append(_jobTitle) .append(_workPhone) .hashCode(); } }
/* * The MIT License * * Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi, * Yahoo! Inc., Erik Ramfelt, Tom Huybrechts * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import com.google.common.collect.ImmutableSet; import hudson.PluginManager.PluginInstanceStore; import hudson.model.AdministrativeMonitor; import hudson.model.Api; import hudson.model.ModelObject; import java.nio.file.Files; import jenkins.YesNoMaybe; import jenkins.model.Jenkins; import hudson.model.UpdateCenter; import hudson.model.UpdateSite; import hudson.util.VersionNumber; import org.jvnet.localizer.ResourceBundleHolder; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.HttpResponses; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; import org.kohsuke.stapler.interceptor.RequirePOST; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.LogFactory; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Level; import java.util.logging.Logger; import static java.util.logging.Level.WARNING; import static org.apache.commons.io.FilenameUtils.getBaseName; /** * Represents a Jenkins plug-in and associated control information * for Jenkins to control {@link Plugin}. * * <p> * A plug-in is packaged into a jar file whose extension is <tt>".jpi"</tt> (or <tt>".hpi"</tt> for backward compatibility), * A plugin needs to have a special manifest entry to identify what it is. * * <p> * At the runtime, a plugin has two distinct state axis. * <ol> * <li>Enabled/Disabled. If enabled, Jenkins is going to use it * next time Jenkins runs. Otherwise the next run will ignore it. * <li>Activated/Deactivated. If activated, that means Jenkins is using * the plugin in this session. Otherwise it's not. * </ol> * <p> * For example, an activated but disabled plugin is still running but the next * time it won't. * * @author Kohsuke Kawaguchi */ @ExportedBean public class PluginWrapper implements Comparable<PluginWrapper>, ModelObject { /** * A plugin won't be loaded unless his declared dependencies are present and match the required minimal version. * This can be set to false to disable the version check (legacy behaviour) */ private static final boolean ENABLE_PLUGIN_DEPENDENCIES_VERSION_CHECK = Boolean.parseBoolean(System.getProperty(PluginWrapper.class.getName()+"." + "dependenciesVersionCheck.enabled", "true")); /** * {@link PluginManager} to which this belongs to. */ public final PluginManager parent; /** * Plugin manifest. * Contains description of the plugin. */ private final Manifest manifest; /** * {@link ClassLoader} for loading classes from this plugin. * Null if disabled. */ public final ClassLoader classLoader; /** * Base URL for loading static resources from this plugin. * Null if disabled. The static resources are mapped under * <tt>CONTEXTPATH/plugin/SHORTNAME/</tt>. */ public final URL baseResourceURL; /** * Used to control enable/disable setting of the plugin. * If this file exists, plugin will be disabled. */ private final File disableFile; /** * A .jpi file, an exploded plugin directory, or a .jpl file. */ private final File archive; /** * Short name of the plugin. The artifact Id of the plugin. * This is also used in the URL within Jenkins, so it needs * to remain stable even when the *.jpi file name is changed * (like Maven does.) */ private final String shortName; /** * True if this plugin is activated for this session. * The snapshot of <tt>disableFile.exists()</tt> as of the start up. */ private final boolean active; private boolean hasCycleDependency = false; private final List<Dependency> dependencies; private final List<Dependency> optionalDependencies; public List<String> getDependencyErrors() { return Collections.unmodifiableList(dependencyErrors); } private final transient List<String> dependencyErrors = new ArrayList<>(); /** * Is this plugin bundled in jenkins.war? */ /*package*/ boolean isBundled; /** * List of plugins that depend on this plugin. */ private Set<String> dependants = Collections.emptySet(); /** * The core can depend on a plugin if it is bundled. Sometimes it's the only thing that * depends on the plugin e.g. UI support library bundle plugin. */ private static Set<String> CORE_ONLY_DEPENDANT = ImmutableSet.copyOf(Arrays.asList("jenkins-core")); /** * Set the list of components that depend on this plugin. * @param dependants The list of components that depend on this plugin. */ public void setDependants(@Nonnull Set<String> dependants) { this.dependants = dependants; } /** * Get the list of components that depend on this plugin. * @return The list of components that depend on this plugin. */ public @Nonnull Set<String> getDependants() { if (isBundled && dependants.isEmpty()) { return CORE_ONLY_DEPENDANT; } else { return dependants; } } /** * Does this plugin have anything that depends on it. * @return {@code true} if something (Jenkins core, or another plugin) depends on this * plugin, otherwise {@code false}. */ public boolean hasDependants() { return (isBundled || !dependants.isEmpty()); } /** * Does this plugin depend on any other plugins. * @return {@code true} if this plugin depends on other plugins, otherwise {@code false}. */ public boolean hasDependencies() { return (dependencies != null && !dependencies.isEmpty()); } @ExportedBean public static final class Dependency { @Exported public final String shortName; @Exported public final String version; @Exported public final boolean optional; public Dependency(String s) { int idx = s.indexOf(':'); if(idx==-1) throw new IllegalArgumentException("Illegal dependency specifier "+s); this.shortName = s.substring(0,idx); String version = s.substring(idx+1); boolean isOptional = false; String[] osgiProperties = version.split("[;]"); for (int i = 1; i < osgiProperties.length; i++) { String osgiProperty = osgiProperties[i].trim(); if (osgiProperty.equalsIgnoreCase("resolution:=optional")) { isOptional = true; } } this.optional = isOptional; if (isOptional) { this.version = osgiProperties[0]; } else { this.version = version; } } @Override public String toString() { return shortName + " (" + version + ")" + (optional ? " optional" : ""); } } /** * @param archive * A .jpi archive file jar file, or a .jpl linked plugin. * @param manifest * The manifest for the plugin * @param baseResourceURL * A URL pointing to the resources for this plugin * @param classLoader * a classloader that loads classes from this plugin and its dependencies * @param disableFile * if this file exists on startup, the plugin will not be activated * @param dependencies a list of mandatory dependencies * @param optionalDependencies a list of optional dependencies */ public PluginWrapper(PluginManager parent, File archive, Manifest manifest, URL baseResourceURL, ClassLoader classLoader, File disableFile, List<Dependency> dependencies, List<Dependency> optionalDependencies) { this.parent = parent; this.manifest = manifest; this.shortName = computeShortName(manifest, archive.getName()); this.baseResourceURL = baseResourceURL; this.classLoader = classLoader; this.disableFile = disableFile; this.active = !disableFile.exists(); this.dependencies = dependencies; this.optionalDependencies = optionalDependencies; this.archive = archive; } public String getDisplayName() { return StringUtils.removeStart(getLongName(), "Jenkins "); } public Api getApi() { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); return new Api(this); } /** * Returns the URL of the index page jelly script. */ public URL getIndexPage() { // In the current impl dependencies are checked first, so the plugin itself // will add the last entry in the getResources result. URL idx = null; try { Enumeration<URL> en = classLoader.getResources("index.jelly"); while (en.hasMoreElements()) idx = en.nextElement(); } catch (IOException ignore) { } // In case plugin has dependencies but is missing its own index.jelly, // check that result has this plugin's artifactId in it: return idx != null && idx.toString().contains(shortName) ? idx : null; } static String computeShortName(Manifest manifest, String fileName) { // use the name captured in the manifest, as often plugins // depend on the specific short name in its URLs. String n = manifest.getMainAttributes().getValue("Short-Name"); if(n!=null) return n; // maven seems to put this automatically, so good fallback to check. n = manifest.getMainAttributes().getValue("Extension-Name"); if(n!=null) return n; // otherwise infer from the file name, since older plugins don't have // this entry. return getBaseName(fileName); } @Exported public List<Dependency> getDependencies() { return dependencies; } public List<Dependency> getOptionalDependencies() { return optionalDependencies; } /** * Returns the short name suitable for URL. */ @Exported public String getShortName() { return shortName; } /** * Gets the instance of {@link Plugin} contributed by this plugin. */ public @CheckForNull Plugin getPlugin() { PluginInstanceStore pis = Jenkins.lookup(PluginInstanceStore.class); return pis != null ? pis.store.get(this) : null; } /** * Gets the URL that shows more information about this plugin. * @return * null if this information is unavailable. * @since 1.283 */ @Exported public String getUrl() { // first look for the manifest entry. This is new in maven-hpi-plugin 1.30 String url = manifest.getMainAttributes().getValue("Url"); if(url!=null) return url; // fallback to update center metadata UpdateSite.Plugin ui = getInfo(); if(ui!=null) return ui.wiki; return null; } @Override public String toString() { return "Plugin:" + getShortName(); } /** * Returns a one-line descriptive name of this plugin. */ @Exported public String getLongName() { String name = manifest.getMainAttributes().getValue("Long-Name"); if(name!=null) return name; return shortName; } /** * Does this plugin supports dynamic loading? */ @Exported public YesNoMaybe supportsDynamicLoad() { String v = manifest.getMainAttributes().getValue("Support-Dynamic-Loading"); if (v==null) return YesNoMaybe.MAYBE; return Boolean.parseBoolean(v) ? YesNoMaybe.YES : YesNoMaybe.NO; } /** * Returns the version number of this plugin */ @Exported public String getVersion() { return getVersionOf(manifest); } private String getVersionOf(Manifest manifest) { String v = manifest.getMainAttributes().getValue("Plugin-Version"); if(v!=null) return v; // plugins generated before maven-hpi-plugin 1.3 should still have this attribute v = manifest.getMainAttributes().getValue("Implementation-Version"); if(v!=null) return v; return "???"; } /** * Returns the required Jenkins core version of this plugin. * @return the required Jenkins core version of this plugin. * @since 2.16 */ @Exported public @CheckForNull String getRequiredCoreVersion() { String v = manifest.getMainAttributes().getValue("Jenkins-Version"); if (v!= null) return v; v = manifest.getMainAttributes().getValue("Hudson-Version"); if (v!= null) return v; return null; } /** * Returns the version number of this plugin */ public VersionNumber getVersionNumber() { return new VersionNumber(getVersion()); } /** * Returns true if the version of this plugin is older than the given version. */ public boolean isOlderThan(VersionNumber v) { try { return getVersionNumber().compareTo(v) < 0; } catch (IllegalArgumentException e) { // if we can't figure out our current version, it probably means it's very old, // since the version information is missing only from the very old plugins return true; } } /** * Terminates the plugin. */ public void stop() { Plugin plugin = getPlugin(); if (plugin != null) { try { LOGGER.log(Level.FINE, "Stopping {0}", shortName); plugin.stop(); } catch (Throwable t) { LOGGER.log(WARNING, "Failed to shut down " + shortName, t); } } else { LOGGER.log(Level.FINE, "Could not find Plugin instance to stop for {0}", shortName); } // Work around a bug in commons-logging. // See http://www.szegedi.org/articles/memleak.html LogFactory.release(classLoader); } public void releaseClassLoader() { if (classLoader instanceof Closeable) try { ((Closeable) classLoader).close(); } catch (IOException e) { LOGGER.log(WARNING, "Failed to shut down classloader",e); } } /** * Enables this plugin next time Jenkins runs. */ public void enable() throws IOException { if (!disableFile.exists()) { LOGGER.log(Level.FINEST, "Plugin {0} has been already enabled. Skipping the enable() operation", getShortName()); return; } if(!disableFile.delete()) throw new IOException("Failed to delete "+disableFile); } /** * Disables this plugin next time Jenkins runs. */ public void disable() throws IOException { // creates an empty file OutputStream os = Files.newOutputStream(disableFile.toPath()); os.close(); } /** * Returns true if this plugin is enabled for this session. */ @Exported public boolean isActive() { return active && !hasCycleDependency(); } public boolean hasCycleDependency(){ return hasCycleDependency; } public void setHasCycleDependency(boolean hasCycle){ hasCycleDependency = hasCycle; } @Exported public boolean isBundled() { return isBundled; } /** * If true, the plugin is going to be activated next time * Jenkins runs. */ @Exported public boolean isEnabled() { return !disableFile.exists(); } public Manifest getManifest() { return manifest; } public void setPlugin(Plugin plugin) { Jenkins.lookup(PluginInstanceStore.class).store.put(this,plugin); plugin.wrapper = this; } public String getPluginClass() { return manifest.getMainAttributes().getValue("Plugin-Class"); } public boolean hasLicensesXml() { try { new URL(baseResourceURL,"WEB-INF/licenses.xml").openStream().close(); return true; } catch (IOException e) { return false; } } /** * Makes sure that all the dependencies exist, and then accept optional dependencies * as real dependencies. * * @throws IOException * thrown if one or several mandatory dependencies doesn't exists. */ /*package*/ void resolvePluginDependencies() throws IOException { if (ENABLE_PLUGIN_DEPENDENCIES_VERSION_CHECK) { String requiredCoreVersion = getRequiredCoreVersion(); if (requiredCoreVersion == null) { LOGGER.warning(shortName + " doesn't declare required core version."); } else { VersionNumber actualVersion = Jenkins.getVersion(); if (actualVersion.isOlderThan(new VersionNumber(requiredCoreVersion))) { dependencyErrors.add(Messages.PluginWrapper_obsoleteCore(Jenkins.getVersion().toString(), requiredCoreVersion)); } } } // make sure dependencies exist for (Dependency d : dependencies) { PluginWrapper dependency = parent.getPlugin(d.shortName); if (dependency == null) { PluginWrapper failedDependency = NOTICE.getPlugin(d.shortName); if (failedDependency != null) { dependencyErrors.add(Messages.PluginWrapper_failed_to_load_dependency(failedDependency.getLongName(), failedDependency.getVersion())); break; } else { dependencyErrors.add(Messages.PluginWrapper_missing(d.shortName, d.version)); } } else { if (dependency.isActive()) { if (isDependencyObsolete(d, dependency)) { dependencyErrors.add(Messages.PluginWrapper_obsolete(dependency.getLongName(), dependency.getVersion(), d.version)); } } else { if (isDependencyObsolete(d, dependency)) { dependencyErrors.add(Messages.PluginWrapper_disabledAndObsolete(dependency.getLongName(), dependency.getVersion(), d.version)); } else { dependencyErrors.add(Messages.PluginWrapper_disabled(dependency.getLongName())); } } } } // add the optional dependencies that exists for (Dependency d : optionalDependencies) { PluginWrapper dependency = parent.getPlugin(d.shortName); if (dependency != null && dependency.isActive()) { if (isDependencyObsolete(d, dependency)) { dependencyErrors.add(Messages.PluginWrapper_obsolete(dependency.getLongName(), dependency.getVersion(), d.version)); } else { dependencies.add(d); } } } if (!dependencyErrors.isEmpty()) { NOTICE.addPlugin(this); StringBuilder messageBuilder = new StringBuilder(); messageBuilder.append(Messages.PluginWrapper_failed_to_load_plugin(getLongName(), getVersion())).append(System.lineSeparator()); for (Iterator<String> iterator = dependencyErrors.iterator(); iterator.hasNext(); ) { String dependencyError = iterator.next(); messageBuilder.append(" - ").append(dependencyError); if (iterator.hasNext()) { messageBuilder.append(System.lineSeparator()); } } throw new IOException(messageBuilder.toString()); } } private boolean isDependencyObsolete(Dependency d, PluginWrapper dependency) { return ENABLE_PLUGIN_DEPENDENCIES_VERSION_CHECK && dependency.getVersionNumber().isOlderThan(new VersionNumber(d.version)); } /** * If the plugin has {@link #getUpdateInfo() an update}, * returns the {@link hudson.model.UpdateSite.Plugin} object. * * @return * This method may return null &mdash; for example, * the user may have installed a plugin locally developed. */ public UpdateSite.Plugin getUpdateInfo() { UpdateCenter uc = Jenkins.getInstance().getUpdateCenter(); UpdateSite.Plugin p = uc.getPlugin(getShortName()); if(p!=null && p.isNewerThan(getVersion())) return p; return null; } /** * returns the {@link hudson.model.UpdateSite.Plugin} object, or null. */ public UpdateSite.Plugin getInfo() { UpdateCenter uc = Jenkins.getInstance().getUpdateCenter(); return uc.getPlugin(getShortName()); } /** * Returns true if this plugin has update in the update center. * * <p> * This method is conservative in the sense that if the version number is incomprehensible, * it always returns false. */ @Exported public boolean hasUpdate() { return getUpdateInfo()!=null; } @Exported @Deprecated // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ public boolean isPinned() { return false; } /** * Returns true if this plugin is deleted. * * The plugin continues to function in this session, but in the next session it'll disappear. */ @Exported public boolean isDeleted() { return !archive.exists(); } /** * Sort by short name. */ public int compareTo(PluginWrapper pw) { return shortName.compareToIgnoreCase(pw.shortName); } /** * returns true if backup of previous version of plugin exists */ @Exported public boolean isDowngradable() { return getBackupFile().exists(); } /** * Where is the backup file? */ public File getBackupFile() { return new File(Jenkins.getInstance().getRootDir(),"plugins/"+getShortName() + ".bak"); } /** * returns the version of the backed up plugin, * or null if there's no back up. */ @Exported public String getBackupVersion() { File backup = getBackupFile(); if (backup.exists()) { try { try (JarFile backupPlugin = new JarFile(backup)) { return backupPlugin.getManifest().getMainAttributes().getValue("Plugin-Version"); } } catch (IOException e) { LOGGER.log(WARNING, "Failed to get backup version from " + backup, e); return null; } } else { return null; } } /** * Checks if this plugin is pinned and that's forcing us to use an older version than the bundled one. */ @Deprecated // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ public boolean isPinningForcingOldVersion() { return false; } @Extension public final static PluginWrapperAdministrativeMonitor NOTICE = new PluginWrapperAdministrativeMonitor(); /** * Administrative Monitor for failed plugins */ public static final class PluginWrapperAdministrativeMonitor extends AdministrativeMonitor { private final Map<String, PluginWrapper> plugins = new HashMap<>(); void addPlugin(PluginWrapper plugin) { plugins.put(plugin.shortName, plugin); } public boolean isActivated() { return !plugins.isEmpty(); } @Override public String getDisplayName() { return Messages.PluginWrapper_PluginWrapperAdministrativeMonitor_DisplayName(); } public Collection<PluginWrapper> getPlugins() { return plugins.values(); } public PluginWrapper getPlugin(String shortName) { return plugins.get(shortName); } /** * Depending on whether the user said "dismiss" or "correct", send him to the right place. */ public void doAct(StaplerRequest req, StaplerResponse rsp) throws IOException { if(req.hasParameter("correct")) { rsp.sendRedirect(req.getContextPath()+"/pluginManager"); } } public static PluginWrapperAdministrativeMonitor get() { return AdministrativeMonitor.all().get(PluginWrapperAdministrativeMonitor.class); } } // // // Action methods // // @RequirePOST public HttpResponse doMakeEnabled() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); enable(); return HttpResponses.ok(); } @RequirePOST public HttpResponse doMakeDisabled() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); disable(); return HttpResponses.ok(); } @RequirePOST @Deprecated public HttpResponse doPin() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ LOGGER.log(WARNING, "Call to pin plugin has been ignored. Plugin name: " + shortName); return HttpResponses.ok(); } @RequirePOST @Deprecated public HttpResponse doUnpin() throws IOException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); // See https://groups.google.com/d/msg/jenkinsci-dev/kRobm-cxFw8/6V66uhibAwAJ LOGGER.log(WARNING, "Call to unpin plugin has been ignored. Plugin name: " + shortName); return HttpResponses.ok(); } @RequirePOST public HttpResponse doDoUninstall() throws IOException { Jenkins jenkins = Jenkins.getActiveInstance(); jenkins.checkPermission(Jenkins.ADMINISTER); archive.delete(); // Redo who depends on who. jenkins.getPluginManager().resolveDependantPlugins(); return HttpResponses.redirectViaContextPath("/pluginManager/installed"); // send back to plugin manager } private static final Logger LOGGER = Logger.getLogger(PluginWrapper.class.getName()); /** * Name of the plugin manifest file (to help find where we parse them.) */ public static final String MANIFEST_FILENAME = "META-INF/MANIFEST.MF"; }
package mobi.cloudymail.mailclient; import mobi.cloudymail.util.MyApp; import mobi.cloudymail.util.Utils; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.view.animation.AnimationUtils; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; public class WellComeActicity extends Activity implements OnViewChangeListener{ //member for splash private ImageView startLogo; int alpha=255; int b=0; //end member of splash private MyScrollLayout mScrollLayout; private ImageView[] imgs; private int count; private int currentItem; private Button startBtn; private RelativeLayout mainRLayout; private LinearLayout pointLLayout; private LinearLayout bglayout; private LinearLayout leftLayout; private LinearLayout rightLayout; private LinearLayout animLayout; private final int SHOWANIMI=0; Handler mHandler = new Handler() { public void handleMessage(Message msg) { if(msg.what==SHOWANIMI) startAnim(); super.handleMessage(msg); } }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SharedPreferences preferences=this.getSharedPreferences(MyApp.SHARED_SETTING, Context.MODE_APPEND); Editor edit=preferences.edit(); boolean firstRun=preferences.getBoolean("firstRun", true); if(firstRun && Utils.isInChinese()) { requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.wellcome); initView(); } else initAsSplash(); if(firstRun) edit.putBoolean("firstRun", false).commit(); } private void initView() { mScrollLayout = (MyScrollLayout) findViewById(R.id.ScrollLayout); pointLLayout = (LinearLayout) findViewById(R.id.llayout); bglayout=(LinearLayout) findViewById(R.id.bglayout); mainRLayout = (RelativeLayout) findViewById(R.id.mainRLayout); startBtn = (Button) findViewById(R.id.startBtn); startBtn.setOnClickListener(onClick); animLayout = (LinearLayout) findViewById(R.id.animLayout); leftLayout = (LinearLayout) findViewById(R.id.leftLayout); rightLayout = (LinearLayout) findViewById(R.id.rightLayout); count = mScrollLayout.getChildCount(); imgs = new ImageView[count]; for(int i = 0; i< count;i++) { imgs[i] = (ImageView) pointLLayout.getChildAt(i); imgs[i].setEnabled(true); imgs[i].setTag(i); } currentItem = 0; imgs[currentItem].setEnabled(false); mScrollLayout.SetOnViewChangeListener(this); } private View.OnClickListener onClick = new View.OnClickListener() { @Override public void onClick(View v) { switch (v.getId()) { case R.id.startBtn: mScrollLayout.setVisibility(View.GONE); pointLLayout.setVisibility(View.GONE); bglayout.setVisibility(View.VISIBLE); // imageBg.invalidate(); new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(500); Message msg=new Message(); msg.what=SHOWANIMI; mHandler.sendMessage(msg); } catch (InterruptedException e) { Log.d(Utils.LOGTAG, "",e); } } }).start(); break; } } }; private void startAnim() { bglayout.setVisibility(View.GONE); animLayout.setVisibility(View.VISIBLE); // mainRLayout.setBackgroundResource(R.drawable.whatsnew_bg); Animation leftOutAnimation = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.translate_left); Animation rightOutAnimation = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.translate_right); // Animation leftOutAnimation = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.fadedout_to_left_down); // Animation rightOutAnimation = AnimationUtils.loadAnimation(getApplicationContext(), R.anim.fadedout_to_right_down); leftLayout.setAnimation(leftOutAnimation); rightLayout.setAnimation(rightOutAnimation); leftOutAnimation.setAnimationListener(new AnimationListener() { @Override public void onAnimationStart(Animation animation) { // mainRLayout.setBackgroundColor(R.color.black); } @Override public void onAnimationRepeat(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { leftLayout.setVisibility(View.GONE); rightLayout.setVisibility(View.GONE); Intent intent = new Intent(WellComeActicity.this,GlobalInBoxActivity.class); WellComeActicity.this.startActivity(intent); WellComeActicity.this.finish(); overridePendingTransition(R.anim.zoom_out_enter, R.anim.zoom_out_exit); } }); } @Override public void OnViewChange(int position) { setcurrentPoint(position); } private void setcurrentPoint(int position) { if(position < 0 || position > count -1 || currentItem == position) { return; } imgs[currentItem].setEnabled(true); imgs[position].setEnabled(false); currentItem = position; } private void initAsSplash() { getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.splash); startLogo = (ImageView) findViewById(R.id.startLogo); if(!Utils.isInChinese()) { startLogo.setBackgroundResource(R.drawable.splash_en); } startLogo.setAlpha(alpha); new Thread(new Runnable() { @Override public void run() { while (b < 2) { try { if (b == 0) { Thread.sleep(200); b = 1; } else { Thread.sleep(35); } updateApp(); } catch (Exception e) { Log.e(Utils.LOGTAG, "Error:", e); } } } }).start(); } private void updateApp() { alpha -= 50; if (alpha <= 0) { b = 2; Intent in = new Intent(this, GlobalInBoxActivity.class); startActivity(in); WellComeActicity.this.finish(); } // mHandler.sendMessage(mHandler.obtainMessage()); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package erp.mod.log.form; import erp.gui.session.SSessionCustom; import erp.lib.SLibConstants; import erp.mod.SModConsts; import erp.mod.SModSysConsts; import erp.mod.log.db.SDbSpotCompanyBranchEntity; import sa.lib.SLibConsts; import sa.lib.SLibUtils; import sa.lib.db.SDbRegistry; import sa.lib.gui.SGuiClient; import sa.lib.gui.SGuiConsts; import sa.lib.gui.SGuiFieldKeyGroup; import sa.lib.gui.SGuiUtils; import sa.lib.gui.SGuiValidation; /** * * @author Juan Barajas */ public class SFormSpotCompanyBranchEntity extends sa.lib.gui.bean.SBeanForm { private SDbSpotCompanyBranchEntity moRegistry; private SGuiFieldKeyGroup moFieldKeyEntityGroup; /** * Creates new form SFormSpotCompanyBranchEntity */ public SFormSpotCompanyBranchEntity(SGuiClient client, String title) { setFormSettings(client, SGuiConsts.BEAN_FORM_EDIT, SModConsts.LOGU_SPOT_COB, SLibConstants.UNDEFINED, title); initComponents(); initComponentsCustom(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel3 = new javax.swing.JPanel(); jPanel1 = new javax.swing.JPanel(); jPanel23 = new javax.swing.JPanel(); jPanel21 = new javax.swing.JPanel(); jlCompanyBranch = new javax.swing.JLabel(); moKeyCompanyBranch = new sa.lib.gui.bean.SBeanFieldKey(); jPanel31 = new javax.swing.JPanel(); jlCompanyBranchEntity = new javax.swing.JLabel(); moKeyCompanyBranchEntity = new sa.lib.gui.bean.SBeanFieldKey(); jPanel22 = new javax.swing.JPanel(); jlSpot = new javax.swing.JLabel(); moKeySpot = new sa.lib.gui.bean.SBeanFieldKey(); jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder("Datos del registro:")); jPanel1.setLayout(new java.awt.BorderLayout(0, 5)); jPanel23.setLayout(new java.awt.GridLayout(3, 1, 0, 5)); jPanel21.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0)); jlCompanyBranch.setForeground(new java.awt.Color(0, 0, 255)); jlCompanyBranch.setText("Sucursal empresa:*"); jlCompanyBranch.setPreferredSize(new java.awt.Dimension(100, 23)); jPanel21.add(jlCompanyBranch); moKeyCompanyBranch.setPreferredSize(new java.awt.Dimension(200, 23)); jPanel21.add(moKeyCompanyBranch); jPanel23.add(jPanel21); jPanel31.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0)); jlCompanyBranchEntity.setForeground(new java.awt.Color(0, 0, 255)); jlCompanyBranchEntity.setText("Entidad:*"); jlCompanyBranchEntity.setPreferredSize(new java.awt.Dimension(100, 23)); jPanel31.add(jlCompanyBranchEntity); moKeyCompanyBranchEntity.setPreferredSize(new java.awt.Dimension(200, 23)); jPanel31.add(moKeyCompanyBranchEntity); jPanel23.add(jPanel31); jPanel22.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0)); jlSpot.setText("Lugar:*"); jlSpot.setPreferredSize(new java.awt.Dimension(100, 23)); jPanel22.add(jlSpot); moKeySpot.setPreferredSize(new java.awt.Dimension(200, 23)); jPanel22.add(moKeySpot); jPanel23.add(jPanel22); jPanel1.add(jPanel23, java.awt.BorderLayout.NORTH); getContentPane().add(jPanel1, java.awt.BorderLayout.CENTER); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel21; private javax.swing.JPanel jPanel22; private javax.swing.JPanel jPanel23; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel31; private javax.swing.JLabel jlCompanyBranch; private javax.swing.JLabel jlCompanyBranchEntity; private javax.swing.JLabel jlSpot; private sa.lib.gui.bean.SBeanFieldKey moKeyCompanyBranch; private sa.lib.gui.bean.SBeanFieldKey moKeyCompanyBranchEntity; private sa.lib.gui.bean.SBeanFieldKey moKeySpot; // End of variables declaration//GEN-END:variables private void initComponentsCustom() { SGuiUtils.setWindowBounds(this, 400, 250); moFieldKeyEntityGroup = new SGuiFieldKeyGroup(miClient); moKeyCompanyBranch.setKeySettings(miClient, SGuiUtils.getLabelName(jlCompanyBranch.getText()), true); moKeyCompanyBranchEntity.setKeySettings(miClient, SGuiUtils.getLabelName(jlCompanyBranchEntity.getText()), true); moKeySpot.setKeySettings(miClient, SGuiUtils.getLabelName(jlSpot.getText()), true); moFields.addField(moKeyCompanyBranch); moFields.addField(moKeyCompanyBranchEntity); moFields.addField(moKeySpot); moFields.setFormButton(jbSave); } @Override public void addAllListeners() { } @Override public void removeAllListeners() { } @Override public void reloadCatalogues() { moFieldKeyEntityGroup.initGroup(); moFieldKeyEntityGroup.addFieldKey(moKeyCompanyBranch, SModConsts.BPSU_BPB, ((SSessionCustom) miClient.getSession().getSessionCustom()).getCurrentCompanyKey()[0], null); moFieldKeyEntityGroup.addFieldKey(moKeyCompanyBranchEntity, SModConsts.CFGU_COB_ENT, SModSysConsts.CFGS_CT_ENT_WH, null); moFieldKeyEntityGroup.populateCatalogues(); miClient.getSession().populateCatalogue(moKeySpot, SModConsts.LOGU_SPOT, SLibConsts.UNDEFINED, null); } @Override public void setRegistry(SDbRegistry registry) throws Exception { int[] key = null; moRegistry = (SDbSpotCompanyBranchEntity) registry; mnFormResult = SLibConsts.UNDEFINED; mbFirstActivation = true; removeAllListeners(); reloadCatalogues(); key = moRegistry.getPrimaryKey(); if (moRegistry.isRegistryNew()) { moRegistry.initPrimaryKey(); jtfRegistryKey.setText(""); } else { jtfRegistryKey.setText(SLibUtils.textKey(moRegistry.getPrimaryKey())); } moKeyCompanyBranch.setValue(new int[] {key[0] }); moKeyCompanyBranchEntity.setValue(new int[] { key[0], key[1] }); moKeySpot.setValue(new int[] { moRegistry.getFkSpotId() }); setFormEditable(true); if (moRegistry.isRegistryNew()) { if (key[0] == SLibConsts.UNDEFINED) { moFieldKeyEntityGroup.resetGroup(); } } else { moKeyCompanyBranch.setEnabled(false); moKeyCompanyBranchEntity.setEnabled(false); } addAllListeners(); } @Override public SDbRegistry getRegistry() throws Exception { SDbSpotCompanyBranchEntity registry = moRegistry.clone(); if (registry.isRegistryNew()) {} moRegistry.setPkCompanyBranchId(moKeyCompanyBranch.getValue()[0]); moRegistry.setPkEntityId(moKeyCompanyBranchEntity.getValue()[1]); moRegistry.setFkSpotId(moKeySpot.getValue()[0]); return registry; } @Override public SGuiValidation validateForm() { SGuiValidation validation = moFields.validateFields(); return validation; } }
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/licenses/publicdomain * Other contributors include Andrew Wright, Jeffrey Hayes, * Pat Fisher, Mike Judd. */ import java.util.concurrent.atomic.*; import junit.framework.*; import java.util.*; public class AtomicIntegerFieldUpdaterTest extends JSR166TestCase { volatile int x = 0; int w; long z; public static void main(String[] args){ junit.textui.TestRunner.run(suite()); } public static Test suite() { return new TestSuite(AtomicIntegerFieldUpdaterTest.class); } /** * Construction with non-existent field throws RuntimeException */ public void testConstructor() { try{ AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a = AtomicIntegerFieldUpdater.newUpdater (AtomicIntegerFieldUpdaterTest.class, "y"); shouldThrow(); } catch (RuntimeException rt) {} } /** * construction with field not of given type throws RuntimeException */ public void testConstructor2() { try{ AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a = AtomicIntegerFieldUpdater.newUpdater (AtomicIntegerFieldUpdaterTest.class, "z"); shouldThrow(); } catch (RuntimeException rt) {} } /** * construction with non-volatile field throws RuntimeException */ public void testConstructor3() { try{ AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a = AtomicIntegerFieldUpdater.newUpdater (AtomicIntegerFieldUpdaterTest.class, "w"); shouldThrow(); } catch (RuntimeException rt) {} } static class Base { protected volatile int f = 0; } static class Sub1 extends Base { AtomicIntegerFieldUpdater<Base> fUpdater = AtomicIntegerFieldUpdater.newUpdater(Base.class, "f"); } static class Sub2 extends Base {} /* Cannot test on iOS: protected field checking requires checking the * calling method on the stack, and iOS uses a native stack, not VM one. public void testProtectedFieldOnAnotherSubtype() { Sub1 sub1 = new Sub1(); Sub2 sub2 = new Sub2(); sub1.fUpdater.set(sub1, 1); try { sub1.fUpdater.set(sub2, 2); shouldThrow(); } catch (RuntimeException rt) {} } */ /** * get returns the last value set or assigned */ public void testGetSet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(1,a.get(this)); a.set(this,2); assertEquals(2,a.get(this)); a.set(this,-3); assertEquals(-3,a.get(this)); } /** * compareAndSet succeeds in changing value if equal to expected else fails */ public void testCompareAndSet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertTrue(a.compareAndSet(this,1,2)); assertTrue(a.compareAndSet(this,2,-4)); assertEquals(-4,a.get(this)); assertFalse(a.compareAndSet(this,-5,7)); assertFalse((7 == a.get(this))); assertTrue(a.compareAndSet(this,-4,7)); assertEquals(7,a.get(this)); } /** * compareAndSet in one thread enables another waiting for value * to succeed */ public void testCompareAndSetInMultipleThreads() { x = 1; final AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest>a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } Thread t = new Thread(new Runnable() { public void run() { while(!a.compareAndSet(AtomicIntegerFieldUpdaterTest.this, 2, 3)) Thread.yield(); }}); try { t.start(); assertTrue(a.compareAndSet(this, 1, 2)); t.join(LONG_DELAY_MS); assertFalse(t.isAlive()); assertEquals(a.get(this), 3); } catch(Exception e) { unexpectedException(); } } /** * repeated weakCompareAndSet succeeds in changing value when equal * to expected */ public void testWeakCompareAndSet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; while(!a.weakCompareAndSet(this,1,2)); while(!a.weakCompareAndSet(this,2,-4)); assertEquals(-4,a.get(this)); while(!a.weakCompareAndSet(this,-4,7)); assertEquals(7,a.get(this)); } /** * getAndSet returns previous value and sets to given value */ public void testGetAndSet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(1,a.getAndSet(this, 0)); assertEquals(0,a.getAndSet(this,-10)); assertEquals(-10,a.getAndSet(this,1)); } /** * getAndAdd returns previous value and adds given value */ public void testGetAndAdd() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(1,a.getAndAdd(this,2)); assertEquals(3,a.get(this)); assertEquals(3,a.getAndAdd(this,-4)); assertEquals(-1,a.get(this)); } /** * getAndDecrement returns previous value and decrements */ public void testGetAndDecrement() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(1,a.getAndDecrement(this)); assertEquals(0,a.getAndDecrement(this)); assertEquals(-1,a.getAndDecrement(this)); } /** * getAndIncrement returns previous value and increments */ public void testGetAndIncrement() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(1,a.getAndIncrement(this)); assertEquals(2,a.get(this)); a.set(this,-2); assertEquals(-2,a.getAndIncrement(this)); assertEquals(-1,a.getAndIncrement(this)); assertEquals(0,a.getAndIncrement(this)); assertEquals(1,a.get(this)); } /** * addAndGet adds given value to current, and returns current value */ public void testAddAndGet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(3,a.addAndGet(this,2)); assertEquals(3,a.get(this)); assertEquals(-1,a.addAndGet(this,-4)); assertEquals(-1,a.get(this)); } /** * decrementAndGet decrements and returns current value */ public void testDecrementAndGet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(0,a.decrementAndGet(this)); assertEquals(-1,a.decrementAndGet(this)); assertEquals(-2,a.decrementAndGet(this)); assertEquals(-2,a.get(this)); } /** * incrementAndGet increments and returns current value */ public void testIncrementAndGet() { AtomicIntegerFieldUpdater<AtomicIntegerFieldUpdaterTest> a; try { a = AtomicIntegerFieldUpdater.newUpdater(AtomicIntegerFieldUpdaterTest.class, "x"); } catch (RuntimeException ok) { return; } x = 1; assertEquals(2,a.incrementAndGet(this)); assertEquals(2,a.get(this)); a.set(this,-2); assertEquals(-1,a.incrementAndGet(this)); assertEquals(0,a.incrementAndGet(this)); assertEquals(1,a.incrementAndGet(this)); assertEquals(1,a.get(this)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.betaplay.sdk.http; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.Socket; import java.net.UnknownHostException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; /** * simple httpclient use to POST acra logs to server in JSON format * * @author Tomas T. * */ public class HttpClient { private String url; public HttpClient(String url) { this.url = url; } // ---------------------------------------------------------------------------- // get set methods // ---------------------------------------------------------------------------- private String mMessage; public String getErrorMessage() { return mMessage; } // --- private String mResponse; public String getResponse() { return mResponse; } // --- private int mResponseCode; public int getResponseCode() { return mResponseCode; } // --- private String mJsonBody; /** * provide here json string witch send in request body * * @param data */ public void setJsonBody(String data) { mJsonBody = data; } // ---------------------------------------------------------------------------- // add params methods // ---------------------------------------------------------------------------- private HttpUriRequest addBodyParams(HttpUriRequest request) throws Exception { if (mJsonBody != null) { request.addHeader("Content-Type", "application/json"); if (request instanceof HttpPost) { ((HttpPost) request).setEntity(new StringEntity(mJsonBody, "UTF-8")); } } return request; } // ---------------------------------------------------------------------------- // execute methods // ---------------------------------------------------------------------------- public void execute() throws Exception { HttpPost request = new HttpPost(url); request = (HttpPost) addBodyParams(request); executeRequest(request, url); } private void executeRequest(HttpUriRequest request, String url) { DefaultHttpClient client = sslClient(new DefaultHttpClient()); HttpParams params = client.getParams(); // timeout 40 sec HttpConnectionParams.setConnectionTimeout(params, 40 * 1000); HttpConnectionParams.setSoTimeout(params, 40 * 1000); HttpResponse httpResponse; try { httpResponse = client.execute(request); mResponseCode = httpResponse.getStatusLine().getStatusCode(); mMessage = httpResponse.getStatusLine().getReasonPhrase(); HttpEntity entity = httpResponse.getEntity(); if (entity != null) { InputStream instream = entity.getContent(); mResponse = convertStreamToString(instream); instream.close(); } } catch (ClientProtocolException e) { client.getConnectionManager().shutdown(); e.printStackTrace(); } catch (IOException e) { client.getConnectionManager().shutdown(); e.printStackTrace(); } } private static String convertStreamToString(InputStream is) { BufferedReader reader = new BufferedReader(new InputStreamReader(is)); StringBuilder sb = new StringBuilder(); String line = null; try { while ((line = reader.readLine()) != null) { sb.append(line + "\n"); } } catch (IOException e) { e.printStackTrace(); } finally { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } return sb.toString(); } /** * solving problems with ssl * * @param client * @return */ private DefaultHttpClient sslClient(org.apache.http.client.HttpClient client) { try { X509TrustManager tm = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] xcs, String string) throws CertificateException {} public void checkServerTrusted(X509Certificate[] xcs, String string) throws CertificateException {} public X509Certificate[] getAcceptedIssuers() {return null;} }; SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(null, new TrustManager[]{tm}, null); SSLSocketFactory ssf = new CustomSSLSocketFactory(ctx); ssf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); ClientConnectionManager ccm = client.getConnectionManager(); SchemeRegistry sr = ccm.getSchemeRegistry(); sr.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); sr.register(new Scheme("https", ssf, 443)); return new DefaultHttpClient(ccm, client.getParams()); } catch (Exception ex) { return null; } } private class CustomSSLSocketFactory extends SSLSocketFactory { SSLContext sslContext = SSLContext.getInstance("TLS"); public CustomSSLSocketFactory(KeyStore truststore) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { super(truststore); TrustManager tm = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {} public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {} public X509Certificate[] getAcceptedIssuers() {return null;} }; sslContext.init(null, new TrustManager[] { tm }, null); } public CustomSSLSocketFactory(SSLContext context) throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException, UnrecoverableKeyException { super(null); sslContext = context; } @Override public Socket createSocket(Socket socket, String host, int port, boolean autoClose) throws IOException, UnknownHostException { return sslContext.getSocketFactory().createSocket(socket, host, port, autoClose); } @Override public Socket createSocket() throws IOException { return sslContext.getSocketFactory().createSocket(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.rest.protocols.http.jetty; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.BeanProperty; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationConfig; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider; import com.fasterxml.jackson.databind.ser.SerializerFactory; import java.io.IOException; import java.sql.SQLException; import java.text.DateFormat; import java.util.Locale; import org.apache.ignite.internal.processors.cache.query.GridCacheSqlIndexMetadata; import org.apache.ignite.internal.processors.cache.query.GridCacheSqlMetadata; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.visor.util.VisorExceptionWrapper; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteUuid; /** * Custom object mapper for HTTP REST API. */ public class GridJettyObjectMapper extends ObjectMapper { /** * Default constructor. */ public GridJettyObjectMapper() { super(null, new CustomSerializerProvider(), null); setDateFormat(DateFormat.getDateTimeInstance(DateFormat.DEFAULT, DateFormat.DEFAULT, Locale.US)); SimpleModule module = new SimpleModule(); module.addSerializer(Throwable.class, THROWABLE_SERIALIZER); module.addSerializer(IgniteBiTuple.class, IGNITE_TUPLE_SERIALIZER); module.addSerializer(IgniteUuid.class, IGNITE_UUID_SERIALIZER); module.addSerializer(GridCacheSqlMetadata.class, IGNITE_SQL_METADATA_SERIALIZER); module.addSerializer(GridCacheSqlIndexMetadata.class, IGNITE_SQL_INDEX_METADATA_SERIALIZER); configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); registerModule(module); } /** Custom {@code null} key serializer. */ private static final JsonSerializer<Object> NULL_KEY_SERIALIZER = new JsonSerializer<Object>() { /** {@inheritDoc} */ @Override public void serialize(Object val, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeFieldName(""); } }; /** Custom {@code null} value serializer. */ private static final JsonSerializer<Object> NULL_VALUE_SERIALIZER = new JsonSerializer<Object>() { /** {@inheritDoc} */ @Override public void serialize(Object val, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeNull(); } }; /** * Custom serializers provider that provide special serializers for {@code null} values. */ private static class CustomSerializerProvider extends DefaultSerializerProvider { /** * Default constructor. */ CustomSerializerProvider() { super(); } /** * Full constructor. * * @param src Blueprint object used as the baseline for this instance. * @param cfg Provider configuration. * @param f Serializers factory. */ CustomSerializerProvider(SerializerProvider src, SerializationConfig cfg, SerializerFactory f) { super(src, cfg, f); } /** {@inheritDoc} */ @Override public DefaultSerializerProvider createInstance(SerializationConfig cfg, SerializerFactory jsf) { return new CustomSerializerProvider(this, cfg, jsf); } /** {@inheritDoc} */ @Override public JsonSerializer<Object> findNullKeySerializer(JavaType serializationType, BeanProperty prop) throws JsonMappingException { return NULL_KEY_SERIALIZER; } /** {@inheritDoc} */ @Override public JsonSerializer<Object> findNullValueSerializer(BeanProperty prop) throws JsonMappingException { return NULL_VALUE_SERIALIZER; } } /** Custom serializer for {@link Throwable} */ private static final JsonSerializer<Throwable> THROWABLE_SERIALIZER = new JsonSerializer<Throwable>() { /** * @param e Exception to write. * @param gen JSON generator. * @throws IOException If failed to write. */ private void writeException(Throwable e, JsonGenerator gen) throws IOException { if (e instanceof VisorExceptionWrapper) { VisorExceptionWrapper wrapper = (VisorExceptionWrapper)e; gen.writeStringField("className", wrapper.getClassName()); } else gen.writeStringField("className", e.getClass().getName()); if (e.getMessage() != null) gen.writeStringField("message", e.getMessage()); if (e instanceof SQLException) { SQLException sqlE = (SQLException)e; gen.writeNumberField("errorCode", sqlE.getErrorCode()); gen.writeStringField("SQLState", sqlE.getSQLState()); } } /** {@inheritDoc} */ @Override public void serialize(Throwable e, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeStartObject(); writeException(e, gen); if (e.getCause() != null) gen.writeObjectField("cause", e.getCause()); if (!F.isEmpty(e.getSuppressed())) { gen.writeArrayFieldStart("suppressed"); for (Throwable sup : e.getSuppressed()) gen.writeObject(sup); gen.writeEndArray(); } gen.writeEndObject(); } }; /** Custom serializer for {@link IgniteUuid} */ private static final JsonSerializer<IgniteUuid> IGNITE_UUID_SERIALIZER = new JsonSerializer<IgniteUuid>() { /** {@inheritDoc} */ @Override public void serialize(IgniteUuid uid, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeString(uid.toString()); } }; /** Custom serializer for {@link IgniteBiTuple} */ private static final JsonSerializer<IgniteBiTuple> IGNITE_TUPLE_SERIALIZER = new JsonSerializer<IgniteBiTuple>() { /** {@inheritDoc} */ @Override public void serialize(IgniteBiTuple t, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeStartObject(); gen.writeObjectField("key", t.getKey()); gen.writeObjectField("value", t.getValue()); gen.writeEndObject(); } }; /** Custom serializer for {@link GridCacheSqlMetadata} */ private static final JsonSerializer<GridCacheSqlMetadata> IGNITE_SQL_METADATA_SERIALIZER = new JsonSerializer<GridCacheSqlMetadata>() { /** {@inheritDoc} */ @Override public void serialize(GridCacheSqlMetadata m, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeStartObject(); gen.writeStringField("cacheName", m.cacheName()); gen.writeObjectField("types", m.types()); gen.writeObjectField("keyClasses", m.keyClasses()); gen.writeObjectField("valClasses", m.valClasses()); gen.writeObjectField("fields", m.fields()); gen.writeObjectField("indexes", m.indexes()); gen.writeEndObject(); } }; /** Custom serializer for {@link GridCacheSqlIndexMetadata} */ private static final JsonSerializer<GridCacheSqlIndexMetadata> IGNITE_SQL_INDEX_METADATA_SERIALIZER = new JsonSerializer<GridCacheSqlIndexMetadata>() { /** {@inheritDoc} */ @Override public void serialize(GridCacheSqlIndexMetadata idx, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeStartObject(); gen.writeStringField("name", idx.name()); gen.writeObjectField("fields", idx.fields()); gen.writeObjectField("descendings", idx.descendings()); gen.writeBooleanField("unique", idx.unique()); gen.writeEndObject(); } }; }
// Copyright 2010-2013 (c) IeAT, Siemens AG, AVANTSSAR and SPaCIoS consortia. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.avantssar.aslanpp.parser; import org.avantssar.aslanpp.model.CommunicationTerm; import org.avantssar.aslanpp.model.ErrorMessages; import org.avantssar.aslanpp.model.FunctionSymbol; import org.avantssar.aslanpp.model.IExpression; import org.avantssar.aslanpp.model.IOwned; import org.avantssar.aslanpp.model.IScope; import org.avantssar.aslanpp.model.ITerm; import org.avantssar.aslanpp.model.MacroSymbol; import org.avantssar.aslanpp.model.Prelude; import org.avantssar.commons.ChannelEntry; import org.avantssar.commons.ErrorGatherer; import org.avantssar.commons.LocationInfo; public class RawOOPCallExpression extends AbstractRawExpression { private final IRawExpression caller; private final IRawExpression what; private ITerm cleanTerm; private CommunicationTerm cleanCommunication; private boolean cleanBuilt; public RawOOPCallExpression(IScope scope, LocationInfo location, ErrorGatherer err, IRawExpression caller, IRawExpression what) { super(scope, location, err); this.caller = caller; this.what = what; } public IRawExpression getCaller() { return caller; } public IRawExpression getWhat() { return what; } @Override public void accept(IRawExpressionVisitor visitor) { visitor.visit(this); } @Override public IExpression getFormula() { buildClean(true); if (cleanTerm != null) { return cleanTerm.expression(); } else { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Function call", getRepresentation(), "formula"); return null; } } @Override public IExpression getGuard(boolean allowReceive) { buildClean(true); if (cleanTerm != null) { return cleanTerm.expression(); } else if (cleanCommunication != null) { if (allowReceive) { if (cleanCommunication.isReceive()) { return cleanCommunication.expression(); } else { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Transmission", getRepresentation(), "guard without receive"); return null; } } else { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Transmission", getRepresentation(), "guard without receive"); return null; } } else { // should never get here getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Function", getRepresentation(), "guard"); return null; } } @Override public ITerm getTerm(boolean allowTransmission, boolean strictVarCheck) { buildClean(strictVarCheck); if (cleanTerm != null) { return cleanTerm; } else if (cleanCommunication != null) { if (allowTransmission) { return cleanCommunication; } else { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Transmission", getRepresentation(), "term"); return null; } } else { // should never get here getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Function", getRepresentation(), "term"); return null; } } @Override public CommunicationTerm getTransmission() { buildClean(true); if (cleanCommunication != null) { return cleanCommunication; } else { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Function", getRepresentation(), "transmission"); return null; } } @Override public IExpression getComparison() { getErrorGatherer().addException(getLocation(), ErrorMessages.ITEM_NOT_ALLOWED_IN_THIS_PLACE, "Function call", getRepresentation(), "(in)equality"); return null; } @Override public boolean isChannelGoal() { return true; } @Override public RawChannelGoalInfo getChannelGoal() { return new RawChannelGoalInfo(caller, what, ChannelEntry.regular.arrow); } private void buildClean(boolean strictVarCheck) { if (!cleanBuilt) { if (getWhat() instanceof RawConstVarExpression) { String whName = ((RawConstVarExpression) getWhat()).getName(); if (whName.equals(Prelude.SEND) || whName.equals(Prelude.RECEIVE)) { if (getWhat() instanceof RawFunctionExpression) { RawFunctionExpression rFnc = (RawFunctionExpression) getWhat(); if (rFnc.getParameters().size() == 2) { ITerm sender = getCaller().getTerm(false, true); ITerm receiver = rFnc.getParameters().get(0).getTerm(false, true); // if receiver switch the roles if (whName.equals(Prelude.RECEIVE)) { ITerm aux = sender; sender = receiver; receiver = aux; } ITerm payload = rFnc.getParameters().get(1).getTerm(false, true); cleanCommunication = getScope().communication(getLocation(), sender, receiver, payload, null, ChannelEntry.regular, whName.equals(Prelude.RECEIVE), true, true); } else { getErrorGatherer().addException(getLocation(), ErrorMessages.WRONG_NUMBER_OF_PARAMETERS, "Transmission", whName, 3, 1); } } else { getErrorGatherer().addException(getLocation(), ErrorMessages.WRONG_NUMBER_OF_PARAMETERS, "Transmission", whName, 3, 1); } } else { String name = null; ITerm[] args = null; if (getWhat() instanceof RawFunctionExpression) { RawFunctionExpression rFnc = (RawFunctionExpression) getWhat(); if (Character.isLowerCase(rFnc.getName().charAt(0))) { name = rFnc.getName(); args = new ITerm[rFnc.getParameters().size() + 1]; args[0] = getCaller().getTerm(false, strictVarCheck); for (int i = 0; i < rFnc.getParameters().size(); i++) { args[i + 1] = rFnc.getParameters().get(i).getTerm(false, strictVarCheck); } } else { getErrorGatherer().addException(getLocation(), ErrorMessages.INVALID_NAME_FOR_ITEM, "function/macro", rFnc.getName()); } } else if (getWhat() instanceof RawConstVarExpression) { RawConstVarExpression rCnst = (RawConstVarExpression) getWhat(); if (Character.isLowerCase(rCnst.getName().charAt(0))) { name = rCnst.getName(); args = new ITerm[1]; args[0] = getCaller().getTerm(false, strictVarCheck); } else { getErrorGatherer().addException(getLocation(), ErrorMessages.INVALID_NAME_FOR_ITEM, "function/macro", rCnst.getName()); } } IOwned sym = getScope().findFunctionOrMacro(name); if (sym instanceof FunctionSymbol) { cleanTerm = ((FunctionSymbol) sym).term(getLocation(), getScope(), args); } else if (sym instanceof MacroSymbol) { cleanTerm = ((MacroSymbol) sym).term(getLocation(), getScope(), args); } else { getErrorGatherer().addException(getLocation(), ErrorMessages.UNDEFINED_FUNCTION_OR_MACRO, name, getScope().getOriginalName()); } } } else { getErrorGatherer().addException(getLocation(), ErrorMessages.DIFFERENT_ITEM_EXPECTED, "function/macro", getWhat().getRepresentation()); } cleanBuilt = true; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.completion; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.suggest.xdocument.*; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.OldCompletionFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Test; import java.io.IOException; import java.util.Arrays; import java.util.Map; import static org.elasticsearch.Version.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.*; public class CompletionFieldMapperTests extends ElasticsearchSingleNodeTest { @Test public void testDefaultConfiguration() throws IOException { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); MappedFieldType completionFieldType = fieldMapper.fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); assertThat(indexAnalyzer.analyzer(), instanceOf(CompletionAnalyzer.class)); CompletionAnalyzer analyzer = (CompletionAnalyzer) indexAnalyzer.analyzer(); assertThat(analyzer.preservePositionIncrements(), equalTo(true)); assertThat(analyzer.preserveSep(), equalTo(true)); NamedAnalyzer searchAnalyzer = completionFieldType.searchAnalyzer(); assertThat(searchAnalyzer.name(), equalTo("simple")); assertThat(searchAnalyzer.analyzer(), instanceOf(CompletionAnalyzer.class)); analyzer = (CompletionAnalyzer) searchAnalyzer.analyzer(); assertThat(analyzer.preservePositionIncrements(), equalTo(true)); assertThat(analyzer.preserveSep(), equalTo(true)); } @Test public void testCompletionAnalyzerSettings() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") .field("search_analyzer", "standard") .field("preserve_separators", false) .field("preserve_position_increments", true) .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); MappedFieldType completionFieldType = fieldMapper.fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); assertThat(indexAnalyzer.analyzer(), instanceOf(CompletionAnalyzer.class)); CompletionAnalyzer analyzer = (CompletionAnalyzer) indexAnalyzer.analyzer(); assertThat(analyzer.preservePositionIncrements(), equalTo(true)); assertThat(analyzer.preserveSep(), equalTo(false)); NamedAnalyzer searchAnalyzer = completionFieldType.searchAnalyzer(); assertThat(searchAnalyzer.name(), equalTo("standard")); assertThat(searchAnalyzer.analyzer(), instanceOf(CompletionAnalyzer.class)); analyzer = (CompletionAnalyzer) searchAnalyzer.analyzer(); assertThat(analyzer.preservePositionIncrements(), equalTo(true)); assertThat(analyzer.preserveSep(), equalTo(false)); } @Test public void testTypeParsing() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") .field("search_analyzer", "standard") .field("preserve_separators", false) .field("preserve_position_increments", true) .field("max_input_length", 14) .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; XContentBuilder builder = jsonBuilder().startObject(); completionFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map<String, Object> serializedMap = JsonXContent.jsonXContent.createParser(builder.bytes()).map(); Map<String, Object> configMap = (Map<String, Object>) serializedMap.get("completion"); assertThat(configMap.get("analyzer").toString(), is("simple")); assertThat(configMap.get("search_analyzer").toString(), is("standard")); assertThat(Boolean.valueOf(configMap.get("preserve_separators").toString()), is(false)); assertThat(Boolean.valueOf(configMap.get("preserve_position_increments").toString()), is(true)); assertThat(Integer.valueOf(configMap.get("max_input_length").toString()), is(14)); } @Test public void testParsingMinimal() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 1); } @Test public void testBackCompatiblity() throws Exception { // creating completion field for pre 2.0 indices, should create old completion fields String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); for (Version version : Arrays.asList(V_1_7_0, V_1_1_0, randomVersionBetween(random(), V_1_1_0, V_1_7_0))) { DocumentMapper defaultMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version.id).build()) .mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertTrue(fieldMapper instanceof OldCompletionFieldMapper); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertThat(fields.length, equalTo(1)); assertFalse(fields[0] instanceof SuggestField); assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } // for 2.0 indices and onwards, should create new completion fields DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertTrue(fieldMapper instanceof CompletionFieldMapper); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertThat(fields.length, equalTo(1)); assertTrue(fields[0] instanceof SuggestField); assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } @Test public void testParsingMultiValued() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .array("completion", "suggestion1", "suggestion2") .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 2); } @Test public void testParsingWithWeight() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("completion") .field("input", "suggestion") .field("weight", 2) .endObject() .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 1); } @Test public void testParsingMultiValueWithWeight() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion1", "suggestion2", "suggestion3") .field("weight", 2) .endObject() .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 3); } @Test public void testParsingFull() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startArray("completion") .startObject() .field("input", "suggestion1") .field("weight", 3) .endObject() .startObject() .field("input", "suggestion2") .field("weight", 4) .endObject() .startObject() .field("input", "suggestion3") .field("weight", 5) .endObject() .endArray() .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 3); } @Test public void testParsingMixed() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); ParsedDocument parsedDocument = defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startArray("completion") .startObject() .array("input", "suggestion1", "suggestion2") .field("weight", 3) .endObject() .startObject() .field("input", "suggestion3") .field("weight", 4) .endObject() .startObject() .field("input", "suggestion4", "suggestion5", "suggestion6") .field("weight", 5) .endObject() .endArray() .endObject() .bytes()); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.names().indexName()); assertSuggestFields(fields, 6); } @Test public void testNonContextEnabledParsingWithContexts() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field1") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .startObject("field1") .field("input", "suggestion1") .startObject("contexts") .field("ctx", "ctx2") .endObject() .field("weight", 3) .endObject() .endObject() .bytes()); fail("Supplying contexts to a non context-enabled field should error"); } catch (MapperParsingException e) { assertThat(e.getRootCause().getMessage(), containsString("field1")); } } @Test public void testFieldValueValidation() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() .bytes()); fail("No error indexing value with reserved character [0x1F]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); assertThat(cause, instanceOf(IllegalArgumentException.class)); assertThat(cause.getMessage(), containsString("[0x1f]")); } charsRefBuilder.setCharAt(2, '\u0000'); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() .bytes()); fail("No error indexing value with reserved character [0x0]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); assertThat(cause, instanceOf(IllegalArgumentException.class)); assertThat(cause.getMessage(), containsString("[0x0]")); } charsRefBuilder.setCharAt(2, '\u001E'); try { defaultMapper.parse("test", "type1", "1", XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) .endObject() .bytes()); fail("No error indexing value with reserved character [0x1E]"); } catch (MapperParsingException e) { Throwable cause = e.unwrapCause().getCause(); assertThat(cause, instanceOf(IllegalArgumentException.class)); assertThat(cause.getMessage(), containsString("[0x1e]")); } } @Test public void testPrefixQueryType() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); assertThat(prefixQuery, instanceOf(PrefixCompletionQuery.class)); } @Test public void testFuzzyQueryType() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co", Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH, Operations.DEFAULT_MAX_DETERMINIZED_STATES, FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS, FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE); assertThat(prefixQuery, instanceOf(FuzzyCompletionQuery.class)); } @Test public void testRegexQueryType() throws Exception { String mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES); assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class)); } private static void assertSuggestFields(IndexableField[] fields, int expected) { int actualFieldCount = 0; for (IndexableField field : fields) { if (field instanceof SuggestField) { actualFieldCount++; } } assertThat(actualFieldCount, equalTo(expected)); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.TransportVerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.TransportClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; import org.elasticsearch.action.admin.indices.alias.exists.TransportAliasesExistAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.TransportGetAliasesAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction; import org.elasticsearch.action.admin.indices.exists.indices.TransportIndicesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.TransportGetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction; import org.elasticsearch.action.admin.indices.optimize.OptimizeAction; import org.elasticsearch.action.admin.indices.optimize.TransportOptimizeAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.TransportRecoveryAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportRefreshAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; import org.elasticsearch.action.admin.indices.segments.TransportIndicesSegmentsAction; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.settings.get.TransportGetSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.get.TransportGetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.upgrade.get.TransportUpgradeStatusAction; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction; import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeSettingsAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction; import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.delete.TransportDeleteWarmerAction; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction; import org.elasticsearch.action.admin.indices.warmer.get.TransportGetWarmersAction; import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerAction; import org.elasticsearch.action.admin.indices.warmer.put.TransportPutWarmerAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.exists.ExistsAction; import org.elasticsearch.action.exists.TransportExistsAction; import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.TransportExplainAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.TransportFieldStatsTransportAction; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.get.TransportShardMultiGetAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction; import org.elasticsearch.action.indexedscripts.delete.TransportDeleteIndexedScriptAction; import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction; import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction; import org.elasticsearch.action.percolate.TransportPercolateAction; import org.elasticsearch.action.percolate.TransportShardMultiPercolateAction; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.search.type.TransportSearchDfsQueryAndFetchAction; import org.elasticsearch.action.search.type.TransportSearchDfsQueryThenFetchAction; import org.elasticsearch.action.search.type.TransportSearchQueryAndFetchAction; import org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction; import org.elasticsearch.action.search.type.TransportSearchScrollQueryAndFetchAction; import org.elasticsearch.action.search.type.TransportSearchScrollQueryThenFetchAction; import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.TransportSuggestAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.TermVectorsAction; import org.elasticsearch.action.termvectors.TransportMultiTermVectorsAction; import org.elasticsearch.action.termvectors.TransportShardMultiTermsVectorAction; import org.elasticsearch.action.termvectors.TransportTermVectorsAction; import org.elasticsearch.action.termvectors.dfs.TransportDfsOnlyAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * */ public class ActionModule extends AbstractModule { private final Map<String, ActionEntry> actions = new HashMap<>(); private final List<Class<? extends ActionFilter>> actionFilters = new ArrayList<>(); static class ActionEntry<Request extends ActionRequest, Response extends ActionResponse> { public final GenericAction<Request, Response> action; public final Class<? extends TransportAction<Request, Response>> transportAction; public final Class[] supportTransportActions; ActionEntry(GenericAction<Request, Response> action, Class<? extends TransportAction<Request, Response>> transportAction, Class... supportTransportActions) { this.action = action; this.transportAction = transportAction; this.supportTransportActions = supportTransportActions; } } private final boolean proxy; public ActionModule(boolean proxy) { this.proxy = proxy; } /** * Registers an action. * * @param action The action type. * @param transportAction The transport action implementing the actual action. * @param supportTransportActions Any support actions that are needed by the transport action. * @param <Request> The request type. * @param <Response> The response type. */ public <Request extends ActionRequest, Response extends ActionResponse> void registerAction(GenericAction<Request, Response> action, Class<? extends TransportAction<Request, Response>> transportAction, Class... supportTransportActions) { actions.put(action.name(), new ActionEntry<>(action, transportAction, supportTransportActions)); } public ActionModule registerFilter(Class<? extends ActionFilter> actionFilter) { actionFilters.add(actionFilter); return this; } @Override protected void configure() { Multibinder<ActionFilter> actionFilterMultibinder = Multibinder.newSetBinder(binder(), ActionFilter.class); for (Class<? extends ActionFilter> actionFilter : actionFilters) { actionFilterMultibinder.addBinding().to(actionFilter); } bind(ActionFilters.class).asEagerSingleton(); bind(AutoCreateIndex.class).asEagerSingleton(); bind(DestructiveOperations.class).asEagerSingleton(); registerAction(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); registerAction(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); registerAction(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); registerAction(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); registerAction(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); registerAction(ClusterHealthAction.INSTANCE, TransportClusterHealthAction.class); registerAction(ClusterUpdateSettingsAction.INSTANCE, TransportClusterUpdateSettingsAction.class); registerAction(ClusterRerouteAction.INSTANCE, TransportClusterRerouteAction.class); registerAction(ClusterSearchShardsAction.INSTANCE, TransportClusterSearchShardsAction.class); registerAction(PendingClusterTasksAction.INSTANCE, TransportPendingClusterTasksAction.class); registerAction(PutRepositoryAction.INSTANCE, TransportPutRepositoryAction.class); registerAction(GetRepositoriesAction.INSTANCE, TransportGetRepositoriesAction.class); registerAction(DeleteRepositoryAction.INSTANCE, TransportDeleteRepositoryAction.class); registerAction(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); registerAction(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); registerAction(DeleteSnapshotAction.INSTANCE, TransportDeleteSnapshotAction.class); registerAction(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); registerAction(RestoreSnapshotAction.INSTANCE, TransportRestoreSnapshotAction.class); registerAction(SnapshotsStatusAction.INSTANCE, TransportSnapshotsStatusAction.class); registerAction(IndicesStatsAction.INSTANCE, TransportIndicesStatsAction.class); registerAction(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class); registerAction(IndicesShardStoresAction.INSTANCE, TransportIndicesShardStoresAction.class); registerAction(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); registerAction(DeleteIndexAction.INSTANCE, TransportDeleteIndexAction.class); registerAction(GetIndexAction.INSTANCE, TransportGetIndexAction.class); registerAction(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); registerAction(CloseIndexAction.INSTANCE, TransportCloseIndexAction.class); registerAction(IndicesExistsAction.INSTANCE, TransportIndicesExistsAction.class); registerAction(TypesExistsAction.INSTANCE, TransportTypesExistsAction.class); registerAction(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); registerAction(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class, TransportGetFieldMappingsIndexAction.class); registerAction(PutMappingAction.INSTANCE, TransportPutMappingAction.class); registerAction(IndicesAliasesAction.INSTANCE, TransportIndicesAliasesAction.class); registerAction(UpdateSettingsAction.INSTANCE, TransportUpdateSettingsAction.class); registerAction(AnalyzeAction.INSTANCE, TransportAnalyzeAction.class); registerAction(PutIndexTemplateAction.INSTANCE, TransportPutIndexTemplateAction.class); registerAction(GetIndexTemplatesAction.INSTANCE, TransportGetIndexTemplatesAction.class); registerAction(DeleteIndexTemplateAction.INSTANCE, TransportDeleteIndexTemplateAction.class); registerAction(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class); registerAction(RefreshAction.INSTANCE, TransportRefreshAction.class); registerAction(FlushAction.INSTANCE, TransportFlushAction.class); registerAction(OptimizeAction.INSTANCE, TransportOptimizeAction.class); registerAction(UpgradeAction.INSTANCE, TransportUpgradeAction.class); registerAction(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class); registerAction(UpgradeSettingsAction.INSTANCE, TransportUpgradeSettingsAction.class); registerAction(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); registerAction(PutWarmerAction.INSTANCE, TransportPutWarmerAction.class); registerAction(DeleteWarmerAction.INSTANCE, TransportDeleteWarmerAction.class); registerAction(GetWarmersAction.INSTANCE, TransportGetWarmersAction.class); registerAction(GetAliasesAction.INSTANCE, TransportGetAliasesAction.class); registerAction(AliasesExistAction.INSTANCE, TransportAliasesExistAction.class); registerAction(GetSettingsAction.INSTANCE, TransportGetSettingsAction.class); registerAction(IndexAction.INSTANCE, TransportIndexAction.class); registerAction(GetAction.INSTANCE, TransportGetAction.class); registerAction(TermVectorsAction.INSTANCE, TransportTermVectorsAction.class, TransportDfsOnlyAction.class); registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class, TransportShardMultiTermsVectorAction.class); registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class); registerAction(ExistsAction.INSTANCE, TransportExistsAction.class); registerAction(SuggestAction.INSTANCE, TransportSuggestAction.class); registerAction(UpdateAction.INSTANCE, TransportUpdateAction.class); registerAction(MultiGetAction.INSTANCE, TransportMultiGetAction.class, TransportShardMultiGetAction.class); registerAction(BulkAction.INSTANCE, TransportBulkAction.class, TransportShardBulkAction.class); registerAction(SearchAction.INSTANCE, TransportSearchAction.class, TransportSearchDfsQueryThenFetchAction.class, TransportSearchQueryThenFetchAction.class, TransportSearchDfsQueryAndFetchAction.class, TransportSearchQueryAndFetchAction.class ); registerAction(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class, TransportSearchScrollQueryThenFetchAction.class, TransportSearchScrollQueryAndFetchAction.class ); registerAction(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class, TransportShardMultiPercolateAction.class); registerAction(ExplainAction.INSTANCE, TransportExplainAction.class); registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); registerAction(RecoveryAction.INSTANCE, TransportRecoveryAction.class); registerAction(RenderSearchTemplateAction.INSTANCE, TransportRenderSearchTemplateAction.class); //Indexed scripts registerAction(PutIndexedScriptAction.INSTANCE, TransportPutIndexedScriptAction.class); registerAction(GetIndexedScriptAction.INSTANCE, TransportGetIndexedScriptAction.class); registerAction(DeleteIndexedScriptAction.INSTANCE, TransportDeleteIndexedScriptAction.class); registerAction(FieldStatsAction.INSTANCE, TransportFieldStatsTransportAction.class); // register Name -> GenericAction Map that can be injected to instances. MapBinder<String, GenericAction> actionsBinder = MapBinder.newMapBinder(binder(), String.class, GenericAction.class); for (Map.Entry<String, ActionEntry> entry : actions.entrySet()) { actionsBinder.addBinding(entry.getKey()).toInstance(entry.getValue().action); } // register GenericAction -> transportAction Map that can be injected to instances. // also register any supporting classes if (!proxy) { bind(TransportLivenessAction.class).asEagerSingleton(); MapBinder<GenericAction, TransportAction> transportActionsBinder = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); for (Map.Entry<String, ActionEntry> entry : actions.entrySet()) { // bind the action as eager singleton, so the map binder one will reuse it bind(entry.getValue().transportAction).asEagerSingleton(); transportActionsBinder.addBinding(entry.getValue().action).to(entry.getValue().transportAction).asEagerSingleton(); for (Class supportAction : entry.getValue().supportTransportActions) { bind(supportAction).asEagerSingleton(); } } } } }
/* * citygml4j - The Open Source Java API for CityGML * https://github.com/citygml4j * * Copyright 2013-2022 Claus Nagel <claus.nagel@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.citygml4j.builder.cityjson.marshal.gml; import org.citygml4j.builder.cityjson.marshal.CityJSONMarshaller; import org.citygml4j.builder.cityjson.marshal.util.SemanticsBuilder; import org.citygml4j.builder.cityjson.marshal.util.VerticesBuilder; import org.citygml4j.cityjson.CityJSON; import org.citygml4j.cityjson.appearance.AbstractMaterialObject; import org.citygml4j.cityjson.appearance.AbstractTextureObject; import org.citygml4j.cityjson.appearance.SolidCollectionMaterialObject; import org.citygml4j.cityjson.appearance.SolidCollectionTextureObject; import org.citygml4j.cityjson.appearance.SolidMaterialObject; import org.citygml4j.cityjson.appearance.SolidTextureObject; import org.citygml4j.cityjson.appearance.SurfaceCollectionMaterialObject; import org.citygml4j.cityjson.appearance.SurfaceCollectionTextureObject; import org.citygml4j.cityjson.feature.AbstractCityObjectType; import org.citygml4j.cityjson.geometry.AbstractGeometryObjectType; import org.citygml4j.cityjson.geometry.AbstractSemanticsObject; import org.citygml4j.cityjson.geometry.AbstractSolidCollectionType; import org.citygml4j.cityjson.geometry.AbstractSurfaceCollectionType; import org.citygml4j.cityjson.geometry.CompositeSolidType; import org.citygml4j.cityjson.geometry.CompositeSurfaceType; import org.citygml4j.cityjson.geometry.GeometryWithAppearance; import org.citygml4j.cityjson.geometry.GeometryWithSemantics; import org.citygml4j.cityjson.geometry.MultiLineStringType; import org.citygml4j.cityjson.geometry.MultiPointType; import org.citygml4j.cityjson.geometry.MultiSolidType; import org.citygml4j.cityjson.geometry.MultiSurfaceType; import org.citygml4j.cityjson.geometry.SolidCollectionSemanticsObject; import org.citygml4j.cityjson.geometry.SolidSemanticsObject; import org.citygml4j.cityjson.geometry.SolidType; import org.citygml4j.cityjson.geometry.SurfaceCollectionSemanticsObject; import org.citygml4j.model.gml.GMLClass; import org.citygml4j.model.gml.feature.AbstractFeature; import org.citygml4j.model.gml.feature.FeatureProperty; import org.citygml4j.model.gml.geometry.AbstractGeometry; import org.citygml4j.model.gml.geometry.GeometryProperty; import org.citygml4j.model.gml.geometry.aggregates.MultiCurve; import org.citygml4j.model.gml.geometry.aggregates.MultiPoint; import org.citygml4j.model.gml.geometry.aggregates.MultiSolid; import org.citygml4j.model.gml.geometry.aggregates.MultiSurface; import org.citygml4j.model.gml.geometry.complexes.CompositeCurve; import org.citygml4j.model.gml.geometry.complexes.CompositeSolid; import org.citygml4j.model.gml.geometry.complexes.CompositeSurface; import org.citygml4j.model.gml.geometry.complexes.GeometricComplex; import org.citygml4j.model.gml.geometry.complexes.GeometricComplexProperty; import org.citygml4j.model.gml.geometry.primitives.AbstractCurve; import org.citygml4j.model.gml.geometry.primitives.AbstractCurveSegment; import org.citygml4j.model.gml.geometry.primitives.AbstractRing; import org.citygml4j.model.gml.geometry.primitives.AbstractRingProperty; import org.citygml4j.model.gml.geometry.primitives.Curve; import org.citygml4j.model.gml.geometry.primitives.CurveSegmentArrayProperty; import org.citygml4j.model.gml.geometry.primitives.GeometricPrimitiveProperty; import org.citygml4j.model.gml.geometry.primitives.LineString; import org.citygml4j.model.gml.geometry.primitives.LineStringSegment; import org.citygml4j.model.gml.geometry.primitives.LinearRing; import org.citygml4j.model.gml.geometry.primitives.OrientableCurve; import org.citygml4j.model.gml.geometry.primitives.OrientableSurface; import org.citygml4j.model.gml.geometry.primitives.Point; import org.citygml4j.model.gml.geometry.primitives.PointArrayProperty; import org.citygml4j.model.gml.geometry.primitives.PointProperty; import org.citygml4j.model.gml.geometry.primitives.Polygon; import org.citygml4j.model.gml.geometry.primitives.PolygonPatch; import org.citygml4j.model.gml.geometry.primitives.Sign; import org.citygml4j.model.gml.geometry.primitives.Solid; import org.citygml4j.model.gml.geometry.primitives.Surface; import org.citygml4j.model.gml.geometry.primitives.SurfaceProperty; import org.citygml4j.model.gml.geometry.primitives.Tin; import org.citygml4j.model.gml.geometry.primitives.TriangulatedSurface; import org.citygml4j.util.child.ChildInfo; import org.citygml4j.util.mapper.TypeMapper; import org.citygml4j.util.walker.GeometryWalker; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; public class GMLMarshaller { private final ReentrantLock lock = new ReentrantLock(); private final CityJSONMarshaller json; private final Supplier<VerticesBuilder> verticesBuilder; private final ChildInfo childInfo; private TypeMapper<AbstractGeometryObjectType> typeMapper; public GMLMarshaller(CityJSONMarshaller json, Supplier<VerticesBuilder> verticesBuilder) { this.json = json; this.verticesBuilder = verticesBuilder; childInfo = new ChildInfo(); } private TypeMapper<AbstractGeometryObjectType> getTypeMapper() { if (typeMapper == null) { lock.lock(); try { if (typeMapper == null) { typeMapper = TypeMapper.<AbstractGeometryObjectType>create() .with(Point.class, this::marshalPoint) .with(MultiPoint.class, this::marshalMultiPoint) .with(Curve.class, this::marshalMultiLineString) .with(CompositeCurve.class, this::marshalMultiLineString) .with(LineString.class, this::marshalMultiLineString) .with(MultiCurve.class, this::marshalMultiLineString) .with(Surface.class, this::marshalSurface) .with(TriangulatedSurface.class, this::marshalTriangulatedSurface) .with(Tin.class, this::marshalTin) .with(MultiSurface.class, this::marshalMultiSurface) .with(CompositeSurface.class, this::marshalCompositeSurface) .with(Solid.class, this::marshalSolid) .with(CompositeSolid.class, this::marshalCompositeSolid) .with(MultiSolid.class, this::marshalMultiSolid); } } finally { lock.unlock(); } } return typeMapper; } public AbstractGeometryObjectType marshal(AbstractGeometry src) { return getTypeMapper().apply(src); } public AbstractCityObjectType marshalFeatureProperty(FeatureProperty<? extends AbstractFeature> featureProperty, CityJSON cityJSON) { return featureProperty.isSetFeature() ? json.getCityGMLMarshaller().marshal(featureProperty.getFeature(), cityJSON) : null; } public void marshalPoint(Point src, MultiPointType dest) { List<Double> vertex = src.toList3d(); if (!vertex.isEmpty()) dest.addPoints(verticesBuilder.get().addVertices(vertex)); } public MultiPointType marshalPoint(Point src) { MultiPointType dest = new MultiPointType(); marshalPoint(src, dest); return dest; } public void marshalMultiPoint(MultiPoint src, MultiPointType dest) { if (src.isSetPointMember()) { for (PointProperty pointProperty : src.getPointMember()) if (pointProperty.isSetPoint()) { List<Double> vertex = pointProperty.getPoint().toList3d(); if (!vertex.isEmpty()) dest.addPoints(verticesBuilder.get().addVertices(vertex)); } } else if (src.isSetPointMembers()) { PointArrayProperty pointArrayProperty = src.getPointMembers(); for (Point point : pointArrayProperty.getPoint()) { List<Double> vertex = point.toList3d(); if (!vertex.isEmpty()) dest.addPoints(verticesBuilder.get().addVertices(vertex)); } } } public MultiPointType marshalMultiPoint(MultiPoint src) { MultiPointType dest = new MultiPointType(); marshalMultiPoint(src, dest); return dest; } public void marshalMultiLineString(AbstractCurve src, MultiLineStringType dest) { MultiLineStringBuilder builder = new MultiLineStringBuilder(); builder.process(src, dest); } public MultiLineStringType marshalMultiLineString(Curve src) { MultiLineStringType dest = new MultiLineStringType(); marshalMultiLineString(src, dest); return dest; } public MultiLineStringType marshalMultiLineString(CompositeCurve src) { MultiLineStringType dest = new MultiLineStringType(); marshalMultiLineString(src, dest); return dest; } public MultiLineStringType marshalMultiLineString(LineString src) { MultiLineStringType dest = new MultiLineStringType(); marshalMultiLineString(src, dest); return dest; } public MultiLineStringType marshalMultiLineString(List<GeometricComplexProperty> src) { MultiLineStringType dest = new MultiLineStringType(); for (GeometricComplexProperty property : src) { if (property.isSetCompositeCurve()) marshalMultiLineString(property.getCompositeCurve(), dest); else if (property.isSetGeometricComplex()) { GeometricComplex complex = property.getGeometricComplex(); if (complex.isSetElement()) { for (GeometricPrimitiveProperty element : complex.getElement()) { if (element.getGeometricPrimitive() instanceof AbstractCurve) { AbstractCurve curve = (AbstractCurve) element.getGeometricPrimitive(); marshalMultiLineString(curve, dest); } } } } } return !dest.getLineStrings().isEmpty() ? dest : null; } public void marshalMultiLineString(MultiCurve src, MultiLineStringType dest) { MultiLineStringBuilder builder = new MultiLineStringBuilder(); builder.process(src, dest); } public MultiLineStringType marshalMultiLineString(MultiCurve src) { MultiLineStringType dest = new MultiLineStringType(); marshalMultiLineString(src, dest); return dest; } public void marshalSurface(Surface src, CompositeSurfaceType dest) { SurfaceCollectionBuilder surfaceBuilder = new SurfaceCollectionBuilder(); SemanticsBuilder semanticsBuilder = new SemanticsBuilder(childInfo.getParentCityObject(src), json.getCityGMLMarshaller()); surfaceBuilder.process(src, dest, semanticsBuilder, true); if (dest.isSetSemantics()) dest.getSemantics().setSurfaces(semanticsBuilder.getSurfaces()); } public CompositeSurfaceType marshalSurface(Surface src) { CompositeSurfaceType dest = new CompositeSurfaceType(); marshalSurface(src, dest); return dest; } public CompositeSurfaceType marshalTriangulatedSurface(TriangulatedSurface src) { return marshalSurface(src); } public CompositeSurfaceType marshalTin(Tin src) { return marshalTriangulatedSurface(src); } public void marshalSurfaceCollection(AbstractGeometry src, AbstractSurfaceCollectionType dest) { SurfaceCollectionBuilder surfaceBuilder = new SurfaceCollectionBuilder(); SemanticsBuilder semanticsBuilder = new SemanticsBuilder(childInfo.getParentCityObject(src), json.getCityGMLMarshaller()); surfaceBuilder.process(src, dest, semanticsBuilder, true); if (dest.isSetSemantics()) dest.getSemantics().setSurfaces(semanticsBuilder.getSurfaces()); } public MultiSurfaceType marshalMultiSurface(MultiSurface src) { MultiSurfaceType dest = new MultiSurfaceType(); marshalSurfaceCollection(src, dest); return dest; } public CompositeSurfaceType marshalCompositeSurface(CompositeSurface src) { CompositeSurfaceType dest = new CompositeSurfaceType(); marshalSurfaceCollection(src, dest); return dest; } public void marshalSolid(Solid src, SolidType dest, SemanticsBuilder semanticsBuilder, boolean collapseMaterialValues) { SurfaceCollectionBuilder surfaceBuilder = new SurfaceCollectionBuilder(); int index = 0; if (src.isSetExterior() && src.getExterior().getSurface() instanceof CompositeSurface) { List<CompositeSurface> shells = new ArrayList<>(); shells.add((CompositeSurface)src.getExterior().getSurface()); if (src.isSetInterior()) { for (SurfaceProperty property : src.getInterior()) { if (property.getSurface() instanceof CompositeSurface) shells.add((CompositeSurface)property.getSurface()); } } for (CompositeSurface shell : shells) { CompositeSurfaceType shellType = new CompositeSurfaceType(); surfaceBuilder.process(shell, shellType, semanticsBuilder, false); if (!shellType.getSurfaces().isEmpty()) { dest.addShell(shellType.getSurfaces()); if (shellType.isSetSemantics()) { SolidSemanticsObject semantics = dest.getSemantics(); if (semantics == null) { semantics = new SolidSemanticsObject(); dest.setSemantics(semantics); } // add null values for non-semantic surfaces appendNulls(semantics, index); semantics.addValues(shellType.getSemantics().getValues()); } if (shellType.isSetMaterial()) { for (SurfaceCollectionMaterialObject object : shellType.getMaterial()) { SolidMaterialObject material = dest.getMaterial(object.getTheme()); if (material == null) { material = new SolidMaterialObject(object.getTheme()); dest.addMaterial(material); } // add null values for non-colored surfaces appendNulls(material, index); material.addValue(object.getValues()); } } if (shellType.isSetTexture()) { for (SurfaceCollectionTextureObject object : shellType.getTexture()) { SolidTextureObject texture = dest.getTexture(object.getTheme()); if (texture == null) { texture = new SolidTextureObject(object.getTheme()); dest.addTexture(texture); } // add null values for non-textured surfaces appendNulls(texture, index); texture.addValue(object.getValues()); } } index++; } else if (index == 0) break; } postprocess(dest, index, collapseMaterialValues); } } public SolidType marshalSolid(Solid src) { SolidType dest = new SolidType(); SemanticsBuilder semanticsBuilder = new SemanticsBuilder(childInfo.getParentCityObject(src), json.getCityGMLMarshaller()); marshalSolid(src, dest, semanticsBuilder, true); if (dest.isSetSemantics()) dest.getSemantics().setSurfaces(semanticsBuilder.getSurfaces()); return dest; } public void marshalSolidCollection(AbstractGeometry src, AbstractSolidCollectionType dest) { SolidCollectionBuilder builder = new SolidCollectionBuilder(); SemanticsBuilder semanticsBuilder = new SemanticsBuilder(childInfo.getParentCityObject(src), json.getCityGMLMarshaller()); builder.process(src, dest, semanticsBuilder); if (dest.isSetSemantics()) dest.getSemantics().setSurfaces(semanticsBuilder.getSurfaces()); } public CompositeSolidType marshalCompositeSolid(CompositeSolid src) { CompositeSolidType dest = new CompositeSolidType(); marshalSolidCollection(src, dest); return dest; } public MultiSolidType marshalMultiSolid(MultiSolid src) { MultiSolidType dest = new MultiSolidType(); marshalSolidCollection(src, dest); return dest; } public AbstractGeometryObjectType marshalGeometryProperty(GeometryProperty<?> src) { AbstractGeometryObjectType dest = null; if (src.isSetGeometry()) dest = marshal(src.getGeometry()); else if (src.hasLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)) dest = marshal((AbstractGeometry)src.getLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)); return dest; } private List<List<Integer>> marshalPolygon(Polygon polygon, boolean reverse) { List<List<Integer>> vertices = null; if (polygon.isSetExterior()) { AbstractRing exterior = polygon.getExterior().getRing(); if (exterior instanceof LinearRing) { List<Integer> indexes = marshalLinearRing((LinearRing)exterior, reverse); if (indexes != null) { vertices = new ArrayList<>(); vertices.add(indexes); if (polygon.isSetInterior()) { for (AbstractRingProperty property : polygon.getInterior()) { AbstractRing interior = property.getRing(); if (interior instanceof LinearRing) { indexes = marshalLinearRing((LinearRing)interior, reverse); if (indexes != null) vertices.add(indexes); } } } } } } return vertices; } private List<Integer> marshalLinearRing(LinearRing linearRing, boolean reverse) { List<Integer> vertices = null; List<Double> values = linearRing.toList3d(reverse); if (values.size() > 11) vertices = verticesBuilder.get().addVertices(values.subList(0, values.size() - 3)); return vertices; } private final class MultiLineStringBuilder extends GeometryWalker { private MultiLineStringType dest; private boolean reverse = false; @Override public void visit(LineString lineString) { List<Double> vertices = lineString.toList3d(reverse); if (!vertices.isEmpty()) dest.addLineString(verticesBuilder.get().addVertices(vertices)); } @Override public void visit(Curve curve) { if (curve.isSetSegments()) { CurveSegmentArrayProperty arrayProperty = curve.getSegments(); if (arrayProperty.isSetCurveSegment()) { List<Double> vertices = new ArrayList<>(); for (AbstractCurveSegment abstractCurveSegment : arrayProperty.getCurveSegment()) { if (abstractCurveSegment.getGMLClass() == GMLClass.LINE_STRING_SEGMENT) { List<Double> values = ((LineStringSegment)abstractCurveSegment).toList3d(); if (!values.isEmpty()) vertices.addAll(values); } } if (!vertices.isEmpty()) { if (!reverse) dest.addLineString(verticesBuilder.get().addVertices(vertices)); else { for (int i = vertices.size() - 3; i >= 0; i -= 3) dest.addLineString(verticesBuilder.get().addVertices(vertices.subList(i, i + 3))); } } } } } @Override public void visit(OrientableCurve orientableCurve) { if (orientableCurve.getOrientation() == Sign.MINUS) { reverse = !reverse; super.visit(orientableCurve); reverse = !reverse; } else super.visit(orientableCurve); } @Override public <T extends AbstractGeometry> void visit(GeometryProperty<T> property) { if (property.hasLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)) ((AbstractGeometry)property.getLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)).accept(this); else super.visit(property); } public void process(AbstractCurve src, MultiLineStringType dest) { this.dest = dest; src.accept(this); } public void process(MultiCurve src, MultiLineStringType dest) { this.dest = dest; src.accept(this); } } private final class SurfaceCollectionBuilder extends GeometryWalker { private AbstractSurfaceCollectionType dest; private SemanticsBuilder semanticsBuilder; private boolean reverse = false; private int index = 0; @Override public void visit(Polygon polygon) { List<List<Integer>> surface = marshalPolygon(polygon, reverse); if (surface != null) { Integer semanticsIndex = semanticsBuilder.addSemanticSurface(childInfo.getParentCityObject(polygon)); Map<String, Integer> materials = json.getCityGMLMarshaller().getAppearanceMarshaller().getMaterials(polygon, reverse); Map<String, List<List<Integer>>> textures = json.getCityGMLMarshaller().getAppearanceMarshaller().getTextures(polygon, reverse); addSurface(surface, semanticsIndex, materials, textures); } } @Override public void visit(PolygonPatch polygonPatch) { Polygon polygon = new Polygon(); polygon.setExterior(polygonPatch.getExterior()); polygon.setInterior(polygonPatch.getInterior()); visit(polygon); } @Override public void visit(LinearRing linearRing) { // required for gml:Rectangle and gml:Triangle List<Integer> vertices = marshalLinearRing(linearRing, reverse); if (vertices != null) { Integer semanticsIndex = semanticsBuilder.addSemanticSurface(childInfo.getParentCityObject(linearRing)); Map<String, Integer> materials = json.getCityGMLMarshaller().getAppearanceMarshaller().getMaterials(linearRing, reverse); Map<String, List<List<Integer>>> textures = json.getCityGMLMarshaller().getAppearanceMarshaller().getTextures(linearRing, reverse); addSurface(Collections.singletonList(vertices), semanticsIndex, materials, textures); } } @Override public void visit(OrientableSurface orientableSurface) { if (orientableSurface.getOrientation() == Sign.MINUS) { reverse = !reverse; super.visit(orientableSurface); reverse = !reverse; } else super.visit(orientableSurface); } @Override public <T extends AbstractGeometry> void visit(GeometryProperty<T> property) { if (property.hasLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)) ((AbstractGeometry)property.getLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)).accept(this); else super.visit(property); } private void addSurface(List<List<Integer>> surface, Integer semanticsIndex, Map<String, Integer> materials, Map<String, List<List<Integer>>> textures) { dest.addSurface(surface); if (semanticsIndex != null) { SurfaceCollectionSemanticsObject semantics = dest.getSemantics(); if (semantics == null) { semantics = new SurfaceCollectionSemanticsObject(); dest.setSemantics(semantics); } appendNulls(semantics, index); semantics.addValue(semanticsIndex); } if (materials != null) { for (Entry<String, Integer> entry : materials.entrySet()) { SurfaceCollectionMaterialObject material = dest.getMaterial(entry.getKey()); if (material == null) { material = new SurfaceCollectionMaterialObject(entry.getKey()); dest.addMaterial(material); } // add null values for non-colored surfaces appendNulls(material, index); material.addValue(entry.getValue()); } } if (textures != null) { for (Entry<String, List<List<Integer>>> entry : textures.entrySet()) { SurfaceCollectionTextureObject texture = dest.getTexture(entry.getKey()); if (texture == null) { texture = new SurfaceCollectionTextureObject(entry.getKey()); dest.addTexture(texture); } // add null values for non-textured surfaces appendNulls(texture, index); texture.addValue(entry.getValue()); } } index++; } public void process(AbstractGeometry src, AbstractSurfaceCollectionType dest, SemanticsBuilder semanticsBuilder, boolean collapseMaterialValues) { this.dest = dest; this.semanticsBuilder = semanticsBuilder; src.accept(this); postprocess(dest, index, collapseMaterialValues); } } private final class SolidCollectionBuilder extends GeometryWalker { private AbstractSolidCollectionType dest; private SemanticsBuilder semanticsBuilder; private int index = 0; @Override public void visit(Solid solid) { SolidType solidType = new SolidType(); marshalSolid(solid, solidType, semanticsBuilder, false); if (!solidType.getShells().isEmpty()) { dest.addSolid(solidType.getShells()); if (solidType.isSetSemantics()) { SolidCollectionSemanticsObject semantics = dest.getSemantics(); if (semantics == null) { semantics = new SolidCollectionSemanticsObject(); dest.setSemantics(semantics); } // add null values for non-semantic surfaces appendNulls(semantics, index); semantics.addValues(solidType.getSemantics().getValues()); } if (solidType.isSetMaterial()) { for (SolidMaterialObject object : solidType.getMaterial()) { SolidCollectionMaterialObject material = dest.getMaterial(object.getTheme()); if (material == null) { material = new SolidCollectionMaterialObject(object.getTheme()); dest.addMaterial(material); } // add null values for non-colored surfaces appendNulls(material, index); material.addValue(object.getValues()); } } if (solidType.isSetTexture()) { for (SolidTextureObject object : solidType.getTexture()) { SolidCollectionTextureObject texture = dest.getTexture(object.getTheme()); if (texture == null) { texture = new SolidCollectionTextureObject(object.getTheme()); dest.addTexture(texture); } // add null values for non-textured surfaces appendNulls(texture, index); texture.addValue(object.getValues()); } } index++; } } @Override public <T extends AbstractGeometry> void visit(GeometryProperty<T> property) { if (property.hasLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)) ((AbstractGeometry)property.getLocalProperty(CityJSONMarshaller.GEOMETRY_XLINK)).accept(this); else super.visit(property); } public void process(AbstractGeometry src, AbstractSolidCollectionType dest, SemanticsBuilder semanticsBuilder) { this.dest = dest; this.semanticsBuilder = semanticsBuilder; src.accept(this); postprocess(dest, index, true); } } private void appendNulls(AbstractSemanticsObject semantics, int index) { while (semantics.getNumValues() < index) semantics.addNullValue(); } private void appendNulls(AbstractMaterialObject material, int index) { while (material.getNumValues() < index) material.addNullValue(); } private void appendNulls(AbstractTextureObject texture, int index) { while (texture.getNumValues() < index) texture.addNullValue(); } private void postprocess(AbstractGeometryObjectType dest, int index, boolean collapseMaterialValues) { if (dest instanceof GeometryWithSemantics) { GeometryWithSemantics geometry = (GeometryWithSemantics)dest; if (geometry.isSetSemantics()) appendNulls(geometry.getSemantics(), index); } if (dest instanceof GeometryWithAppearance<?, ?>) { GeometryWithAppearance<?, ?> geometry = (GeometryWithAppearance<?, ?>)dest; if (geometry.isSetMaterial()) { for (AbstractMaterialObject material : geometry.getMaterial()) { if (!collapseMaterialValues || !material.collapseValues()) appendNulls(material, index); } } if (geometry.isSetTexture()) { for (AbstractTextureObject texture : geometry.getTexture()) appendNulls(texture, index); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.ant; import java.io.File; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.apache.ivy.Ivy; import org.apache.ivy.core.cache.ResolutionCacheManager; import org.apache.ivy.core.module.descriptor.Artifact; import org.apache.ivy.core.module.descriptor.ModuleDescriptor; import org.apache.ivy.core.module.id.ModuleId; import org.apache.ivy.core.module.id.ModuleRevisionId; import org.apache.ivy.core.report.ResolveReport; import org.apache.ivy.core.resolve.ResolveOptions; import org.apache.ivy.core.settings.IvySettings; import org.apache.ivy.util.Message; import org.apache.ivy.util.filter.Filter; import org.apache.ivy.util.filter.FilterHelper; import org.apache.tools.ant.BuildException; import static org.apache.ivy.util.StringUtils.joinArray; import static org.apache.ivy.util.StringUtils.splitToArray; /** * Base class for tasks needing to be performed after a resolve. */ public abstract class IvyPostResolveTask extends IvyTask { private String conf; private boolean haltOnFailure = true; private boolean transitive = true; private boolean inline = false; private String organisation; private String branch = null; private String module; private String revision = "latest.integration"; private String resolveId; private String type; private File file; private Filter<Artifact> artifactFilter = null; private boolean useOrigin = false; private Boolean keep = null; private boolean refresh = false; private String resolveMode = null; private String log = ResolveOptions.LOG_DEFAULT; private boolean changing = false; private IvyResolve resolve = new IvyResolve(); public boolean isUseOrigin() { return useOrigin; } public void setUseOrigin(boolean useOrigin) { this.useOrigin = useOrigin; } public String getLog() { return log; } public void setLog(String log) { this.log = log; } public IvyDependency createDependency() { return resolve.createDependency(); } public IvyExclude createExclude() { return resolve.createExclude(); } public IvyConflict createConflict() { return resolve.createConflict(); } protected void prepareAndCheck() { Ivy ivy = getIvyInstance(); IvySettings settings = ivy.getSettings(); boolean orgAndModSetManually = organisation != null && module != null; organisation = getProperty(organisation, settings, "ivy.organisation"); module = getProperty(module, settings, "ivy.module"); if (file == null) { String fileName = getProperty(settings, "ivy.resolved.file", resolveId); if (fileName != null) { file = getProject().resolveFile(fileName); } } if (isInline()) { if (conf == null) { conf = "*"; } if (organisation == null) { throw new BuildException( "no organisation provided for ivy cache task in inline mode: " + "It can either be set explicitly via the attribute 'organisation' " + "or via 'ivy.organisation' property"); } if (module == null) { throw new BuildException( "no module name provided for ivy cache task in inline mode: " + "It can either be set explicitly via the attribute 'module' " + "or via 'ivy.module' property"); } String[] toResolve = getConfsToResolve(getOrganisation(), getModule() + "-caller", conf, true); // When we make an inline resolution, we can not resolve private confs. for (int i = 0; i < toResolve.length; i++) { if ("*".equals(toResolve[i])) { toResolve[i] = "*(public)"; } } if (toResolve.length > 0) { Message.verbose(String.format("using inline mode to resolve %s %s %s (%s)", getOrganisation(), getModule(), getRevision(), joinArray(toResolve, ", "))); IvyResolve resolve = setupResolve(isHaltonfailure(), isUseOrigin()); resolve.setOrganisation(getOrganisation()); resolve.setModule(getModule()); resolve.setBranch(getBranch()); resolve.setRevision(getRevision()); resolve.setInline(true); resolve.setChanging(isChanging()); resolve.setConf(conf); resolve.setResolveId(resolveId); resolve.setTransitive(isTransitive()); resolve.execute(); } else { Message.verbose(String.format("inline resolve already done for %s %s %s (%s)", getOrganisation(), getModule(), getRevision(), conf)); } if ("*".equals(conf)) { conf = joinArray( getResolvedConfigurations(getOrganisation(), getModule() + "-caller", true), ", "); } } else { Message.debug("using standard ensure resolved"); // if the organization and module has been manually specified, we'll reuse the resolved // data from another build (there is no way to know which configurations were resolved // there (TODO: maybe we can check which reports exist and extract the configurations // from these report names?) if (!orgAndModSetManually) { ensureResolved(settings); } conf = getProperty(conf, settings, "ivy.resolved.configurations"); if ("*".equals(conf)) { conf = getProperty(settings, "ivy.resolved.configurations"); if (conf == null) { throw new BuildException("bad conf provided for ivy cache task: " + "'*' can only be used with a prior call to <resolve/>"); } } } organisation = getProperty(organisation, settings, "ivy.organisation"); module = getProperty(module, settings, "ivy.module"); if (organisation == null) { throw new BuildException("no organisation provided for ivy cache task: " + "It can either be set explicitly via the attribute 'organisation' " + "or via 'ivy.organisation' property or a prior call to <resolve/>"); } if (module == null) { throw new BuildException("no module name provided for ivy cache task: " + "It can either be set explicitly via the attribute 'module' " + "or via 'ivy.module' property or a prior call to <resolve/>"); } if (conf == null) { throw new BuildException("no conf provided for ivy cache task: " + "It can either be set explicitly via the attribute 'conf' or " + "via 'ivy.resolved.configurations' property or a prior call to <resolve/>"); } artifactFilter = FilterHelper.getArtifactTypeFilter(type); } protected void ensureResolved(IvySettings settings) { String requestedConfigs = getProperty(getConf(), settings, "ivy.resolved.configurations"); String[] confs = (getResolveId() == null) ? getConfsToResolve(getOrganisation(), getModule(), requestedConfigs, false) : getConfsToResolve(getResolveId(), requestedConfigs); if (confs.length > 0) { IvyResolve resolve = setupResolve(isHaltonfailure(), isUseOrigin()); resolve.setFile(getFile()); resolve.setTransitive(isTransitive()); resolve.setConf(joinArray(confs, ", ")); resolve.setResolveId(getResolveId()); resolve.execute(); } } protected String[] getConfsToResolve(String org, String module, String conf, boolean strict) { ModuleDescriptor reference = getResolvedDescriptor(org, module, strict); String[] rconfs = getResolvedConfigurations(org, module, strict); return getConfsToResolve(reference, conf, rconfs); } protected String[] getConfsToResolve(String resolveId, String conf) { ModuleDescriptor reference = getResolvedDescriptor(resolveId, false); if (reference == null) { // assume the module has been resolved outside this build, resolve the required // configurations again // TODO: find a way to discover which confs were resolved by that previous resolve if (conf == null) { return new String[] {"*"}; } return splitToArray(conf); } String[] rconfs = getProject().getReference("ivy.resolved.configurations.ref." + resolveId); return getConfsToResolve(reference, conf, rconfs); } private String[] getConfsToResolve(ModuleDescriptor reference, String conf, String[] rconfs) { Message.debug("calculating configurations to resolve"); if (reference == null) { Message.debug("module not yet resolved, all confs still need to be resolved"); if (conf == null) { return new String[] {"*"}; } return splitToArray(conf); } if (conf == null) { Message.debug("module already resolved, no configuration to resolve"); return new String[0]; } String[] confs; if ("*".equals(conf)) { confs = reference.getConfigurationsNames(); } else { confs = splitToArray(conf); } Set<String> rconfsSet = new HashSet<>(); // for each resolved configuration, check if the report still exists ResolutionCacheManager cache = getSettings().getResolutionCacheManager(); for (String resolvedConf : rconfs) { String resolveId = getResolveId(); if (resolveId == null) { resolveId = ResolveOptions.getDefaultResolveId(reference); } File report = cache.getConfigurationResolveReportInCache(resolveId, resolvedConf); // if the report does not exist any longer, we have to recreate it... if (report.exists()) { rconfsSet.add(resolvedConf); } } Set<String> confsSet = new HashSet<>(Arrays.asList(confs)); Message.debug("resolved configurations: " + rconfsSet); Message.debug("asked configurations: " + confsSet); confsSet.removeAll(rconfsSet); Message.debug("to resolve configurations: " + confsSet); return confsSet.toArray(new String[confsSet.size()]); } protected IvyResolve setupResolve(boolean haltOnFailure, boolean useOrigin) { Message.verbose("no resolved descriptor found: launching default resolve"); resolve.setTaskName(getTaskName()); resolve.setProject(getProject()); resolve.setHaltonfailure(haltOnFailure); resolve.setUseOrigin(useOrigin); resolve.setValidate(doValidate(getSettings())); resolve.setKeep(isKeep()); resolve.setRefresh(isRefresh()); resolve.setLog(getLog()); resolve.setSettingsRef(getSettingsRef()); resolve.setResolveMode(getResolveMode()); return resolve; } protected ModuleRevisionId getResolvedMrid() { return new ModuleRevisionId(getResolvedModuleId(), (getRevision() == null) ? Ivy.getWorkingRevision() : getRevision()); } protected ModuleId getResolvedModuleId() { return isInline() ? new ModuleId(getOrganisation(), getModule() + "-caller") : new ModuleId(getOrganisation(), getModule()); } protected ResolveReport getResolvedReport() { return getResolvedReport(getOrganisation(), isInline() ? getModule() + "-caller" : getModule(), resolveId); } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getConf() { return conf; } public void setConf(String conf) { this.conf = conf; } public String getModule() { return module; } public void setModule(String module) { this.module = module; } public String getOrganisation() { return organisation; } public void setOrganisation(String organisation) { this.organisation = organisation; } public String getBranch() { return branch; } public void setBranch(String branch) { this.branch = branch; } public boolean isHaltonfailure() { return haltOnFailure; } public void setHaltonfailure(boolean haltOnFailure) { this.haltOnFailure = haltOnFailure; } public void setCache(File cache) { cacheAttributeNotSupported(); } public String getRevision() { return revision; } public void setRevision(String rev) { revision = rev; } public Filter<Artifact> getArtifactFilter() { return artifactFilter; } public boolean isTransitive() { return transitive; } public void setTransitive(boolean transitive) { this.transitive = transitive; } public boolean isInline() { return inline; } public void setInline(boolean inline) { this.inline = inline; } public void setResolveId(String resolveId) { this.resolveId = resolveId; } public String getResolveId() { return resolveId; } public void setFile(File file) { this.file = file; } public File getFile() { return file; } public void setKeep(boolean keep) { this.keep = keep; } public boolean isKeep() { return this.keep == null ? !isInline() : this.keep; } public void setChanging(boolean changing) { this.changing = changing; } public boolean isChanging() { return this.changing; } public void setRefresh(boolean refresh) { this.refresh = refresh; } public boolean isRefresh() { return refresh; } public String getResolveMode() { return resolveMode; } public void setResolveMode(String resolveMode) { this.resolveMode = resolveMode; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.unit.jms.client; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.SelectorTranslator; import org.junit.Assert; import org.junit.Test; public class SelectorTranslatorTest extends ActiveMQTestBase { @Test public void testParseNull() { Assert.assertNull(SelectorTranslator.convertToActiveMQFilterString(null)); } @Test public void testParseSimple() { final String selector = "color = 'red'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); } @Test public void testParseMoreComplex() { final String selector = "color = 'red' OR cheese = 'stilton' OR (age = 3 AND shoesize = 12)"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); } @Test public void testParseJMSDeliveryMode() { String selector = "JMSDeliveryMode='NON_PERSISTENT'"; Assert.assertEquals("AMQDurable='NON_DURABLE'", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "JMSDeliveryMode='PERSISTENT'"; Assert.assertEquals("AMQDurable='DURABLE'", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "color = 'red' AND 'NON_PERSISTENT' = JMSDeliveryMode"; Assert.assertEquals("color = 'red' AND 'NON_DURABLE' = AMQDurable", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "color = 'red' AND 'PERSISTENT' = JMSDeliveryMode"; Assert.assertEquals("color = 'red' AND 'DURABLE' = AMQDurable", SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSDeliveryMode"); } @Test public void testParseJMSPriority() { String selector = "JMSPriority=5"; Assert.assertEquals("AMQPriority=5", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSPriority = 7"; Assert.assertEquals(" AMQPriority = 7", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSPriority = 7 OR 1 = JMSPriority AND (JMSPriority= 1 + 4)"; Assert.assertEquals(" AMQPriority = 7 OR 1 = AMQPriority AND (AMQPriority= 1 + 4)", SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSPriority"); selector = "animal = 'lion' JMSPriority = 321 OR animal_name = 'xyzJMSPriorityxyz'"; Assert.assertEquals("animal = 'lion' AMQPriority = 321 OR animal_name = 'xyzJMSPriorityxyz'", SelectorTranslator.convertToActiveMQFilterString(selector)); } @Test public void testParseJMSMessageID() { String selector = "JMSMessageID='ID:AMQ-12435678"; Assert.assertEquals("AMQUserID='ID:AMQ-12435678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSMessageID='ID:AMQ-12435678"; Assert.assertEquals(" AMQUserID='ID:AMQ-12435678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSMessageID = 'ID:AMQ-12435678"; Assert.assertEquals(" AMQUserID = 'ID:AMQ-12435678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSMessageID"; Assert.assertEquals(" myHeader = AMQUserID", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSMessageID OR (JMSMessageID = 'ID-AMQ' + '12345')"; Assert.assertEquals(" myHeader = AMQUserID OR (AMQUserID = 'ID-AMQ' + '12345')", SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSMessageID"); } @Test public void testParseJMSTimestamp() { String selector = "JMSTimestamp=12345678"; Assert.assertEquals("AMQTimestamp=12345678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSTimestamp=12345678"; Assert.assertEquals(" AMQTimestamp=12345678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSTimestamp=12345678 OR 78766 = JMSTimestamp AND (JMSTimestamp= 1 + 4878787)"; Assert.assertEquals(" AMQTimestamp=12345678 OR 78766 = AMQTimestamp AND (AMQTimestamp= 1 + 4878787)", SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSTimestamp"); selector = "animal = 'lion' JMSTimestamp = 321 OR animal_name = 'xyzJMSTimestampxyz'"; Assert.assertEquals("animal = 'lion' AMQTimestamp = 321 OR animal_name = 'xyzJMSTimestampxyz'", SelectorTranslator.convertToActiveMQFilterString(selector)); } @Test public void testParseJMSExpiration() { String selector = "JMSExpiration=12345678"; Assert.assertEquals("AMQExpiration=12345678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSExpiration=12345678"; Assert.assertEquals(" AMQExpiration=12345678", SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSExpiration=12345678 OR 78766 = JMSExpiration AND (JMSExpiration= 1 + 4878787)"; Assert.assertEquals(" AMQExpiration=12345678 OR 78766 = AMQExpiration AND (AMQExpiration= 1 + 4878787)", SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSExpiration"); selector = "animal = 'lion' JMSExpiration = 321 OR animal_name = 'xyzJMSExpirationxyz'"; Assert.assertEquals("animal = 'lion' AMQExpiration = 321 OR animal_name = 'xyzJMSExpirationxyz'", SelectorTranslator.convertToActiveMQFilterString(selector)); } @Test public void testParseJMSCorrelationID() { String selector = "JMSCorrelationID='ID:AMQ-12435678"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSCorrelationID='ID:AMQ-12435678"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSCorrelationID = 'ID:AMQ-12435678"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSCorrelationID"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSCorrelationID OR (JMSCorrelationID = 'ID-AMQ' + '12345')"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSCorrelationID"); } @Test public void testParseJMSType() { String selector = "JMSType='aardvark'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSType='aardvark'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " JMSType = 'aardvark'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSType"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = " myHeader = JMSType OR (JMSType = 'aardvark' + 'sandwich')"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); checkNoSubstitute("JMSType"); } @Test public void testConvertHQFilterString() { String selector = "HQUserID = 'ID:AMQ-12435678'"; Assert.assertEquals("AMQUserID = 'ID:AMQ-12435678'", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQUserID = 'HQUserID'"; Assert.assertEquals("AMQUserID = 'HQUserID'", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQUserID = 'ID:AMQ-12435678'"; Assert.assertEquals("AMQUserID = 'ID:AMQ-12435678'", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQDurable='NON_DURABLE'"; Assert.assertEquals("AMQDurable='NON_DURABLE'", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQPriority=5"; Assert.assertEquals("AMQPriority=5", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQTimestamp=12345678"; Assert.assertEquals("AMQTimestamp=12345678", SelectorTranslator.convertHQToActiveMQFilterString(selector)); selector = "HQExpiration=12345678"; Assert.assertEquals("AMQExpiration=12345678", SelectorTranslator.convertHQToActiveMQFilterString(selector)); } // Private ------------------------------------------------------------------------------------- private void checkNoSubstitute(final String fieldName) { String selector = "Other" + fieldName + " = 767868"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "cheese = 'cheddar' AND Wrong" + fieldName + " = 54"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "fruit = 'pomegranate' AND " + fieldName + "NotThisOne = 'tuesday'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = '" + fieldName + "'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = ' " + fieldName + "'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = ' " + fieldName + " '"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = 'xyz " + fieldName + "'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = 'xyz" + fieldName + "'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = '" + fieldName + "xyz'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); selector = "animal = 'lion' AND animal_name = 'xyz" + fieldName + "xyz'"; Assert.assertEquals(selector, SelectorTranslator.convertToActiveMQFilterString(selector)); } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.components; // Start of user code for imports import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.common.util.Diagnostic; import org.eclipse.emf.common.util.WrappedException; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.EcorePackage; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.Diagnostician; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.eef.runtime.api.notify.EStructuralFeatureNotificationFilter; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.notify.NotificationFilter; import org.eclipse.emf.eef.runtime.context.PropertiesEditingContext; import org.eclipse.emf.eef.runtime.impl.components.SinglePartPropertiesEditingComponent; import org.eclipse.emf.eef.runtime.impl.utils.EEFConverterUtil; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.TemplateParameter; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.parts.TemplateParameterPropertiesEditionPart; // End of user code /** * * */ public class TemplateParameterPropertiesEditionComponent extends SinglePartPropertiesEditingComponent { public static String BASE_PART = "Base"; //$NON-NLS-1$ /** * Default constructor * */ public TemplateParameterPropertiesEditionComponent(PropertiesEditingContext editingContext, EObject templateParameter, String editing_mode) { super(editingContext, templateParameter, editing_mode); parts = new String[] { BASE_PART }; repositoryKey = EsbViewsRepository.class; partKey = EsbViewsRepository.TemplateParameter.class; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#initPart(java.lang.Object, int, org.eclipse.emf.ecore.EObject, * org.eclipse.emf.ecore.resource.ResourceSet) * */ public void initPart(Object key, int kind, EObject elt, ResourceSet allResource) { setInitializing(true); if (editingPart != null && key == partKey) { editingPart.setContext(elt, allResource); final TemplateParameter templateParameter = (TemplateParameter)elt; final TemplateParameterPropertiesEditionPart basePart = (TemplateParameterPropertiesEditionPart)editingPart; // init values if (isAccessible(EsbViewsRepository.TemplateParameter.Properties.name)) basePart.setName(EEFConverterUtil.convertToString(EcorePackage.Literals.ESTRING, templateParameter.getName())); if (isAccessible(EsbViewsRepository.TemplateParameter.Properties.isMandatory)) { basePart.setIsMandatory(templateParameter.isIsMandatory()); } if (isAccessible(EsbViewsRepository.TemplateParameter.Properties.defaultValue)) basePart.setDefaultValue(EEFConverterUtil.convertToString(EcorePackage.Literals.ESTRING, templateParameter.getDefaultValue())); // init filters // init values for referenced views // init filters for referenced views } setInitializing(false); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#associatedFeature(java.lang.Object) */ public EStructuralFeature associatedFeature(Object editorKey) { if (editorKey == EsbViewsRepository.TemplateParameter.Properties.name) { return EsbPackage.eINSTANCE.getTemplateParameter_Name(); } if (editorKey == EsbViewsRepository.TemplateParameter.Properties.isMandatory) { return EsbPackage.eINSTANCE.getTemplateParameter_IsMandatory(); } if (editorKey == EsbViewsRepository.TemplateParameter.Properties.defaultValue) { return EsbPackage.eINSTANCE.getTemplateParameter_DefaultValue(); } return super.associatedFeature(editorKey); } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updateSemanticModel(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void updateSemanticModel(final IPropertiesEditionEvent event) { TemplateParameter templateParameter = (TemplateParameter)semanticObject; if (EsbViewsRepository.TemplateParameter.Properties.name == event.getAffectedEditor()) { templateParameter.setName((java.lang.String)EEFConverterUtil.createFromString(EcorePackage.Literals.ESTRING, (String)event.getNewValue())); } if (EsbViewsRepository.TemplateParameter.Properties.isMandatory == event.getAffectedEditor()) { templateParameter.setIsMandatory((Boolean)event.getNewValue()); } if (EsbViewsRepository.TemplateParameter.Properties.defaultValue == event.getAffectedEditor()) { templateParameter.setDefaultValue((java.lang.String)EEFConverterUtil.createFromString(EcorePackage.Literals.ESTRING, (String)event.getNewValue())); } } /** * {@inheritDoc} * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#updatePart(org.eclipse.emf.common.notify.Notification) */ public void updatePart(Notification msg) { super.updatePart(msg); if (editingPart.isVisible()) { TemplateParameterPropertiesEditionPart basePart = (TemplateParameterPropertiesEditionPart)editingPart; if (EsbPackage.eINSTANCE.getTemplateParameter_Name().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.TemplateParameter.Properties.name)) { if (msg.getNewValue() != null) { basePart.setName(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, msg.getNewValue())); } else { basePart.setName(""); } } if (EsbPackage.eINSTANCE.getTemplateParameter_IsMandatory().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.TemplateParameter.Properties.isMandatory)) basePart.setIsMandatory((Boolean)msg.getNewValue()); if (EsbPackage.eINSTANCE.getTemplateParameter_DefaultValue().equals(msg.getFeature()) && msg.getNotifier().equals(semanticObject) && basePart != null && isAccessible(EsbViewsRepository.TemplateParameter.Properties.defaultValue)) { if (msg.getNewValue() != null) { basePart.setDefaultValue(EcoreUtil.convertToString(EcorePackage.Literals.ESTRING, msg.getNewValue())); } else { basePart.setDefaultValue(""); } } } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#getNotificationFilters() */ @Override protected NotificationFilter[] getNotificationFilters() { NotificationFilter filter = new EStructuralFeatureNotificationFilter( EsbPackage.eINSTANCE.getTemplateParameter_Name(), EsbPackage.eINSTANCE.getTemplateParameter_IsMandatory(), EsbPackage.eINSTANCE.getTemplateParameter_DefaultValue() ); return new NotificationFilter[] {filter,}; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.impl.components.StandardPropertiesEditionComponent#isRequired(java.lang.Object, int) * */ public boolean isRequired(Object key, int kind) { return key == EsbViewsRepository.TemplateParameter.Properties.name; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent#validateValue(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public Diagnostic validateValue(IPropertiesEditionEvent event) { Diagnostic ret = Diagnostic.OK_INSTANCE; if (event.getNewValue() != null) { try { if (EsbViewsRepository.TemplateParameter.Properties.name == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getTemplateParameter_Name().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getTemplateParameter_Name().getEAttributeType(), newValue); } if (EsbViewsRepository.TemplateParameter.Properties.isMandatory == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getTemplateParameter_IsMandatory().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getTemplateParameter_IsMandatory().getEAttributeType(), newValue); } if (EsbViewsRepository.TemplateParameter.Properties.defaultValue == event.getAffectedEditor()) { Object newValue = event.getNewValue(); if (newValue instanceof String) { newValue = EEFConverterUtil.createFromString(EsbPackage.eINSTANCE.getTemplateParameter_DefaultValue().getEAttributeType(), (String)newValue); } ret = Diagnostician.INSTANCE.validate(EsbPackage.eINSTANCE.getTemplateParameter_DefaultValue().getEAttributeType(), newValue); } } catch (IllegalArgumentException iae) { ret = BasicDiagnostic.toDiagnostic(iae); } catch (WrappedException we) { ret = BasicDiagnostic.toDiagnostic(we); } } return ret; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.support; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; public class AutoCreateIndexTests extends ESTestCase { public void testParseFailed() { try { Settings settings = Settings.builder().put("action.auto_create_index", ",,,").build(); newAutoCreateIndex(settings); fail("initialization should have failed"); } catch (IllegalArgumentException ex) { assertEquals("Can't parse [,,,] for setting [action.auto_create_index] must be either [true, false, or a " + "comma separated list of index patterns]", ex.getMessage()); } } public void testParseFailedMissingIndex() { String prefix = randomFrom("+", "-"); Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), prefix).build(); try { newAutoCreateIndex(settings); fail("initialization should have failed"); } catch(IllegalArgumentException ex) { assertEquals("Can't parse [" + prefix + "] for setting [action.auto_create_index] must contain an index name after [" + prefix + "]", ex.getMessage()); } } public void testHandleSpaces() { // see #21449 Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(".marvel-, .security, .watches, .triggered_watches, .watcher-history-", ".marvel-,.security,.watches,.triggered_watches,.watcher-history-")).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); List<Tuple<String, Boolean>> expressions = autoCreateIndex.getAutoCreate().getExpressions(); Map<String, Boolean> map = new HashMap<>(); for (Tuple<String, Boolean> t : expressions) { map.put(t.v1(), t.v2()); } assertTrue(map.get(".marvel-")); assertTrue(map.get(".security")); assertTrue(map.get(".watches")); assertTrue(map.get(".triggered_watches")); assertTrue(map.get(".watcher-history-")); assertEquals(5, map.size()); } public void testAutoCreationDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState())); assertEquals("no such index and [action.auto_create_index] is [false]", e.getMessage()); } public void testAutoCreationEnabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); } public void testDefaultAutoCreation() { AutoCreateIndex autoCreateIndex = newAutoCreateIndex(Settings.EMPTY); assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); } public void testExistingIndex() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false, randomAlphaOfLengthBetween(7, 10)).toString()).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); assertThat(autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"), buildClusterState("index1", "index2", "index3")), equalTo(false)); } public void testDynamicMappingDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), (randomFrom(true, randomAlphaOfLengthBetween(1, 10)).toString())) .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState())); assertEquals("no such index and [index.mapper.dynamic] is [false]", e.getMessage()); } public void testAutoCreationPatternEnabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+index*", "index*")) .build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAlphaOfLengthBetween(1, 5), clusterState), equalTo(true)); expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationPatternDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-index*").build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); expectForbidden(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*"); /* When patterns are specified, even if the are all negative, the default is can't create. So a pure negative pattern is the same * as false, really. */ expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultiplePatternsWithWildcards() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+test*,-index*", "test*,-index*")).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); expectForbidden(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*"); assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAlphaOfLengthBetween(1, 5), clusterState), equalTo(true)); expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultiplePatternsNoWildcards() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-index1").build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); expectNotMatch(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5)); expectNotMatch(clusterState, autoCreateIndex, "test" + randomAlphaOfLengthBetween(2, 5)); expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultipleIndexNames() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "test1,test2").build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(true)); expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationConflictingPatternsFirstWins() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-test1,-test2,+test2").build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); expectForbidden(clusterState, autoCreateIndex, "test2", "-test2"); expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testUpdate() { boolean value = randomBoolean(); Settings settings; if (value && randomBoolean()) { settings = Settings.EMPTY; } else { settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), value).build(); } ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, new IndexNameExpressionResolver(settings)); assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(value)); Settings newSettings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), !value).build(); clusterSettings.applySettings(newSettings); assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(!value)); newSettings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "logs-*").build(); clusterSettings.applySettings(newSettings); assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(true)); assertThat(autoCreateIndex.getAutoCreate().getExpressions().size(), equalTo(1)); assertThat(autoCreateIndex.getAutoCreate().getExpressions().get(0).v1(), equalTo("logs-*")); } private static ClusterState buildClusterState(String... indices) { MetaData.Builder metaData = MetaData.builder(); for (String index : indices) { metaData.put(IndexMetaData.builder(index).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)); } return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metaData(metaData).build(); } private AutoCreateIndex newAutoCreateIndex(Settings settings) { return new AutoCreateIndex(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new IndexNameExpressionResolver(settings)); } private void expectNotMatch(ClusterState clusterState, AutoCreateIndex autoCreateIndex, String index) { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(index, clusterState)); assertEquals("no such index and [action.auto_create_index] ([" + autoCreateIndex.getAutoCreate() + "]) doesn't match", e.getMessage()); } private void expectForbidden(ClusterState clusterState, AutoCreateIndex autoCreateIndex, String index, String forbiddingPattern) { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(index, clusterState)); assertEquals("no such index and [action.auto_create_index] contains [" + forbiddingPattern + "] which forbids automatic creation of the index", e.getMessage()); } }
/* * $Header: /home/cvs/jakarta-slide/webdavclient/clientlib/src/java/org/apache/webdav/lib/WebdavSession.java,v 1.2.2.2 2004/02/25 16:33:21 ib Exp $ * $Revision: 1.2.2.2 $ * $Date: 2004/02/25 16:33:21 $ * * ==================================================================== * * Copyright 1999-2002 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.webdav.lib; import java.io.IOException; import java.util.Arrays; import org.apache.commons.httpclient.Credentials; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HostConfiguration; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpConnection; import org.apache.commons.httpclient.HttpConnectionManager; import org.apache.commons.httpclient.HttpState; import org.apache.commons.httpclient.HttpURL; import org.apache.commons.httpclient.SimpleHttpConnectionManager; import org.apache.commons.httpclient.UsernamePasswordCredentials; import org.apache.commons.httpclient.params.HostParams; /** * This WebdavSession class is for the session management of WebDAV clients. * This class saves and restores the requested client. * * Although this class is thread safe, it should only be accessed by one * concurrent thread, since the underlying protocol, HTTP, is not multiplexed. * If simultaneous operations are needed, it is recommended to create * additional threads, each having its own associated WebDAV client. * * Clients that use persistent connections SHOULD limit the number of * simultaneous connections that they maintain to a given server. A * single-user client SHOULD NOT maintain more than 2 connections with * any server or proxy. A proxy SHOULD use up to 2*N connections to * another server or proxy, where N is the number of simultaneously * active users. These guidelines are intended to improve HTTP response * times and avoid congestion. * * @author <a href="mailto:jericho@thinkfree.com">Park, Sung-Gu</a> */ public abstract class WebdavSession { // ------------------------------------------------------- Constructors /** * Default constructor. */ public WebdavSession() { super(); } // ---------------------------------------------------- Instance Variables /** * The Http client instance. */ protected HttpClient client; /** * Credentials to use for authentication */ protected Credentials hostCredentials = null; /** * The hostname to use for the proxy, if any */ protected String proxyHost = null; /** * Port number to use for proxy, if any */ protected int proxyPort = -1; /** * Credentials to use for an authenticating proxy */ protected Credentials proxyCredentials = null; /** * Debug level. */ protected int debug = 0; // ------------------------------------------------------------- Properties /** * Set debug level. */ public void setDebug(int debug) { this.debug = debug; } // ------------------------------------------------------ Public methods /** * Get a <code>HttpClient</code> instance. * This method returns a new client instance for the first time. * And it is saved util it's closed or reset. * * @param httpURL The http URL to connect. only used the authority part. * @return An instance of <code>HttpClient</code>. * @exception IOException */ public HttpClient getSessionInstance(HttpURL httpURL) throws IOException { return getSessionInstance(httpURL, false); } /** * Get a <code>HttpClient</code> instance. * This method returns a new client instance, when reset is true. * * @param httpURL The http URL to connect. only used the authority part. * @param reset The reset flag to represent whether the saved information * is used or not. * @return An instance of <code>HttpClient</code>. * @exception IOException */ public HttpClient getSessionInstance(HttpURL httpURL, boolean reset) throws IOException { if (reset || client == null) { client = new HttpClient(); // Set a state which allows lock tracking client.setState(new WebdavState()); HostConfiguration hostConfig = client.getHostConfiguration(); final HostParams params = new HostParams(); params.setParameter(HostParams.DEFAULT_HEADERS, Arrays.asList(new Header("Connection", "close"))); hostConfig.setParams(params); hostConfig.setHost(httpURL); if (proxyHost != null && proxyPort > 0) hostConfig.setProxy(proxyHost, proxyPort); if (hostCredentials == null) { String userName = httpURL.getUser(); if (userName != null && userName.length() > 0) { hostCredentials = new UsernamePasswordCredentials(userName, httpURL.getPassword()); } } if (hostCredentials != null) { HttpState clientState = client.getState(); clientState.setCredentials(null, httpURL.getHost(), hostCredentials); clientState.setAuthenticationPreemptive(true); } if (proxyCredentials != null) { client.getState().setProxyCredentials(null, proxyHost, proxyCredentials); } } return client; } /** * Set credentials for authentication. * * @param credentials The credentials to use for authentication. */ public void setCredentials(Credentials credentials) { hostCredentials = credentials; } /** Set proxy info, to use proxying. */ public void setProxy(String host, int port) { this.proxyHost = host; this.proxyPort = port; } /** * Set credentials for authenticating against a proxy. * * @param credentials The credentials to use for authentication. */ public void setProxyCredentials(Credentials credentials) { proxyCredentials = credentials; } /** * Close an session and delete the connection information. * * @exception IOException Error in closing socket. */ public void closeSession() throws IOException { if (client != null) { final HttpConnectionManager httpConnectionManager = client.getHttpConnectionManager(); if (httpConnectionManager instanceof SimpleHttpConnectionManager) { // #closeIdleConnections may not close it inside SimpleHttpConnectionManager ((SimpleHttpConnectionManager) httpConnectionManager).shutdown(); } else { // final HttpConnection connection = httpConnectionManager.getConnection(client.getHostConfiguration()); // connection.releaseConnection(); httpConnectionManager.closeIdleConnections(0); } client = null; } } /** * Close an session and delete the connection information. * * @param client The HttpClient instance. * @exception IOException Error in closing socket. * @deprecated Replaced by closeSession() */ public synchronized void closeSession(HttpClient client) throws IOException { closeSession(); } /** * Progressing by the progress event. * * @param pe The progress event. */ /* public void progressing(ProgressEvent pe) { if (debug > 3) System.err.println("[EVENT/WebdavSession] " + "action:" + pe.getAction() + ((pe.getResourceName() == null) ? "" : ", resource:" + pe.getResourceName()) + ", soMany:" + pe.getSoMany() + ", remained:" + pe.getRemainedSize() + ", total:" + pe.getTotalSize()); getProgressUtil().fireProgress(pe); } */ /** * Get the utility of this progress event and listener. * * @return ProgressUtil */ /* public ProgressUtil getProgressUtil() { return sessionProgress; } */ }
package org.nebulae2us.stardust.sql.domain; import java.util.*; import org.nebulae2us.electron.*; import org.nebulae2us.electron.util.*; import org.nebulae2us.stardust.internal.util.*; import org.nebulae2us.stardust.my.domain.*; @Builder(destination=EntityMapping.class) public class EntityMappingBuilder<P> implements Wrappable<EntityMapping> { protected final EntityMapping $$$wrapped; protected final P $$$parentBuilder; public EntityMappingBuilder() { this.$$$wrapped = null; this.$$$parentBuilder = null; } public EntityMappingBuilder(P parentBuilder) { this.$$$wrapped = null; this.$$$parentBuilder = parentBuilder; } protected EntityMappingBuilder(EntityMapping wrapped) { if (wrapped == null) { throw new NullPointerException(); } this.$$$wrapped = wrapped; this.$$$parentBuilder = null; } public EntityMappingBuilder<P> storeTo(BuilderRepository repo, Object builderId) { repo.put(builderId, this); return this; } public EntityMapping getWrappedObject() { return this.$$$wrapped; } protected void verifyMutable() { if (this.$$$wrapped != null) { throw new IllegalStateException("Cannot mutate fields of immutable objects"); } } public P end() { return this.$$$parentBuilder; } public EntityMapping toEntityMapping() { return new Converter(new DestinationClassResolverByAnnotation(), true, Builders.IGNORED_TYPES).convert(this).to(EntityMapping.class); } private EntityBuilder<?> entity; public EntityBuilder<?> getEntity() { if (this.$$$wrapped != null && WrapHelper.valueNotSet(this.entity, EntityBuilder.class)) { Object o = WrapHelper.getValue(this.$$$wrapped, EntityMapping.class, "entity"); this.entity = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(o).to(EntityBuilder.class); } return entity; } public void setEntity(EntityBuilder<?> entity) { verifyMutable(); this.entity = entity; } public EntityMappingBuilder<P> entity(EntityBuilder<?> entity) { verifyMutable(); this.entity = entity; return this; } public EntityMappingBuilder<P> entity$wrap(Entity entity) { verifyMutable(); this.entity = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(entity).to(EntityBuilder.class); return this; } public EntityMappingBuilder<P> entity$restoreFrom(BuilderRepository repo, Object builderId) { verifyMutable(); Object restoredObject = repo.get(builderId); if (restoredObject == null) { if (repo.isSupportLazy()) { repo.addObjectStoredListener(builderId, new Procedure() { public void execute(Object... arguments) { EntityMappingBuilder.this.entity = (EntityBuilder<?>)arguments[0]; } }); } else { throw new IllegalStateException("Object does not exist with id " + builderId); } } else if (!(restoredObject instanceof EntityBuilder)) { throw new IllegalStateException("Type mismatch for id: " + builderId + ". " + EntityBuilder.class.getSimpleName() + " vs " + restoredObject.getClass().getSimpleName()); } else { this.entity = (EntityBuilder<?>)restoredObject; } return this; } public EntityBuilder<? extends EntityMappingBuilder<P>> entity$begin() { verifyMutable(); EntityBuilder<EntityMappingBuilder<P>> result = new EntityBuilder<EntityMappingBuilder<P>>(this); this.entity = result; return result; } private int discriminatorColumnIndex; public int getDiscriminatorColumnIndex() { if (this.$$$wrapped != null && WrapHelper.valueNotSet(this.discriminatorColumnIndex, int.class)) { Object o = WrapHelper.getValue(this.$$$wrapped, EntityMapping.class, "discriminatorColumnIndex"); this.discriminatorColumnIndex = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(o).to(int.class); } return discriminatorColumnIndex; } public void setDiscriminatorColumnIndex(int discriminatorColumnIndex) { verifyMutable(); this.discriminatorColumnIndex = discriminatorColumnIndex; } public EntityMappingBuilder<P> discriminatorColumnIndex(int discriminatorColumnIndex) { verifyMutable(); this.discriminatorColumnIndex = discriminatorColumnIndex; return this; } private List<ScalarAttributeMappingBuilder<?>> identifierAttributeMappings; public List<ScalarAttributeMappingBuilder<?>> getIdentifierAttributeMappings() { if (this.$$$wrapped != null && WrapHelper.valueNotSet(this.identifierAttributeMappings, List.class)) { Object o = WrapHelper.getValue(this.$$$wrapped, EntityMapping.class, "identifierAttributeMappings"); this.identifierAttributeMappings = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(o).to(List.class); } return identifierAttributeMappings; } public void setIdentifierAttributeMappings(List<ScalarAttributeMappingBuilder<?>> identifierAttributeMappings) { verifyMutable(); this.identifierAttributeMappings = identifierAttributeMappings; } public EntityMappingBuilder<P> identifierAttributeMappings(ScalarAttributeMappingBuilder<?> ... identifierAttributeMappings) { verifyMutable(); return identifierAttributeMappings(new ListBuilder<ScalarAttributeMappingBuilder<?>>().add(identifierAttributeMappings).toList()); } public EntityMappingBuilder<P> identifierAttributeMappings(Collection<ScalarAttributeMappingBuilder<?>> identifierAttributeMappings) { verifyMutable(); if (this.identifierAttributeMappings == null) { this.identifierAttributeMappings = new ArrayList<ScalarAttributeMappingBuilder<?>>(); } if (identifierAttributeMappings != null) { for (ScalarAttributeMappingBuilder<?> e : identifierAttributeMappings) { CollectionUtils.addItem(this.identifierAttributeMappings, e); } } return this; } public ScalarAttributeMappingBuilder<? extends EntityMappingBuilder<P>> identifierAttributeMappings$addScalarAttributeMapping() { verifyMutable(); if (this.identifierAttributeMappings == null) { this.identifierAttributeMappings = new ArrayList<ScalarAttributeMappingBuilder<?>>(); } ScalarAttributeMappingBuilder<EntityMappingBuilder<P>> result = new ScalarAttributeMappingBuilder<EntityMappingBuilder<P>>(this); CollectionUtils.addItem(this.identifierAttributeMappings, result); return result; } public class IdentifierAttributeMappings$$$builder<P1 extends EntityMappingBuilder<P>> { private final P1 $$$parentBuilder1; protected IdentifierAttributeMappings$$$builder(P1 parentBuilder) { this.$$$parentBuilder1 = parentBuilder; } public ScalarAttributeMappingBuilder<IdentifierAttributeMappings$$$builder<P1>> scalarAttributeMapping$begin() { ScalarAttributeMappingBuilder<IdentifierAttributeMappings$$$builder<P1>> result = new ScalarAttributeMappingBuilder<IdentifierAttributeMappings$$$builder<P1>>(this); CollectionUtils.addItem(EntityMappingBuilder.this.identifierAttributeMappings, result); return result; } public P1 end() { return this.$$$parentBuilder1; } } public IdentifierAttributeMappings$$$builder<? extends EntityMappingBuilder<P>> identifierAttributeMappings$list() { verifyMutable(); if (this.identifierAttributeMappings == null) { this.identifierAttributeMappings = new ArrayList<ScalarAttributeMappingBuilder<?>>(); } return new IdentifierAttributeMappings$$$builder<EntityMappingBuilder<P>>(this); } public EntityMappingBuilder<P> identifierAttributeMappings$wrap(ScalarAttributeMapping ... identifierAttributeMappings) { return identifierAttributeMappings$wrap(new ListBuilder<ScalarAttributeMapping>().add(identifierAttributeMappings).toList()); } public EntityMappingBuilder<P> identifierAttributeMappings$wrap(Collection<? extends ScalarAttributeMapping> identifierAttributeMappings) { verifyMutable(); if (this.identifierAttributeMappings == null) { this.identifierAttributeMappings = new ArrayList<ScalarAttributeMappingBuilder<?>>(); } if (identifierAttributeMappings != null) { for (ScalarAttributeMapping e : identifierAttributeMappings) { ScalarAttributeMappingBuilder<?> wrapped = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(e).to(ScalarAttributeMappingBuilder.class); CollectionUtils.addItem(this.identifierAttributeMappings, wrapped); } } return this; } public EntityMappingBuilder<P> identifierAttributeMappings$restoreFrom(BuilderRepository repo, Object ... builderIds) { return identifierAttributeMappings$restoreFrom(repo, new ListBuilder<Object>().add(builderIds).toList()); } public EntityMappingBuilder<P> identifierAttributeMappings$restoreFrom(BuilderRepository repo, Collection<Object> builderIds) { verifyMutable(); if (this.identifierAttributeMappings == null) { this.identifierAttributeMappings = new ArrayList<ScalarAttributeMappingBuilder<?>>(); } if (builderIds != null) { for (Object builderId : builderIds) { Object restoredObject = repo.get(builderId); if (restoredObject == null) { if (repo.isSupportLazy()) { repo.addObjectStoredListener(builderId, new Procedure() { public void execute(Object... arguments) { CollectionUtils.addItem(EntityMappingBuilder.this.identifierAttributeMappings, arguments[0]); } }); } else { throw new IllegalStateException("Object does not exist with id " + builderId); } } else if (!(restoredObject instanceof ScalarAttributeMappingBuilder)) { throw new IllegalStateException("Type mismatch for id: " + builderId + ". " + ScalarAttributeMappingBuilder.class.getSimpleName() + " vs " + restoredObject.getClass().getSimpleName()); } else { CollectionUtils.addItem(this.identifierAttributeMappings, restoredObject); } } } return this; } private List<AttributeMappingBuilder<?>> attributeMappings; public List<AttributeMappingBuilder<?>> getAttributeMappings() { if (this.$$$wrapped != null && WrapHelper.valueNotSet(this.attributeMappings, List.class)) { Object o = WrapHelper.getValue(this.$$$wrapped, EntityMapping.class, "attributeMappings"); this.attributeMappings = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(o).to(List.class); } return attributeMappings; } public void setAttributeMappings(List<AttributeMappingBuilder<?>> attributeMappings) { verifyMutable(); this.attributeMappings = attributeMappings; } public EntityMappingBuilder<P> attributeMappings(AttributeMappingBuilder<?> ... attributeMappings) { verifyMutable(); return attributeMappings(new ListBuilder<AttributeMappingBuilder<?>>().add(attributeMappings).toList()); } public EntityMappingBuilder<P> attributeMappings(Collection<AttributeMappingBuilder<?>> attributeMappings) { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } if (attributeMappings != null) { for (AttributeMappingBuilder<?> e : attributeMappings) { CollectionUtils.addItem(this.attributeMappings, e); } } return this; } public AttributeMappingBuilder<? extends EntityMappingBuilder<P>> attributeMappings$addAttributeMapping() { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } AttributeMappingBuilder<EntityMappingBuilder<P>> result = new AttributeMappingBuilder<EntityMappingBuilder<P>>(this); CollectionUtils.addItem(this.attributeMappings, result); return result; } public EntityAttributeMappingBuilder<? extends EntityMappingBuilder<P>> attributeMappings$addEntityAttributeMapping() { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } EntityAttributeMappingBuilder<EntityMappingBuilder<P>> result = new EntityAttributeMappingBuilder<EntityMappingBuilder<P>>(this); CollectionUtils.addItem(this.attributeMappings, result); return result; } public ScalarAttributeMappingBuilder<? extends EntityMappingBuilder<P>> attributeMappings$addScalarAttributeMapping() { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } ScalarAttributeMappingBuilder<EntityMappingBuilder<P>> result = new ScalarAttributeMappingBuilder<EntityMappingBuilder<P>>(this); CollectionUtils.addItem(this.attributeMappings, result); return result; } public ValueObjectAttributeMappingBuilder<? extends EntityMappingBuilder<P>> attributeMappings$addValueObjectAttributeMapping() { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } ValueObjectAttributeMappingBuilder<EntityMappingBuilder<P>> result = new ValueObjectAttributeMappingBuilder<EntityMappingBuilder<P>>(this); CollectionUtils.addItem(this.attributeMappings, result); return result; } public class AttributeMappings$$$builder<P1 extends EntityMappingBuilder<P>> { private final P1 $$$parentBuilder1; protected AttributeMappings$$$builder(P1 parentBuilder) { this.$$$parentBuilder1 = parentBuilder; } public AttributeMappingBuilder<AttributeMappings$$$builder<P1>> attributeMapping$begin() { AttributeMappingBuilder<AttributeMappings$$$builder<P1>> result = new AttributeMappingBuilder<AttributeMappings$$$builder<P1>>(this); CollectionUtils.addItem(EntityMappingBuilder.this.attributeMappings, result); return result; } public EntityAttributeMappingBuilder<AttributeMappings$$$builder<P1>> entityAttributeMapping$begin() { EntityAttributeMappingBuilder<AttributeMappings$$$builder<P1>> result = new EntityAttributeMappingBuilder<AttributeMappings$$$builder<P1>>(this); CollectionUtils.addItem(EntityMappingBuilder.this.attributeMappings, result); return result; } public ScalarAttributeMappingBuilder<AttributeMappings$$$builder<P1>> scalarAttributeMapping$begin() { ScalarAttributeMappingBuilder<AttributeMappings$$$builder<P1>> result = new ScalarAttributeMappingBuilder<AttributeMappings$$$builder<P1>>(this); CollectionUtils.addItem(EntityMappingBuilder.this.attributeMappings, result); return result; } public ValueObjectAttributeMappingBuilder<AttributeMappings$$$builder<P1>> valueObjectAttributeMapping$begin() { ValueObjectAttributeMappingBuilder<AttributeMappings$$$builder<P1>> result = new ValueObjectAttributeMappingBuilder<AttributeMappings$$$builder<P1>>(this); CollectionUtils.addItem(EntityMappingBuilder.this.attributeMappings, result); return result; } public P1 end() { return this.$$$parentBuilder1; } } public AttributeMappings$$$builder<? extends EntityMappingBuilder<P>> attributeMappings$list() { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } return new AttributeMappings$$$builder<EntityMappingBuilder<P>>(this); } public EntityMappingBuilder<P> attributeMappings$wrap(AttributeMapping ... attributeMappings) { return attributeMappings$wrap(new ListBuilder<AttributeMapping>().add(attributeMappings).toList()); } public EntityMappingBuilder<P> attributeMappings$wrap(Collection<? extends AttributeMapping> attributeMappings) { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } if (attributeMappings != null) { for (AttributeMapping e : attributeMappings) { AttributeMappingBuilder<?> wrapped = new WrapConverter(Builders.DESTINATION_CLASS_RESOLVER, Builders.IGNORED_TYPES).convert(e).to(AttributeMappingBuilder.class); CollectionUtils.addItem(this.attributeMappings, wrapped); } } return this; } public EntityMappingBuilder<P> attributeMappings$restoreFrom(BuilderRepository repo, Object ... builderIds) { return attributeMappings$restoreFrom(repo, new ListBuilder<Object>().add(builderIds).toList()); } public EntityMappingBuilder<P> attributeMappings$restoreFrom(BuilderRepository repo, Collection<Object> builderIds) { verifyMutable(); if (this.attributeMappings == null) { this.attributeMappings = new ArrayList<AttributeMappingBuilder<?>>(); } if (builderIds != null) { for (Object builderId : builderIds) { Object restoredObject = repo.get(builderId); if (restoredObject == null) { if (repo.isSupportLazy()) { repo.addObjectStoredListener(builderId, new Procedure() { public void execute(Object... arguments) { CollectionUtils.addItem(EntityMappingBuilder.this.attributeMappings, arguments[0]); } }); } else { throw new IllegalStateException("Object does not exist with id " + builderId); } } else if (!(restoredObject instanceof AttributeMappingBuilder)) { throw new IllegalStateException("Type mismatch for id: " + builderId + ". " + AttributeMappingBuilder.class.getSimpleName() + " vs " + restoredObject.getClass().getSimpleName()); } else { CollectionUtils.addItem(this.attributeMappings, restoredObject); } } } return this; } /* CUSTOM CODE ********************************* * * Put your own custom code below. These codes won't be discarded during generation. * */ }
package edu.scu.cs.robotics; import android.app.Application; import android.content.Context; import android.content.res.Configuration; import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbManager; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.widget.Toast; import com.hoho.android.usbserial.driver.UsbSerialDriver; import com.hoho.android.usbserial.driver.UsbSerialProber; import java.io.IOException; import java.util.ArrayList; import java.util.List; import edu.scu.cs.robotics.communication.JoysticksInput; import edu.scu.cs.robotics.communication.USBDataReceiver; import edu.scu.cs.robotics.communication.USBJoysticksInputSender; import edu.scu.cs.robotics.util.Utility; /** * Created by manishkarney on 4/21/14. */ public class H2OBotApplication extends Application { private static H2OBotApplication singleton; private final String LOG_TAG = H2OBotApplication.class.getSimpleName(); List<DeviceEntry> mAllUSBDevices = new ArrayList<DeviceEntry>(); //Joystick related objects JoysticksInput mJoysticksInput; //USB Communication related objects USBJoysticksInputSender mUSBJoysticksInputSenderService; USBDataReceiver mUSBDataReceiver; private static final int JOYSTICK_SENDER_SERVICE_DELAY=0; private static final int JOYSTICK_SENDER_SERVICE_INTERVAL=500; Handler mHandler = new Handler(Looper.getMainLooper()) { @Override public void handleMessage(Message inputMessage) { // Gets the image task from the incoming Message object. String purposeValue =inputMessage.getData().getString(Utility.UI_HANDLER_THREAD_PURPOSE,"DEF"); if(purposeValue.equals(Utility.UI_HANDLER_THREAD_MAKE_TOAST)) { Bundle bundle = inputMessage.getData(); String msg = bundle.getString("Message", "DEF"); boolean isLong = bundle.getBoolean("isLong", false); Toast.makeText(getApplicationContext(), msg, isLong ? Toast.LENGTH_SHORT : Toast.LENGTH_SHORT).show(); } } }; public H2OBotApplication getInstance(){ return singleton; } @Override public void onCreate() { super.onCreate(); singleton = this; //TODO remove this Utility.uiHandler=mHandler; } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } @Override public void onLowMemory() { super.onLowMemory(); } @Override public void onTerminate() { super.onTerminate(); } public List<DeviceEntry> getAllDrivers() { return mAllUSBDevices; } public void setUpDrivers() { Utility.makeToastShort(this,"Setting Up Drivers"); UsbManager usbManager = (UsbManager) getSystemService(Context.USB_SERVICE); for (final UsbDevice device : usbManager.getDeviceList().values()) { final List<UsbSerialDriver> drivers = UsbSerialProber.probeSingleDevice(usbManager, device); Log.d(LOG_TAG, "Found usb device: " + device); if (drivers.isEmpty()) { Log.d(LOG_TAG, " - No UsbSerialDriver available."); mAllUSBDevices.add(new DeviceEntry(device, null)); } else { for (UsbSerialDriver driver : drivers) { Log.d(LOG_TAG, " + " + driver); mAllUSBDevices.add(new DeviceEntry(device, driver)); } } } if(!mAllUSBDevices.isEmpty()){ setUpUSBTxRx(); Utility.makeToastShort(this,"Driver retrieved."); }else{ Log.e(LOG_TAG,"Failed to retrieve driver of the connected device."); Utility.makeToastShort(this,"Failed to retrieve driver of the connected device."); } } public void setUpUSBTxRx(){ //Only selecting the first device in the array. Log.d(LOG_TAG,"setUpUSBTxRx : allUSBDevices Size "+mAllUSBDevices.size() +" Driver:" +mAllUSBDevices.get(0).driver); Utility.makeToastShort(this,"Started Setting up USB TX RX"); setUpDriverConfiguration(mAllUSBDevices.get(0)); startGetUSBJoysticksInputSenderService(); startUSBDataReceiverService(); } public void startGetUSBJoysticksInputSenderService(){ mUSBJoysticksInputSenderService = new USBJoysticksInputSender(mAllUSBDevices.get(0), mJoysticksInput); mUSBJoysticksInputSenderService.startSendingAtFixedRate(JOYSTICK_SENDER_SERVICE_DELAY,JOYSTICK_SENDER_SERVICE_INTERVAL); } public void startUSBDataReceiverService(){ mUSBDataReceiver = new USBDataReceiver(this,mAllUSBDevices.get(0)); } private void setUpDriverConfiguration(DeviceEntry entry){ try { entry.driver.open(); entry.driver.setParameters(115200, 8, UsbSerialDriver.STOPBITS_1, UsbSerialDriver.PARITY_NONE); } catch (IOException e) { Log.e(LOG_TAG,"Error setting parameters of driver."); e.printStackTrace(); try { entry.driver.close(); } catch (IOException e2) { // Ignore. } entry.driver = null; return; } } public void tearDownDrivers() { closeDriver(); } private void closeDriver() { // if (mSerialIoManager != null) { // Log.i(TAG, "Stopping io manager .."); // mSerialIoManager.stop(); // mSerialIoManager = null; // } mUSBJoysticksInputSenderService.cancelSenderTask(); mUSBDataReceiver.cancelReceiverTask(); if(mAllUSBDevices!=null&&mAllUSBDevices.get(0)!=null){ try { mAllUSBDevices.get(0).driver.close(); } catch (IOException e) { Log.w(LOG_TAG,"Problem when trying to close driver."); e.printStackTrace(); } finally { mAllUSBDevices.clear(); mUSBJoysticksInputSenderService.cleanUp(); mUSBDataReceiver.cleanUp(); mUSBDataReceiver=null; mUSBJoysticksInputSenderService=null; } } } /** Simple container for a UsbDevice and its driver. */ public static class DeviceEntry { public UsbDevice device; public UsbSerialDriver driver; DeviceEntry(UsbDevice device, UsbSerialDriver driver) { this.device = device; this.driver = driver; } } public USBJoysticksInputSender getUSBJoysticksInputSenderService() { return mUSBJoysticksInputSenderService; } public JoysticksInput setUpAndGetJoysticksInput(){ mJoysticksInput = JoysticksInput.getInstance(); return mJoysticksInput; } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.truth.Truth.assertThat; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.testing.NullPointerTester; import junit.framework.TestCase; import java.io.Serializable; import java.util.Arrays; import java.util.List; /** * Unit test for {@code ObjectArrays}. * * @author Kevin Bourrillion */ @GwtCompatible(emulated = true) public class ObjectArraysTest extends TestCase { @GwtIncompatible("NullPointerTester") public void testNullPointerExceptions() { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicStaticMethods(ObjectArrays.class); } @GwtIncompatible("ObjectArrays.newArray(Class, int)") public void testNewArray_fromClass_Empty() { String[] empty = ObjectArrays.newArray(String.class, 0); assertEquals(String[].class, empty.getClass()); assertThat(empty).isEmpty(); } @GwtIncompatible("ObjectArrays.newArray(Class, int)") public void testNewArray_fromClass_Nonempty() { String[] array = ObjectArrays.newArray(String.class, 2); assertEquals(String[].class, array.getClass()); assertThat(array).hasLength(2); assertNull(array[0]); } @GwtIncompatible("ObjectArrays.newArray(Class, int)") public void testNewArray_fromClass_OfArray() { String[][] array = ObjectArrays.newArray(String[].class, 1); assertEquals(String[][].class, array.getClass()); assertThat(array).hasLength(1); assertNull(array[0]); } public void testNewArray_fromArray_Empty() { String[] in = new String[0]; String[] empty = ObjectArrays.newArray(in, 0); assertThat(empty).isEmpty(); } public void testNewArray_fromArray_Nonempty() { String[] array = ObjectArrays.newArray(new String[0], 2); assertEquals(String[].class, array.getClass()); assertThat(array).hasLength(2); assertNull(array[0]); } public void testNewArray_fromArray_OfArray() { String[][] array = ObjectArrays.newArray(new String[0][0], 1); assertEquals(String[][].class, array.getClass()); assertThat(array).hasLength(1); assertNull(array[0]); } @GwtIncompatible("ObjectArrays.concat(Object[], Object[], Class)") public void testConcatEmptyEmpty() { String[] result = ObjectArrays.concat(new String[0], new String[0], String.class); assertEquals(String[].class, result.getClass()); assertThat(result).isEmpty(); } @GwtIncompatible("ObjectArrays.concat(Object[], Object[], Class)") public void testConcatEmptyNonempty() { String[] result = ObjectArrays.concat( new String[0], new String[] { "a", "b" }, String.class); assertEquals(String[].class, result.getClass()); assertThat(result).asList().containsExactly("a", "b").inOrder(); } @GwtIncompatible("ObjectArrays.concat(Object[], Object[], Class)") public void testConcatNonemptyEmpty() { String[] result = ObjectArrays.concat( new String[] { "a", "b" }, new String[0], String.class); assertEquals(String[].class, result.getClass()); assertThat(result).asList().containsExactly("a", "b").inOrder(); } @GwtIncompatible("ObjectArrays.concat(Object[], Object[], Class)") public void testConcatBasic() { String[] result = ObjectArrays.concat( new String[] { "a", "b" }, new String[] { "c", "d" }, String.class); assertEquals(String[].class, result.getClass()); assertThat(result).asList().containsExactly("a", "b", "c", "d").inOrder(); } @GwtIncompatible("ObjectArrays.concat(Object[], Object[], Class)") public void testConcatWithMoreGeneralType() { Serializable[] result = ObjectArrays.concat(new String[0], new String[0], Serializable.class); assertEquals(Serializable[].class, result.getClass()); } public void testToArrayImpl1() { doTestToArrayImpl1(Lists.<Integer>newArrayList()); doTestToArrayImpl1(Lists.newArrayList(1)); doTestToArrayImpl1(Lists.newArrayList(1, null, 3)); } private void doTestToArrayImpl1(List<Integer> list) { Object[] reference = list.toArray(); Object[] target = ObjectArrays.toArrayImpl(list); assertEquals(reference.getClass(), target.getClass()); assertTrue(Arrays.equals(reference, target)); } public void testToArrayImpl2() { doTestToArrayImpl2(Lists.<Integer>newArrayList(), new Integer[0], false); doTestToArrayImpl2(Lists.<Integer>newArrayList(), new Integer[1], true); doTestToArrayImpl2(Lists.newArrayList(1), new Integer[0], false); doTestToArrayImpl2(Lists.newArrayList(1), new Integer[1], true); doTestToArrayImpl2(Lists.newArrayList(1), new Integer[] { 2, 3 }, true); doTestToArrayImpl2(Lists.newArrayList(1, null, 3), new Integer[0], false); doTestToArrayImpl2(Lists.newArrayList(1, null, 3), new Integer[2], false); doTestToArrayImpl2(Lists.newArrayList(1, null, 3), new Integer[3], true); } private void doTestToArrayImpl2(List<Integer> list, Integer[] array1, boolean expectModify) { Integer[] starting = ObjectArrays.arraysCopyOf(array1, array1.length); Integer[] array2 = ObjectArrays.arraysCopyOf(array1, array1.length); Object[] reference = list.toArray(array1); Object[] target = ObjectArrays.toArrayImpl(list, array2); assertEquals(reference.getClass(), target.getClass()); assertTrue(Arrays.equals(reference, target)); assertTrue(Arrays.equals(reference, target)); Object[] expectedArray1 = expectModify ? reference : starting; Object[] expectedArray2 = expectModify ? target : starting; assertTrue(Arrays.equals(expectedArray1, array1)); assertTrue(Arrays.equals(expectedArray2, array2)); } public void testPrependZeroElements() { String[] result = ObjectArrays.concat("foo", new String[] {}); assertThat(result).asList().contains("foo"); } public void testPrependOneElement() { String[] result = ObjectArrays.concat("foo", new String[] { "bar" }); assertThat(result).asList().containsExactly("foo", "bar").inOrder(); } public void testPrependTwoElements() { String[] result = ObjectArrays.concat("foo", new String[] { "bar", "baz" }); assertThat(result).asList().containsExactly("foo", "bar", "baz").inOrder(); } public void testAppendZeroElements() { String[] result = ObjectArrays.concat(new String[] {}, "foo"); assertThat(result).asList().contains("foo"); } public void testAppendOneElement() { String[] result = ObjectArrays.concat(new String[] { "foo" }, "bar"); assertThat(result).asList().containsExactly("foo", "bar").inOrder(); } public void testAppendTwoElements() { String[] result = ObjectArrays.concat(new String[] { "foo", "bar" }, "baz"); assertThat(result).asList().containsExactly("foo", "bar", "baz").inOrder(); } public void testEmptyArrayToEmpty() { doTestNewArrayEquals(new Object[0], 0); } public void testEmptyArrayToNonEmpty() { checkArrayEquals(new Long[5], ObjectArrays.newArray(new Long[0], 5)); } public void testNonEmptyToShorter() { checkArrayEquals(new String[9], ObjectArrays.newArray(new String[10], 9)); } public void testNonEmptyToSameLength() { doTestNewArrayEquals(new String[10], 10); } public void testNonEmptyToLonger() { checkArrayEquals(new String[10], ObjectArrays.newArray(new String[] { "a", "b", "c", "d", "e" }, 10)); } private static void checkArrayEquals(Object[] expected, Object[] actual) { assertTrue("expected(" + expected.getClass() + "): " + Arrays.toString(expected) + " actual(" + actual.getClass() + "): " + Arrays.toString(actual), arrayEquals(expected, actual)); } private static boolean arrayEquals(Object[] array1, Object[] array2) { assertSame(array1.getClass(), array2.getClass()); return Arrays.equals(array1, array2); } private static void doTestNewArrayEquals(Object[] expected, int length) { checkArrayEquals(expected, ObjectArrays.newArray(expected, length)); } }
/* * Copyright (c) 2010 ThruPoint Ltd * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.jscep.message; import java.io.IOException; import java.security.PrivateKey; import java.security.Provider; import java.security.cert.CertificateEncodingException; import java.security.cert.X509Certificate; import java.util.Collection; import java.util.Collections; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.cms.AttributeTable; import org.bouncycastle.cert.jcajce.JcaCertStore; import org.bouncycastle.cms.CMSAbsentContent; import org.bouncycastle.cms.CMSAttributeTableGenerator; import org.bouncycastle.cms.CMSEnvelopedData; import org.bouncycastle.cms.CMSException; import org.bouncycastle.cms.CMSProcessable; import org.bouncycastle.cms.CMSProcessableByteArray; import org.bouncycastle.cms.CMSSignedData; import org.bouncycastle.cms.CMSSignedDataGenerator; import org.bouncycastle.cms.DefaultSignedAttributeTableGenerator; import org.bouncycastle.cms.SignerInfoGenerator; import org.bouncycastle.cms.jcajce.JcaSignerInfoGeneratorBuilder; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.DigestCalculatorProvider; import org.bouncycastle.operator.OperatorCreationException; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.jscep.transaction.PkiStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is used to encode a <tt>pkiMessage</tt> into a PKCS #7 signedData * object. * * @see PkiMessageDecoder */ public final class PkiMessageEncoder { private static final String DATA = "1.2.840.113549.1.7.1"; private static final Logger LOGGER = LoggerFactory .getLogger(PkiMessageEncoder.class); private final PrivateKey signerKey; private final X509Certificate signerId; private final PkcsPkiEnvelopeEncoder enveloper; private final String signatureAlgorithm; /** * Creates a new <tt>PkiMessageEncoder</tt> instance. * * @param signerKey * the key to use to sign the <tt>signedData</tt>. * @param signerId * the certificate to use to identify the signer. * @param enveloper * the enveloper used for encoding the <tt>messageData</tt> */ public PkiMessageEncoder(PrivateKey signerKey, X509Certificate signerId, PkcsPkiEnvelopeEncoder enveloper) { this.signerKey = signerKey; this.signerId = signerId; this.enveloper = enveloper; this.signatureAlgorithm = "SHA1withRSA"; } /** * Creates a new <tt>PkiMessageEncoder</tt> instance. * * @param signerKey * the key to use to sign the <tt>signedData</tt>. * @param signerId * the certificate to use to identify the signer. * @param enveloper * the enveloper used for encoding the <tt>messageData</tt> */ public PkiMessageEncoder(PrivateKey signerKey, X509Certificate signerId, PkcsPkiEnvelopeEncoder enveloper, String signatureAlgorithm) { this.signerKey = signerKey; this.signerId = signerId; this.enveloper = enveloper; this.signatureAlgorithm = signatureAlgorithm; } /** * Encodes the provided <tt>PkiMessage</tt> into a PKCS #7 * <tt>signedData</tt>. * * @param message * the <tt>PkiMessage</tt> to encode. * @return the encoded <tt>signedData</tt> * @throws MessageEncodingException * if there is a problem encoding the <tt>PkiMessage</tt> */ public CMSSignedData encode(PkiMessage<?> message) throws MessageEncodingException { LOGGER.debug("Encoding pkiMessage"); LOGGER.debug("Encoding message: {}", message); CMSProcessable content = getContent(message); LOGGER.debug( "Signing pkiMessage using key belonging to [issuer={}; serial={}]", signerId.getIssuerDN(), signerId.getSerialNumber()); try { CMSSignedDataGenerator generator = new CMSSignedDataGenerator(); generator.addSignerInfoGenerator(getSignerInfo(message)); generator.addCertificates(getCertificates()); LOGGER.debug("Signing {} content", content); CMSSignedData pkiMessage = generator.generate(DATA, content, true, (Provider) null, true); LOGGER.debug("Finished encoding pkiMessage"); return pkiMessage; } catch (CMSException e) { throw new MessageEncodingException(e); } catch (Exception e) { throw new MessageEncodingException(e); } } private CMSProcessable getContent(PkiMessage<?> message) throws MessageEncodingException { CMSProcessable signable; boolean hasMessageData = true; if (message instanceof CertRep) { CertRep response = (CertRep) message; if (response.getPkiStatus() != PkiStatus.SUCCESS) { hasMessageData = false; } } if (hasMessageData) { try { CMSEnvelopedData ed = encodeMessage(message); signable = new CMSProcessableByteArray(ed.getEncoded()); } catch (IOException e) { throw new MessageEncodingException(e); } } else { signable = new CMSAbsentContent(); } return signable; } private CMSEnvelopedData encodeMessage(PkiMessage<?> message) throws MessageEncodingException { Object messageData = message.getMessageData(); byte[] bytes; if (messageData instanceof byte[]) { bytes = (byte[]) messageData; } else if (messageData instanceof PKCS10CertificationRequest) { try { bytes = ((PKCS10CertificationRequest) messageData).getEncoded(); } catch (IOException e) { throw new MessageEncodingException(e); } } else if (messageData instanceof CMSSignedData) { try { bytes = ((CMSSignedData) messageData).getEncoded(); } catch (IOException e) { throw new MessageEncodingException(e); } } else { try { bytes = ((ASN1Object) messageData).getEncoded(); } catch (IOException e) { throw new MessageEncodingException(e); } } return enveloper.encode(bytes); } private JcaCertStore getCertificates() throws MessageEncodingException { Collection<X509Certificate> certColl = Collections.singleton(signerId); JcaCertStore certStore; try { certStore = new JcaCertStore(certColl); } catch (CertificateEncodingException e) { throw new MessageEncodingException(e); } return certStore; } private SignerInfoGenerator getSignerInfo(PkiMessage<?> message) throws MessageEncodingException { JcaSignerInfoGeneratorBuilder signerInfoBuilder = new JcaSignerInfoGeneratorBuilder( getDigestCalculator()); signerInfoBuilder .setSignedAttributeGenerator(getTableGenerator(message)); SignerInfoGenerator signerInfo; try { signerInfo = signerInfoBuilder.build(getContentSigner(), signerId); } catch (Exception e) { throw new MessageEncodingException(e); } return signerInfo; } private CMSAttributeTableGenerator getTableGenerator(PkiMessage<?> message) { AttributeTableFactory attrFactory = new AttributeTableFactory(); AttributeTable signedAttrs = attrFactory.fromPkiMessage(message); CMSAttributeTableGenerator atGen = new DefaultSignedAttributeTableGenerator( signedAttrs); return atGen; } private DigestCalculatorProvider getDigestCalculator() throws MessageEncodingException { try { return new JcaDigestCalculatorProviderBuilder().build(); } catch (OperatorCreationException e) { throw new MessageEncodingException(e); } } private ContentSigner getContentSigner() throws OperatorCreationException { return new JcaContentSignerBuilder(signatureAlgorithm).build(signerKey); } }
/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.kubernetes.client.openapi.models; import com.google.gson.annotations.SerializedName; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.Objects; /** * IngressClassParametersReference identifies an API object. This can be used to specify a cluster * or namespace-scoped resource. */ @ApiModel( description = "IngressClassParametersReference identifies an API object. This can be used to specify a cluster or namespace-scoped resource.") @javax.annotation.Generated( value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-12-10T19:11:23.904Z[Etc/UTC]") public class V1IngressClassParametersReference { public static final String SERIALIZED_NAME_API_GROUP = "apiGroup"; @SerializedName(SERIALIZED_NAME_API_GROUP) private String apiGroup; public static final String SERIALIZED_NAME_KIND = "kind"; @SerializedName(SERIALIZED_NAME_KIND) private String kind; public static final String SERIALIZED_NAME_NAME = "name"; @SerializedName(SERIALIZED_NAME_NAME) private String name; public static final String SERIALIZED_NAME_NAMESPACE = "namespace"; @SerializedName(SERIALIZED_NAME_NAMESPACE) private String namespace; public static final String SERIALIZED_NAME_SCOPE = "scope"; @SerializedName(SERIALIZED_NAME_SCOPE) private String scope; public V1IngressClassParametersReference apiGroup(String apiGroup) { this.apiGroup = apiGroup; return this; } /** * APIGroup is the group for the resource being referenced. If APIGroup is not specified, the * specified Kind must be in the core API group. For any other third-party types, APIGroup is * required. * * @return apiGroup */ @javax.annotation.Nullable @ApiModelProperty( value = "APIGroup is the group for the resource being referenced. If APIGroup is not specified, the specified Kind must be in the core API group. For any other third-party types, APIGroup is required.") public String getApiGroup() { return apiGroup; } public void setApiGroup(String apiGroup) { this.apiGroup = apiGroup; } public V1IngressClassParametersReference kind(String kind) { this.kind = kind; return this; } /** * Kind is the type of resource being referenced. * * @return kind */ @ApiModelProperty(required = true, value = "Kind is the type of resource being referenced.") public String getKind() { return kind; } public void setKind(String kind) { this.kind = kind; } public V1IngressClassParametersReference name(String name) { this.name = name; return this; } /** * Name is the name of resource being referenced. * * @return name */ @ApiModelProperty(required = true, value = "Name is the name of resource being referenced.") public String getName() { return name; } public void setName(String name) { this.name = name; } public V1IngressClassParametersReference namespace(String namespace) { this.namespace = namespace; return this; } /** * Namespace is the namespace of the resource being referenced. This field is required when scope * is set to \&quot;Namespace\&quot; and must be unset when scope is set to \&quot;Cluster\&quot;. * * @return namespace */ @javax.annotation.Nullable @ApiModelProperty( value = "Namespace is the namespace of the resource being referenced. This field is required when scope is set to \"Namespace\" and must be unset when scope is set to \"Cluster\".") public String getNamespace() { return namespace; } public void setNamespace(String namespace) { this.namespace = namespace; } public V1IngressClassParametersReference scope(String scope) { this.scope = scope; return this; } /** * Scope represents if this refers to a cluster or namespace scoped resource. This may be set to * \&quot;Cluster\&quot; (default) or \&quot;Namespace\&quot;. * * @return scope */ @javax.annotation.Nullable @ApiModelProperty( value = "Scope represents if this refers to a cluster or namespace scoped resource. This may be set to \"Cluster\" (default) or \"Namespace\".") public String getScope() { return scope; } public void setScope(String scope) { this.scope = scope; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } V1IngressClassParametersReference v1IngressClassParametersReference = (V1IngressClassParametersReference) o; return Objects.equals(this.apiGroup, v1IngressClassParametersReference.apiGroup) && Objects.equals(this.kind, v1IngressClassParametersReference.kind) && Objects.equals(this.name, v1IngressClassParametersReference.name) && Objects.equals(this.namespace, v1IngressClassParametersReference.namespace) && Objects.equals(this.scope, v1IngressClassParametersReference.scope); } @Override public int hashCode() { return Objects.hash(apiGroup, kind, name, namespace, scope); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class V1IngressClassParametersReference {\n"); sb.append(" apiGroup: ").append(toIndentedString(apiGroup)).append("\n"); sb.append(" kind: ").append(toIndentedString(kind)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" namespace: ").append(toIndentedString(namespace)).append("\n"); sb.append(" scope: ").append(toIndentedString(scope)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive.metastore; import com.facebook.presto.hive.ForHiveMetastore; import com.facebook.presto.hive.HiveClientConfig; import com.facebook.presto.hive.HiveCluster; import com.facebook.presto.hive.HiveViewNotSupportedException; import com.facebook.presto.hive.RetryDriver; import com.facebook.presto.hive.TableAlreadyExistsException; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.TableNotFoundException; import com.google.common.base.Throwables; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ExecutionError; import com.google.common.util.concurrent.UncheckedExecutionException; import io.airlift.units.Duration; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.thrift.TException; import org.weakref.jmx.Flatten; import org.weakref.jmx.Managed; import javax.annotation.concurrent.ThreadSafe; import javax.inject.Inject; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.function.Function; import static com.facebook.presto.hive.HiveErrorCode.HIVE_METASTORE_ERROR; import static com.facebook.presto.hive.HiveUtil.PRESTO_VIEW_FLAG; import static com.facebook.presto.hive.HiveUtil.isPrestoView; import static com.facebook.presto.hive.metastore.HivePrivilege.OWNERSHIP; import static com.facebook.presto.hive.metastore.HivePrivilege.parsePrivilege; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.cache.CacheLoader.asyncReloading; import static com.google.common.collect.Iterables.transform; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toSet; import static java.util.stream.StreamSupport.stream; import static org.apache.hadoop.hive.metastore.api.PrincipalType.ROLE; import static org.apache.hadoop.hive.metastore.api.PrincipalType.USER; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS; /** * Hive Metastore Cache */ @ThreadSafe public class CachingHiveMetastore implements HiveMetastore { private final CachingHiveMetastoreStats stats = new CachingHiveMetastoreStats(); protected final HiveCluster clientProvider; private final LoadingCache<String, List<String>> databaseNamesCache; private final LoadingCache<String, Optional<Database>> databaseCache; private final LoadingCache<String, Optional<List<String>>> tableNamesCache; private final LoadingCache<String, Optional<List<String>>> viewNamesCache; private final LoadingCache<HiveTableName, Optional<List<String>>> partitionNamesCache; private final LoadingCache<HiveTableName, Optional<Table>> tableCache; private final LoadingCache<HivePartitionName, Optional<Partition>> partitionCache; private final LoadingCache<PartitionFilter, Optional<List<String>>> partitionFilterCache; private final LoadingCache<String, Set<String>> userRolesCache; private final LoadingCache<UserTableKey, Set<HivePrivilege>> userTablePrivileges; @Inject public CachingHiveMetastore(HiveCluster hiveCluster, @ForHiveMetastore ExecutorService executor, HiveClientConfig hiveClientConfig) { this(requireNonNull(hiveCluster, "hiveCluster is null"), requireNonNull(executor, "executor is null"), requireNonNull(hiveClientConfig, "hiveClientConfig is null").getMetastoreCacheTtl(), hiveClientConfig.getMetastoreRefreshInterval()); } public CachingHiveMetastore(HiveCluster hiveCluster, ExecutorService executor, Duration cacheTtl, Duration refreshInterval) { this.clientProvider = requireNonNull(hiveCluster, "hiveCluster is null"); long expiresAfterWriteMillis = requireNonNull(cacheTtl, "cacheTtl is null").toMillis(); long refreshMills = requireNonNull(refreshInterval, "refreshInterval is null").toMillis(); databaseNamesCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<String, List<String>>() { @Override public List<String> load(String key) throws Exception { return loadAllDatabases(); } }, executor)); databaseCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<String, Optional<Database>>() { @Override public Optional<Database> load(String databaseName) throws Exception { return loadDatabase(databaseName); } }, executor)); tableNamesCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<String, Optional<List<String>>>() { @Override public Optional<List<String>> load(String databaseName) throws Exception { return loadAllTables(databaseName); } }, executor)); tableCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<HiveTableName, Optional<Table>>() { @Override public Optional<Table> load(HiveTableName hiveTableName) throws Exception { return loadTable(hiveTableName); } }, executor)); viewNamesCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<String, Optional<List<String>>>() { @Override public Optional<List<String>> load(String databaseName) throws Exception { return loadAllViews(databaseName); } }, executor)); partitionNamesCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<HiveTableName, Optional<List<String>>>() { @Override public Optional<List<String>> load(HiveTableName hiveTableName) throws Exception { return loadPartitionNames(hiveTableName); } }, executor)); partitionFilterCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<PartitionFilter, Optional<List<String>>>() { @Override public Optional<List<String>> load(PartitionFilter partitionFilter) throws Exception { return loadPartitionNamesByParts(partitionFilter); } }, executor)); partitionCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<HivePartitionName, Optional<Partition>>() { @Override public Optional<Partition> load(HivePartitionName partitionName) throws Exception { return loadPartitionByName(partitionName); } @Override public Map<HivePartitionName, Optional<Partition>> loadAll(Iterable<? extends HivePartitionName> partitionNames) throws Exception { return loadPartitionsByNames(partitionNames); } }, executor)); userRolesCache = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<String, Set<String>>() { @Override public Set<String> load(String user) throws Exception { return loadRoles(user); } }, executor)); userTablePrivileges = CacheBuilder.newBuilder() .expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS) .refreshAfterWrite(refreshMills, MILLISECONDS) .build(asyncReloading(new CacheLoader<UserTableKey, Set<HivePrivilege>>() { @Override public Set<HivePrivilege> load(UserTableKey key) throws Exception { return loadTablePrivileges(key.getUser(), key.getDatabase(), key.getTable()); } }, executor)); } @Managed @Flatten public CachingHiveMetastoreStats getStats() { return stats; } @Override @Managed public void flushCache() { databaseNamesCache.invalidateAll(); tableNamesCache.invalidateAll(); viewNamesCache.invalidateAll(); partitionNamesCache.invalidateAll(); databaseCache.invalidateAll(); tableCache.invalidateAll(); partitionCache.invalidateAll(); partitionFilterCache.invalidateAll(); userTablePrivileges.invalidateAll(); } private static <K, V> V get(LoadingCache<K, V> cache, K key) { try { return cache.get(key); } catch (ExecutionException | UncheckedExecutionException | ExecutionError e) { throw Throwables.propagate(e.getCause()); } } private static <K, V> Map<K, V> getAll(LoadingCache<K, V> cache, Iterable<K> keys) { try { return cache.getAll(keys); } catch (ExecutionException | UncheckedExecutionException | ExecutionError e) { throw Throwables.propagate(e.getCause()); } } @Override public List<String> getAllDatabases() { return get(databaseNamesCache, ""); } private List<String> loadAllDatabases() throws Exception { try { return retry() .stopOnIllegalExceptions() .run("getAllDatabases", stats.getGetAllDatabases().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return client.getAllDatabases(); } })); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Optional<Database> getDatabase(String databaseName) { return get(databaseCache, databaseName); } private Optional<Database> loadDatabase(String databaseName) throws Exception { try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getDatabase", stats.getGetDatabase().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return Optional.of(client.getDatabase(databaseName)); } })); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Optional<List<String>> getAllTables(String databaseName) { return get(tableNamesCache, databaseName); } private Optional<List<String>> loadAllTables(String databaseName) throws Exception { Callable<List<String>> getAllTables = stats.getGetAllTables().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return client.getAllTables(databaseName); } }); Callable<Void> getDatabase = stats.getGetDatabase().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { client.getDatabase(databaseName); return null; } }); try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getAllTables", () -> { List<String> tables = getAllTables.call(); if (tables.isEmpty()) { // Check to see if the database exists getDatabase.call(); } return Optional.of(tables); }); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Optional<Table> getTable(String databaseName, String tableName) { return get(tableCache, HiveTableName.table(databaseName, tableName)); } @Override public Optional<List<String>> getAllViews(String databaseName) { return get(viewNamesCache, databaseName); } private Optional<List<String>> loadAllViews(String databaseName) throws Exception { try { return retry() .stopOn(UnknownDBException.class) .stopOnIllegalExceptions() .run("getAllViews", stats.getAllViews().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { String filter = HIVE_FILTER_FIELD_PARAMS + PRESTO_VIEW_FLAG + " = \"true\""; return Optional.of(client.getTableNamesByFilter(databaseName, filter)); } })); } catch (UnknownDBException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public void createTable(Table table) { try { retry() .stopOn(AlreadyExistsException.class, InvalidObjectException.class, MetaException.class, NoSuchObjectException.class) .stopOnIllegalExceptions() .run("createTable", stats.getCreateTable().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { client.createTable(table); } return null; })); } catch (AlreadyExistsException e) { throw new TableAlreadyExistsException(new SchemaTableName(table.getDbName(), table.getTableName())); } catch (NoSuchObjectException e) { throw new SchemaNotFoundException(table.getDbName()); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { invalidateTable(table.getDbName(), table.getTableName()); } } @Override public void dropTable(String databaseName, String tableName) { try { retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("dropTable", stats.getDropTable().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { client.dropTable(databaseName, tableName, true); } return null; })); } catch (NoSuchObjectException e) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { invalidateTable(databaseName, tableName); } } protected void invalidateTable(String databaseName, String tableName) { tableCache.invalidate(new HiveTableName(databaseName, tableName)); tableNamesCache.invalidate(databaseName); viewNamesCache.invalidate(databaseName); invalidatePartitionCache(databaseName, tableName); } @Override public void alterTable(String databaseName, String tableName, Table table) { try { retry() .stopOn(InvalidOperationException.class, MetaException.class) .stopOnIllegalExceptions() .run("alterTable", stats.getAlterTable().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { Optional<Table> source = loadTable(new HiveTableName(databaseName, tableName)); if (!source.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } client.alterTable(databaseName, tableName, table); } return null; })); } catch (NoSuchObjectException e) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (InvalidOperationException | MetaException e) { throw Throwables.propagate(e); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { invalidateTable(databaseName, tableName); invalidateTable(table.getDbName(), table.getTableName()); } } private Optional<Table> loadTable(HiveTableName hiveTableName) throws Exception { try { return retry() .stopOn(NoSuchObjectException.class, HiveViewNotSupportedException.class) .stopOnIllegalExceptions() .run("getTable", stats.getGetTable().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { Table table = client.getTable(hiveTableName.getDatabaseName(), hiveTableName.getTableName()); if (table.getTableType().equals(TableType.VIRTUAL_VIEW.name()) && (!isPrestoView(table))) { throw new HiveViewNotSupportedException(new SchemaTableName(hiveTableName.getDatabaseName(), hiveTableName.getTableName())); } return Optional.of(table); } })); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Optional<List<String>> getPartitionNames(String databaseName, String tableName) { return get(partitionNamesCache, HiveTableName.table(databaseName, tableName)); } private Optional<List<String>> loadPartitionNames(HiveTableName hiveTableName) throws Exception { try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getPartitionNames", stats.getGetPartitionNames().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return Optional.of(client.getPartitionNames(hiveTableName.getDatabaseName(), hiveTableName.getTableName())); } })); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Optional<List<String>> getPartitionNamesByParts(String databaseName, String tableName, List<String> parts) { return get(partitionFilterCache, PartitionFilter.partitionFilter(databaseName, tableName, parts)); } private Optional<List<String>> loadPartitionNamesByParts(PartitionFilter partitionFilter) throws Exception { try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getPartitionNamesByParts", stats.getGetPartitionNamesPs().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return Optional.of(client.getPartitionNamesFiltered( partitionFilter.getHiveTableName().getDatabaseName(), partitionFilter.getHiveTableName().getTableName(), partitionFilter.getParts())); } })); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public void addPartitions(String databaseName, String tableName, List<Partition> partitions) { if (partitions.isEmpty()) { return; } try { retry() .stopOn(AlreadyExistsException.class, InvalidObjectException.class, MetaException.class, NoSuchObjectException.class, PrestoException.class) .stopOnIllegalExceptions() .run("addPartitions", stats.getAddPartitions().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { int partitionsAdded = client.addPartitions(partitions); if (partitionsAdded != partitions.size()) { throw new PrestoException(HIVE_METASTORE_ERROR, format("Hive metastore only added %s of %s partitions", partitionsAdded, partitions.size())); } return null; } })); } catch (AlreadyExistsException e) { // todo partition already exists exception throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (NoSuchObjectException e) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { // todo do we need to invalidate all partitions? invalidatePartitionCache(databaseName, tableName); } } @Override public void dropPartition(String databaseName, String tableName, List<String> parts) { try { retry() .stopOn(NoSuchObjectException.class, MetaException.class) .stopOnIllegalExceptions() .run("dropPartition", stats.getDropPartition().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { client.dropPartition(databaseName, tableName, parts, true); } return null; })); } catch (NoSuchObjectException e) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { invalidatePartitionCache(databaseName, tableName); } } @Override public void dropPartitionByName(String databaseName, String tableName, String partitionName) { try { retry() .stopOn(NoSuchObjectException.class, MetaException.class) .stopOnIllegalExceptions() .run("dropPartitionByName", stats.getDropPartitionByName().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { // It is observed that: (examples below assumes a table with one partition column `ds`) // * When a partition doesn't exist (e.g. ds=2015-09-99), this thrift call is a no-op. It doesn't throw any exception. // * When a typo exists in partition column name (e.g. dxs=2015-09-01), this thrift call will delete ds=2015-09-01. client.dropPartitionByName(databaseName, tableName, partitionName, true); } return null; })); } catch (NoSuchObjectException e) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { invalidatePartitionCache(databaseName, tableName); } } private void invalidatePartitionCache(String databaseName, String tableName) { HiveTableName hiveTableName = HiveTableName.table(databaseName, tableName); partitionNamesCache.invalidate(hiveTableName); partitionCache.asMap().keySet().stream() .filter(partitionName -> partitionName.getHiveTableName().equals(hiveTableName)) .forEach(partitionCache::invalidate); partitionFilterCache.asMap().keySet().stream() .filter(partitionFilter -> partitionFilter.getHiveTableName().equals(hiveTableName)) .forEach(partitionFilterCache::invalidate); } @Override public Optional<Map<String, Partition>> getPartitionsByNames(String databaseName, String tableName, List<String> partitionNames) { Iterable<HivePartitionName> names = transform(partitionNames, name -> HivePartitionName.partition(databaseName, tableName, name)); ImmutableMap.Builder<String, Partition> partitionsByName = ImmutableMap.builder(); Map<HivePartitionName, Optional<Partition>> all = getAll(partitionCache, names); for (Entry<HivePartitionName, Optional<Partition>> entry : all.entrySet()) { if (!entry.getValue().isPresent()) { return Optional.empty(); } partitionsByName.put(entry.getKey().getPartitionName(), entry.getValue().get()); } return Optional.of(partitionsByName.build()); } @Override public Optional<Partition> getPartition(String databaseName, String tableName, String partitionName) { HivePartitionName name = HivePartitionName.partition(databaseName, tableName, partitionName); return get(partitionCache, name); } private Optional<Partition> loadPartitionByName(HivePartitionName partitionName) throws Exception { requireNonNull(partitionName, "partitionName is null"); try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getPartitionsByNames", stats.getGetPartitionByName().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { return Optional.of(client.getPartitionByName( partitionName.getHiveTableName().getDatabaseName(), partitionName.getHiveTableName().getTableName(), partitionName.getPartitionName())); } })); } catch (NoSuchObjectException e) { return Optional.empty(); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } private Map<HivePartitionName, Optional<Partition>> loadPartitionsByNames(Iterable<? extends HivePartitionName> partitionNames) throws Exception { requireNonNull(partitionNames, "partitionNames is null"); checkArgument(!Iterables.isEmpty(partitionNames), "partitionNames is empty"); HivePartitionName firstPartition = Iterables.get(partitionNames, 0); HiveTableName hiveTableName = firstPartition.getHiveTableName(); String databaseName = hiveTableName.getDatabaseName(); String tableName = hiveTableName.getTableName(); List<String> partitionsToFetch = new ArrayList<>(); for (HivePartitionName partitionName : partitionNames) { checkArgument(partitionName.getHiveTableName().equals(hiveTableName), "Expected table name %s but got %s", hiveTableName, partitionName.getHiveTableName()); partitionsToFetch.add(partitionName.getPartitionName()); } List<String> partitionColumnNames = ImmutableList.copyOf(Warehouse.makeSpecFromName(firstPartition.getPartitionName()).keySet()); try { return retry() .stopOn(NoSuchObjectException.class) .stopOnIllegalExceptions() .run("getPartitionsByNames", stats.getGetPartitionsByNames().wrap(() -> { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { ImmutableMap.Builder<HivePartitionName, Optional<Partition>> partitions = ImmutableMap.builder(); for (Partition partition : client.getPartitionsByNames(databaseName, tableName, partitionsToFetch)) { String partitionId = FileUtils.makePartName(partitionColumnNames, partition.getValues(), null); partitions.put(HivePartitionName.partition(databaseName, tableName, partitionId), Optional.of(partition)); } return partitions.build(); } })); } catch (NoSuchObjectException e) { // assume none of the partitions in the batch are available return stream(partitionNames.spliterator(), false) .collect(toMap(identity(), (name) -> Optional.empty())); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Set<String> getRoles(String user) { return get(userRolesCache, user); } private Set<String> loadRoles(String user) { try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { List<Role> roles = client.listRoles(user, USER); if (roles == null) { return ImmutableSet.of(); } return ImmutableSet.copyOf(roles.stream() .map(Role::getRoleName) .collect(toSet())); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } @Override public Set<HivePrivilege> getDatabasePrivileges(String user, String databaseName) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); if (isDatabaseOwner(user, databaseName)) { privileges.add(OWNERSHIP); } privileges.addAll(getPrivileges(user, new HiveObjectRef(HiveObjectType.DATABASE, databaseName, null, null, null))); return privileges.build(); } @Override public Set<HivePrivilege> getTablePrivileges(String user, String databaseName, String tableName) { return get(userTablePrivileges, new UserTableKey(user, tableName, databaseName)); } @Override public void grantTablePrivileges(String databaseName, String tableName, String grantee, Set<PrivilegeGrantInfo> privilegeGrantInfoSet) { try { retry() .stopOnIllegalExceptions() .run("grantTablePrivileges", stats.getGrantTablePrivileges().wrap(() -> { try (HiveMetastoreClient metastoreClient = clientProvider.createMetastoreClient()) { PrincipalType principalType; if (metastoreClient.getRoleNames().contains(grantee)) { principalType = ROLE; } else { principalType = USER; } ImmutableList.Builder<HiveObjectPrivilege> privilegeBagBuilder = ImmutableList.builder(); for (PrivilegeGrantInfo privilegeGrantInfo : privilegeGrantInfoSet) { privilegeBagBuilder.add( new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, databaseName, tableName, null, null), grantee, principalType, privilegeGrantInfo)); } // TODO: Check whether the user/role exists in the hive metastore. // TODO: Check whether the user already has the given privilege. metastoreClient.grantPrivileges(new PrivilegeBag(privilegeBagBuilder.build())); } return null; })); } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw Throwables.propagate(e); } finally { userTablePrivileges.invalidate(new UserTableKey(grantee, tableName, databaseName)); } } @Override public boolean hasPrivilegeWithGrantOptionOnTable(String user, String databaseName, String tableName, HivePrivilege hivePrivilege) { try (HiveMetastoreClient metastoreClient = clientProvider.createMetastoreClient()) { PrincipalPrivilegeSet principalPrivilegeSet = metastoreClient.getPrivilegeSet(new HiveObjectRef(HiveObjectType.TABLE, databaseName, tableName, null, null), user, null); for (PrivilegeGrantInfo privilegeGrantInfo : principalPrivilegeSet.getUserPrivileges().get(user)) { if (privilegeGrantInfo.getPrivilege().equalsIgnoreCase(hivePrivilege.name()) && privilegeGrantInfo.isGrantOption()) { return true; } } return false; } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } } private Set<HivePrivilege> loadTablePrivileges(String user, String databaseName, String tableName) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); if (isTableOwner(user, databaseName, tableName)) { privileges.add(OWNERSHIP); } privileges.addAll(getPrivileges(user, new HiveObjectRef(HiveObjectType.TABLE, databaseName, tableName, null, null))); return privileges.build(); } private Set<HivePrivilege> getPrivileges(String user, HiveObjectRef objectReference) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { PrincipalPrivilegeSet privilegeSet = client.getPrivilegeSet(objectReference, user, null); if (privilegeSet != null) { Map<String, List<PrivilegeGrantInfo>> userPrivileges = privilegeSet.getUserPrivileges(); if (userPrivileges != null) { privileges.addAll(toGrants(userPrivileges.get(user))); } for (List<PrivilegeGrantInfo> rolePrivileges : privilegeSet.getRolePrivileges().values()) { privileges.addAll(toGrants(rolePrivileges)); } // We do not add the group permissions as Hive does not seem to process these } } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } return privileges.build(); } private static Set<HivePrivilege> toGrants(List<PrivilegeGrantInfo> userGrants) { if (userGrants == null) { return ImmutableSet.of(); } ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); for (PrivilegeGrantInfo userGrant : userGrants) { privileges.addAll(parsePrivilege(userGrant)); if (userGrant.isGrantOption()) { privileges.add(HivePrivilege.GRANT); } } return privileges.build(); } private RetryDriver retry() { return RetryDriver.retry() .exceptionMapper(getExceptionMapper()) .stopOn(PrestoException.class); } protected Function<Exception, Exception> getExceptionMapper() { return identity(); } private static class HiveTableName { private final String databaseName; private final String tableName; private HiveTableName(String databaseName, String tableName) { this.databaseName = databaseName; this.tableName = tableName; } public static HiveTableName table(String databaseName, String tableName) { return new HiveTableName(databaseName, tableName); } public String getDatabaseName() { return databaseName; } public String getTableName() { return tableName; } @Override public String toString() { return toStringHelper(this) .add("databaseName", databaseName) .add("tableName", tableName) .toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } HiveTableName other = (HiveTableName) o; return Objects.equals(databaseName, other.databaseName) && Objects.equals(tableName, other.tableName); } @Override public int hashCode() { return Objects.hash(databaseName, tableName); } } private static class HivePartitionName { private final HiveTableName hiveTableName; private final String partitionName; private HivePartitionName(HiveTableName hiveTableName, String partitionName) { this.hiveTableName = hiveTableName; this.partitionName = partitionName; } public static HivePartitionName partition(String databaseName, String tableName, String partitionName) { return new HivePartitionName(HiveTableName.table(databaseName, tableName), partitionName); } public HiveTableName getHiveTableName() { return hiveTableName; } public String getPartitionName() { return partitionName; } @Override public String toString() { return toStringHelper(this) .add("hiveTableName", hiveTableName) .add("partitionName", partitionName) .toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } HivePartitionName other = (HivePartitionName) o; return Objects.equals(hiveTableName, other.hiveTableName) && Objects.equals(partitionName, other.partitionName); } @Override public int hashCode() { return Objects.hash(hiveTableName, partitionName); } } private static class PartitionFilter { private final HiveTableName hiveTableName; private final List<String> parts; private PartitionFilter(HiveTableName hiveTableName, List<String> parts) { this.hiveTableName = hiveTableName; this.parts = ImmutableList.copyOf(parts); } public static PartitionFilter partitionFilter(String databaseName, String tableName, List<String> parts) { return new PartitionFilter(HiveTableName.table(databaseName, tableName), parts); } public HiveTableName getHiveTableName() { return hiveTableName; } public List<String> getParts() { return parts; } @Override public String toString() { return toStringHelper(this) .add("hiveTableName", hiveTableName) .add("parts", parts) .toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PartitionFilter other = (PartitionFilter) o; return Objects.equals(hiveTableName, other.hiveTableName) && Objects.equals(parts, other.parts); } @Override public int hashCode() { return Objects.hash(hiveTableName, parts); } } private static class UserTableKey { private final String user; private final String database; private final String table; public UserTableKey(String user, String table, String database) { this.user = requireNonNull(user, "principalName is null"); this.table = requireNonNull(table, "table is null"); this.database = requireNonNull(database, "database is null"); } public String getUser() { return user; } public String getDatabase() { return database; } public String getTable() { return table; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserTableKey that = (UserTableKey) o; return Objects.equals(user, that.user) && Objects.equals(table, that.table) && Objects.equals(database, that.database); } @Override public int hashCode() { return Objects.hash(user, table, database); } @Override public String toString() { return toStringHelper(this) .add("principalName", user) .add("table", table) .add("database", database) .toString(); } } }
package com.rj.pixelesque; /* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.os.Bundle; import android.app.Dialog; import android.content.Context; import android.graphics.*; import android.view.MotionEvent; import android.view.View; public class ColorPickerDialog extends Dialog { public interface OnColorChangedListener { void colorChanged(int color); } private OnColorChangedListener mListener; private int mInitialColor; private static class ColorPickerView extends View { private Paint mPaint; private Paint mCenterPaint; private final int[] mColors; private OnColorChangedListener mListener; ColorPickerView(Context c, OnColorChangedListener l, int color) { super(c); mListener = l; mColors = new int[] { 0xFFFF0000, 0xFFFF00FF, 0xFF0000FF, 0xFF00FFFF, 0xFF00FF00, 0xFFFFFF00, 0xFFFF0000 }; Shader s = new SweepGradient(0, 0, mColors, null); mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint.setShader(s); mPaint.setStyle(Paint.Style.STROKE); mPaint.setStrokeWidth(32); mCenterPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mCenterPaint.setColor(color); mCenterPaint.setStrokeWidth(5); } private boolean mTrackingCenter; private boolean mHighlightCenter; @Override protected void onDraw(Canvas canvas) { float r = CENTER_X - mPaint.getStrokeWidth()*0.5f; canvas.translate(CENTER_X, CENTER_X); canvas.drawOval(new RectF(-r, -r, r, r), mPaint); canvas.drawCircle(0, 0, CENTER_RADIUS, mCenterPaint); if (mTrackingCenter) { int c = mCenterPaint.getColor(); mCenterPaint.setStyle(Paint.Style.STROKE); if (mHighlightCenter) { mCenterPaint.setAlpha(0xFF); } else { mCenterPaint.setAlpha(0x80); } canvas.drawCircle(0, 0, CENTER_RADIUS + mCenterPaint.getStrokeWidth(), mCenterPaint); mCenterPaint.setStyle(Paint.Style.FILL); mCenterPaint.setColor(c); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { setMeasuredDimension(CENTER_X*2, CENTER_Y*2); } private static final int CENTER_X = 100; private static final int CENTER_Y = 100; private static final int CENTER_RADIUS = 32; private int floatToByte(float x) { int n = java.lang.Math.round(x); return n; } private int pinToByte(int n) { if (n < 0) { n = 0; } else if (n > 255) { n = 255; } return n; } private int ave(int s, int d, float p) { return s + java.lang.Math.round(p * (d - s)); } private int interpColor(int colors[], float unit) { if (unit <= 0) { return colors[0]; } if (unit >= 1) { return colors[colors.length - 1]; } float p = unit * (colors.length - 1); int i = (int)p; p -= i; // now p is just the fractional part [0...1) and i is the index int c0 = colors[i]; int c1 = colors[i+1]; int a = ave(Color.alpha(c0), Color.alpha(c1), p); int r = ave(Color.red(c0), Color.red(c1), p); int g = ave(Color.green(c0), Color.green(c1), p); int b = ave(Color.blue(c0), Color.blue(c1), p); return Color.argb(a, r, g, b); } private int rotateColor(int color, float rad) { float deg = rad * 180 / 3.1415927f; int r = Color.red(color); int g = Color.green(color); int b = Color.blue(color); ColorMatrix cm = new ColorMatrix(); ColorMatrix tmp = new ColorMatrix(); cm.setRGB2YUV(); tmp.setRotate(0, deg); cm.postConcat(tmp); tmp.setYUV2RGB(); cm.postConcat(tmp); final float[] a = cm.getArray(); int ir = floatToByte(a[0] * r + a[1] * g + a[2] * b); int ig = floatToByte(a[5] * r + a[6] * g + a[7] * b); int ib = floatToByte(a[10] * r + a[11] * g + a[12] * b); return Color.argb(Color.alpha(color), pinToByte(ir), pinToByte(ig), pinToByte(ib)); } private static final float PI = 3.1415926f; @Override public boolean onTouchEvent(MotionEvent event) { float x = event.getX() - CENTER_X; float y = event.getY() - CENTER_Y; boolean inCenter = java.lang.Math.sqrt(x*x + y*y) <= CENTER_RADIUS; switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mTrackingCenter = inCenter; if (inCenter) { mHighlightCenter = true; invalidate(); break; } case MotionEvent.ACTION_MOVE: if (mTrackingCenter) { if (mHighlightCenter != inCenter) { mHighlightCenter = inCenter; invalidate(); } } else { float angle = (float)java.lang.Math.atan2(y, x); // need to turn angle [-PI ... PI] into unit [0....1] float unit = angle/(2*PI); if (unit < 0) { unit += 1; } mCenterPaint.setColor(interpColor(mColors, unit)); invalidate(); } break; case MotionEvent.ACTION_UP: if (mTrackingCenter) { if (inCenter) { mListener.colorChanged(mCenterPaint.getColor()); } mTrackingCenter = false; // so we draw w/o halo invalidate(); } break; } return true; } } public ColorPickerDialog(Context context, OnColorChangedListener listener, int initialColor) { super(context); mListener = listener; mInitialColor = initialColor; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); OnColorChangedListener l = new OnColorChangedListener() { public void colorChanged(int color) { mListener.colorChanged(color); dismiss(); } }; setContentView(new ColorPickerView(getContext(), l, mInitialColor)); setTitle("Pick a Color"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.cache.distributed.near.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.marshaller.*; import org.apache.ignite.marshaller.optimized.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.junits.common.*; import java.io.*; import java.lang.reflect.*; import java.util.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheDistributionMode.*; import static org.apache.ignite.cache.CacheMode.*; /** * */ public class GridCacheEntryMemorySizeSelfTest extends GridCommonAbstractTest { /** IP finder. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Null reference size (optimized marshaller writes one byte for null reference). */ private static final int NULL_REF_SIZE = 1; /** Entry overhead. */ private static final int ENTRY_OVERHEAD; /** Replicated entry overhead. */ private static final int REPLICATED_ENTRY_OVERHEAD; /** DHT entry overhead. */ private static final int DHT_ENTRY_OVERHEAD; /** Near entry overhead. */ private static final int NEAR_ENTRY_OVERHEAD; /** Reader size. */ private static final int READER_SIZE = 24; /** Key size in bytes. */ private static final int KEY_SIZE; /** 1KB value size in bytes. */ private static final int ONE_KB_VAL_SIZE; /** 2KB value size in bytes. */ private static final int TWO_KB_VAL_SIZE; /** * */ static { try { ENTRY_OVERHEAD = U.<Integer>staticField(GridCacheMapEntry.class, "SIZE_OVERHEAD"); DHT_ENTRY_OVERHEAD = U.<Integer>staticField(GridDhtCacheEntry.class, "DHT_SIZE_OVERHEAD"); NEAR_ENTRY_OVERHEAD = U.<Integer>staticField(GridNearCacheEntry.class, "NEAR_SIZE_OVERHEAD"); REPLICATED_ENTRY_OVERHEAD = DHT_ENTRY_OVERHEAD; Marshaller marsh = new OptimizedMarshaller(); marsh.setContext(new MarshallerContext() { @Override public boolean registerClass(int id, Class cls) { return true; } @Override public Class getClass(int id, ClassLoader ldr) { throw new UnsupportedOperationException(); } }); KEY_SIZE = marsh.marshal(1).length; ONE_KB_VAL_SIZE = marsh.marshal(new Value(new byte[1024])).length; TWO_KB_VAL_SIZE = marsh.marshal(new Value(new byte[2048])).length; } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** Cache mode. */ private CacheMode mode; /** Near cache enabled flag. */ private boolean nearEnabled; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); CacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(mode); cacheCfg.setDistributionMode(nearEnabled ? NEAR_PARTITIONED : PARTITIONED_ONLY); cacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); cacheCfg.setAtomicityMode(TRANSACTIONAL); if (mode == PARTITIONED) cacheCfg.setBackups(0); cfg.setCacheConfiguration(cacheCfg); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(disco); return cfg; } /** @throws Exception If failed. */ public void testLocal() throws Exception { mode = LOCAL; try { IgniteCache<Integer, Value> cache = startGrid().jcache(null); cache.put(1, new Value(new byte[1024])); cache.put(2, new Value(new byte[2048])); GridCacheAdapter<Integer, Value> internalCache = internalCache(cache); assertEquals(KEY_SIZE + NULL_REF_SIZE + ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(0)), internalCache.entryEx(0).memorySize()); assertEquals(KEY_SIZE + ONE_KB_VAL_SIZE + ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(1)), internalCache.entryEx(1).memorySize()); assertEquals(KEY_SIZE + TWO_KB_VAL_SIZE + ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(2)), internalCache.entryEx(2).memorySize()); } finally { stopAllGrids(); } } /** @throws Exception If failed. */ public void testReplicated() throws Exception { mode = REPLICATED; try { IgniteCache<Integer, Value> cache = startGrid().jcache(null); cache.put(1, new Value(new byte[1024])); cache.put(2, new Value(new byte[2048])); GridCacheAdapter<Integer, Value> internalCache = dht(cache); assertEquals(KEY_SIZE + NULL_REF_SIZE + ENTRY_OVERHEAD + REPLICATED_ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(0)), internalCache.entryEx(0).memorySize()); assertEquals(KEY_SIZE + ONE_KB_VAL_SIZE + ENTRY_OVERHEAD + REPLICATED_ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(1)), internalCache.entryEx(1).memorySize()); assertEquals(KEY_SIZE + TWO_KB_VAL_SIZE + ENTRY_OVERHEAD + REPLICATED_ENTRY_OVERHEAD + extrasSize(internalCache.entryEx(2)), internalCache.entryEx(2).memorySize()); } finally { stopAllGrids(); } } /** @throws Exception If failed. */ public void testPartitionedNearEnabled() throws Exception { mode = PARTITIONED; nearEnabled = true; try { startGridsMultiThreaded(2); int[] keys = new int[3]; int key = 0; for (int i = 0; i < keys.length; i++) { while (true) { key++; if (grid(0).cluster().mapKeyToNode(null, key).equals(grid(0).localNode())) { if (i > 0) jcache(0).put(key, new Value(new byte[i * 1024])); keys[i] = key; break; } } } // Create near entries. assertNotNull(jcache(1).get(keys[1])); assertNotNull(jcache(1).get(keys[2])); GridCacheAdapter<Object, Object> cache0 = dht(jcache(0)); assertEquals(KEY_SIZE + NULL_REF_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + extrasSize(cache0.entryEx(keys[0])), cache0.entryEx(keys[0]).memorySize()); assertEquals(KEY_SIZE + ONE_KB_VAL_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + READER_SIZE + extrasSize(cache0.entryEx(keys[1])), cache0.entryEx(keys[1]).memorySize()); assertEquals(KEY_SIZE + TWO_KB_VAL_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + READER_SIZE + extrasSize(cache0.entryEx(keys[2])), cache0.entryEx(keys[2]).memorySize()); GridCacheAdapter<Object, Object> cache1 = near(jcache(1)); assertEquals(KEY_SIZE + NULL_REF_SIZE + ENTRY_OVERHEAD + NEAR_ENTRY_OVERHEAD + extrasSize(cache1.entryEx(keys[0])), cache1.entryEx(keys[0]).memorySize()); assertEquals(KEY_SIZE + ONE_KB_VAL_SIZE + ENTRY_OVERHEAD + NEAR_ENTRY_OVERHEAD + extrasSize(cache1.entryEx(keys[1])), cache1.entryEx(keys[1]).memorySize()); assertEquals(KEY_SIZE + TWO_KB_VAL_SIZE + ENTRY_OVERHEAD + NEAR_ENTRY_OVERHEAD + extrasSize(cache1.entryEx(keys[2])), cache1.entryEx(keys[2]).memorySize()); } finally { stopAllGrids(); } } /** @throws Exception If failed. */ public void testPartitionedNearDisabled() throws Exception { mode = PARTITIONED; nearEnabled = false; try { startGridsMultiThreaded(2); int[] keys = new int[3]; int key = 0; for (int i = 0; i < keys.length; i++) { while (true) { key++; if (grid(0).cluster().mapKeyToNode(null, key).equals(grid(0).localNode())) { if (i > 0) jcache(0).put(key, new Value(new byte[i * 1024])); keys[i] = key; break; } } } // Create near entries. assertNotNull(jcache(1).get(keys[1])); assertNotNull(jcache(1).get(keys[2])); GridCacheAdapter<Object, Object> cache = dht(jcache(0)); assertEquals(KEY_SIZE + NULL_REF_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + extrasSize(cache.entryEx(keys[0])), cache.entryEx(keys[0]).memorySize()); assertEquals(KEY_SIZE + ONE_KB_VAL_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + extrasSize(cache.entryEx(keys[1])), cache.entryEx(keys[1]).memorySize()); assertEquals(KEY_SIZE + TWO_KB_VAL_SIZE + ENTRY_OVERHEAD + DHT_ENTRY_OVERHEAD + extrasSize(cache.entryEx(keys[2])), cache.entryEx(keys[2]).memorySize()); // Do not test other node since there are no backups. } finally { stopAllGrids(); } } /** * Get entry extras size. * * @param entry Entry. * @return Extras size. * @throws Exception If failed. */ private int extrasSize(GridCacheEntryEx entry) throws Exception { Method mthd = GridCacheMapEntry.class.getDeclaredMethod("extrasSize"); mthd.setAccessible(true); return (Integer)mthd.invoke(entry); } /** Value. */ @SuppressWarnings("UnusedDeclaration") private static class Value implements Serializable { /** Byte array. */ private byte[] arr; /** @param arr Byte array. */ private Value(byte[] arr) { this.arr = arr; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Value val = (Value)o; return Arrays.equals(arr, val.arr); } /** {@inheritDoc} */ @Override public int hashCode() { return arr != null ? Arrays.hashCode(arr) : 0; } } }
/** * Copyright 2004-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibatis.common.util; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Iterator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class PaginatedArrayListTest { private PaginatedArrayList smallPageList; private PaginatedArrayList oddPageList; private PaginatedArrayList evenPageList; @BeforeEach public void setUp() { smallPageList = new PaginatedArrayList(5); smallPageList.add(Integer.valueOf(0)); smallPageList.add(Integer.valueOf(1)); smallPageList.add(Integer.valueOf(2)); oddPageList = new PaginatedArrayList(5); oddPageList.add(Integer.valueOf(0)); oddPageList.add(Integer.valueOf(1)); oddPageList.add(Integer.valueOf(2)); oddPageList.add(Integer.valueOf(3)); oddPageList.add(Integer.valueOf(4)); oddPageList.add(Integer.valueOf(5)); oddPageList.add(Integer.valueOf(6)); oddPageList.add(Integer.valueOf(7)); oddPageList.add(Integer.valueOf(8)); oddPageList.add(Integer.valueOf(9)); oddPageList.add(Integer.valueOf(10)); oddPageList.add(Integer.valueOf(11)); oddPageList.add(Integer.valueOf(12)); oddPageList.add(Integer.valueOf(13)); oddPageList.add(Integer.valueOf(14)); oddPageList.add(Integer.valueOf(15)); oddPageList.add(Integer.valueOf(16)); oddPageList.add(Integer.valueOf(17)); evenPageList = new PaginatedArrayList(5); evenPageList.add(Integer.valueOf(0)); evenPageList.add(Integer.valueOf(1)); evenPageList.add(Integer.valueOf(2)); evenPageList.add(Integer.valueOf(3)); evenPageList.add(Integer.valueOf(4)); evenPageList.add(Integer.valueOf(5)); evenPageList.add(Integer.valueOf(6)); evenPageList.add(Integer.valueOf(7)); evenPageList.add(Integer.valueOf(8)); evenPageList.add(Integer.valueOf(9)); evenPageList.add(Integer.valueOf(10)); evenPageList.add(Integer.valueOf(11)); evenPageList.add(Integer.valueOf(12)); evenPageList.add(Integer.valueOf(13)); evenPageList.add(Integer.valueOf(14)); } @Test public void testOddPaginatedIterator() { assertEquals(true, oddPageList.isFirstPage()); assertEquals(false, oddPageList.isPreviousPageAvailable()); int count = 0; Iterator i = oddPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); oddPageList.nextPage(); count = 0; i = oddPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); oddPageList.nextPage(); assertEquals(true, oddPageList.isMiddlePage()); count = 0; i = oddPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); oddPageList.nextPage(); count = 0; i = oddPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(3, count); assertEquals(true, oddPageList.isLastPage()); assertEquals(false, oddPageList.isNextPageAvailable()); oddPageList.nextPage(); assertEquals(true, oddPageList.isLastPage()); assertEquals(false, oddPageList.isNextPageAvailable()); oddPageList.previousPage(); assertEquals(Integer.valueOf(10), oddPageList.get(0)); assertEquals(Integer.valueOf(12), oddPageList.get(2)); oddPageList.gotoPage(500); assertEquals(Integer.valueOf(0), oddPageList.get(0)); assertEquals(Integer.valueOf(4), oddPageList.get(4)); oddPageList.gotoPage(-500); assertEquals(Integer.valueOf(15), oddPageList.get(0)); assertEquals(Integer.valueOf(17), oddPageList.get(2)); } @Test public void testEvenPaginatedIterator() { assertEquals(true, evenPageList.isFirstPage()); assertEquals(false, evenPageList.isPreviousPageAvailable()); int count = 0; Iterator i = evenPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); evenPageList.nextPage(); assertEquals(true, evenPageList.isMiddlePage()); count = 0; i = evenPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); evenPageList.nextPage(); count = 0; i = evenPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(5, count); assertEquals(true, evenPageList.isLastPage()); assertEquals(false, evenPageList.isNextPageAvailable()); evenPageList.nextPage(); assertEquals(Integer.valueOf(10), evenPageList.get(0)); assertEquals(Integer.valueOf(14), evenPageList.get(4)); evenPageList.previousPage(); assertEquals(Integer.valueOf(5), evenPageList.get(0)); assertEquals(Integer.valueOf(9), evenPageList.get(4)); evenPageList.gotoPage(500); assertEquals(Integer.valueOf(0), evenPageList.get(0)); assertEquals(Integer.valueOf(4), evenPageList.get(4)); evenPageList.gotoPage(-500); assertEquals(Integer.valueOf(10), evenPageList.get(0)); assertEquals(Integer.valueOf(14), evenPageList.get(4)); } @Test public void testSmallPaginatedIterator() { assertEquals(true, smallPageList.isFirstPage()); assertEquals(true, smallPageList.isLastPage()); assertEquals(false, smallPageList.isMiddlePage()); assertEquals(false, smallPageList.isPreviousPageAvailable()); assertEquals(false, smallPageList.isNextPageAvailable()); int count = 0; Iterator i = smallPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(3, count); smallPageList.nextPage(); count = 0; i = smallPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(3, count); assertEquals(true, smallPageList.isFirstPage()); assertEquals(true, smallPageList.isLastPage()); assertEquals(false, smallPageList.isMiddlePage()); assertEquals(false, smallPageList.isPreviousPageAvailable()); assertEquals(false, smallPageList.isNextPageAvailable()); smallPageList.nextPage(); count = 0; i = smallPageList.iterator(); while (i.hasNext()) { i.next(); count++; } assertEquals(3, count); smallPageList.nextPage(); assertEquals(Integer.valueOf(0), smallPageList.get(0)); assertEquals(Integer.valueOf(2), smallPageList.get(2)); smallPageList.previousPage(); assertEquals(Integer.valueOf(0), smallPageList.get(0)); assertEquals(Integer.valueOf(2), smallPageList.get(2)); smallPageList.gotoPage(500); assertEquals(Integer.valueOf(0), smallPageList.get(0)); assertEquals(Integer.valueOf(2), smallPageList.get(2)); smallPageList.gotoPage(-500); assertEquals(Integer.valueOf(0), smallPageList.get(0)); assertEquals(Integer.valueOf(2), smallPageList.get(2)); assertEquals(true, smallPageList.isFirstPage()); assertEquals(true, smallPageList.isLastPage()); assertEquals(false, smallPageList.isMiddlePage()); assertEquals(false, smallPageList.isPreviousPageAvailable()); assertEquals(false, smallPageList.isNextPageAvailable()); } }
/** * Copyright (C) 2015 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.fabric8.openshift.client.dsl.internal; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.Watch; import io.fabric8.kubernetes.client.Watcher; import io.fabric8.kubernetes.client.dsl.Gettable; import io.fabric8.kubernetes.client.dsl.Reaper; import io.fabric8.kubernetes.client.dsl.Triggerable; import io.fabric8.kubernetes.client.dsl.Typeable; import io.fabric8.kubernetes.client.dsl.Watchable; import io.fabric8.kubernetes.client.dsl.base.BaseOperation; import io.fabric8.kubernetes.client.utils.URLUtils; import io.fabric8.kubernetes.client.utils.Utils; import io.fabric8.openshift.api.model.Build; import io.fabric8.openshift.api.model.BuildConfig; import io.fabric8.openshift.api.model.BuildConfigList; import io.fabric8.openshift.api.model.BuildList; import io.fabric8.openshift.api.model.BuildRequest; import io.fabric8.openshift.api.model.DoneableBuildConfig; import io.fabric8.openshift.api.model.WebHookTrigger; import io.fabric8.openshift.client.OpenShiftConfig; import io.fabric8.openshift.client.dsl.BuildConfigOperation; import io.fabric8.openshift.client.dsl.BuildConfigResource; import io.fabric8.openshift.client.dsl.InputStreamable; import io.fabric8.openshift.client.dsl.TimeoutInputStreamable; import io.fabric8.openshift.client.dsl.buildconfig.AsFileTimeoutInputStreamable; import io.fabric8.openshift.client.dsl.buildconfig.AuthorEmailable; import io.fabric8.openshift.client.dsl.buildconfig.AuthorMessageAsFileTimeoutInputStreamable; import io.fabric8.openshift.client.dsl.buildconfig.CommitterAuthorMessageAsFileTimeoutInputStreamable; import io.fabric8.openshift.client.dsl.buildconfig.CommitterEmailable; import io.fabric8.openshift.client.dsl.buildconfig.MessageAsFileTimeoutInputStreamable; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okio.BufferedSink; import okio.Okio; import okio.Source; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import static io.fabric8.openshift.client.OpenShiftAPIGroups.BUILD; public class BuildConfigOperationsImpl extends OpenShiftOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> implements BuildConfigOperation { public static final String BUILD_CONFIG_LABEL = "openshift.io/build-config.name"; public static final String BUILD_CONFIG_ANNOTATION = "openshift.io/build-config.name"; private final String secret; private final String triggerType; private final String authorName; private final String authorEmail; private final String committerName; private final String committerEmail; private final String commit; private final String message; private final String asFile; private final long timeout; private final TimeUnit timeoutUnit; public BuildConfigOperationsImpl(OkHttpClient client, OpenShiftConfig config, String namespace) { this(client, config, null, namespace, null, true, null, null, false, -1, new TreeMap<String, String>(), new TreeMap<String, String>(), new TreeMap<String, String[]>(), new TreeMap<String, String[]>(), new TreeMap<String, String>(), null, null, null, null, null, null, null, null, null, config.getBuildTimeout(), TimeUnit.MILLISECONDS); } public BuildConfigOperationsImpl(OkHttpClient client, OpenShiftConfig config, String apiVersion, String namespace, String name, Boolean cascading, BuildConfig item, String resourceVersion, Boolean reloadingFromServer, long gracePeriodSeconds, Map<String, String> labels, Map<String, String> labelsNot, Map<String, String[]> labelsIn, Map<String, String[]> labelsNotIn, Map<String, String> fields) { this(client, config, apiVersion, namespace, name, cascading, item, resourceVersion, reloadingFromServer, gracePeriodSeconds, labels, labelsNot, labelsIn, labelsNotIn, fields, null, null, null, null, null, null, null, null, null, config.getBuildTimeout(), TimeUnit.MILLISECONDS); } public BuildConfigOperationsImpl(OkHttpClient client, OpenShiftConfig config, String apiVersion, String namespace, String name, Boolean cascading, BuildConfig item, String resourceVersion, Boolean reloadingFromServer, long gracePeriodSeconds, Map<String, String> labels, Map<String, String> labelsNot, Map<String, String[]> labelsIn, Map<String, String[]> labelsNotIn, Map<String, String> fields, String secret, String triggerType, String authorName, String authorEmail, String committerName, String committerEmail, String commit, String message, String asFile, long timeout, TimeUnit timeoutUnit) { super(client, OpenShiftOperation.withApiGroup(client, BUILD, apiVersion, config), "buildconfigs", namespace, name, cascading, item, resourceVersion, reloadingFromServer, gracePeriodSeconds, labels, labelsNot, labelsIn, labelsNotIn, fields); this.triggerType = triggerType; this.secret = secret; this.authorName = authorName; this.authorEmail = authorEmail; this.committerName = committerName; this.committerEmail = committerEmail; this.commit = commit; this.message = message; this.asFile = asFile; reaper = new BuildConfigReaper(this); this.timeout = timeout; this.timeoutUnit = timeoutUnit; } @Override public BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build> withName(String name) { if (name == null || name.length() == 0) { throw new IllegalArgumentException("Name must be provided."); } return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), name, isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public OpenShiftOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> inNamespace(String namespace) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), namespace, getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public Gettable<BuildConfig> fromServer() { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), namespace, getName(), isCascading(), getItem(), getResourceVersion(), true, getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public Typeable<Triggerable<WebHookTrigger, Void>> withSecret(String secret) { return new BuildConfigOperationsImpl(client,getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build> load(InputStream is) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), unmarshal(is, getType()), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public Build instantiate(BuildRequest request) { try { updateApiVersion(request); URL instantiationUrl = new URL(URLUtils.join(getResourceUrl().toString(), "instantiate")); RequestBody requestBody = RequestBody.create(JSON, BaseOperation.JSON_MAPPER.writer().writeValueAsString(request)); Request.Builder requestBuilder = new Request.Builder().post(requestBody).url(instantiationUrl); return handleResponse(requestBuilder, Build.class); } catch (Exception e) { throw KubernetesClientException.launderThrowable(e); } } @Override public CommitterAuthorMessageAsFileTimeoutInputStreamable<Build> instantiateBinary() { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public Void trigger(WebHookTrigger trigger) { try { //TODO: This needs some attention. String triggerUrl = URLUtils.join(getResourceUrl().toString(), "webhooks", secret, triggerType); RequestBody requestBody = RequestBody.create(JSON, BaseOperation.JSON_MAPPER.writer().writeValueAsBytes(trigger)); Request.Builder requestBuilder = new Request.Builder() .post(requestBody) .url(triggerUrl) .addHeader("X-Github-Event", "push"); handleResponse(requestBuilder, null); } catch (Exception e) { throw KubernetesClientException.launderThrowable(e); } return null; } @Override public Triggerable<WebHookTrigger, Void> withType(String type) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, type, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public Watchable<Watch, Watcher<BuildConfig>> withResourceVersion(String resourceVersion) { BuildConfigOperationsImpl buildConfigOperations = new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), namespace, getName(), isCascading(), getItem(), resourceVersion, isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); return buildConfigOperations; } /* * Labels are limited to 63 chars so need to first truncate the build config name (if required), retrieve builds with matching label, * then check the build config name against the builds' build config annotation which have no such length restriction (but * aren't usable for searching). Would be better if referenced build config was available via fields but it currently isn't... */ private void deleteBuilds() { if (getName() == null) { return; } String buildConfigLabelValue = getName().substring(0, Math.min(getName().length(), 63)); BuildList matchingBuilds = new BuildOperationsImpl(client, (OpenShiftConfig) config, namespace).inNamespace(namespace).withLabel(BUILD_CONFIG_LABEL, buildConfigLabelValue).list(); if (matchingBuilds.getItems() != null) { for (Build matchingBuild : matchingBuilds.getItems()) { if (matchingBuild.getMetadata() != null && matchingBuild.getMetadata().getAnnotations() != null && getName().equals(matchingBuild.getMetadata().getAnnotations().get(BUILD_CONFIG_ANNOTATION))) { new BuildOperationsImpl(client, (OpenShiftConfig) config, namespace).inNamespace(matchingBuild.getMetadata().getNamespace()).withName(matchingBuild.getMetadata().getName()).delete(); } } } } @Override public Build fromInputStream(final InputStream inputStream) { return fromInputStream(inputStream, -1L); } @Override public Build fromFile(final File file) { if (!file.exists()) { throw new IllegalArgumentException("Can't instantiate binary build from the specified file. The file does not exists"); } try (InputStream is = new FileInputStream(file)) { // Use a length to prevent chunked encoding with OkHttp, which in turn // doesn't work with 'Expect: 100-continue' negotiation with the OpenShift API server return fromInputStream(is, file.length()); } catch (Throwable t) { throw KubernetesClientException.launderThrowable(t); } } private Build fromInputStream(final InputStream inputStream, final long contentLength) { try { RequestBody requestBody = new RequestBody() { @Override public MediaType contentType() { return MediaType.parse("application/octet-stream"); } @Override public long contentLength() throws IOException { return contentLength; } @Override public void writeTo(BufferedSink sink) throws IOException { Source source = null; try { source = Okio.source(inputStream); OutputStream os = sink.outputStream(); sink.writeAll(source); } catch (IOException e) { throw KubernetesClientException.launderThrowable("Can't instantiate binary build, due to error reading/writing stream. " + "Can be caused if the output stream was closed by the server.", e); } } }; OkHttpClient newClient = client.newBuilder() .readTimeout(timeout, timeoutUnit) .writeTimeout(timeout, timeoutUnit) .build(); Request.Builder requestBuilder = new Request.Builder().post(requestBody) .header("Expect", "100-continue") .url(getQueryParameters()); return handleResponse(newClient, requestBuilder, Build.class); } catch (Exception e) { throw KubernetesClientException.launderThrowable(e); } } private String getQueryParameters() throws MalformedURLException { StringBuilder sb = new StringBuilder(); sb.append(URLUtils.join(getResourceUrl().toString(), "instantiatebinary")); if (Utils.isNullOrEmpty(message)) { sb.append("?commit="); } else { sb.append("?commit=").append(message); } if (!Utils.isNullOrEmpty(authorName)) { sb.append("&revision.authorName=").append(authorName); } if (!Utils.isNullOrEmpty(authorEmail)) { sb.append("&revision.authorEmail=").append(authorEmail); } if (!Utils.isNullOrEmpty(committerName)) { sb.append("&revision.committerName=").append(committerName); } if (!Utils.isNullOrEmpty(committerEmail)) { sb.append("&revision.committerEmail=").append(committerEmail); } if (!Utils.isNullOrEmpty(commit)) { sb.append("&revision.commit=").append(commit); } if (!Utils.isNullOrEmpty(asFile)) { sb.append("&asFile=").append(asFile); } return sb.toString(); } @Override public TimeoutInputStreamable<Build> asFile(String fileName) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, fileName, timeout, timeoutUnit); } @Override public MessageAsFileTimeoutInputStreamable<Build> withAuthorEmail(String email) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, email, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public AuthorMessageAsFileTimeoutInputStreamable<Build> withCommitterEmail(String committerEmail) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public AsFileTimeoutInputStreamable<Build> withMessage(String message) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public AuthorEmailable<MessageAsFileTimeoutInputStreamable<Build>> withAuthorName(String authorName) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public CommitterEmailable<AuthorMessageAsFileTimeoutInputStreamable<Build>> withCommitterName(String committerName) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, timeoutUnit); } @Override public InputStreamable<Build> withTimeout(long timeout, TimeUnit unit) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeout, unit); } @Override public InputStreamable<Build> withTimeoutInMillis(long timeoutInMillis) { return new BuildConfigOperationsImpl(client, getConfig(), getAPIVersion(), getNamespace(), getName(), isCascading(), getItem(), getResourceVersion(), isReloadingFromServer(), getGracePeriodSeconds(), getLabels(), getLabelsNot(), getLabelsIn(), getLabelsNotIn(), getFields(), secret, triggerType, authorName, authorEmail, committerName, committerEmail, commit, message, asFile, timeoutInMillis, TimeUnit.MILLISECONDS); } private static class BuildConfigReaper implements Reaper { private BuildConfigOperationsImpl oper; public BuildConfigReaper(BuildConfigOperationsImpl oper) { this.oper = oper; } @Override public boolean reap() { oper.deleteBuilds(); return false; } } }
/* * Copyright 2015 PRImA Research Lab, University of Salford, United Kingdom * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.primaresearch.web.gwt.client.ui.page.tool; import org.primaresearch.maths.geometry.Point; import org.primaresearch.maths.geometry.Polygon; import org.primaresearch.maths.geometry.Rect; import org.primaresearch.web.gwt.client.page.PageSyncManager; import org.primaresearch.web.gwt.client.ui.page.SelectionManager; import org.primaresearch.web.gwt.client.ui.page.SelectionManager.SelectionListener; import org.primaresearch.web.gwt.client.ui.page.tool.controls.DraggableControl; import org.primaresearch.web.gwt.shared.page.ContentObjectC; import com.google.gwt.dom.client.Style.Cursor; /** * Page view tool/control for resizing a selected page content object. * * @author Christian Clausner * */ public class ResizeRegionTool extends DraggableControl implements SelectionListener { //Resize types public static final int TYPE_LEFT = 1; public static final int TYPE_RIGHT = 2; public static final int TYPE_TOP = 3; public static final int TYPE_BOTTOM = 4; public static final int TYPE_TOP_LEFT = 5; public static final int TYPE_TOP_RIGHT = 6; public static final int TYPE_BOTTOM_LEFT = 7; public static final int TYPE_BOTTOM_RIGHT = 8; private int resizeType; private ContentObjectC selObj = null; private Polygon referencePolygon = null; private PageSyncManager pageSync; private boolean alwaysRefreshPageView; /** * Constructor * * @param resizeType See TYPE_... constants * @param imageUrl Icon resource * @param selectionManager Content object selection manager for adding a listener * @param pageSync Page content synchronisation manager for sending changed object outlines to the server * @param alwaysRefreshPageView If set to <code>false</code> the view is only refreshed when the user releases the mouse button, otherwise it is refreshed as well when moving the mouse. */ public ResizeRegionTool(int resizeType, String imageUrl, SelectionManager selectionManager, PageSyncManager pageSync, boolean alwaysRefreshPageView) { super(imageUrl); this.resizeType = resizeType; selectionManager.addListener(this); this.asWidget().setVisible(false); setCursor(); this.pageSync = pageSync; this.alwaysRefreshPageView = alwaysRefreshPageView; } /** * Selects the mouse cursor according to the resize type. */ private void setCursor() { if (resizeType == TYPE_LEFT) this.asWidget().getElement().getStyle().setCursor(Cursor.W_RESIZE); else if (resizeType == TYPE_RIGHT) this.asWidget().getElement().getStyle().setCursor(Cursor.E_RESIZE); else if (resizeType == TYPE_TOP) this.asWidget().getElement().getStyle().setCursor(Cursor.N_RESIZE); else if (resizeType == TYPE_BOTTOM) this.asWidget().getElement().getStyle().setCursor(Cursor.S_RESIZE); else if (resizeType == TYPE_TOP_LEFT) this.asWidget().getElement().getStyle().setCursor(Cursor.NW_RESIZE); else if (resizeType == TYPE_TOP_RIGHT) this.asWidget().getElement().getStyle().setCursor(Cursor.NE_RESIZE); else if (resizeType == TYPE_BOTTOM_LEFT) this.asWidget().getElement().getStyle().setCursor(Cursor.SW_RESIZE); else if (resizeType == TYPE_BOTTOM_RIGHT) this.asWidget().getElement().getStyle().setCursor(Cursor.SE_RESIZE); } @Override public void selectionChanged(SelectionManager manager) { if (manager.isEmpty() || manager.getSelection().size() > 1) { //Multiple or no objects selected this.asWidget().setVisible(false); selObj = null; } else { //One object selected selObj = manager.getSelection().iterator().next(); this.asWidget().setVisible(selObj != null && !selObj.isReadOnly()); updatePosition(); } } /** * Positions the control according to the resize type (at a corner of the bounding box or in the middle of one side). */ private void updatePosition() { if (selObj == null) return; int halfWidth = this.asWidget().getElement().getClientWidth() / 2; int halfHeight = this.asWidget().getElement().getClientHeight() / 2; Rect objectRect = selObj.getCoords().getBoundingBox(); Point targetPos = getTargetPositionOfWidget(objectRect); int x = (int)(targetPos.x * getPageView().getZoomFactor()) - halfWidth; int y = (int)(targetPos.y * getPageView().getZoomFactor()) - halfHeight; setPosistion(x, y); } Point getTargetPositionOfWidget(Rect rect) { if (resizeType == TYPE_LEFT) return new Point(rect.left, (rect.top + rect.bottom) / 2); else if (resizeType == TYPE_RIGHT) return new Point(rect.right, (rect.top + rect.bottom) / 2); else if (resizeType == TYPE_TOP) return new Point((rect.left + rect.right) / 2, rect.top); else if (resizeType == TYPE_BOTTOM) return new Point((rect.left + rect.right) / 2, rect.bottom); else if (resizeType == TYPE_TOP_LEFT) return new Point(rect.left, rect.top); else if (resizeType == TYPE_TOP_RIGHT) return new Point(rect.right, rect.top); else if (resizeType == TYPE_BOTTOM_LEFT) return new Point(rect.left, rect.bottom); else //if (resizeType == TYPE_BOTTOM_RIGHT) return new Point(rect.right, rect.bottom); } @Override public void refresh() { updatePosition(); } @Override protected void onDrag(Point start, Point current) { if (referencePolygon == null) { referencePolygon = selObj.getCoords().clone(); } int diffX = (int)((double)(current.x - start.x) / getPageView().getZoomFactor()); int diffY = (int)((double)(current.y - start.y) / getPageView().getZoomFactor()); final double minScalingFactor = 0.1; //Minimum of 10% double scaleX = Math.max(getScalingFactorX(diffX), minScalingFactor); double scaleY = Math.max(getScalingFactorY(diffY), minScalingFactor); int offsetX = getOffsetX(diffX); int offsetY = getOffsetY(diffY); if (scaleX > minScalingFactor || scaleY > minScalingFactor) { Polygon polygon = selObj.getCoords(); int xBoundingBox = referencePolygon.getBoundingBox().left; int yBoundingBox = referencePolygon.getBoundingBox().top; for (int i=0; i<polygon.getSize(); i++) { int xOld = referencePolygon.getPoint(i).x; int xRel = xOld - xBoundingBox; int yOld = referencePolygon.getPoint(i).y; int yRel = yOld - yBoundingBox; if (scaleX > minScalingFactor) polygon.getPoint(i).x = xBoundingBox + (int)((double)xRel * scaleX) + offsetX; if (scaleY > minScalingFactor) polygon.getPoint(i).y = yBoundingBox + (int)((double)yRel * scaleY) + offsetY; } polygon.setBoundingBoxOutdated(); } pageView.refreshHoverWidgets(); //Refresh position of all widgets if (alwaysRefreshPageView) pageView.getRenderer().refresh(); } /** * Calculates the horizontal scaling factor for resizing the current page content object (in relation to the mouse movement). */ private double getScalingFactorX(int diffX) { if (resizeType == TYPE_TOP || resizeType == TYPE_BOTTOM) return 1.0; if (resizeType == TYPE_LEFT || resizeType == TYPE_TOP_LEFT || resizeType == TYPE_BOTTOM_LEFT) return (double)(referencePolygon.getBoundingBox().getWidth()-diffX) / (double)referencePolygon.getBoundingBox().getWidth(); return (double)(referencePolygon.getBoundingBox().getWidth() + diffX) / (double)referencePolygon.getBoundingBox().getWidth(); } /** * Calculates the vertical scaling factor for resizing the current page content object (in relation to the mouse movement). */ private double getScalingFactorY(int diffY) { if (resizeType == TYPE_LEFT || resizeType == TYPE_RIGHT) return 1.0; if (resizeType == TYPE_TOP || resizeType == TYPE_TOP_LEFT || resizeType == TYPE_TOP_RIGHT) return (double)(referencePolygon.getBoundingBox().getHeight()-diffY) / (double)referencePolygon.getBoundingBox().getHeight(); return (double)(referencePolygon.getBoundingBox().getHeight() + diffY) / (double)referencePolygon.getBoundingBox().getHeight(); } /** * Calculates the horizontal offset for resizing the current page content object (in relation to the mouse movement). */ private int getOffsetX(int diffX) { if (resizeType == TYPE_LEFT || resizeType == TYPE_TOP_LEFT || resizeType == TYPE_BOTTOM_LEFT) return diffX; return 0; } /** * Calculates the vertical offset for resizing the current page content object (in relation to the mouse movement). */ private int getOffsetY(int diffY) { if (resizeType == TYPE_TOP || resizeType == TYPE_TOP_LEFT || resizeType == TYPE_TOP_RIGHT) return diffY; return 0; } @Override protected void onDragEnd(Point start, Point current) { referencePolygon = null; this.getPageView().getRenderer().refresh(); //Sync to server pageSync.syncObjectOutline(selObj); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.apache.spark.SparkConf; import org.apache.spark.memory.TaskMemoryManager; import org.apache.spark.memory.TestMemoryManager; import org.apache.spark.sql.types.StructType; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter; import org.apache.spark.unsafe.types.UTF8String; import java.util.Random; public class RowBasedKeyValueBatchSuite { private final Random rand = new Random(42); private TestMemoryManager memoryManager; private TaskMemoryManager taskMemoryManager; private StructType keySchema = new StructType().add("k1", DataTypes.LongType) .add("k2", DataTypes.StringType); private StructType fixedKeySchema = new StructType().add("k1", DataTypes.LongType) .add("k2", DataTypes.LongType); private StructType valueSchema = new StructType().add("count", DataTypes.LongType) .add("sum", DataTypes.LongType); private int DEFAULT_CAPACITY = 1 << 16; private String getRandomString(int length) { Assert.assertTrue(length >= 0); final byte[] bytes = new byte[length]; rand.nextBytes(bytes); return new String(bytes); } private UnsafeRow makeKeyRow(long k1, String k2) { UnsafeRowWriter writer = new UnsafeRowWriter(2); writer.reset(); writer.write(0, k1); writer.write(1, UTF8String.fromString(k2)); return writer.getRow(); } private UnsafeRow makeKeyRow(long k1, long k2) { UnsafeRowWriter writer = new UnsafeRowWriter(2); writer.reset(); writer.write(0, k1); writer.write(1, k2); return writer.getRow(); } private UnsafeRow makeValueRow(long v1, long v2) { UnsafeRowWriter writer = new UnsafeRowWriter(2); writer.reset(); writer.write(0, v1); writer.write(1, v2); return writer.getRow(); } private UnsafeRow appendRow(RowBasedKeyValueBatch batch, UnsafeRow key, UnsafeRow value) { return batch.appendRow(key.getBaseObject(), key.getBaseOffset(), key.getSizeInBytes(), value.getBaseObject(), value.getBaseOffset(), value.getSizeInBytes()); } private void updateValueRow(UnsafeRow row, long v1, long v2) { row.setLong(0, v1); row.setLong(1, v2); } private boolean checkKey(UnsafeRow row, long k1, String k2) { return (row.getLong(0) == k1) && (row.getUTF8String(1).equals(UTF8String.fromString(k2))); } private boolean checkKey(UnsafeRow row, long k1, long k2) { return (row.getLong(0) == k1) && (row.getLong(1) == k2); } private boolean checkValue(UnsafeRow row, long v1, long v2) { return (row.getLong(0) == v1) && (row.getLong(1) == v2); } @Before public void setup() { memoryManager = new TestMemoryManager(new SparkConf() .set("spark.memory.offHeap.enabled", "false") .set("spark.shuffle.spill.compress", "false") .set("spark.shuffle.compress", "false")); taskMemoryManager = new TaskMemoryManager(memoryManager, 0); } @After public void tearDown() { if (taskMemoryManager != null) { Assert.assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory()); long leakedMemory = taskMemoryManager.getMemoryConsumptionForThisTask(); taskMemoryManager = null; Assert.assertEquals(0L, leakedMemory); } } @Test public void emptyBatch() throws Exception { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { Assert.assertEquals(0, batch.numRows()); try { batch.getKeyRow(-1); Assert.fail("Should not be able to get row -1"); } catch (AssertionError e) { // Expected exception; do nothing. } try { batch.getValueRow(-1); Assert.fail("Should not be able to get row -1"); } catch (AssertionError e) { // Expected exception; do nothing. } try { batch.getKeyRow(0); Assert.fail("Should not be able to get row 0 when batch is empty"); } catch (AssertionError e) { // Expected exception; do nothing. } try { batch.getValueRow(0); Assert.fail("Should not be able to get row 0 when batch is empty"); } catch (AssertionError e) { // Expected exception; do nothing. } Assert.assertFalse(batch.rowIterator().next()); } } @Test public void batchType() { try (RowBasedKeyValueBatch batch1 = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY); RowBasedKeyValueBatch batch2 = RowBasedKeyValueBatch.allocate(fixedKeySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { Assert.assertEquals(batch1.getClass(), VariableLengthRowBasedKeyValueBatch.class); Assert.assertEquals(batch2.getClass(), FixedLengthRowBasedKeyValueBatch.class); } } @Test public void setAndRetrieve() { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { UnsafeRow ret1 = appendRow(batch, makeKeyRow(1, "A"), makeValueRow(1, 1)); Assert.assertTrue(checkValue(ret1, 1, 1)); UnsafeRow ret2 = appendRow(batch, makeKeyRow(2, "B"), makeValueRow(2, 2)); Assert.assertTrue(checkValue(ret2, 2, 2)); UnsafeRow ret3 = appendRow(batch, makeKeyRow(3, "C"), makeValueRow(3, 3)); Assert.assertTrue(checkValue(ret3, 3, 3)); Assert.assertEquals(3, batch.numRows()); UnsafeRow retrievedKey1 = batch.getKeyRow(0); Assert.assertTrue(checkKey(retrievedKey1, 1, "A")); UnsafeRow retrievedKey2 = batch.getKeyRow(1); Assert.assertTrue(checkKey(retrievedKey2, 2, "B")); UnsafeRow retrievedValue1 = batch.getValueRow(1); Assert.assertTrue(checkValue(retrievedValue1, 2, 2)); UnsafeRow retrievedValue2 = batch.getValueRow(2); Assert.assertTrue(checkValue(retrievedValue2, 3, 3)); try { batch.getKeyRow(3); Assert.fail("Should not be able to get row 3"); } catch (AssertionError e) { // Expected exception; do nothing. } try { batch.getValueRow(3); Assert.fail("Should not be able to get row 3"); } catch (AssertionError e) { // Expected exception; do nothing. } } } @Test public void setUpdateAndRetrieve() { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { appendRow(batch, makeKeyRow(1, "A"), makeValueRow(1, 1)); Assert.assertEquals(1, batch.numRows()); UnsafeRow retrievedValue = batch.getValueRow(0); updateValueRow(retrievedValue, 2, 2); UnsafeRow retrievedValue2 = batch.getValueRow(0); Assert.assertTrue(checkValue(retrievedValue2, 2, 2)); } } @Test public void iteratorTest() throws Exception { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { appendRow(batch, makeKeyRow(1, "A"), makeValueRow(1, 1)); appendRow(batch, makeKeyRow(2, "B"), makeValueRow(2, 2)); appendRow(batch, makeKeyRow(3, "C"), makeValueRow(3, 3)); Assert.assertEquals(3, batch.numRows()); org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator = batch.rowIterator(); Assert.assertTrue(iterator.next()); UnsafeRow key1 = iterator.getKey(); UnsafeRow value1 = iterator.getValue(); Assert.assertTrue(checkKey(key1, 1, "A")); Assert.assertTrue(checkValue(value1, 1, 1)); Assert.assertTrue(iterator.next()); UnsafeRow key2 = iterator.getKey(); UnsafeRow value2 = iterator.getValue(); Assert.assertTrue(checkKey(key2, 2, "B")); Assert.assertTrue(checkValue(value2, 2, 2)); Assert.assertTrue(iterator.next()); UnsafeRow key3 = iterator.getKey(); UnsafeRow value3 = iterator.getValue(); Assert.assertTrue(checkKey(key3, 3, "C")); Assert.assertTrue(checkValue(value3, 3, 3)); Assert.assertFalse(iterator.next()); } } @Test public void fixedLengthTest() throws Exception { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(fixedKeySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { appendRow(batch, makeKeyRow(11, 11), makeValueRow(1, 1)); appendRow(batch, makeKeyRow(22, 22), makeValueRow(2, 2)); appendRow(batch, makeKeyRow(33, 33), makeValueRow(3, 3)); UnsafeRow retrievedKey1 = batch.getKeyRow(0); Assert.assertTrue(checkKey(retrievedKey1, 11, 11)); UnsafeRow retrievedKey2 = batch.getKeyRow(1); Assert.assertTrue(checkKey(retrievedKey2, 22, 22)); UnsafeRow retrievedValue1 = batch.getValueRow(1); Assert.assertTrue(checkValue(retrievedValue1, 2, 2)); UnsafeRow retrievedValue2 = batch.getValueRow(2); Assert.assertTrue(checkValue(retrievedValue2, 3, 3)); Assert.assertEquals(3, batch.numRows()); org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator = batch.rowIterator(); Assert.assertTrue(iterator.next()); UnsafeRow key1 = iterator.getKey(); UnsafeRow value1 = iterator.getValue(); Assert.assertTrue(checkKey(key1, 11, 11)); Assert.assertTrue(checkValue(value1, 1, 1)); Assert.assertTrue(iterator.next()); UnsafeRow key2 = iterator.getKey(); UnsafeRow value2 = iterator.getValue(); Assert.assertTrue(checkKey(key2, 22, 22)); Assert.assertTrue(checkValue(value2, 2, 2)); Assert.assertTrue(iterator.next()); UnsafeRow key3 = iterator.getKey(); UnsafeRow value3 = iterator.getValue(); Assert.assertTrue(checkKey(key3, 33, 33)); Assert.assertTrue(checkValue(value3, 3, 3)); Assert.assertFalse(iterator.next()); } } @Test public void appendRowUntilExceedingCapacity() throws Exception { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, 10)) { UnsafeRow key = makeKeyRow(1, "A"); UnsafeRow value = makeValueRow(1, 1); for (int i = 0; i < 10; i++) { appendRow(batch, key, value); } UnsafeRow ret = appendRow(batch, key, value); Assert.assertEquals(batch.numRows(), 10); Assert.assertNull(ret); org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator = batch.rowIterator(); for (int i = 0; i < 10; i++) { Assert.assertTrue(iterator.next()); UnsafeRow key1 = iterator.getKey(); UnsafeRow value1 = iterator.getValue(); Assert.assertTrue(checkKey(key1, 1, "A")); Assert.assertTrue(checkValue(value1, 1, 1)); } Assert.assertFalse(iterator.next()); } } @Test public void appendRowUntilExceedingPageSize() throws Exception { // Use default size or spark.buffer.pageSize if specified int pageSizeToUse = (int) memoryManager.pageSizeBytes(); try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, pageSizeToUse)) { UnsafeRow key = makeKeyRow(1, "A"); UnsafeRow value = makeValueRow(1, 1); int recordLength = 8 + key.getSizeInBytes() + value.getSizeInBytes() + 8; int totalSize = 4; int numRows = 0; while (totalSize + recordLength < pageSizeToUse) { appendRow(batch, key, value); totalSize += recordLength; numRows++; } UnsafeRow ret = appendRow(batch, key, value); Assert.assertEquals(batch.numRows(), numRows); Assert.assertNull(ret); org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> iterator = batch.rowIterator(); for (int i = 0; i < numRows; i++) { Assert.assertTrue(iterator.next()); UnsafeRow key1 = iterator.getKey(); UnsafeRow value1 = iterator.getValue(); Assert.assertTrue(checkKey(key1, 1, "A")); Assert.assertTrue(checkValue(value1, 1, 1)); } Assert.assertFalse(iterator.next()); } } @Test public void failureToAllocateFirstPage() throws Exception { memoryManager.limit(1024); try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { UnsafeRow key = makeKeyRow(1, "A"); UnsafeRow value = makeValueRow(11, 11); UnsafeRow ret = appendRow(batch, key, value); Assert.assertNull(ret); Assert.assertFalse(batch.rowIterator().next()); } } @Test public void randomizedTest() { try (RowBasedKeyValueBatch batch = RowBasedKeyValueBatch.allocate(keySchema, valueSchema, taskMemoryManager, DEFAULT_CAPACITY)) { int numEntry = 100; long[] expectedK1 = new long[numEntry]; String[] expectedK2 = new String[numEntry]; long[] expectedV1 = new long[numEntry]; long[] expectedV2 = new long[numEntry]; for (int i = 0; i < numEntry; i++) { long k1 = rand.nextLong(); String k2 = getRandomString(rand.nextInt(256)); long v1 = rand.nextLong(); long v2 = rand.nextLong(); appendRow(batch, makeKeyRow(k1, k2), makeValueRow(v1, v2)); expectedK1[i] = k1; expectedK2[i] = k2; expectedV1[i] = v1; expectedV2[i] = v2; } for (int j = 0; j < 10000; j++) { int rowId = rand.nextInt(numEntry); if (rand.nextBoolean()) { UnsafeRow key = batch.getKeyRow(rowId); Assert.assertTrue(checkKey(key, expectedK1[rowId], expectedK2[rowId])); } if (rand.nextBoolean()) { UnsafeRow value = batch.getValueRow(rowId); Assert.assertTrue(checkValue(value, expectedV1[rowId], expectedV2[rowId])); } } } } }
package org.yeastrc.xlink.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import org.yeastrc.xlink.db.DBConnectionFactory; import org.yeastrc.xlink.dto.ScanDTO; import org.yeastrc.xlink.utils.ValidateIsCentroidValidValue; public class ScanDAO { private static final Logger log = LoggerFactory.getLogger( ScanDAO.class); public static ScanDTO getScanFromId( int scanId ) throws Exception { ScanDTO scanDTO = null;; // Get our connection to the database. Connection connection = null; PreparedStatement pstmt = null; ResultSet rs = null; // Our SQL statement final String sqlStr = "SELECT scan.* " + " FROM scan " + " WHERE id = ? " ; try { connection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); pstmt = connection.prepareStatement( sqlStr ); pstmt.setInt( 1, scanId ); // Our results rs = pstmt.executeQuery(); if ( rs.next() ) { scanDTO = new ScanDTO(); scanDTO.setId( rs.getInt( "id" ) ); scanDTO.setScanFileId( rs.getInt( "scan_file_id" ) ); scanDTO.setStartScanNumber( rs.getInt( "start_scan_number" ) ); scanDTO.setEndScanNumber( rs.getInt( "end_scan_number" ) ); scanDTO.setLevel( rs.getInt( "level" ) ); scanDTO.setPreMZ( rs.getBigDecimal( "preMZ" ) ); scanDTO.setPrecursorScanNum( rs.getInt( "precursor_scan_number" ) ); scanDTO.setPrecursorScanId( rs.getInt( "precursor_scan_id" ) ); scanDTO.setRetentionTime( rs.getBigDecimal( "retention_time" ) ); scanDTO.setPeakCount( rs.getInt( "peak_count" ) ); scanDTO.setFragmentationType( rs.getString( "fragmentation_type" ) );; scanDTO.setIsCentroid( rs.getString( "is_centroid" ) ); // scanDTO.setMzIntListAsString( rs.getString( "spectrum_data" ) ); } } catch ( Exception e ) { log.error( "ERROR: database connection: '" + DBConnectionFactory.PROXL + "' sql: " + sqlStr, e ); throw e; } finally { // Always make sure result sets and statements are closed, // and the connection is returned to the pool if (rs != null) { try { rs.close(); } catch (SQLException e) { ; } rs = null; } if (pstmt != null) { try { pstmt.close(); } catch (SQLException e) { ; } pstmt = null; } if (connection != null) { try { connection.close(); } catch (SQLException e) { ; } connection = null; } } return scanDTO; } public static Integer getMaxScanId( ) throws Exception { Integer maxScanId = null;; // Get our connection to the database. Connection connection = null; PreparedStatement pstmt = null; ResultSet rs = null; // Our SQL statement final String sqlStr = "SELECT MAX( id ) AS max_id FROM scan" ; try { connection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); pstmt = connection.prepareStatement( sqlStr ); // Our results rs = pstmt.executeQuery(); if ( rs.next() ) { maxScanId = rs.getInt( "max_id" ); } } catch ( Exception e ) { log.error( "ERROR: database connection: '" + DBConnectionFactory.PROXL + "' sql: " + sqlStr, e ); throw e; } finally { // Always make sure result sets and statements are closed, // and the connection is returned to the pool if (rs != null) { try { rs.close(); } catch (SQLException e) { ; } rs = null; } if (pstmt != null) { try { pstmt.close(); } catch (SQLException e) { ; } pstmt = null; } if (connection != null) { try { connection.close(); } catch (SQLException e) { ; } connection = null; } } return maxScanId; } private static String insertSQL = "INSERT INTO scan " + "(scan_file_id, start_scan_number, end_scan_number, level, preMZ, " + " precursor_scan_number, precursor_scan_id, retention_time, peak_count, fragmentation_type, is_centroid )" + " VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )"; /** * @param item * @return * @throws Exception */ public static int save( ScanDTO item ) throws Exception { Connection dbConnection = null; try { dbConnection = DBConnectionFactory.getConnection( DBConnectionFactory.PROXL ); save( item, dbConnection ); } finally { // be sure database handles are closed if( dbConnection != null ) { try { dbConnection.close(); } catch( Throwable t ) { ; } dbConnection = null; } } return item.getId(); } /** * @param item * @return * @throws Exception */ public static int save( ScanDTO item, Connection dbConnection ) throws Exception { if ( ! ValidateIsCentroidValidValue.validateIsCentroidValidValue( item.getIsCentroid() ) ) { String msg = "ERROR: ScanDAO.save(item): isCentroid is not a valid value: is:" + item.getIsCentroid(); log.error( msg ); throw new IllegalArgumentException(msg); } PreparedStatement pstmtSave = null; PreparedStatement pstmtSaveSpectrumData = null; ResultSet rsGenKeys = null; try { pstmtSave = dbConnection.prepareStatement( insertSQL, Statement.RETURN_GENERATED_KEYS ); int counter = 0; counter++; pstmtSave.setInt( counter, item.getScanFileId() ); counter++; pstmtSave.setInt( counter, item.getStartScanNumber() ); counter++; pstmtSave.setInt( counter, item.getEndScanNumber() ); counter++; pstmtSave.setInt( counter, item.getLevel() ); counter++; pstmtSave.setBigDecimal( counter, item.getPreMZ() ); counter++; pstmtSave.setInt( counter, item.getPrecursorScanNum() ); counter++; if( item.getPrecursorScanId() > 0 ) pstmtSave.setInt( counter, item.getPrecursorScanId() ); else pstmtSave.setNull( counter, Types.INTEGER); // precursorScanId counter++; pstmtSave.setBigDecimal( counter, item.getRetentionTime() ); counter++; pstmtSave.setInt( counter, item.getPeakCount() ); counter++; pstmtSave.setString( counter, item.getFragmentationType() ); counter++; pstmtSave.setString( counter, item.getIsCentroid() ); int rowsUpdated = pstmtSave.executeUpdate(); if ( rowsUpdated == 0 ) { } rsGenKeys = pstmtSave.getGeneratedKeys(); if ( rsGenKeys.next() ) { item.setId( rsGenKeys.getInt( 1 ) ); } } catch (Exception sqlEx) { String msg = "save:Exception '" + sqlEx.toString() + ".\nSQL = " + insertSQL; log.error( msg , sqlEx); throw sqlEx; } finally { if (rsGenKeys != null) { try { rsGenKeys.close(); } catch (Exception ex) { // ignore } } if (pstmtSave != null) { try { pstmtSave.close(); } catch (Exception ex) { // ignore } } if (pstmtSaveSpectrumData != null) { try { pstmtSaveSpectrumData.close(); } catch (Exception ex) { // ignore } } } return item.getId(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.inference.preprocessing; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Function; import java.util.function.IntFunction; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.apache.lucene.util.RamUsageEstimator.sizeOf; /** * PreProcessor for n-gram encoding a string */ public class NGram implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { private static final int DEFAULT_START = 0; private static final int DEFAULT_LENGTH = 50; private static final int MAX_LENGTH = 100; private static final int MIN_GRAM = 1; private static final int MAX_GRAM = 5; private static String defaultPrefix(Integer start, Integer length) { return "ngram_" + (start == null ? DEFAULT_START : start) + "_" + (length == null ? DEFAULT_LENGTH : length); } public static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(NGram.class); public static final ParseField NAME = new ParseField("n_gram_encoding"); public static final ParseField FIELD = new ParseField("field"); public static final ParseField FEATURE_PREFIX = new ParseField("feature_prefix"); public static final ParseField NGRAMS = new ParseField("n_grams"); public static final ParseField START = new ParseField("start"); public static final ParseField LENGTH = new ParseField("length"); public static final ParseField CUSTOM = new ParseField("custom"); private static final ConstructingObjectParser<NGram, PreProcessorParseContext> STRICT_PARSER = createParser(false); private static final ConstructingObjectParser<NGram, PreProcessorParseContext> LENIENT_PARSER = createParser(true); @SuppressWarnings("unchecked") private static ConstructingObjectParser<NGram, PreProcessorParseContext> createParser(boolean lenient) { ConstructingObjectParser<NGram, PreProcessorParseContext> parser = new ConstructingObjectParser<>( NAME.getPreferredName(), lenient, (a, c) -> new NGram( (String) a[0], (List<Integer>) a[1], (Integer) a[2], (Integer) a[3], a[4] == null ? c.isCustomByDefault() : (Boolean) a[4], (String) a[5] ) ); parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); parser.declareIntArray(ConstructingObjectParser.constructorArg(), NGRAMS); parser.declareInt(ConstructingObjectParser.optionalConstructorArg(), START); parser.declareInt(ConstructingObjectParser.optionalConstructorArg(), LENGTH); parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); parser.declareString(ConstructingObjectParser.optionalConstructorArg(), FEATURE_PREFIX); return parser; } public static NGram fromXContentStrict(XContentParser parser, PreProcessorParseContext context) { return STRICT_PARSER.apply(parser, context == null ? PreProcessorParseContext.DEFAULT : context); } public static NGram fromXContentLenient(XContentParser parser, PreProcessorParseContext context) { return LENIENT_PARSER.apply(parser, context == null ? PreProcessorParseContext.DEFAULT : context); } private final String field; private final String featurePrefix; private final int[] nGrams; private final int start; private final int length; private final boolean custom; NGram(String field, List<Integer> nGrams, Integer start, Integer length, Boolean custom, String featurePrefix) { this( field, featurePrefix == null ? defaultPrefix(start, length) : featurePrefix, Sets.newHashSet(nGrams).stream().mapToInt(Integer::intValue).toArray(), start == null ? DEFAULT_START : start, length == null ? DEFAULT_LENGTH : length, custom != null && custom ); } public NGram(String field, String featurePrefix, int[] nGrams, int start, int length, boolean custom) { this.field = ExceptionsHelper.requireNonNull(field, FIELD); this.featurePrefix = ExceptionsHelper.requireNonNull(featurePrefix, FEATURE_PREFIX); this.nGrams = ExceptionsHelper.requireNonNull(nGrams, NGRAMS); if (nGrams.length == 0) { throw ExceptionsHelper.badRequestException("[{}] must not be empty", NGRAMS.getPreferredName()); } if (Arrays.stream(this.nGrams).anyMatch(i -> i < MIN_GRAM || i > MAX_GRAM)) { throw ExceptionsHelper.badRequestException( "[{}] is invalid [{}]; minimum supported value is [{}]; maximum supported value is [{}]", NGRAMS.getPreferredName(), Arrays.stream(nGrams).mapToObj(String::valueOf).collect(Collectors.joining(", ")), MIN_GRAM, MAX_GRAM ); } this.start = start; if (start < 0 && length + start > 0) { throw ExceptionsHelper.badRequestException("if [start] is negative, [length] + [start] must be less than 0"); } this.length = length; if (length <= 0) { throw ExceptionsHelper.badRequestException("[{}] must be a positive integer", LENGTH.getPreferredName()); } if (length > MAX_LENGTH) { throw ExceptionsHelper.badRequestException("[{}] must be not be greater than [{}]", LENGTH.getPreferredName(), MAX_LENGTH); } if (Arrays.stream(this.nGrams).anyMatch(i -> i > length)) { throw ExceptionsHelper.badRequestException( "[{}] and [{}] are invalid; all ngrams must be shorter than or equal to length [{}]", NGRAMS.getPreferredName(), LENGTH.getPreferredName(), length ); } this.custom = custom; } public NGram(StreamInput in) throws IOException { this.field = in.readString(); this.featurePrefix = in.readString(); this.nGrams = in.readVIntArray(); this.start = in.readInt(); this.length = in.readVInt(); this.custom = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(field); out.writeString(featurePrefix); out.writeVIntArray(nGrams); out.writeInt(start); out.writeVInt(length); out.writeBoolean(custom); } @Override public String toString() { return Strings.toString(this); } @Override public List<String> inputFields() { return Collections.singletonList(field); } @Override public List<String> outputFields() { return allPossibleNGramOutputFeatureNames(); } @Override public void process(Map<String, Object> fields) { Object value = fields.get(field); if (value == null) { return; } final String stringValue = value.toString(); // String is too small for the starting point if (start > stringValue.length() || stringValue.length() + start < 0) { return; } final int startPos = start < 0 ? (stringValue.length() + start) : start; final int len = Math.min(startPos + length, stringValue.length()); for (int nGram : nGrams) { for (int i = 0; i < len; i++) { if (startPos + i + nGram > len) { break; } fields.put(nGramFeature(nGram, i), stringValue.substring(startPos + i, startPos + i + nGram)); } } } @Override public Map<String, String> reverseLookup() { return outputFields().stream().collect(Collectors.toMap(Function.identity(), ignored -> field)); } @Override public String getOutputFieldType(String outputField) { return TextFieldMapper.CONTENT_TYPE; } @Override public long ramBytesUsed() { long size = SHALLOW_SIZE; size += sizeOf(field); size += sizeOf(featurePrefix); size += sizeOf(nGrams); return size; } @Override public String getWriteableName() { return NAME.getPreferredName(); } @Override public String getName() { return NAME.getPreferredName(); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FIELD.getPreferredName(), field); builder.field(FEATURE_PREFIX.getPreferredName(), featurePrefix); builder.field(NGRAMS.getPreferredName(), nGrams); builder.field(START.getPreferredName(), start); builder.field(LENGTH.getPreferredName(), length); builder.field(CUSTOM.getPreferredName(), custom); builder.endObject(); return builder; } public String getField() { return field; } public String getFeaturePrefix() { return featurePrefix; } public int[] getnGrams() { return nGrams; } public int getStart() { return start; } public int getLength() { return length; } @Override public boolean isCustom() { return custom; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NGram nGram = (NGram) o; return start == nGram.start && length == nGram.length && custom == nGram.custom && Objects.equals(field, nGram.field) && Objects.equals(featurePrefix, nGram.featurePrefix) && Arrays.equals(nGrams, nGram.nGrams); } @Override public int hashCode() { int result = Objects.hash(field, featurePrefix, start, length, custom); result = 31 * result + Arrays.hashCode(nGrams); return result; } private String nGramFeature(int nGram, int pos) { return featurePrefix + "." + nGram + pos; } private List<String> allPossibleNGramOutputFeatureNames() { int totalNgrams = 0; for (int nGram : nGrams) { totalNgrams += (length - (nGram - 1)); } if (totalNgrams <= 0) { return Collections.emptyList(); } List<String> ngramOutputs = new ArrayList<>(totalNgrams); for (int nGram : nGrams) { IntFunction<String> func = i -> nGramFeature(nGram, i); IntStream.range(0, (length - (nGram - 1))).mapToObj(func).forEach(ngramOutputs::add); } return ngramOutputs; } }
/* * 3D City Database - The Open Source CityGML Database * https://www.3dcitydb.org/ * * Copyright 2013 - 2021 * Chair of Geoinformatics * Technical University of Munich, Germany * https://www.lrg.tum.de/gis/ * * The 3D City Database is jointly developed with the following * cooperation partners: * * Virtual City Systems, Berlin <https://vc.systems/> * M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.citydb.gui.operation.visExporter.preferences; import org.citydb.config.Config; import org.citydb.config.i18n.Language; import org.citydb.config.project.visExporter.AltitudeMode; import org.citydb.config.project.visExporter.PointAndCurve; import org.citydb.config.project.visExporter.PointDisplayMode; import org.citydb.gui.components.ColorPicker; import org.citydb.gui.components.TitledPanel; import org.citydb.gui.components.popup.PopupMenuDecorator; import org.citydb.gui.operation.common.DefaultPreferencesComponent; import org.citydb.gui.util.GuiUtil; import javax.swing.*; import java.awt.*; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.Locale; import java.util.function.Supplier; public class PointAndCurveStylingPanel extends DefaultPreferencesComponent { private final Supplier<PointAndCurve> pointAndCurveSupplier; private TitledPanel pointPanel; private TitledPanel crossLinePanel; private TitledPanel iconPanel; private TitledPanel cubePanel; private TitledPanel curvePanel; private JRadioButton iconRButton; private JRadioButton crossLineRButton; private JRadioButton cubeRButton; private JLabel pointCrossLineDefaultStyle; private JLabel pointAltitudeModeLabel; private JComboBox<AltitudeMode> pointAltitudeModeComboBox; private JLabel pointCrossLineThicknessLabel; private JSpinner pointCrossLineThicknessSpinner; private JLabel pointCrossLineNormalColorLabel; private ColorPicker pointCrossLineNormalColorButton; private JCheckBox pointCrossLineHighlightingCheckbox; private JLabel pointCrossLineHighlightingThicknessLabel; private JSpinner pointCrossLineHighlightingThicknessSpinner; private JLabel pointCrossLineHighlightingColorLabel; private ColorPicker pointCrossLineHighlightingColorButton; private JLabel pointIconDefaultStyle; private JLabel pointIconColorLabel; private ColorPicker pointIconColorButton; private JLabel pointIconScaleLabel; private JSpinner pointIconScaleSpinner; private JCheckBox pointIconHighlightingCheckbox; private JLabel pointIconHighlightingColorLabel; private ColorPicker pointIconHighlightingColorButton; private JLabel pointIconHighlightingScaleLabel; private JSpinner pointIconHighlightingScaleSpinner; private JLabel pointCubeDefaultStyle; private JLabel pointCubeLengthOfSideLabel; private JSpinner pointCubeLengthOfSideSpinner; private JLabel pointCubeFillColorLabel; private ColorPicker pointCubeFillColorButton; private JCheckBox pointCubeHighlightingCheckbox; private JLabel pointCubeHighlightingColorLabel; private ColorPicker pointCubeHighlightingColorButton; private JLabel pointCubeHighlightingLineThicknessLabel; private JSpinner pointCubeHighlightingLineThicknessSpinner; private JLabel curveDefaultStyle; private JLabel curveAltitudeModeLabel; private JComboBox<AltitudeMode> curveAltitudeModeComboBox; private JLabel curveThicknessLabel; private JSpinner curveThicknessSpinner; private JLabel curveNormalColorLabel; private ColorPicker curveNormalColorButton; private JCheckBox curveHighlightingCheckbox; private JLabel curveHighlightingThicknessLabel; private JSpinner curveHighlightingThicknessSpinner; private JLabel curveHighlightingColorLabel; private ColorPicker curveHighlightingColorButton; public PointAndCurveStylingPanel(Supplier<PointAndCurve> pointAndCurveSupplier, Config config) { super(config); this.pointAndCurveSupplier = pointAndCurveSupplier; initGui(); } @Override public String getTitle() { return Language.I18N.getString("pref.tree.visExport.pointAndCurve.styling"); } @Override public boolean isModified() { PointAndCurve pacSettings = pointAndCurveSupplier.get(); switch (pacSettings.getPointDisplayMode()) { case CROSS_LINE: if (!crossLineRButton.isSelected()) return true; break; case ICON: if (!iconRButton.isSelected()) return true; break; case CUBE: if (!cubeRButton.isSelected()) return true; break; } if (!pacSettings.getPointAltitudeMode().equals(pointAltitudeModeComboBox.getSelectedItem())) return true; if (pacSettings.getPointThickness() != (Double) pointCrossLineThicknessSpinner.getValue()) return true; if (pacSettings.getPointNormalColor() != pointCrossLineNormalColorButton.getBackground().getRGB()) return true; if (pacSettings.isPointHighlightingEnabled() != pointCrossLineHighlightingCheckbox.isSelected()) return true; if (pacSettings.getPointHighlightedThickness() != (Double) pointCrossLineHighlightingThicknessSpinner.getValue()) return true; if (pacSettings.getPointHighlightedColor() != pointCrossLineHighlightingColorButton.getBackground().getRGB()) return true; if (pacSettings.getPointIconColor() != pointIconColorButton.getBackground().getRGB()) return true; if (pacSettings.getPointIconScale() != (Double) pointIconScaleSpinner.getValue()) return true; if (pacSettings.isPointIconHighlightingEnabled() != pointIconHighlightingCheckbox.isSelected()) return true; if (pacSettings.getPointIconHighlightedColor() != pointIconHighlightingColorButton.getBackground().getRGB()) return true; if (pacSettings.getPointIconHighlightedScale() != (Double) pointIconHighlightingScaleSpinner.getValue()) return true; if (pacSettings.getPointCubeLengthOfSide() != (Double) pointCubeLengthOfSideSpinner.getValue()) return true; if (pacSettings.getPointCubeFillColor() != pointCubeFillColorButton.getBackground().getRGB()) return true; if (pacSettings.isPointCubeHighlightingEnabled() != pointCubeHighlightingCheckbox.isSelected()) return true; if (pacSettings.getPointCubeHighlightedColor() != pointCubeHighlightingColorButton.getBackground().getRGB()) return true; if (pacSettings.getPointCubeHighlightedOutlineThickness() != (Double) pointCubeHighlightingLineThicknessSpinner.getValue()) return true; if (!pacSettings.getCurveAltitudeMode().equals(curveAltitudeModeComboBox.getSelectedItem())) return true; if (pacSettings.getCurveThickness() != (Double) curveThicknessSpinner.getValue()) return true; if (pacSettings.getCurveNormalColor() != curveNormalColorButton.getBackground().getRGB()) return true; if (pacSettings.isCurveHighlightingEnabled() != curveHighlightingCheckbox.isSelected()) return true; if (pacSettings.getCurveHighlightedThickness() != (Double) curveHighlightingThicknessSpinner.getValue()) return true; if (pacSettings.getCurveHighlightedColor() != curveHighlightingColorButton.getBackground().getRGB()) return true; return false; } private void initGui() { crossLineRButton = new JRadioButton(); iconRButton = new JRadioButton(); cubeRButton = new JRadioButton(); pointAltitudeModeLabel = new JLabel(); pointAltitudeModeComboBox = new JComboBox<>(); pointCrossLineDefaultStyle = new JLabel(); pointCrossLineThicknessLabel = new JLabel(); pointCrossLineNormalColorLabel = new JLabel(); pointCrossLineNormalColorButton = new ColorPicker(); pointCrossLineHighlightingCheckbox = new JCheckBox(); pointCrossLineHighlightingThicknessLabel = new JLabel(); pointCrossLineHighlightingColorLabel = new JLabel(); pointCrossLineHighlightingColorButton = new ColorPicker(); pointIconDefaultStyle = new JLabel(); pointIconColorLabel = new JLabel(); pointIconColorButton = new ColorPicker(); pointIconScaleLabel = new JLabel(); pointIconHighlightingCheckbox = new JCheckBox(); pointIconHighlightingColorLabel = new JLabel(); pointIconHighlightingColorButton = new ColorPicker(); pointIconHighlightingScaleLabel = new JLabel(); pointCubeDefaultStyle = new JLabel(); pointCubeLengthOfSideLabel = new JLabel(); pointCubeFillColorLabel = new JLabel(); pointCubeFillColorButton = new ColorPicker(); pointCubeHighlightingCheckbox = new JCheckBox(); pointCubeHighlightingColorLabel = new JLabel(); pointCubeHighlightingColorButton = new ColorPicker(); pointCubeHighlightingLineThicknessLabel = new JLabel(); curveDefaultStyle = new JLabel(); curveAltitudeModeLabel = new JLabel(); curveAltitudeModeComboBox = new JComboBox<>(); curveThicknessLabel = new JLabel(); curveNormalColorLabel = new JLabel(); curveNormalColorButton = new ColorPicker(); curveHighlightingCheckbox = new JCheckBox(); curveHighlightingThicknessLabel = new JLabel(); curveHighlightingColorLabel = new JLabel(); curveHighlightingColorButton = new ColorPicker(); ButtonGroup pointRadioGroup = new ButtonGroup(); pointRadioGroup.add(crossLineRButton); pointRadioGroup.add(iconRButton); pointRadioGroup.add(cubeRButton); SpinnerModel pointThicknessModel = new SpinnerNumberModel(1, 0.1, 10, 0.1); pointCrossLineThicknessSpinner = new JSpinner(pointThicknessModel); setSpinnerFormat(pointCrossLineThicknessSpinner, "#.#"); SpinnerModel pointHighlightingThicknessModel = new SpinnerNumberModel(2, 0.1, 10, 0.1); pointCrossLineHighlightingThicknessSpinner = new JSpinner(pointHighlightingThicknessModel); setSpinnerFormat(pointCrossLineHighlightingThicknessSpinner, "#.#"); SpinnerModel pointIconScaleModel = new SpinnerNumberModel(1, 0.1, 10, 0.1); pointIconScaleSpinner = new JSpinner(pointIconScaleModel); setSpinnerFormat(pointIconScaleSpinner, "#.#"); SpinnerModel pointIconHighlightingScaleModel = new SpinnerNumberModel(1, 0.1, 10, 0.1); pointIconHighlightingScaleSpinner = new JSpinner(pointIconHighlightingScaleModel); setSpinnerFormat(pointIconHighlightingScaleSpinner, "#.#"); SpinnerModel pointCubeLengthOfSideModel = new SpinnerNumberModel(1, 0.1, 10, 0.1); pointCubeLengthOfSideSpinner = new JSpinner(pointCubeLengthOfSideModel); setSpinnerFormat(pointCubeLengthOfSideSpinner, "#.#"); SpinnerModel pointCubeHighlightingThicknessModel = new SpinnerNumberModel(2, 0.1, 10, 0.1); pointCubeHighlightingLineThicknessSpinner = new JSpinner(pointCubeHighlightingThicknessModel); setSpinnerFormat(pointCubeHighlightingLineThicknessSpinner, "#.#"); SpinnerModel curveThicknessModel = new SpinnerNumberModel(1, 0.1, 10, 0.1); curveThicknessSpinner = new JSpinner(curveThicknessModel); setSpinnerFormat(curveThicknessSpinner, "#.#"); SpinnerModel curveHighlightingThicknessModel = new SpinnerNumberModel(2, 0.1, 10, 0.1); curveHighlightingThicknessSpinner = new JSpinner(curveHighlightingThicknessModel); setSpinnerFormat(curveHighlightingThicknessSpinner, "#.#"); setLayout(new GridBagLayout()); { JPanel crossLineContent = new JPanel(); crossLineContent.setLayout(new GridBagLayout()); { JPanel defaultStyle = createStylePanel(pointCrossLineNormalColorLabel, pointCrossLineNormalColorButton, pointCrossLineThicknessLabel, pointCrossLineThicknessSpinner); JPanel highlightStyle = createStylePanel(pointCrossLineHighlightingColorLabel, pointCrossLineHighlightingColorButton, pointCrossLineHighlightingThicknessLabel, pointCrossLineHighlightingThicknessSpinner); crossLineContent.add(pointCrossLineDefaultStyle, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 10)); crossLineContent.add(defaultStyle, GuiUtil.setConstraints(1, 0, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 0, 0, 0, 0)); crossLineContent.add(pointCrossLineHighlightingCheckbox, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.HORIZONTAL, 5, 0, 0, 10)); crossLineContent.add(highlightStyle, GuiUtil.setConstraints(1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 5, 0, 0, 0)); } crossLinePanel = new TitledPanel() .withToggleButton(crossLineRButton) .showSeparator(false) .withMargin(new Insets(0, 0, 0, 0)) .build(crossLineContent); JPanel iconContent = new JPanel(); iconContent.setLayout(new GridBagLayout()); { JPanel defaultStyle = createStylePanel(pointIconColorLabel, pointIconColorButton, pointIconScaleLabel, pointIconScaleSpinner); JPanel highlightStyle = createStylePanel(pointIconHighlightingColorLabel, pointIconHighlightingColorButton, pointIconHighlightingScaleLabel, pointIconHighlightingScaleSpinner); iconContent.add(pointIconDefaultStyle, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 10)); iconContent.add(defaultStyle, GuiUtil.setConstraints(1, 0, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 0, 0, 0, 0)); iconContent.add(pointIconHighlightingCheckbox, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.HORIZONTAL, 5, 0, 0, 10)); iconContent.add(highlightStyle, GuiUtil.setConstraints(1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 5, 0, 0, 0)); } iconPanel = new TitledPanel() .withToggleButton(iconRButton) .showSeparator(false) .withMargin(new Insets(0, 0, 0, 0)) .build(iconContent); JPanel cubeContent = new JPanel(); cubeContent.setLayout(new GridBagLayout()); { JPanel defaultStyle = createStylePanel(pointCubeFillColorLabel, pointCubeFillColorButton, pointCubeLengthOfSideLabel, pointCubeLengthOfSideSpinner); JPanel highlightStyle = createStylePanel(pointCubeHighlightingColorLabel, pointCubeHighlightingColorButton, pointCubeHighlightingLineThicknessLabel, pointCubeHighlightingLineThicknessSpinner); cubeContent.add(pointCubeDefaultStyle, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 10)); cubeContent.add(defaultStyle, GuiUtil.setConstraints(1, 0, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 0, 0, 0, 0)); cubeContent.add(pointCubeHighlightingCheckbox, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.HORIZONTAL, 5, 0, 0, 10)); cubeContent.add(highlightStyle, GuiUtil.setConstraints(1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 5, 0, 0, 0)); } cubePanel = new TitledPanel() .withToggleButton(cubeRButton) .showSeparator(false) .withMargin(new Insets(0, 0, 0, 0)) .build(cubeContent); JPanel content = new JPanel(); content.setLayout(new GridBagLayout()); content.add(pointAltitudeModeLabel, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 5)); content.add(pointAltitudeModeComboBox, GuiUtil.setConstraints(1, 0, 1, 0, GridBagConstraints.HORIZONTAL, 0, 5, 0, 0)); content.add(crossLinePanel, GuiUtil.setConstraints(0, 1, 2, 1, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0)); content.add(iconPanel, GuiUtil.setConstraints(0, 2, 2, 1, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0)); content.add(cubePanel, GuiUtil.setConstraints(0, 3, 2, 1, 1, 0, GridBagConstraints.BOTH, 5, 0, 0, 0)); pointPanel = new TitledPanel().build(content); } { JPanel content = new JPanel(); content.setLayout(new GridBagLayout()); { JPanel defaultStyle = createStylePanel(curveNormalColorLabel, curveNormalColorButton, curveThicknessLabel, curveThicknessSpinner); JPanel highlightStyle = createStylePanel(curveHighlightingColorLabel, curveHighlightingColorButton, curveHighlightingThicknessLabel, curveHighlightingThicknessSpinner); content.add(curveAltitudeModeLabel, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 10)); content.add(curveAltitudeModeComboBox, GuiUtil.setConstraints(1, 0, 1, 0, GridBagConstraints.HORIZONTAL, 0, 0, 0, 0)); content.add(curveDefaultStyle, GuiUtil.setConstraints(0, 1, 0, 0, GridBagConstraints.HORIZONTAL, 5, 0, 0, 10)); content.add(defaultStyle, GuiUtil.setConstraints(1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 5, 0, 0, 0)); content.add(curveHighlightingCheckbox, GuiUtil.setConstraints(0, 2, 0, 0, GridBagConstraints.HORIZONTAL, 5, 0, 0, 10)); content.add(highlightStyle, GuiUtil.setConstraints(1, 2, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, 5, 0, 0, 0)); } curvePanel = new TitledPanel().build(content); } add(pointPanel, GuiUtil.setConstraints(0, 0, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0)); add(curvePanel, GuiUtil.setConstraints(0, 1, 1, 0, GridBagConstraints.BOTH, 0, 0, 0, 0)); PopupMenuDecorator.getInstance().decorate(((JSpinner.DefaultEditor) pointCrossLineThicknessSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) pointCrossLineHighlightingThicknessSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) pointIconScaleSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) pointIconHighlightingScaleSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) pointCubeLengthOfSideSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) pointCubeHighlightingLineThicknessSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) curveThicknessSpinner.getEditor()).getTextField(), ((JSpinner.DefaultEditor) curveHighlightingThicknessSpinner.getEditor()).getTextField()); iconRButton.addActionListener(e -> setEnabledPointComponents()); crossLineRButton.addActionListener(e -> setEnabledPointComponents()); cubeRButton.addActionListener(e -> setEnabledPointComponents()); pointCrossLineHighlightingCheckbox.addActionListener(e -> setEnabledPointComponents()); pointIconHighlightingCheckbox.addActionListener(e -> setEnabledPointComponents()); pointCubeHighlightingCheckbox.addActionListener(e -> setEnabledPointComponents()); curveHighlightingCheckbox.addActionListener(e -> setEnabledCurveComponents()); } private JPanel createStylePanel(JLabel colorLabel, ColorPicker color, JLabel strokeLabel, JSpinner stroke) { JPanel panel = new JPanel(); panel.setLayout(new GridBagLayout()); { panel.add(color, GuiUtil.setConstraints(0, 0, 0, 0, GridBagConstraints.NONE, 0, 0, 0, 0)); panel.add(colorLabel, GuiUtil.setConstraints(1, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 5, 0, 0)); if (strokeLabel != null && stroke != null) { panel.add(stroke, GuiUtil.setConstraints(2, 0, 0, 0, GridBagConstraints.NONE, 0, 20, 0, 0)); panel.add(strokeLabel, GuiUtil.setConstraints(3, 0, 0, 0, GridBagConstraints.HORIZONTAL, 0, 5, 0, 0)); } } return panel; } private JPanel createStylePanel(JLabel fillColorLabel, ColorPicker fillColor) { return createStylePanel(fillColorLabel, fillColor, null, null); } @Override public void doTranslation() { PointAndCurve pacSettings = pointAndCurveSupplier.get(); pointPanel.setTitle(Language.I18N.getString("pref.visExport.border.point")); pointAltitudeModeLabel.setText(Language.I18N.getString("pref.visExport.label.curveAltitudeMode")); pointAltitudeModeComboBox.removeAllItems(); for (AltitudeMode c: AltitudeMode.values()) { pointAltitudeModeComboBox.addItem(c); } pointCrossLineDefaultStyle.setText(Language.I18N.getString("pref.visExport.label.defaultStyle")); pointCrossLineNormalColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.choosePointColor")); pointCrossLineHighlightingColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.choosePointHighlightingColor")); pointIconColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.choosePointIconColor")); pointCubeFillColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.chooseFillColor")); pointCubeHighlightingColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.chooseFillColor")); curveNormalColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.chooseCurveColor")); curveHighlightingColorButton.setDialogTitle(Language.I18N.getString("pref.visExport.label.chooseCurveHighlightingColor")); iconPanel.setTitle(Language.I18N.getString("pref.visExport.pointdisplay.mode.label.icon")); crossLinePanel.setTitle(Language.I18N.getString("pref.visExport.pointdisplay.mode.label.cross")); cubePanel.setTitle(Language.I18N.getString("pref.visExport.pointdisplay.mode.label.cube")); pointCubeDefaultStyle.setText(Language.I18N.getString("pref.visExport.label.defaultStyle")); pointCubeLengthOfSideLabel.setText(Language.I18N.getString("pref.visExport.label.cubeSideLength")); pointCubeFillColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); pointCubeHighlightingCheckbox.setText(Language.I18N.getString("pref.visExport.label.highlightStyle")); pointCubeHighlightingColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); pointCubeHighlightingLineThicknessLabel.setText(Language.I18N.getString("pref.visExport.label.curveThickness")); pointIconDefaultStyle.setText(Language.I18N.getString("pref.visExport.label.defaultStyle")); pointIconColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); pointIconScaleLabel.setText(Language.I18N.getString("pref.visExport.label.pointIconScale")); pointIconHighlightingCheckbox.setText(Language.I18N.getString("pref.visExport.label.highlightStyle")); pointIconHighlightingColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); pointIconHighlightingScaleLabel.setText(Language.I18N.getString("pref.visExport.label.pointIconScale")); pointAltitudeModeComboBox.setSelectedItem(pacSettings.getPointAltitudeMode()); pointCrossLineThicknessLabel.setText(Language.I18N.getString("pref.visExport.label.curveThickness")); pointCrossLineNormalColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); pointCrossLineHighlightingCheckbox.setText(Language.I18N.getString("pref.visExport.label.highlightStyle")); pointCrossLineHighlightingThicknessLabel.setText(Language.I18N.getString("pref.visExport.label.curveThickness")); pointCrossLineHighlightingColorLabel.setText(Language.I18N.getString("pref.visExport.label.pointColor")); curvePanel.setTitle(Language.I18N.getString("pref.visExport.border.curve")); curveAltitudeModeLabel.setText(Language.I18N.getString("pref.visExport.label.curveAltitudeMode")); curveAltitudeModeComboBox.removeAllItems(); for (AltitudeMode c: AltitudeMode.values()) { curveAltitudeModeComboBox.addItem(c); } curveDefaultStyle.setText(Language.I18N.getString("pref.visExport.label.defaultStyle")); curveAltitudeModeComboBox.setSelectedItem(pacSettings.getCurveAltitudeMode()); curveThicknessLabel.setText(Language.I18N.getString("pref.visExport.label.curveThickness")); curveNormalColorLabel.setText(Language.I18N.getString("pref.visExport.label.curveColor")); curveHighlightingCheckbox.setText(Language.I18N.getString("pref.visExport.label.highlightStyle")); curveHighlightingThicknessLabel.setText(Language.I18N.getString("pref.visExport.label.curveThickness")); curveHighlightingColorLabel.setText(Language.I18N.getString("pref.visExport.label.curveColor")); } @Override public void loadSettings() { PointAndCurve pacSettings = pointAndCurveSupplier.get(); switch (pacSettings.getPointDisplayMode()) { case ICON: iconRButton.setSelected(true); break; case CROSS_LINE: crossLineRButton.setSelected(true); break; case CUBE: cubeRButton.setSelected(true); break; } pointAltitudeModeComboBox.setSelectedItem(pacSettings.getPointAltitudeMode()); pointCrossLineThicknessSpinner.setValue(pacSettings.getPointThickness()); pointCrossLineNormalColorButton.setColor(new Color(pacSettings.getPointNormalColor(), true)); pointCrossLineHighlightingCheckbox.setSelected(pacSettings.isPointHighlightingEnabled()); pointCrossLineHighlightingThicknessSpinner.setValue(pacSettings.getPointHighlightedThickness()); pointCrossLineHighlightingColorButton.setColor(new Color(pacSettings.getPointHighlightedColor(), true)); pointIconColorButton.setColor(new Color(pacSettings.getPointIconColor(), true)); pointIconScaleSpinner.setValue(pacSettings.getPointIconScale()); pointIconHighlightingCheckbox.setSelected(pacSettings.isPointIconHighlightingEnabled()); pointIconHighlightingColorButton.setColor(new Color(pacSettings.getPointIconHighlightedColor(), true)); pointIconHighlightingScaleSpinner.setValue(pacSettings.getPointIconHighlightedScale()); pointCubeLengthOfSideSpinner.setValue(pacSettings.getPointCubeLengthOfSide()); pointCubeFillColorButton.setColor(new Color(pacSettings.getPointCubeFillColor(), true)); pointCubeHighlightingCheckbox.setSelected(pacSettings.isPointCubeHighlightingEnabled()); pointCubeHighlightingColorButton.setColor(new Color(pacSettings.getPointCubeHighlightedColor(), true)); pointCubeHighlightingLineThicknessSpinner.setValue(pacSettings.getPointCubeHighlightedOutlineThickness()); curveAltitudeModeComboBox.setSelectedItem(pacSettings.getCurveAltitudeMode()); curveThicknessSpinner.setValue(pacSettings.getCurveThickness()); curveNormalColorButton.setColor(new Color(pacSettings.getCurveNormalColor(), true)); curveHighlightingCheckbox.setSelected(pacSettings.isCurveHighlightingEnabled()); curveHighlightingThicknessSpinner.setValue(pacSettings.getCurveHighlightedThickness()); curveHighlightingColorButton.setColor(new Color(pacSettings.getCurveHighlightedColor(), true)); setEnabledComponents(); } @Override public void setSettings() { PointAndCurve pacSettings = pointAndCurveSupplier.get(); if (iconRButton.isSelected()) { pacSettings.setPointDisplayMode(PointDisplayMode.ICON); } else if (crossLineRButton.isSelected()) { pacSettings.setPointDisplayMode(PointDisplayMode.CROSS_LINE); } else if (cubeRButton.isSelected()) { pacSettings.setPointDisplayMode(PointDisplayMode.CUBE); } pacSettings.setPointAltitudeMode((AltitudeMode) pointAltitudeModeComboBox.getSelectedItem()); pacSettings.setPointThickness((Double) pointCrossLineThicknessSpinner.getValue()); pacSettings.setPointNormalColor(pointCrossLineNormalColorButton.getColor().getRGB()); pacSettings.setPointHighlightingEnabled(pointCrossLineHighlightingCheckbox.isSelected()); pacSettings.setPointHighlightedThickness((Double) pointCrossLineHighlightingThicknessSpinner.getValue()); pacSettings.setPointHighlightedColor(pointCrossLineHighlightingColorButton.getColor().getRGB()); pacSettings.setPointIconColor(pointIconColorButton.getColor().getRGB()); pacSettings.setPointIconScale((Double) pointIconScaleSpinner.getValue()); pacSettings.setPointIconHighlightingEnabled(pointIconHighlightingCheckbox.isSelected()); pacSettings.setPointIconHighlightedColor(pointIconHighlightingColorButton.getColor().getRGB()); pacSettings.setPointIconHighlightedScale((Double) pointIconHighlightingScaleSpinner.getValue()); pacSettings.setPointCubeLengthOfSide((Double) pointCubeLengthOfSideSpinner.getValue()); pacSettings.setPointCubeFillColor(pointCubeFillColorButton.getColor().getRGB()); pacSettings.setPointCubeHighlightingEnabled(pointCubeHighlightingCheckbox.isSelected()); pacSettings.setPointCubeHighlightedColor(pointCubeHighlightingColorButton.getColor().getRGB()); pacSettings.setPointCubeHighlightedOutlineThickness((Double) pointCubeHighlightingLineThicknessSpinner.getValue()); pacSettings.setCurveAltitudeMode((AltitudeMode) curveAltitudeModeComboBox.getSelectedItem()); pacSettings.setCurveThickness((Double) curveThicknessSpinner.getValue()); pacSettings.setCurveNormalColor(curveNormalColorButton.getColor().getRGB()); pacSettings.setCurveHighlightingEnabled(curveHighlightingCheckbox.isSelected()); pacSettings.setCurveHighlightedThickness((Double) curveHighlightingThicknessSpinner.getValue()); pacSettings.setCurveHighlightedColor(curveHighlightingColorButton.getColor().getRGB()); } private void setEnabledComponents() { setEnabledPointComponents(); setEnabledCurveComponents(); } private void setEnabledPointComponents() { pointIconDefaultStyle.setEnabled(iconRButton.isSelected()); pointIconColorLabel.setEnabled(iconRButton.isSelected()); pointIconColorButton.setEnabled(iconRButton.isSelected()); pointIconScaleLabel.setEnabled(iconRButton.isSelected()); pointIconScaleSpinner.setEnabled(iconRButton.isSelected()); pointIconHighlightingCheckbox.setEnabled(iconRButton.isSelected()); pointIconHighlightingColorLabel.setEnabled(pointIconHighlightingCheckbox.isSelected() && iconRButton.isSelected()); pointIconHighlightingColorButton.setEnabled(pointIconHighlightingCheckbox.isSelected() && iconRButton.isSelected()); pointIconHighlightingScaleLabel.setEnabled(pointIconHighlightingCheckbox.isSelected() && iconRButton.isSelected()); pointIconHighlightingScaleSpinner.setEnabled(pointIconHighlightingCheckbox.isSelected() && iconRButton.isSelected()); pointCrossLineDefaultStyle.setEnabled(crossLineRButton.isSelected()); pointCrossLineThicknessLabel.setEnabled(crossLineRButton.isSelected()); pointCrossLineThicknessSpinner.setEnabled(crossLineRButton.isSelected()); pointCrossLineNormalColorLabel.setEnabled(crossLineRButton.isSelected()); pointCrossLineNormalColorButton.setEnabled(crossLineRButton.isSelected()); pointCrossLineHighlightingCheckbox.setEnabled(crossLineRButton.isSelected()); pointCrossLineHighlightingColorLabel.setEnabled(pointCrossLineHighlightingCheckbox.isSelected() && crossLineRButton.isSelected()); pointCrossLineHighlightingColorButton.setEnabled(pointCrossLineHighlightingCheckbox.isSelected() && crossLineRButton.isSelected()); pointCrossLineHighlightingThicknessLabel.setEnabled(pointCrossLineHighlightingCheckbox.isSelected() && crossLineRButton.isSelected()); pointCrossLineHighlightingThicknessSpinner.setEnabled(pointCrossLineHighlightingCheckbox.isSelected() && crossLineRButton.isSelected()); pointCubeDefaultStyle.setEnabled(cubeRButton.isSelected()); pointCubeLengthOfSideLabel.setEnabled(cubeRButton.isSelected()); pointCubeLengthOfSideSpinner.setEnabled(cubeRButton.isSelected()); pointCubeFillColorLabel.setEnabled(cubeRButton.isSelected()); pointCubeFillColorButton.setEnabled(cubeRButton.isSelected()); pointCubeHighlightingCheckbox.setEnabled(cubeRButton.isSelected()); pointCubeHighlightingColorLabel.setEnabled(pointCubeHighlightingCheckbox.isSelected() && cubeRButton.isSelected()); pointCubeHighlightingColorButton.setEnabled(pointCubeHighlightingCheckbox.isSelected() && cubeRButton.isSelected()); pointCubeHighlightingLineThicknessLabel.setEnabled(pointCubeHighlightingCheckbox.isSelected() && cubeRButton.isSelected()); pointCubeHighlightingLineThicknessSpinner.setEnabled(pointCubeHighlightingCheckbox.isSelected() && cubeRButton.isSelected()); } private void setEnabledCurveComponents() { curveHighlightingColorLabel.setEnabled(curveHighlightingCheckbox.isSelected()); curveHighlightingColorButton.setEnabled(curveHighlightingCheckbox.isSelected()); curveHighlightingThicknessLabel.setEnabled(curveHighlightingCheckbox.isSelected()); curveHighlightingThicknessSpinner.setEnabled(curveHighlightingCheckbox.isSelected()); } private void setSpinnerFormat(JSpinner spinner, String decimalFormatPattern) { JSpinner.NumberEditor editor = new JSpinner.NumberEditor(spinner, decimalFormatPattern); DecimalFormat format = editor.getFormat(); format.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.ENGLISH)); spinner.setEditor(editor); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.concurrent; import java.util.concurrent.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.tracing.TraceState; import org.apache.cassandra.tracing.Tracing; import static org.apache.cassandra.tracing.Tracing.isTracing; /** * This class encorporates some Executor best practices for Cassandra. Most of the executors in the system * should use or extend this. There are two main improvements over a vanilla TPE: * * - If a task throws an exception, the default uncaught exception handler will be invoked; if there is * no such handler, the exception will be logged. * - MaximumPoolSize is not supported. Here is what that means (quoting TPE javadoc): * * If fewer than corePoolSize threads are running, the Executor always prefers adding a new thread rather than queuing. * If corePoolSize or more threads are running, the Executor always prefers queuing a request rather than adding a new thread. * If a request cannot be queued, a new thread is created unless this would exceed maximumPoolSize, in which case, the task will be rejected. * * We don't want this last stage of creating new threads if the queue is full; it makes it needlessly difficult to * reason about the system's behavior. In other words, if DebuggableTPE has allocated our maximum number of (core) * threads and the queue is full, we want the enqueuer to block. But to allow the number of threads to drop if a * stage is less busy, core thread timeout is enabled. */ public class DebuggableThreadPoolExecutor extends ThreadPoolExecutor { protected static final Logger logger = LoggerFactory.getLogger(DebuggableThreadPoolExecutor.class); public static final RejectedExecutionHandler blockingExecutionHandler = new RejectedExecutionHandler() { public void rejectedExecution(Runnable task, ThreadPoolExecutor executor) { ((DebuggableThreadPoolExecutor) executor).onInitialRejection(task); BlockingQueue<Runnable> queue = executor.getQueue(); while (true) { if (executor.isShutdown()) { ((DebuggableThreadPoolExecutor) executor).onFinalRejection(task); throw new RejectedExecutionException("ThreadPoolExecutor has shut down"); } try { if (queue.offer(task, 1000, TimeUnit.MILLISECONDS)) { ((DebuggableThreadPoolExecutor) executor).onFinalAccept(task); break; } } catch (InterruptedException e) { throw new AssertionError(e); } } } }; public DebuggableThreadPoolExecutor(String threadPoolName, int priority) { this(1, Integer.MAX_VALUE, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new NamedThreadFactory(threadPoolName, priority)); } public DebuggableThreadPoolExecutor(int corePoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> queue, ThreadFactory factory) { this(corePoolSize, corePoolSize, keepAliveTime, unit, queue, factory); } public DebuggableThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory); allowCoreThreadTimeOut(true); // block task submissions until queue has room. // this is fighting TPE's design a bit because TPE rejects if queue.offer reports a full queue. // we'll just override this with a handler that retries until it gets in. ugly, but effective. // (there is an extensive analysis of the options here at // http://today.java.net/pub/a/today/2008/10/23/creating-a-notifying-blocking-thread-pool-executor.html) this.setRejectedExecutionHandler(blockingExecutionHandler); } /** * Returns a ThreadPoolExecutor with a fixed number of threads. * When all threads are actively executing tasks, new tasks are queued. * If (most) threads are expected to be idle most of the time, prefer createWithMaxSize() instead. * @param threadPoolName the name of the threads created by this executor * @param size the fixed number of threads for this executor * @return the new DebuggableThreadPoolExecutor */ public static DebuggableThreadPoolExecutor createWithFixedPoolSize(String threadPoolName, int size) { return createWithMaximumPoolSize(threadPoolName, size, Integer.MAX_VALUE, TimeUnit.SECONDS); } /** * Returns a ThreadPoolExecutor with a fixed maximum number of threads, but whose * threads are terminated when idle for too long. * When all threads are actively executing tasks, new tasks are queued. * @param threadPoolName the name of the threads created by this executor * @param size the maximum number of threads for this executor * @param keepAliveTime the time an idle thread is kept alive before being terminated * @param unit tht time unit for {@code keepAliveTime} * @return the new DebuggableThreadPoolExecutor */ public static DebuggableThreadPoolExecutor createWithMaximumPoolSize(String threadPoolName, int size, int keepAliveTime, TimeUnit unit) { return new DebuggableThreadPoolExecutor(size, Integer.MAX_VALUE, keepAliveTime, unit, new LinkedBlockingQueue<Runnable>(), new NamedThreadFactory(threadPoolName)); } protected void onInitialRejection(Runnable task) {} protected void onFinalAccept(Runnable task) {} protected void onFinalRejection(Runnable task) {} // execute does not call newTaskFor @Override public void execute(Runnable command) { super.execute(isTracing() && !(command instanceof TraceSessionWrapper) ? new TraceSessionWrapper<Object>(command, null) : command); } @Override protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T result) { if (isTracing() && !(runnable instanceof TraceSessionWrapper)) { return new TraceSessionWrapper<T>(runnable, result); } return super.newTaskFor(runnable, result); } @Override protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) { if (isTracing() && !(callable instanceof TraceSessionWrapper)) { return new TraceSessionWrapper<T>(callable); } return super.newTaskFor(callable); } @Override protected void afterExecute(Runnable r, Throwable t) { super.afterExecute(r, t); if (r instanceof TraceSessionWrapper) { TraceSessionWrapper tsw = (TraceSessionWrapper) r; // we have to reset trace state as its presence is what denotes the current thread is tracing // and if left this thread might start tracing unrelated tasks tsw.reset(); } logExceptionsAfterExecute(r, t); } @Override protected void beforeExecute(Thread t, Runnable r) { if (r instanceof TraceSessionWrapper) ((TraceSessionWrapper) r).setupContext(); super.beforeExecute(t, r); } /** * Send @param t and any exception wrapped by @param r to the default uncaught exception handler, * or log them if none such is set up */ public static void logExceptionsAfterExecute(Runnable r, Throwable t) { Throwable hiddenThrowable = extractThrowable(r); if (hiddenThrowable != null) handleOrLog(hiddenThrowable); // ThreadPoolExecutor will re-throw exceptions thrown by its Task (which will be seen by // the default uncaught exception handler) so we only need to do anything if that handler // isn't set up yet. if (t != null && Thread.getDefaultUncaughtExceptionHandler() == null) handleOrLog(t); } /** * Send @param t to the default uncaught exception handler, or log it if none such is set up */ public static void handleOrLog(Throwable t) { if (Thread.getDefaultUncaughtExceptionHandler() == null) logger.error("Error in ThreadPoolExecutor", t); else Thread.getDefaultUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), t); } /** * @return any exception wrapped by @param runnable, i.e., if it is a FutureTask */ public static Throwable extractThrowable(Runnable runnable) { // Check for exceptions wrapped by FutureTask. We do this by calling get(), which will // cause it to throw any saved exception. // // Complicating things, calling get() on a ScheduledFutureTask will block until the task // is cancelled. Hence, the extra isDone check beforehand. if ((runnable instanceof Future<?>) && ((Future<?>) runnable).isDone()) { try { ((Future<?>) runnable).get(); } catch (InterruptedException e) { throw new AssertionError(e); } catch (CancellationException e) { logger.debug("Task cancelled", e); } catch (ExecutionException e) { return e.getCause(); } } return null; } /** * Used to wrap a Runnable or Callable passed to submit or execute so we can clone the TraceSessionContext and move * it into the worker thread. * * @param <T> */ private static class TraceSessionWrapper<T> extends FutureTask<T> { private final TraceState state; public TraceSessionWrapper(Runnable runnable, T result) { super(runnable, result); state = Tracing.instance().get(); } public TraceSessionWrapper(Callable<T> callable) { super(callable); state = Tracing.instance().get(); } private void setupContext() { Tracing.instance().set(state); } private void reset() { Tracing.instance().set(null); } } }
/* * Copyright 2011-2017 Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.migration; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.ui.DocumentAdapter; import com.intellij.util.ui.CheckBox; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.psiutils.*; import com.siyeh.ig.psiutils.SwitchUtils.IfStatementBranch; import org.jdom.Element; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.text.Document; import java.awt.*; import java.text.NumberFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.List; public class IfCanBeSwitchInspection extends BaseInspection { @NonNls private static final String ONLY_SAFE = "onlySuggestNullSafe"; @SuppressWarnings("PublicField") public int minimumBranches = 3; @SuppressWarnings("PublicField") public boolean suggestIntSwitches = false; @SuppressWarnings("PublicField") public boolean suggestEnumSwitches = false; protected boolean onlySuggestNullSafe = true; @Override public boolean isEnabledByDefault() { return true; } @Nls @NotNull @Override public String getDisplayName() { return InspectionGadgetsBundle.message("if.can.be.switch.display.name"); } @NotNull @Override protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("if.can.be.switch.problem.descriptor"); } @Override protected InspectionGadgetsFix buildFix(Object... infos) { return new IfCanBeSwitchFix(); } @Override public JComponent createOptionsPanel() { final JPanel panel = new JPanel(new GridBagLayout()); final JLabel label = new JLabel(InspectionGadgetsBundle.message("if.can.be.switch.minimum.branch.option")); final NumberFormat formatter = NumberFormat.getIntegerInstance(); formatter.setParseIntegerOnly(true); final JFormattedTextField valueField = new JFormattedTextField(formatter); valueField.setValue(Integer.valueOf(minimumBranches)); valueField.setColumns(2); final Document document = valueField.getDocument(); document.addDocumentListener(new DocumentAdapter() { @Override public void textChanged(DocumentEvent e) { try { valueField.commitEdit(); minimumBranches = ((Number)valueField.getValue()).intValue(); } catch (ParseException ignore) { // No luck this time } } }); final GridBagConstraints constraints = new GridBagConstraints(); constraints.gridx = 0; constraints.gridy = 0; constraints.insets.bottom = 4; constraints.weightx = 0.0; constraints.anchor = GridBagConstraints.BASELINE_LEADING; constraints.fill = GridBagConstraints.NONE; constraints.insets.right = 10; panel.add(label, constraints); constraints.gridx = 1; constraints.gridy = 0; constraints.weightx = 1.0; constraints.insets.right = 0; panel.add(valueField, constraints); constraints.gridx = 0; constraints.gridy = 1; constraints.gridwidth = 2; final CheckBox checkBox1 = new CheckBox(InspectionGadgetsBundle.message("if.can.be.switch.int.option"), this, "suggestIntSwitches"); panel.add(checkBox1, constraints); constraints.gridy = 2; final CheckBox checkBox2 = new CheckBox(InspectionGadgetsBundle.message("if.can.be.switch.enum.option"), this, "suggestEnumSwitches"); panel.add(checkBox2, constraints); constraints.gridy = 3; constraints.weighty = 1.0; final CheckBox checkBox3 = new CheckBox(InspectionGadgetsBundle.message("if.can.be.switch.null.safe.option"), this, "onlySuggestNullSafe"); panel.add(checkBox3, constraints); return panel; } public void setOnlySuggestNullSafe(boolean onlySuggestNullSafe) { this.onlySuggestNullSafe = onlySuggestNullSafe; } private static class IfCanBeSwitchFix extends InspectionGadgetsFix { public IfCanBeSwitchFix() {} @Override @NotNull public String getFamilyName() { return InspectionGadgetsBundle.message("if.can.be.switch.quickfix"); } @Override protected void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement().getParent(); if (!(element instanceof PsiIfStatement)) { return; } final PsiIfStatement ifStatement = (PsiIfStatement)element; replaceIfWithSwitch(ifStatement); } } public static void replaceIfWithSwitch(PsiIfStatement ifStatement) { boolean breaksNeedRelabeled = false; PsiStatement breakTarget = null; String labelString = ""; if (ControlFlowUtils.statementContainsNakedBreak(ifStatement)) { breakTarget = PsiTreeUtil.getParentOfType(ifStatement, PsiLoopStatement.class, PsiSwitchStatement.class); if (breakTarget != null) { final PsiElement parent = breakTarget.getParent(); if (parent instanceof PsiLabeledStatement) { final PsiLabeledStatement labeledStatement = (PsiLabeledStatement)parent; labelString = labeledStatement.getLabelIdentifier().getText(); breakTarget = labeledStatement; breaksNeedRelabeled = true; } else { labelString = SwitchUtils.findUniqueLabelName(ifStatement, "label"); breaksNeedRelabeled = true; } } } final PsiIfStatement statementToReplace = ifStatement; final PsiExpression switchExpression = SwitchUtils.getSwitchExpression(ifStatement, 0, false, true); if (switchExpression == null) { return; } final List<IfStatementBranch> branches = new ArrayList<>(20); while (true) { final PsiExpression condition = ifStatement.getCondition(); final PsiStatement thenBranch = ifStatement.getThenBranch(); final IfStatementBranch ifBranch = new IfStatementBranch(thenBranch, false); extractCaseExpressions(condition, switchExpression, ifBranch); if (!branches.isEmpty()) { extractIfComments(ifStatement, ifBranch); } extractStatementComments(thenBranch, ifBranch); branches.add(ifBranch); final PsiStatement elseBranch = ifStatement.getElseBranch(); if (elseBranch instanceof PsiIfStatement) { ifStatement = (PsiIfStatement)elseBranch; } else if (elseBranch == null) { break; } else { final IfStatementBranch elseIfBranch = new IfStatementBranch(elseBranch, true); final PsiKeyword elseKeyword = ifStatement.getElseElement(); extractIfComments(elseKeyword, elseIfBranch); extractStatementComments(elseBranch, elseIfBranch); branches.add(elseIfBranch); break; } } @NonNls final StringBuilder switchStatementText = new StringBuilder(); switchStatementText.append("switch(").append(switchExpression.getText()).append("){"); final PsiType type = switchExpression.getType(); final boolean castToInt = type != null && type.equalsToText(CommonClassNames.JAVA_LANG_INTEGER); for (IfStatementBranch branch : branches) { boolean hasConflicts = false; for (IfStatementBranch testBranch : branches) { if (branch == testBranch) { continue; } if (branch.topLevelDeclarationsConflictWith(testBranch)) { hasConflicts = true; } } dumpBranch(branch, castToInt, hasConflicts, breaksNeedRelabeled, labelString, switchStatementText); } switchStatementText.append('}'); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(ifStatement.getProject()); final PsiElementFactory factory = psiFacade.getElementFactory(); if (breaksNeedRelabeled) { final StringBuilder out = new StringBuilder(); if (!(breakTarget instanceof PsiLabeledStatement)) { out.append(labelString).append(':'); } termReplace(breakTarget, statementToReplace, switchStatementText, out); final String newStatementText = out.toString(); final PsiStatement newStatement = factory.createStatementFromText(newStatementText, ifStatement); breakTarget.replace(newStatement); } else { final PsiStatement newStatement = factory.createStatementFromText(switchStatementText.toString(), ifStatement); statementToReplace.replace(newStatement); } } @SafeVarargs @Nullable public static <T extends PsiElement> T getPrevSiblingOfType(@Nullable PsiElement element, @NotNull Class<T> aClass, @NotNull Class<? extends PsiElement>... stopAt) { if (element == null) { return null; } PsiElement sibling = element.getPrevSibling(); while (sibling != null && !aClass.isInstance(sibling)) { for (Class<? extends PsiElement> stopClass : stopAt) { if (stopClass.isInstance(sibling)) { return null; } } sibling = sibling.getPrevSibling(); } //noinspection unchecked return (T)sibling; } private static void extractIfComments(PsiElement element, IfStatementBranch out) { PsiComment comment = getPrevSiblingOfType(element, PsiComment.class, PsiStatement.class); while (comment != null) { out.addComment(getCommentText(comment)); comment = getPrevSiblingOfType(comment, PsiComment.class, PsiStatement.class); } } private static void extractStatementComments(PsiElement element, IfStatementBranch out) { PsiComment comment = getPrevSiblingOfType(element, PsiComment.class, PsiStatement.class, PsiKeyword.class); while (comment != null) { out.addStatementComment(getCommentText(comment)); comment = getPrevSiblingOfType(comment, PsiComment.class, PsiStatement.class, PsiKeyword.class); } } private static String getCommentText(PsiComment comment) { final PsiElement sibling = comment.getPrevSibling(); if (sibling instanceof PsiWhiteSpace) { final String whiteSpaceText = sibling.getText(); return whiteSpaceText.startsWith("\n") ? whiteSpaceText.substring(1) + comment.getText() : comment.getText(); } else { return comment.getText(); } } private static void termReplace(PsiElement target, PsiElement replace, StringBuilder stringToReplaceWith, StringBuilder out) { if (target.equals(replace)) { out.append(stringToReplaceWith); } else if (target.getChildren().length == 0) { out.append(target.getText()); } else { final PsiElement[] children = target.getChildren(); for (final PsiElement child : children) { termReplace(child, replace, stringToReplaceWith, out); } } } private static void extractCaseExpressions(PsiExpression expression, PsiExpression switchExpression, IfStatementBranch branch) { if (expression instanceof PsiMethodCallExpression) { final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)expression; final PsiExpressionList argumentList = methodCallExpression.getArgumentList(); final PsiExpression[] arguments = argumentList.getExpressions(); final PsiExpression argument = arguments[0]; final PsiExpression secondArgument = arguments.length > 1 ? arguments[1] : null; final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final PsiExpression qualifierExpression = methodExpression.getQualifierExpression(); final boolean stringType = ExpressionUtils.hasStringType(qualifierExpression); if (EquivalenceChecker.getCanonicalPsiEquivalence().expressionsAreEquivalent(switchExpression, argument)) { branch.addCaseExpression(stringType? qualifierExpression : secondArgument); } else { branch.addCaseExpression(argument); } } else if (expression instanceof PsiPolyadicExpression) { final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)expression; final PsiExpression[] operands = polyadicExpression.getOperands(); final IElementType tokenType = polyadicExpression.getOperationTokenType(); if (JavaTokenType.OROR.equals(tokenType)) { for (PsiExpression operand : operands) { extractCaseExpressions(operand, switchExpression, branch); } } else if (operands.length == 2) { final PsiExpression lhs = operands[0]; final PsiExpression rhs = operands[1]; if (EquivalenceChecker.getCanonicalPsiEquivalence().expressionsAreEquivalent(switchExpression, rhs)) { branch.addCaseExpression(lhs); } else { branch.addCaseExpression(rhs); } } } else if (expression instanceof PsiParenthesizedExpression) { final PsiParenthesizedExpression parenthesizedExpression = (PsiParenthesizedExpression)expression; final PsiExpression contents = parenthesizedExpression.getExpression(); extractCaseExpressions(contents, switchExpression, branch); } } private static void dumpBranch(IfStatementBranch branch, boolean castToInt, boolean wrap, boolean renameBreaks, String breakLabelName, @NonNls StringBuilder switchStatementText) { dumpComments(branch.getComments(), switchStatementText); if (branch.isElse()) { switchStatementText.append("default: "); } else { for (PsiExpression caseExpression : branch.getCaseExpressions()) { switchStatementText.append("case ").append(getCaseLabelText(caseExpression, castToInt)).append(": "); } } dumpComments(branch.getStatementComments(), switchStatementText); dumpBody(branch.getStatement(), wrap, renameBreaks, breakLabelName, switchStatementText); } @NonNls private static String getCaseLabelText(PsiExpression expression, boolean castToInt) { if (expression instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)expression; final PsiElement target = referenceExpression.resolve(); if (target instanceof PsiEnumConstant) { final PsiEnumConstant enumConstant = (PsiEnumConstant)target; return enumConstant.getName(); } } if (castToInt) { final PsiType type = expression.getType(); if (!PsiType.INT.equals(type)) { /* because Integer a = 1; switch (a) { case (byte)7: } does not compile with javac (but does with Eclipse) */ return "(int)" + expression.getText(); } } return expression.getText(); } private static void dumpComments(List<String> comments, StringBuilder switchStatementText) { if (comments.isEmpty()) { return; } switchStatementText.append('\n'); for (String comment : comments) { switchStatementText.append(comment).append('\n'); } } private static void dumpBody(PsiStatement bodyStatement, boolean wrap, boolean renameBreaks, String breakLabelName, @NonNls StringBuilder switchStatementText) { if (wrap) { switchStatementText.append('{'); } if (bodyStatement instanceof PsiBlockStatement) { final PsiCodeBlock codeBlock = ((PsiBlockStatement)bodyStatement).getCodeBlock(); final PsiElement[] children = codeBlock.getChildren(); //skip the first and last members, to unwrap the block for (int i = 1; i < children.length - 1; i++) { final PsiElement child = children[i]; appendElement(child, renameBreaks, breakLabelName, switchStatementText); } } else { appendElement(bodyStatement, renameBreaks, breakLabelName, switchStatementText); } if (ControlFlowUtils.statementMayCompleteNormally(bodyStatement)) { switchStatementText.append("break;"); } if (wrap) { switchStatementText.append('}'); } } private static void appendElement(PsiElement element, boolean renameBreakElements, String breakLabelString, @NonNls StringBuilder switchStatementText) { final String text = element.getText(); if (!renameBreakElements) { switchStatementText.append(text); } else if (element instanceof PsiBreakStatement) { final PsiBreakStatement breakStatement = (PsiBreakStatement)element; final PsiIdentifier identifier = breakStatement.getLabelIdentifier(); if (identifier == null) { switchStatementText.append("break ").append(breakLabelString).append(';'); } else { switchStatementText.append(text); } } else if (element instanceof PsiBlockStatement || element instanceof PsiCodeBlock || element instanceof PsiIfStatement) { final PsiElement[] children = element.getChildren(); for (final PsiElement child : children) { appendElement(child, true, breakLabelString, switchStatementText); } } else { switchStatementText.append(text); } final PsiElement lastChild = element.getLastChild(); if (isEndOfLineComment(lastChild)) { switchStatementText.append('\n'); } } private static boolean isEndOfLineComment(PsiElement element) { if (!(element instanceof PsiComment)) { return false; } final PsiComment comment = (PsiComment)element; final IElementType tokenType = comment.getTokenType(); return JavaTokenType.END_OF_LINE_COMMENT.equals(tokenType); } @Override public BaseInspectionVisitor buildVisitor() { return new IfCanBeSwitchVisitor(); } @Override public void writeSettings(@NotNull Element node) throws WriteExternalException { super.writeSettings(node); if (!onlySuggestNullSafe) { final Element e = new Element("option"); e.setAttribute("name", ONLY_SAFE); e.setAttribute("value", Boolean.toString(onlySuggestNullSafe)); node.addContent(e); } } @Override public void readSettings(@NotNull Element node) throws InvalidDataException { super.readSettings(node); for (Element child : node.getChildren("option")) { if (Comparing.strEqual(child.getAttributeValue("name"), ONLY_SAFE)) { final String value = child.getAttributeValue("value"); if (value != null) { onlySuggestNullSafe = Boolean.parseBoolean(value); } break; } } } private class IfCanBeSwitchVisitor extends BaseInspectionVisitor { @Override public void visitIfStatement(PsiIfStatement statement) { super.visitIfStatement(statement); final PsiElement parent = statement.getParent(); if (parent instanceof PsiIfStatement) { return; } final PsiExpression switchExpression = SwitchUtils.getSwitchExpression(statement, minimumBranches, false, true); if (switchExpression == null) { return; } final ProblemHighlightType highlightType = shouldHighlight(switchExpression) ? ProblemHighlightType.GENERIC_ERROR_OR_WARNING : ProblemHighlightType.INFORMATION; registerError(statement.getFirstChild(), highlightType, switchExpression); } private boolean shouldHighlight(PsiExpression switchExpression) { final PsiType type = switchExpression.getType(); if (!suggestIntSwitches) { if (type instanceof PsiClassType) { if (type.equalsToText(CommonClassNames.JAVA_LANG_INTEGER) || type.equalsToText(CommonClassNames.JAVA_LANG_SHORT) || type.equalsToText(CommonClassNames.JAVA_LANG_BYTE) || type.equalsToText(CommonClassNames.JAVA_LANG_CHARACTER)) { return false; } } else if (PsiType.INT.equals(type) || PsiType.SHORT.equals(type) || PsiType.BYTE.equals(type) || PsiType.CHAR.equals(type)) { return false; } } if (type instanceof PsiClassType) { final PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (aClass == null) { return false; } if (!suggestEnumSwitches && aClass.isEnum()) { return false; } if (CommonClassNames.JAVA_LANG_STRING.equals(aClass.getQualifiedName())) { final PsiElement parent = ParenthesesUtils.getParentSkipParentheses(switchExpression); if (parent instanceof PsiExpressionList && onlySuggestNullSafe && !ExpressionUtils.isAnnotatedNotNull(switchExpression)) { final PsiElement grandParent = parent.getParent(); if (grandParent instanceof PsiMethodCallExpression) { final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent; if ("equals".equals(methodCallExpression.getMethodExpression().getReferenceName())) { // Objects.equals(switchExpression, other) or other.equals(switchExpression) return false; } } } return !(parent instanceof PsiPolyadicExpression); // == expression } } return !SideEffectChecker.mayHaveSideEffects(switchExpression); } } }
package com.sudwood.advancedutilities.items; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemArmor.ArmorMaterial; import net.minecraft.item.ItemStack; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.common.util.EnumHelper; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import net.minecraftforge.oredict.ShapelessOreRecipe; import com.sudwood.advancedutilities.AdvancedUtilities; import com.sudwood.advancedutilities.blocks.AdvancedUtilitiesBlocks; import com.sudwood.advancedutilities.blocks.BlockIngotBlock; import com.sudwood.advancedutilities.blocks.BlockMachineBase; import com.sudwood.advancedutilities.items.minecart.ItemChunkChestCart; import com.sudwood.advancedutilities.items.minecart.ItemChunkLoadCart; import com.sudwood.advancedutilities.items.minecart.ItemChunkTankCart; import com.sudwood.advancedutilities.items.minecart.ItemCrowbar; import com.sudwood.advancedutilities.items.minecart.ItemSpeedyChestcart; import com.sudwood.advancedutilities.items.minecart.ItemSpeedyChunkChestCart; import com.sudwood.advancedutilities.items.minecart.ItemSpeedyChunkTankCart; import com.sudwood.advancedutilities.items.minecart.ItemSpeedyMinecart; import com.sudwood.advancedutilities.items.minecart.ItemSpeedyTankCart; import com.sudwood.advancedutilities.items.minecart.ItemTankCart; import cpw.mods.fml.common.registry.GameRegistry; public class AdvancedUtilitiesItems { public static Item ingot; public static Item rubber; public static Item glue; public static Item wire; public static Item toolPart; public static Item toolBE; public static Item cast; public static Item plate; public static Item brassRivets; public static Item stoneRivets; public static Item steelRivets; public static Item bronzeHelm; public static Item bronzeChest; public static Item bronzeLegs; public static Item bronzeBoots; public static Item dust; public static Item bag; public static Item climbingBelt; public static Item magnetAmulet; public static Item voidRing; public static Item pnumaticGun; public static Item bronzeBullet; public static Item steelBullet; public static Item steamJetpack; public static Item rebreather; public static Item itemCasing; public static Item itemBulletHead; public static Item jackHammer; public static Item bulletMagazine; public static Item upgrade; public static Item runningShoes; public static Item steamLegs; public static Item speedyMinecart; public static Item speedyChestcart; public static Item chunkLoadCart; public static Item speedyChunkLoadCart; public static Item chunkChestCart; public static Item speedyChunkChestCart; public static Item tankCart; public static Item speedyTankCart; public static Item chunkTankCart; public static Item speedyChunkTankCart; public static Item crowbar; public static Item bronzeWrench; public static Item itemTank; public static Item quickPotion; public static Item tomato; public static Item devTele; public static void init() { ingot = new ItemIngot().setUnlocalizedName("Ingot"); dust = new ItemDust().setUnlocalizedName("Dust"); bag = new ItemBag().setUnlocalizedName("Bag"); rubber = new ItemRubber(0).setUnlocalizedName("Rubber").setCreativeTab(AdvancedUtilities.advancedBEMachinesTab); glue = new ItemRubber(1).setUnlocalizedName("Glue").setCreativeTab(AdvancedUtilities.advancedBEMachinesTab); wire = new ItemWire().setUnlocalizedName("Wire"); toolPart = new ItemToolPart().setUnlocalizedName("toolPart"); toolBE = new ItemBETool().setUnlocalizedName("BETool"); cast = new ItemCast().setUnlocalizedName("Cast"); plate = new ItemPlate().setUnlocalizedName("Plate"); brassRivets = new ItemRivets(0).setUnlocalizedName("BrassRivets").setCreativeTab(AdvancedUtilities.advancedTab); stoneRivets = new ItemRivets(1).setUnlocalizedName("StoneRivets").setCreativeTab(AdvancedUtilities.advancedTab); steelRivets = new ItemRivets(2).setUnlocalizedName("SteelRivets").setCreativeTab(AdvancedUtilities.advancedTab); ArmorMaterial armorBronze = EnumHelper.addArmorMaterial("BRONZEAU", 40, new int[]{2, 6, 5, 2}, 9); bronzeHelm = new ItemArmorBE(armorBronze, 0, 0).setUnlocalizedName("BronzeHelm").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); bronzeChest = new ItemArmorBE(armorBronze, 0, 1).setUnlocalizedName("BronzeChest").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); bronzeLegs = new ItemArmorBE(armorBronze, 0, 2).setUnlocalizedName("BronzeLegs").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); bronzeBoots = new ItemArmorBE(armorBronze, 0, 3).setUnlocalizedName("BronzeBoots").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); pnumaticGun = new ItemPnumaticGun().setUnlocalizedName("PnumaticGun").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); bronzeBullet = new ItemBulletBE(0).setUnlocalizedName("BronzeBullet").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); steelBullet = new ItemBulletBE(1).setUnlocalizedName("SteelBullet").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); itemCasing = new ItemCasing().setUnlocalizedName("Casing"); itemBulletHead = new ItemBulletHead().setUnlocalizedName("BulletHead"); jackHammer = new ItemJackHammer().setUnlocalizedName("JackHammer").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); bulletMagazine = new ItemMagazine().setUnlocalizedName("BulletMagazine").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); upgrade = new ItemUpgrade().setUnlocalizedName("Upgrade"); speedyMinecart = new ItemSpeedyMinecart(0).setUnlocalizedName("SpeedyMinecart").setCreativeTab(AdvancedUtilities.advancedTab); speedyChestcart = new ItemSpeedyChestcart(1).setUnlocalizedName("SpeedyChestcart").setCreativeTab(AdvancedUtilities.advancedTab); chunkLoadCart = new ItemChunkLoadCart(0, 0).setUnlocalizedName("ChunkLoadCart").setCreativeTab(AdvancedUtilities.advancedTab); speedyChunkLoadCart = new ItemChunkLoadCart(0, 1).setUnlocalizedName("SpeedyChunkLoadCart").setCreativeTab(AdvancedUtilities.advancedTab); chunkChestCart = new ItemChunkChestCart(0).setUnlocalizedName("ChunkChestCart").setCreativeTab(AdvancedUtilities.advancedTab); speedyChunkChestCart = new ItemSpeedyChunkChestCart(0).setUnlocalizedName("SpeedyChunkChestCart").setCreativeTab(AdvancedUtilities.advancedTab); tankCart = new ItemTankCart(0).setUnlocalizedName("TankCart").setCreativeTab(AdvancedUtilities.advancedTab); speedyTankCart = new ItemSpeedyTankCart(0).setUnlocalizedName("SpeedyTankCart").setCreativeTab(AdvancedUtilities.advancedTab); chunkTankCart = new ItemChunkTankCart(0).setUnlocalizedName("ChunkTankCart").setCreativeTab(AdvancedUtilities.advancedTab); speedyChunkTankCart = new ItemSpeedyChunkTankCart(0).setUnlocalizedName("SpeedyChunkTankCart").setCreativeTab(AdvancedUtilities.advancedTab); crowbar = new ItemCrowbar().setUnlocalizedName("Crowbar").setCreativeTab(AdvancedUtilities.advancedTab); bronzeWrench = new ItemWrench().setUnlocalizedName("BronzeWrench").setCreativeTab(AdvancedUtilities.advancedTab).setMaxDamage(1500).setMaxStackSize(1); climbingBelt = new ItemClimbingBelt().setUnlocalizedName("ClimbingBelt").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); magnetAmulet = new ItemMagnetAmulet().setUnlocalizedName("MagnetAmulet").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); voidRing = new ItemVoidRing().setUnlocalizedName("VoidRing").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); quickPotion = new ItemQuickPotion(0, 0, false).setUnlocalizedName("QuickPotion").setCreativeTab(AdvancedUtilities.advancedTab); tomato = new ItemTomato(6, 6, false).setUnlocalizedName("Tomato").setCreativeTab(AdvancedUtilities.advancedTab).setCreativeTab(CreativeTabs.tabFood); ArmorMaterial armorSteamJetpack = EnumHelper.addArmorMaterial("SJETPACKAU", 0, new int[]{0, 0, 0, 0}, 10); ArmorMaterial armorSteamLegs = EnumHelper.addArmorMaterial("SLEGSAU", 0, new int[]{2, 6, 5, 2}, 10); steamJetpack = new ItemArmorSteamJetpack(armorSteamJetpack, 0, 1).setUnlocalizedName("SteamJetpack").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); runningShoes = new ItemRunningShoes(armorSteamJetpack, 0, 3).setUnlocalizedName("RunningShoes").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); rebreather = new ItemArmorRebreather(armorSteamJetpack, 0, 0).setUnlocalizedName("Rebreather").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); steamLegs = new ItemArmorSteamLegs(armorSteamLegs, 0, 2).setUnlocalizedName("SteamLegs").setCreativeTab(AdvancedUtilities.advancedBEToolsTab); devTele = new ItemDevTool().setUnlocalizedName("DevTele").setCreativeTab(AdvancedUtilities.advancedTab); registerItems(); } public static void registerItems() { GameRegistry.registerItem(ingot, "ingot"); GameRegistry.registerItem(dust, "dust"); GameRegistry.registerItem(rubber, "rubber"); GameRegistry.registerItem(glue, "glue"); GameRegistry.registerItem(wire, "Wire"); GameRegistry.registerItem(bag, "bag"); GameRegistry.registerItem(climbingBelt, "climbingbelt"); GameRegistry.registerItem(magnetAmulet, "magnetAmulet"); GameRegistry.registerItem(voidRing, "voidring"); GameRegistry.registerItem(toolPart, "toolpart"); GameRegistry.registerItem(toolBE, "betool"); GameRegistry.registerItem(cast, "cast"); GameRegistry.registerItem(plate, "plate"); GameRegistry.registerItem(brassRivets, "brassfittings"); GameRegistry.registerItem(stoneRivets, "stonefittings"); GameRegistry.registerItem(steelRivets, "steelrivets"); GameRegistry.registerItem(bronzeHelm, "bronzehelm"); GameRegistry.registerItem(bronzeChest, "bronzechest"); GameRegistry.registerItem(bronzeLegs, "bronzelegs"); GameRegistry.registerItem(bronzeBoots, "bronzeboots"); GameRegistry.registerItem(pnumaticGun, "pnumaticgun"); GameRegistry.registerItem(bronzeBullet, "bronzebullet"); GameRegistry.registerItem(steelBullet, "steelbullet"); GameRegistry.registerItem(steamJetpack, "steamjetpack"); GameRegistry.registerItem(rebreather, "rebreather"); GameRegistry.registerItem(runningShoes, "runningshoes"); GameRegistry.registerItem(steamLegs, "steamlegs"); GameRegistry.registerItem(itemCasing, "itemcasing"); GameRegistry.registerItem(itemBulletHead, "itembullethead"); GameRegistry.registerItem(jackHammer, "jackhammer"); GameRegistry.registerItem(bulletMagazine, "bulletmagazine"); GameRegistry.registerItem(upgrade, "upgrade"); GameRegistry.registerItem(speedyMinecart, "speedyminecart"); GameRegistry.registerItem(speedyChestcart, "speedychestcart"); GameRegistry.registerItem(chunkLoadCart, "chunkLoadCart"); GameRegistry.registerItem(speedyChunkLoadCart, "speedychunkLoadCart"); GameRegistry.registerItem(chunkChestCart, "chunkchestcart"); GameRegistry.registerItem(speedyChunkChestCart, "speedychunkchestcart"); GameRegistry.registerItem(tankCart, "tankcart"); GameRegistry.registerItem(speedyTankCart, "speedytankcart"); GameRegistry.registerItem(chunkTankCart, "chunktankcart"); GameRegistry.registerItem(speedyChunkTankCart, "speedychunktankcart"); GameRegistry.registerItem(crowbar, "crowbar"); GameRegistry.registerItem(bronzeWrench, "bronzewrench"); GameRegistry.registerItem(quickPotion, "quickhealth"); GameRegistry.registerItem(tomato, "tomato"); GameRegistry.registerItem(devTele, "devTele"); OreDictionary.registerOre("ingotCopper", new ItemStack(ingot, 1, ItemIngot.COPPER)); OreDictionary.registerOre("ingotTin", new ItemStack(ingot, 1, ItemIngot.TIN)); OreDictionary.registerOre("ingotBronze", new ItemStack(ingot, 1, ItemIngot.BRONZE)); OreDictionary.registerOre("ingotZinc", new ItemStack(ingot, 1, ItemIngot.ZINC)); OreDictionary.registerOre("ingotBrass", new ItemStack(ingot, 1, ItemIngot.BRASS)); OreDictionary.registerOre("ingotSilver", new ItemStack(ingot, 1, ItemIngot.SILVER)); OreDictionary.registerOre("ingotLead", new ItemStack(ingot, 1, ItemIngot.LEAD)); OreDictionary.registerOre("ingotAluminum", new ItemStack(ingot, 1, ItemIngot.ALUMINUM)); OreDictionary.registerOre("ingotAluminium", new ItemStack(ingot, 1, ItemIngot.ALUMINUM)); OreDictionary.registerOre("ingotTungsten", new ItemStack(ingot, 1, ItemIngot.TUNGSTEN)); OreDictionary.registerOre("ingotPlatinum", new ItemStack(ingot, 1, ItemIngot.PLATINUM)); OreDictionary.registerOre("ingotIridium", new ItemStack(ingot, 1, ItemIngot.IRIDIUM)); OreDictionary.registerOre("ingotPalidium", new ItemStack(ingot, 1, ItemIngot.PALIDIUM)); OreDictionary.registerOre("ingotSteel", new ItemStack(ingot, 1, ItemIngot.STEEL)); OreDictionary.registerOre("ingotIron", new ItemStack(Items.iron_ingot, 1)); OreDictionary.registerOre("ingotGold", new ItemStack(Items.gold_ingot)); OreDictionary.registerOre("gemDiamond", new ItemStack(Items.diamond, 1)); OreDictionary.registerOre("coal", new ItemStack(Items.coal, 1)); OreDictionary.registerOre("cropWheat", new ItemStack(Items.wheat, 1)); OreDictionary.registerOre("slimeball", new ItemStack(glue, 1)); OreDictionary.registerOre("plateIron", new ItemStack(plate, 1, ItemPlate.IRONPLATE)); OreDictionary.registerOre("plateBronze", new ItemStack(plate, 1, ItemPlate.BRONZEPLATE)); OreDictionary.registerOre("plateBrass", new ItemStack(plate, 1, ItemPlate.BRASSPLATE)); OreDictionary.registerOre("plateSteel", new ItemStack(plate, 1, ItemPlate.STEELPLATE)); OreDictionary.registerOre("dustCopper", new ItemStack(dust, 1, 0)); OreDictionary.registerOre("dustIron", new ItemStack(dust, 1, 1)); OreDictionary.registerOre("dustTin", new ItemStack(dust, 1, 2)); OreDictionary.registerOre("dustZinc", new ItemStack(dust, 1, 3)); OreDictionary.registerOre("dustBronze", new ItemStack(dust, 1, 4)); OreDictionary.registerOre("dustBrass", new ItemStack(dust, 1, 5)); OreDictionary.registerOre("dustGold", new ItemStack(dust, 1, 6)); OreDictionary.registerOre("dustDiamond", new ItemStack(dust, 1, 7)); OreDictionary.registerOre("dustCoal", new ItemStack(dust, 1, 8)); OreDictionary.registerOre("dustSilver", new ItemStack(dust, 1, 9)); OreDictionary.registerOre("dustPlatinum", new ItemStack(dust, 1, 10)); OreDictionary.registerOre("dustAluminum", new ItemStack(dust, 1, 11)); OreDictionary.registerOre("dustAluminium", new ItemStack(dust, 1, 11)); OreDictionary.registerOre("dustLead", new ItemStack(dust, 1, 12)); OreDictionary.registerOre("dustTungsten", new ItemStack(dust, 1, 13)); OreDictionary.registerOre("dustIridium", new ItemStack(dust, 1, 14)); OreDictionary.registerOre("dustPalidium", new ItemStack(dust, 1, 15)); OreDictionary.registerOre("dustNickel", new ItemStack(dust, 1, 20)); OreDictionary.registerOre("dustWheat", new ItemStack(dust,1 ,18)); OreDictionary.registerOre("foodFlour", new ItemStack(dust,1 ,18)); OreDictionary.registerOre("materialRubber", rubber); OreDictionary.registerOre("itemRubber", rubber); OreDictionary.registerOre("itemBone", Items.bone); } public static void addRecipies() { GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(plate, 1, 3) , new Object[]{ "XXX", "XXX", "XXX", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(stoneRivets, 1) , new Object[]{ "X X", " ", " X", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(itemBulletHead, 1, 0) , new Object[]{ " X ", "X X", " ", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(itemCasing, 1, 0) , new Object[]{ "X X", "X X", " X ", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(toolPart, 1, 10) , new Object[]{ "XXX", "X X", " ", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(toolPart, 1, 13) , new Object[]{ "XXX", "X X", "X X", Character.valueOf('X'), "stone"})); GameRegistry.addRecipe(new ItemStack(itemCasing, 1, 2) , new Object[]{ "PPP","R R","PPP",Character.valueOf('P'), new ItemStack(plate, 1, 1), 'R', brassRivets }); addUpgradeRecipes(); GameRegistry.addRecipe(new ItemStack(runningShoes, 1) , new Object[]{ "LS ","RLS","RLL", 'R', brassRivets, 'S', Items.string, 'L', Items.leather }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(rebreather, 1) , new Object[]{ " H ","TGT"," I ", 'H', Items.leather_helmet, 'T', new ItemStack(itemCasing, 1, 2) , 'G', Blocks.glass, 'I', "ingotIron" })); GameRegistry.addRecipe(new ItemStack(steamLegs, 1) , new Object[]{ "PRP","TRT","PRP", 'P', new ItemStack(plate, 1, 1), 'T', new ItemStack(itemCasing, 1, 2), 'R', brassRivets }); GameRegistry.addRecipe(new ItemStack(bulletMagazine, 1) , new Object[]{ "PRP","P P","PRP",Character.valueOf('P'), new ItemStack(plate, 1, 0), 'R', brassRivets }); addPotionRecipes(); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(crowbar, 1) , new Object[]{ " I"," I ","I ",'I', "ingotBronze" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(crowbar, 1) , new Object[]{ "I "," I "," I",'I', "ingotBronze" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(bronzeWrench, 1) , new Object[]{ "IBI"," I "," I ",'I', "ingotBronze",'B',"ingotBrass" })); GameRegistry.addRecipe(new ItemStack(Items.saddle, 1) , new Object[]{ "RLR","L L"," ",Character.valueOf('L'), Items.leather, 'R', brassRivets }); GameRegistry.addShapelessRecipe(new ItemStack(Items.string, 4), new Object[]{new ItemStack(Blocks.wool, 1, OreDictionary.WILDCARD_VALUE)}); GameRegistry.addShapelessRecipe(new ItemStack(glue, 1), new Object[]{new ItemStack(Items.water_bucket, 1), new ItemStack(dust, 1, 18)}); GameRegistry.addShapelessRecipe(new ItemStack(glue, 1), new Object[]{new ItemStack(Items.potionitem, 1, 0), new ItemStack(dust, 1, 18)}); GameRegistry.addShapelessRecipe(new ItemStack(glue, 1), new Object[]{new ItemStack(quickPotion, 1,2), new ItemStack(dust, 1, 18)}); GameRegistry.addShapelessRecipe(new ItemStack(Items.book, 1), new Object[]{new ItemStack(Items.paper, 1),new ItemStack(Items.paper, 1),new ItemStack(Items.paper, 1), new ItemStack(glue, 1)}); GameRegistry.addShapelessRecipe(new ItemStack(Items.paper, 1), new Object[]{new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19), new ItemStack(Items.water_bucket, 1)}); GameRegistry.addShapelessRecipe(new ItemStack(Items.paper, 3), new Object[]{new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19),new ItemStack(dust, 1, 19), new ItemStack(Items.water_bucket, 1)}); GameRegistry.addRecipe(new ItemStack(pnumaticGun, 1) , new Object[]{ "IP ", " CI", "IPI", Character.valueOf('I'), new ItemStack(plate, 1, 0), Character.valueOf('P'), new ItemStack(plate, 1, 1), 'C', new ItemStack(itemCasing, 1, 2) }); GameRegistry.addRecipe(new ItemStack(Items.iron_horse_armor, 1) , new Object[]{ " RP", "PPR", "PPP", Character.valueOf('P'), new ItemStack(plate, 1, 0), 'R', brassRivets }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.golden_horse_armor, 1) , new Object[]{ " RP", "PPR", "PPP", Character.valueOf('P'), "ingotGold", 'R', brassRivets })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.diamond_horse_armor, 1) , new Object[]{ " RP", "PPR", "PPP", Character.valueOf('P'), "gemDiamond", 'R', brassRivets })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(speedyMinecart, 1) , new Object[]{ " ", "P P", "PPP", Character.valueOf('P'), "ingotBronze" })); MinecraftForge.addGrassSeed(new ItemStack(Items.melon_seeds), 4); MinecraftForge.addGrassSeed(new ItemStack(Items.pumpkin_seeds), 4); MinecraftForge.addGrassSeed(new ItemStack(Items.carrot), 1); MinecraftForge.addGrassSeed(new ItemStack(Items.potato), 1); for(int i = 0; i < 16; i++) { GameRegistry.addRecipe(new ItemStack(bag, 1, i) , new Object[]{ "LWL", "W W", "LWL", 'L', Items.leather, 'W', new ItemStack(Blocks.wool, 1, i) }); } GameRegistry.addRecipe(new ItemStack(steamJetpack, 1) , new Object[]{ "RLB", "C C", "BLR", 'C', new ItemStack(itemCasing, 1, 2), 'L', Items.leather, 'R', new ItemStack(toolPart, 1, 1), 'B', new ItemStack(toolPart, 1, 0) }); GameRegistry.addShapelessRecipe(new ItemStack(speedyChestcart, 1), new Object[]{Blocks.chest, speedyMinecart}); GameRegistry.addShapelessRecipe(new ItemStack(chunkLoadCart, 1), new Object[]{AdvancedUtilitiesBlocks.chunkLoader, Items.minecart}); GameRegistry.addShapelessRecipe(new ItemStack(speedyChunkLoadCart, 1), new Object[]{AdvancedUtilitiesBlocks.chunkLoader, speedyMinecart}); GameRegistry.addShapelessRecipe(new ItemStack(chunkChestCart, 1), new Object[]{Blocks.chest, Items.minecart, AdvancedUtilitiesBlocks.chunkLoader}); GameRegistry.addShapelessRecipe(new ItemStack(speedyChunkChestCart, 1), new Object[]{Blocks.chest, speedyMinecart, AdvancedUtilitiesBlocks.chunkLoader}); GameRegistry.addShapelessRecipe(new ItemStack(tankCart, 1), new Object[]{AdvancedUtilitiesBlocks.blockTank, Items.minecart}); GameRegistry.addShapelessRecipe(new ItemStack(speedyTankCart, 1), new Object[]{AdvancedUtilitiesBlocks.blockTank, speedyMinecart}); GameRegistry.addShapelessRecipe(new ItemStack(chunkTankCart, 1), new Object[]{AdvancedUtilitiesBlocks.blockTank, Items.minecart, AdvancedUtilitiesBlocks.chunkLoader}); GameRegistry.addShapelessRecipe(new ItemStack(speedyChunkTankCart, 1), new Object[]{AdvancedUtilitiesBlocks.blockTank, speedyMinecart, AdvancedUtilitiesBlocks.chunkLoader}); GameRegistry.addRecipe(new ItemStack(jackHammer, 1) , new Object[]{ "CPC", "CMC", " B ", 'C', new ItemStack(itemCasing, 1, 2), 'P', new ItemStack(plate, 1, 0), 'M', new ItemStack(AdvancedUtilitiesBlocks.machineBase, 1, BlockMachineBase.BRONZEMACHINE), 'B', new ItemStack(toolPart, 1, 0) }); GameRegistry.addShapelessRecipe(new ItemStack(bronzeBullet, 1), new Object[]{ new ItemStack(itemBulletHead, 1, 1), new ItemStack(itemCasing, 1 ,1) }); GameRegistry.addShapelessRecipe(new ItemStack(steelBullet, 1), new Object[]{ new ItemStack(itemBulletHead, 1, 2), new ItemStack(itemCasing, 1 ,3) }); GameRegistry.addSmelting(new ItemStack(dust, 1, 0), new ItemStack(ingot, 1, ItemIngot.COPPER), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 1), new ItemStack(Items.iron_ingot, 1), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 2), new ItemStack(ingot, 1, ItemIngot.TIN), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 3), new ItemStack(ingot, 1, ItemIngot.ZINC), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 4), new ItemStack(ingot, 1, ItemIngot.BRONZE), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 5), new ItemStack(ingot, 1, ItemIngot.BRASS), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 6), new ItemStack(Items.gold_ingot, 1), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 7), new ItemStack(Items.diamond, 1), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 9), new ItemStack(ingot, 1, ItemIngot.SILVER), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 10), new ItemStack(ingot, 1, ItemIngot.PLATINUM), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 11), new ItemStack(ingot, 1, ItemIngot.ALUMINUM), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 12), new ItemStack(ingot, 1, ItemIngot.LEAD), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 13), new ItemStack(ingot, 1, ItemIngot.TUNGSTEN), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 14), new ItemStack(ingot, 1, ItemIngot.IRIDIUM), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 15), new ItemStack(ingot, 1, ItemIngot.PALIDIUM), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 18), new ItemStack(Items.bread, 1), 1); GameRegistry.addSmelting(new ItemStack(dust, 1, 20), new ItemStack(ingot, 1, ItemIngot.NICKEL), 1); GameRegistry.addSmelting(new ItemStack(AdvancedUtilitiesBlocks.blockRubberLog, 1), new ItemStack(rubber, 2), 1); GameRegistry.addShapelessRecipe(new ItemStack(dust, 9, 17), new Object[]{ new ItemStack(dust, 1, 7) }); GameRegistry.addShapelessRecipe(new ItemStack(dust, 1, 7), new Object[]{ new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17),new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17), new ItemStack(dust, 1, 17) }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(Items.dye, 64, 15) , new Object[]{ "FDF","DFD","FDF",'F', "foodFlour", 'D', "dirt" })); GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Items.gunpowder, 4), new Object[] { "dustCoal", new ItemStack(Items.coal, 1, 1), Items.wheat, Items.redstone })); GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(Blocks.dirt, 8), new Object[] { "cobblestone", "treeSapling", Items.wheat })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(climbingBelt, 1) , new Object[]{ "SBS","LLL","SBS",'S', "slimeball", 'B', "ingotBrass", 'L', Items.leather })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(magnetAmulet, 1) , new Object[]{ "TTT","T T","LDR",'T', "ingotTungsten", 'R', Items.redstone, 'L', new ItemStack(Items.dye, 1,4), 'D', "gemDiamond" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(voidRing, 1) , new Object[]{ "OGO","O O","OOO",'O', Blocks.obsidian, 'G', "blockGlass", })); } public static void addUpgradeRecipes() { GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 0) , new Object[]{ " I ","WWW"," I ",'I', "ingotIron", 'W',Items.water_bucket })); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 1) , new Object[]{ "RRR","RRR","RRR",'R', Blocks.redstone_block }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 2) , new Object[]{ " G ","UUU"," G ",'G', "ingotGold", 'U', new ItemStack(upgrade, 1, 1) })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 3) , new Object[]{ "DDD","DUD","DDD",'D', "gemDiamond", 'U', new ItemStack(upgrade, 1, 2) })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 4) , new Object[]{ "RRR","RRR","RRR",'R', "blockAluminum" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 4) , new Object[]{ "RRR","RRR","RRR",'R', "blockAluminium" })); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 5) , new Object[]{ "WWW","UUU","WWW",'W', new ItemStack(Blocks.wool, 1, OreDictionary.WILDCARD_VALUE), 'U', new ItemStack(upgrade, 1, 4) }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 6) , new Object[]{ "DDD","DUD","DDD",'D', "gemDiamond", 'U', new ItemStack(upgrade, 1, 5) })); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 7) , new Object[]{ "RRR","RRR","RRR",'R', Blocks.lapis_block }); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 8) , new Object[]{ "WWW","UUU","WWW",'W', Blocks.lapis_block, 'U', new ItemStack(upgrade, 1, 7) }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 9) , new Object[]{ "DDD","DUD","DDD",'D', "gemDiamond", 'U', new ItemStack(upgrade, 1, 8) })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 10) , new Object[]{ "RRR","RRR","RRR",'R', "blockBronze" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 10) , new Object[]{ "RRR","RRR","RRR",'R', "blockIron" })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 11) , new Object[]{ " I ","UUU"," I ",'I', "blockIron", 'U', new ItemStack(upgrade, 1, 10) })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 11) , new Object[]{ " I ","UUU"," I ",'I', "blockBronze", 'U', new ItemStack(upgrade, 1, 10) })); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 12) , new Object[]{ "DDD","DUD","DDD",'D', "gemDiamond", 'U', new ItemStack(upgrade, 1, 11) })); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 13) , new Object[]{ "RRR","RRR","RRR",'R', Blocks.quartz_block }); GameRegistry.addRecipe(new ItemStack(upgrade, 1, 14) , new Object[]{ "WWW","UUU","WWW",'W', Blocks.quartz_block, 'U', new ItemStack(upgrade, 1, 13) }); GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(upgrade, 1, 15) , new Object[]{ "DDD","DUD","DDD",'D', "gemDiamond", 'U', new ItemStack(upgrade, 1, 14) })); } public static void addPotionRecipes() { GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 2), new Object[]{ new ItemStack(Items.potionitem ,1 , 0) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 3), new Object[]{ new ItemStack(Items.potionitem ,1 , 16) }); GameRegistry.addShapelessRecipe(new ItemStack(Items.potionitem ,1 , 0), new Object[]{ new ItemStack(quickPotion, 1, 2) }); GameRegistry.addShapelessRecipe(new ItemStack(Items.potionitem ,1 , 16), new Object[]{ new ItemStack(quickPotion, 1, 3) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 3), new Object[]{ new ItemStack(quickPotion, 1, 2), Items.nether_wart }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 4), new Object[]{ new ItemStack(quickPotion, 1, 3), new ItemStack(dust, 1, 17) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 4), new Object[]{ new ItemStack(Items.potionitem, 1, 16), new ItemStack(dust, 1, 17) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 0), new Object[]{ new ItemStack(Items.potionitem, 1, 8261), new ItemStack(dust, 1, 17) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 0), new Object[]{ new ItemStack(quickPotion, 1, 4), new ItemStack(Items.speckled_melon, 1) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 1), new Object[]{ new ItemStack(Items.potionitem, 1, 8229), new ItemStack(dust, 1, 17) }); GameRegistry.addShapelessRecipe(new ItemStack(quickPotion, 1, 1), new Object[]{ new ItemStack(quickPotion, 1, 0), Items.glowstone_dust }); } public void addMachineryRecipes() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.models.impl; import java.lang.ref.PhantomReference; import java.lang.ref.ReferenceQueue; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Dictionary; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.WeakHashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.PostConstruct; import org.apache.commons.lang.StringUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.ReferencePolicy; import org.apache.felix.scr.annotations.ReferencePolicyOption; import org.apache.felix.scr.annotations.References; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.adapter.Adaptable; import org.apache.sling.api.adapter.AdapterFactory; import org.apache.sling.api.resource.Resource; import org.apache.sling.commons.osgi.PropertiesUtil; import org.apache.sling.commons.osgi.RankedServices; import org.apache.sling.models.annotations.Model; import org.apache.sling.models.annotations.ValidationStrategy; import org.apache.sling.models.annotations.ViaProviderType; import org.apache.sling.models.annotations.via.BeanProperty; import org.apache.sling.models.export.spi.ModelExporter; import org.apache.sling.models.factory.ExportException; import org.apache.sling.models.factory.InvalidAdaptableException; import org.apache.sling.models.factory.InvalidModelException; import org.apache.sling.models.factory.MissingElementException; import org.apache.sling.models.factory.MissingElementsException; import org.apache.sling.models.factory.MissingExporterException; import org.apache.sling.models.factory.ModelClassException; import org.apache.sling.models.factory.ModelFactory; import org.apache.sling.models.factory.PostConstructException; import org.apache.sling.models.factory.ValidationException; import org.apache.sling.models.impl.model.ConstructorParameter; import org.apache.sling.models.impl.model.InjectableElement; import org.apache.sling.models.impl.model.InjectableField; import org.apache.sling.models.impl.model.InjectableMethod; import org.apache.sling.models.impl.model.ModelClass; import org.apache.sling.models.impl.model.ModelClassConstructor; import org.apache.sling.models.spi.AcceptsNullName; import org.apache.sling.models.spi.DisposalCallback; import org.apache.sling.models.spi.DisposalCallbackRegistry; import org.apache.sling.models.spi.ImplementationPicker; import org.apache.sling.models.spi.Injector; import org.apache.sling.models.spi.ModelValidation; import org.apache.sling.models.spi.ValuePreparer; import org.apache.sling.models.spi.ViaProvider; import org.apache.sling.models.spi.injectorspecific.InjectAnnotationProcessor; import org.apache.sling.models.spi.injectorspecific.InjectAnnotationProcessorFactory; import org.apache.sling.models.spi.injectorspecific.InjectAnnotationProcessorFactory2; import org.apache.sling.models.spi.injectorspecific.StaticInjectAnnotationProcessorFactory; import org.apache.sling.scripting.api.BindingsValuesProvidersByContext; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Component(metatype = true, immediate = true, label = "Apache Sling Model Adapter Factory") @Service(value = ModelFactory.class) @References({ @Reference( name = "injector", referenceInterface = Injector.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC), @Reference( name = "viaProvider", referenceInterface = ViaProvider.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC) }) @SuppressWarnings("deprecation") public class ModelAdapterFactory implements AdapterFactory, Runnable, ModelFactory { // hard code this value since we always know exactly how many there are private static final int VALUE_PREPARERS_COUNT = 2; private static class DisposalCallbackRegistryImpl implements DisposalCallbackRegistry { private List<DisposalCallback> callbacks = new ArrayList<DisposalCallback>(); @Override public void addDisposalCallback(@Nonnull DisposalCallback callback) { callbacks.add(callback); } private void seal() { callbacks = Collections.unmodifiableList(callbacks); } private void onDisposed() { for (DisposalCallback callback : callbacks) { callback.onDisposed(); } } } private ReferenceQueue<Object> queue; private ConcurrentMap<java.lang.ref.Reference<Object>, DisposalCallbackRegistryImpl> disposalCallbacks; @Override public void run() { clearDisposalCallbackRegistryQueue(); } private void clearDisposalCallbackRegistryQueue() { java.lang.ref.Reference<?> ref = queue.poll(); while (ref != null) { log.debug("calling disposal for {}.", ref.toString()); DisposalCallbackRegistryImpl registry = disposalCallbacks.remove(ref); registry.onDisposed(); ref = queue.poll(); } } private static final Logger log = LoggerFactory.getLogger(ModelAdapterFactory.class); private static final int DEFAULT_MAX_RECURSION_DEPTH = 20; private static final long DEFAULT_CLEANUP_JOB_PERIOD = 30l; @Property(label = "Maximum Recursion Depth", description = "Maximum depth adaptation will be attempted.", intValue = DEFAULT_MAX_RECURSION_DEPTH) private static final String PROP_MAX_RECURSION_DEPTH = "max.recursion.depth"; @Property(label = "Cleanup Job Period", description = "Period at which OSGi service references from ThreadLocals will be cleaned up.", longValue = DEFAULT_CLEANUP_JOB_PERIOD) private static final String PROP_CLEANUP_JOB_PERIOD = "cleanup.job.period"; private final @Nonnull ConcurrentMap<String, RankedServices<Injector>> injectors = new ConcurrentHashMap<String, RankedServices<Injector>>(); private final @Nonnull RankedServices<Injector> sortedInjectors = new RankedServices<Injector>(); private final @Nonnull ConcurrentMap<Class<? extends ViaProviderType>, ViaProvider> viaProviders = new ConcurrentHashMap<Class<? extends ViaProviderType>, ViaProvider>(); @Reference(name = "injectAnnotationProcessorFactory", referenceInterface = InjectAnnotationProcessorFactory.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC) private final @Nonnull RankedServices<InjectAnnotationProcessorFactory> injectAnnotationProcessorFactories = new RankedServices<InjectAnnotationProcessorFactory>(); @Reference(name = "injectAnnotationProcessorFactory2", referenceInterface = InjectAnnotationProcessorFactory2.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC) private final @Nonnull RankedServices<InjectAnnotationProcessorFactory2> injectAnnotationProcessorFactories2 = new RankedServices<InjectAnnotationProcessorFactory2>(); @Reference(name = "staticInjectAnnotationProcessorFactory", referenceInterface = StaticInjectAnnotationProcessorFactory.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC) private final @Nonnull RankedServices<StaticInjectAnnotationProcessorFactory> staticInjectAnnotationProcessorFactories = new RankedServices<StaticInjectAnnotationProcessorFactory>(); @Reference(name = "implementationPicker", referenceInterface = ImplementationPicker.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC) private final @Nonnull RankedServices<ImplementationPicker> implementationPickers = new RankedServices<ImplementationPicker>(); // bind the service with the highest priority (if a new one comes in this service gets restarted) @Reference(cardinality=ReferenceCardinality.OPTIONAL_UNARY, policyOption=ReferencePolicyOption.GREEDY) private ModelValidation modelValidation = null; @Reference(name = "modelExporter", cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC, referenceInterface = ModelExporter.class) private final @Nonnull RankedServices<ModelExporter> modelExporters = new RankedServices<ModelExporter>(); @Reference private BindingsValuesProvidersByContext bindingsValuesProvidersByContext; ModelPackageBundleListener listener; final AdapterImplementations adapterImplementations = new AdapterImplementations(); private ServiceRegistration jobRegistration; private ServiceRegistration configPrinterRegistration; // Use threadlocal to count recursive invocations and break recursing if a max. limit is reached (to avoid cyclic dependencies) private ThreadLocal<ThreadInvocationCounter> invocationCountThreadLocal; private Map<Object, Map<Class, Object>> adapterCache; // use a smaller initial capacity than the default as we expect a relatively small number of // adapters per adaptable private final int INNER_CACHE_INITIAL_CAPACITY = 4; public <AdapterType> AdapterType getAdapter(Object adaptable, Class<AdapterType> type) { Result<AdapterType> result = internalCreateModel(adaptable, type); if (!result.wasSuccessful()) { log.warn("Could not adapt to model", result.getThrowable()); return null; } else { return result.getValue(); } } @Override public @Nonnull <ModelType> ModelType createModel(@Nonnull Object adaptable, @Nonnull Class<ModelType> type) throws MissingElementsException, InvalidAdaptableException, ValidationException, InvalidModelException { Result<ModelType> result = internalCreateModel(adaptable, type); if (!result.wasSuccessful()) { throw result.getThrowable(); } return result.getValue(); } @Override public boolean canCreateFromAdaptable(@Nonnull Object adaptable, @Nonnull Class<?> modelClass) throws ModelClassException { return internalCanCreateFromAdaptable(adaptable, modelClass); } private boolean internalCanCreateFromAdaptable(Object adaptable, Class<?> requestedType) throws ModelClassException { try { ModelClass<?> modelClass = getImplementationTypeForAdapterType(requestedType, adaptable); Class<?>[] declaredAdaptable = modelClass.getModelAnnotation().adaptables(); for (Class<?> clazz : declaredAdaptable) { if (clazz.isInstance(adaptable)) { return true; } } } catch (ModelClassException e) { log.debug("Could not find implementation for given type " + requestedType + ". Probably forgot either the model annotation or it was not registered as adapter factory (yet)", e); return false; } return false; } @Override @Deprecated public boolean isModelClass(@Nonnull Object adaptable, @Nonnull Class<?> requestedType) { try { getImplementationTypeForAdapterType(requestedType, adaptable); } catch (ModelClassException e) { log.debug("Could not find implementation for given adaptable. Probably forgot either the model annotation or it was not registered as adapter factory (yet)", e); return false; } return true; } @Override public boolean isModelClass(@Nonnull Class<?> type) { return this.adapterImplementations.isModelClass(type); } /** * * @param requestedType the adapter type * @param adaptable the adaptable * @return the implementation type to use for the desired model type or null if there is none registered * @see <a * href="http://sling.apache.org/documentation/bundles/models.html#specifying-an-alternate-adapter-class-since-sling-models-110">Specifying * an Alternate Adapter Class</a> */ private <ModelType> ModelClass<ModelType> getImplementationTypeForAdapterType(Class<ModelType> requestedType, Object adaptable) { // lookup ModelClass wrapper for implementation type // additionally check if a different implementation class was registered for this adapter type // the adapter implementation is initially filled by the ModelPackageBundleList ModelClass<ModelType> modelClass = this.adapterImplementations.lookup(requestedType, adaptable); if (modelClass != null) { log.debug("Using implementation type {} for requested adapter type {}", modelClass, requestedType); return modelClass; } // throw exception here throw new ModelClassException("Could not yet find an adapter factory for the model " + requestedType + " from adaptable " + adaptable.getClass()); } @SuppressWarnings("unchecked") private <ModelType> Result<ModelType> internalCreateModel(final Object adaptable, final Class<ModelType> requestedType) { Result<ModelType> result; ThreadInvocationCounter threadInvocationCounter = invocationCountThreadLocal.get(); if (threadInvocationCounter.isMaximumReached()) { String msg = String.format("Adapting %s to %s failed, too much recursive invocations (>=%s).", adaptable, requestedType, threadInvocationCounter.maxRecursionDepth); return new Result<ModelType>(new ModelClassException(msg)); } threadInvocationCounter.increase(); try { // check if a different implementation class was registered for this adapter type ModelClass<ModelType> modelClass = getImplementationTypeForAdapterType(requestedType, adaptable); if (!modelClass.hasModelAnnotation()) { String msg = String.format("Provided Adapter class does not have a Model annotation: %s", modelClass.getType()); return new Result<ModelType>(new ModelClassException(msg)); } boolean isAdaptable = false; Model modelAnnotation = modelClass.getModelAnnotation(); if (modelAnnotation.cache()) { Map<Class, Object> adaptableCache = adapterCache.get(adaptable); if (adaptableCache != null) { ModelType cachedObject = (ModelType) adaptableCache.get(requestedType); if (cachedObject != null) { return new Result<ModelType>(cachedObject); } } } Class<?>[] declaredAdaptable = modelAnnotation.adaptables(); for (Class<?> clazz : declaredAdaptable) { if (clazz.isInstance(adaptable)) { isAdaptable = true; } } if (!isAdaptable) { String msg = String.format("Adaptables (%s) are not acceptable for the model class: %s", StringUtils.join(declaredAdaptable), modelClass.getType()); return new Result<ModelType>(new InvalidAdaptableException(msg)); } else { RuntimeException t = validateModel(adaptable, modelClass.getType(), modelAnnotation); if (t != null) { return new Result<ModelType>(t); } if (modelClass.getType().isInterface()) { Result<InvocationHandler> handlerResult = createInvocationHandler(adaptable, modelClass); if (handlerResult.wasSuccessful()) { ModelType model = (ModelType) Proxy.newProxyInstance(modelClass.getType().getClassLoader(), new Class<?>[] { modelClass.getType() }, handlerResult.getValue()); if (modelAnnotation.cache()) { Map<Class, Object> adaptableCache = adapterCache.get(adaptable); if (adaptableCache == null) { adaptableCache = new ConcurrentHashMap<Class, Object>(INNER_CACHE_INITIAL_CAPACITY); adapterCache.put(adaptable, adaptableCache); } adaptableCache.put(requestedType, model); } result = new Result<ModelType>(model); } else { return new Result<ModelType>(handlerResult.getThrowable()); } } else { try { result = createObject(adaptable, modelClass); if (result.wasSuccessful() && modelAnnotation.cache()) { Map<Class, Object> adaptableCache = adapterCache.get(adaptable); if (adaptableCache == null) { adaptableCache = new ConcurrentHashMap<Class, Object>(INNER_CACHE_INITIAL_CAPACITY); adapterCache.put(adaptable, adaptableCache); } adaptableCache.put(requestedType, result.getValue()); } } catch (Exception e) { String msg = String.format("Unable to create model %s", modelClass.getType()); return new Result<ModelType>(new ModelClassException(msg, e)); } } } return result; } finally { threadInvocationCounter.decrease(); } } private <ModelType> RuntimeException validateModel(Object adaptable, Class<ModelType> modelType, Model modelAnnotation) { if (modelAnnotation.validation() != ValidationStrategy.DISABLED) { if (modelValidation == null) { return new ValidationException("No active service for ModelValidation found, therefore no validation can be performed."); } return modelValidation.validate(adaptable, modelType, modelAnnotation.validation() == ValidationStrategy.REQUIRED); } return null; } private interface InjectCallback { /** * Is called each time when the given value should be injected into the given element * @param element * @param value * @return an InjectionResult */ RuntimeException inject(InjectableElement element, Object value); } private class SetFieldCallback implements InjectCallback { private final Object object; private SetFieldCallback(Object object) { this.object = object; } @Override public RuntimeException inject(InjectableElement element, Object value) { return setField((InjectableField) element, object, value); } } private class SetMethodsCallback implements InjectCallback { private final Map<Method, Object> methods; private SetMethodsCallback( Map<Method, Object> methods) { this.methods = methods; } @Override public RuntimeException inject(InjectableElement element, Object value) { return setMethod((InjectableMethod) element, methods, value); } } private class SetConstructorParameterCallback implements InjectCallback { private final List<Object> parameterValues; private SetConstructorParameterCallback(List<Object> parameterValues) { this.parameterValues = parameterValues; } @Override public RuntimeException inject(InjectableElement element, Object value) { return setConstructorParameter((ConstructorParameter)element, parameterValues, value); } } private @CheckForNull RuntimeException injectElement(final InjectableElement element, final Object adaptable, final @Nonnull DisposalCallbackRegistry registry, final InjectCallback callback, final @Nonnull Map<ValuePreparer, Object> preparedValues) { InjectAnnotationProcessor annotationProcessor = null; String source = element.getSource(); boolean wasInjectionSuccessful = false; // find an appropriate annotation processor for (InjectAnnotationProcessorFactory2 factory : injectAnnotationProcessorFactories2) { annotationProcessor = factory.createAnnotationProcessor(adaptable, element.getAnnotatedElement()); if (annotationProcessor != null) { break; } } if (annotationProcessor == null) { for (InjectAnnotationProcessorFactory factory : injectAnnotationProcessorFactories) { annotationProcessor = factory.createAnnotationProcessor(adaptable, element.getAnnotatedElement()); if (annotationProcessor != null) { break; } } } String name = getName(element, annotationProcessor); final Object injectionAdaptable = getAdaptable(adaptable, element, annotationProcessor); RuntimeException lastInjectionException = null; if (injectionAdaptable != null) { // prepare the set of injectors to process. if a source is given only use injectors with this name. final RankedServices<Injector> injectorsToProcess; if (StringUtils.isEmpty(source)) { injectorsToProcess = sortedInjectors; } else { injectorsToProcess = injectors.get(source); if (injectorsToProcess == null) { throw new IllegalArgumentException("No Sling Models Injector registered for source '" + source + "'."); } } // find the right injector for (Injector injector : injectorsToProcess) { if (name != null || injector instanceof AcceptsNullName) { Object preparedValue = injectionAdaptable; // only do the ValuePreparer optimization for the original adaptable if (injector instanceof ValuePreparer && adaptable == injectionAdaptable) { final ValuePreparer preparer = (ValuePreparer) injector; Object fromMap = preparedValues.get(preparer); if (fromMap != null) { preparedValue = fromMap; } else { preparedValue = preparer.prepareValue(injectionAdaptable); preparedValues.put(preparer, preparedValue); } } Object value = injector.getValue(preparedValue, name, element.getType(), element.getAnnotatedElement(), registry); if (value != null) { lastInjectionException = callback.inject(element, value); if (lastInjectionException == null) { wasInjectionSuccessful = true; break; } } } } } // if injection failed, use default if (!wasInjectionSuccessful) { Result<Boolean> defaultInjectionResult = injectDefaultValue(element, annotationProcessor, callback); if (defaultInjectionResult.wasSuccessful()) { wasInjectionSuccessful = defaultInjectionResult.getValue(); // log previous injection error, if there was any if (lastInjectionException != null && wasInjectionSuccessful) { log.debug("Although falling back to default value worked, injection into {} failed because of: " + lastInjectionException.getMessage(), element.getAnnotatedElement(), lastInjectionException); } } else { return defaultInjectionResult.getThrowable(); } } // if default is not set, check if mandatory if (!wasInjectionSuccessful) { if (element.isOptional(annotationProcessor)) { // log previous injection error, if there was any if (lastInjectionException != null) { log.debug("Injection into optional element {} failed because of: " + lastInjectionException.getMessage(), element.getAnnotatedElement(), lastInjectionException); } if (element.isPrimitive()) { RuntimeException throwable = injectPrimitiveInitialValue(element, callback); if (throwable != null) { return throwable; } } } else { if (lastInjectionException != null) { return lastInjectionException; } else { return new ModelClassException("No injector returned a non-null value!"); } } } return null; } private <ModelType> Result<InvocationHandler> createInvocationHandler(final Object adaptable, final ModelClass<ModelType> modelClass) { InjectableMethod[] injectableMethods = modelClass.getInjectableMethods(); final Map<Method, Object> methods = new HashMap<Method, Object>(); SetMethodsCallback callback = new SetMethodsCallback(methods); MapBackedInvocationHandler handler = new MapBackedInvocationHandler(methods); DisposalCallbackRegistryImpl registry = new DisposalCallbackRegistryImpl(); registerCallbackRegistry(handler, registry); final Map<ValuePreparer, Object> preparedValues = new HashMap<ValuePreparer, Object>(VALUE_PREPARERS_COUNT); MissingElementsException missingElements = new MissingElementsException("Could not create all mandatory methods for interface of model " + modelClass); for (InjectableMethod method : injectableMethods) { RuntimeException t = injectElement(method, adaptable, registry, callback, preparedValues); if (t != null) { missingElements.addMissingElementExceptions(new MissingElementException(method.getAnnotatedElement(), t)); } } registry.seal(); if (!missingElements.isEmpty()) { return new Result<InvocationHandler>(missingElements); } return new Result<InvocationHandler>(handler); } private void registerCallbackRegistry(Object object, DisposalCallbackRegistryImpl registry) { PhantomReference<Object> reference = new PhantomReference<Object>(object, queue); disposalCallbacks.put(reference, registry); } private <ModelType> Result<ModelType> createObject(final Object adaptable, final ModelClass<ModelType> modelClass) throws InstantiationException, InvocationTargetException, IllegalAccessException { DisposalCallbackRegistryImpl registry = new DisposalCallbackRegistryImpl(); ModelClassConstructor<ModelType> constructorToUse = getBestMatchingConstructor(adaptable, modelClass); if (constructorToUse == null) { return new Result<ModelType>(new ModelClassException("Unable to find a useable constructor for model " + modelClass.getType())); } final Map<ValuePreparer, Object> preparedValues = new HashMap<ValuePreparer, Object>(VALUE_PREPARERS_COUNT); final ModelType object; if (constructorToUse.getConstructor().getParameterTypes().length == 0) { // no parameters for constructor injection? instantiate it right away object = constructorToUse.getConstructor().newInstance(); } else { // instantiate with constructor injection // if this fails, make sure resources that may be claimed by injectors are cleared up again try { Result<ModelType> result = newInstanceWithConstructorInjection(constructorToUse, adaptable, modelClass, registry, preparedValues); if (!result.wasSuccessful()) { registry.onDisposed(); return result; } else { object = result.getValue(); } } catch (InstantiationException ex) { registry.onDisposed(); throw ex; } catch (InvocationTargetException ex) { registry.onDisposed(); throw ex; } catch (IllegalAccessException ex) { registry.onDisposed(); throw ex; } } registerCallbackRegistry(object, registry); InjectCallback callback = new SetFieldCallback(object); InjectableField[] injectableFields = modelClass.getInjectableFields(); MissingElementsException missingElements = new MissingElementsException("Could not inject all required fields into " + modelClass.getType()); for (InjectableField field : injectableFields) { RuntimeException t = injectElement(field, adaptable, registry, callback, preparedValues); if (t != null) { missingElements.addMissingElementExceptions(new MissingElementException(field.getAnnotatedElement(), t)); } } registry.seal(); if (!missingElements.isEmpty()) { return new Result<ModelType>(missingElements); } try { invokePostConstruct(object); } catch (InvocationTargetException e) { return new Result<ModelType>(new PostConstructException("Post-construct method has thrown an exception for model " + modelClass.getType(), e.getCause())); } catch (IllegalAccessException e) { new Result<ModelType>(new ModelClassException("Could not call post-construct method for model " + modelClass.getType(), e)); } return new Result<ModelType>(object); } /** * Gets best matching constructor for constructor injection - or default constructor if none is found. * @param adaptable Adaptable instance * @param type Model type * @return Constructor or null if none found */ @SuppressWarnings("unchecked") private <ModelType> ModelClassConstructor<ModelType> getBestMatchingConstructor(Object adaptable, ModelClass<ModelType> type) { ModelClassConstructor<ModelType>[] constructors = type.getConstructors(); for (ModelClassConstructor<ModelType> constructor : constructors) { // first try to find the constructor with most parameters and @Inject annotation if (constructor.hasInjectAnnotation()) { return constructor; } // compatibility mode for sling models implementation <= 1.0.6: // support constructor without @Inject if it has exactly one parameter matching the adaptable class final Class<?>[] paramTypes = constructor.getConstructor().getParameterTypes(); if (paramTypes.length == 1) { Class<?> paramType = constructor.getConstructor().getParameterTypes()[0]; if (paramType.isInstance(adaptable)) { return constructor; } } // if no constructor for injection found use public constructor without any params if (constructor.getConstructor().getParameterTypes().length == 0) { return constructor; } } return null; } private <ModelType> Result<ModelType> newInstanceWithConstructorInjection(final ModelClassConstructor<ModelType> constructor, final Object adaptable, final ModelClass<ModelType> modelClass, final DisposalCallbackRegistry registry, final @Nonnull Map<ValuePreparer, Object> preparedValues) throws InstantiationException, InvocationTargetException, IllegalAccessException { ConstructorParameter[] parameters = constructor.getConstructorParameters(); List<Object> paramValues = new ArrayList<Object>(Arrays.asList(new Object[parameters.length])); InjectCallback callback = new SetConstructorParameterCallback(paramValues); MissingElementsException missingElements = new MissingElementsException("Required constructor parameters were not able to be injected on model " + modelClass.getType()); for (int i = 0; i < parameters.length; i++) { RuntimeException t = injectElement(parameters[i], adaptable, registry, callback, preparedValues); if (t != null) { missingElements.addMissingElementExceptions(new MissingElementException(parameters[i].getAnnotatedElement(), t)); } } if (!missingElements.isEmpty()) { return new Result<ModelType>(missingElements); } return new Result<ModelType>(constructor.getConstructor().newInstance(paramValues.toArray(new Object[paramValues.size()]))); } private Result<Boolean> injectDefaultValue(InjectableElement point, InjectAnnotationProcessor processor, InjectCallback callback) { if (processor != null) { if (processor.hasDefault()) { RuntimeException t = callback.inject(point, processor.getDefault()); if (t == null) { return new Result<Boolean>(Boolean.TRUE); } else { return new Result<Boolean>(t); } } } Object value = point.getDefaultValue(); if (value != null) { RuntimeException t = callback.inject(point, value); if (t == null) { return new Result<Boolean>(Boolean.TRUE); } else { return new Result<Boolean>(t); } } else { return new Result<Boolean>(Boolean.FALSE); } } /** * Injects the default initial value for the given primitive class which * cannot be null (e.g. int = 0, boolean = false). * * @param point Annotated element * @param callback Inject callback */ private RuntimeException injectPrimitiveInitialValue(InjectableElement point, InjectCallback callback) { Type primitiveType = ReflectionUtil.mapWrapperClasses(point.getType()); Object value = null; if (primitiveType == int.class) { value = 0; } else if (primitiveType == long.class) { value = 0L; } else if (primitiveType == boolean.class) { value = Boolean.FALSE; } else if (primitiveType == double.class) { value = 0.0d; } else if (primitiveType == float.class) { value = 0.0f; } else if (primitiveType == short.class) { value = (short) 0; } else if (primitiveType == byte.class) { value = (byte) 0; } else if (primitiveType == char.class) { value = '\u0000'; } if (value != null) { return callback.inject(point, value); } else { return new ModelClassException(String.format("Unknown primitive type %s", primitiveType.toString())); } } private Object getAdaptable(Object adaptable, InjectableElement point, InjectAnnotationProcessor processor) { String viaValue = null; Class<? extends ViaProviderType> viaProviderType = null; if (processor != null) { viaValue = processor.getVia(); viaProviderType = BeanProperty.class; // processors don't support via provider type } if (StringUtils.isBlank(viaValue)) { viaValue = point.getVia(); viaProviderType = point.getViaProviderType(); } if (viaProviderType == null || viaValue == null) { return adaptable; } ViaProvider viaProvider = viaProviders.get(viaProviderType); if (viaProvider == null) { log.error("Unable to find Via provider type {}.", viaProviderType); return null; } final Object viaResult = viaProvider.getAdaptable(adaptable, viaValue); if (viaResult == ViaProvider.ORIGINAL) { return adaptable; } else { return viaResult; } } private String getName(InjectableElement element, InjectAnnotationProcessor processor) { // try to get the name from injector-specific annotation if (processor != null) { String name = processor.getName(); if (name != null) { return name; } } // get name from @Named annotation or element name return element.getName(); } private boolean addMethodIfNotOverriden(List<Method> methods, Method newMethod) { for (Method method : methods) { if (method.getName().equals(newMethod.getName())) { if (Arrays.equals(method.getParameterTypes(),newMethod.getParameterTypes())) { return false; } } } methods.add(newMethod); return true; } private void invokePostConstruct(Object object) throws InvocationTargetException, IllegalAccessException { Class<?> clazz = object.getClass(); List<Method> postConstructMethods = new ArrayList<Method>(); while (clazz != null) { Method[] methods = clazz.getDeclaredMethods(); for (Method method : methods) { if (method.isAnnotationPresent(PostConstruct.class)) { addMethodIfNotOverriden(postConstructMethods, method); } } clazz = clazz.getSuperclass(); } Collections.reverse(postConstructMethods); for (Method method : postConstructMethods) { boolean accessible = method.isAccessible(); try { if (!accessible) { method.setAccessible(true); } method.invoke(object); } finally { if (!accessible) { method.setAccessible(false); } } } } private RuntimeException setField(InjectableField injectableField, Object createdObject, Object value) { Result<Object> result = adaptIfNecessary(value, injectableField.getFieldType(), injectableField.getFieldGenericType()); if (result.wasSuccessful()) { return injectableField.set(createdObject, result); } else { return result.getThrowable(); } } private RuntimeException setMethod(InjectableMethod injectableMethod, Map<Method, Object> methods, Object value) { Method method = injectableMethod.getMethod(); Result<Object> result = adaptIfNecessary(value, method.getReturnType(), method.getGenericReturnType()); if (result.wasSuccessful()) { methods.put(method, result.getValue()); return null; } else { return result.getThrowable(); } } private RuntimeException setConstructorParameter(ConstructorParameter constructorParameter, List<Object> parameterValues, Object value) { if (constructorParameter.getParameterType() instanceof Class<?>) { Result<Object> result = adaptIfNecessary(value, (Class<?>) constructorParameter.getParameterType(), constructorParameter.getGenericType()); if (result.wasSuccessful() ) { parameterValues.set(constructorParameter.getParameterIndex(), result.getValue()); return null; } else { return result.getThrowable(); } } else { return new ModelClassException(String.format("Constructor parameter with index %d is not a class!", constructorParameter.getParameterIndex())); } } private Result<Object> adaptIfNecessary(final Object value, final Class<?> type, final Type genericType) { final Object adaptedValue; if (!isAcceptableType(type, genericType, value)) { if (genericType instanceof ParameterizedType) { ParameterizedType parameterizedType = (ParameterizedType) genericType; if (value instanceof Collection && (type.equals(Collection.class) || type.equals(List.class)) && parameterizedType.getActualTypeArguments().length == 1) { List<Object> result = new ArrayList<Object>(); for (Object valueObject : (Collection<?>) value) { Result<Object> singleValueResult = adapt(valueObject, (Class<?>) parameterizedType.getActualTypeArguments()[0], true); if (singleValueResult.wasSuccessful()) { result.add(singleValueResult.getValue()); } else { return singleValueResult; } } adaptedValue = result; } else { return new Result<Object>(new ModelClassException(String.format("%s is neither a parameterized Collection or List", type))); } } else { return adapt(value, type, false); } return new Result<Object>(adaptedValue); } else { return new Result<Object>(value); } } /** * Preferably adapt via the {@link ModelFactory} in case the target type is a Sling Model itself, otherwise use regular {@link Adaptable#adaptTo(Class)}. * @param value the object from which to adapt * @param type the target type * @param isWithinCollection * @return a Result either encapsulating an exception or the adapted value */ private @CheckForNull Result<Object> adapt(final Object value, final Class<?> type, boolean isWithinCollection) { Object adaptedValue = null; final String messageSuffix = isWithinCollection ? " in collection" : ""; if (isModelClass(type) && canCreateFromAdaptable(value, type)) { Result<?> result = internalCreateModel(value, type); if (result.wasSuccessful()) { adaptedValue = result.getValue(); } else { return new Result<Object>(new ModelClassException( String.format("Could not create model from %s: %s%s", value.getClass(), result.getThrowable().getMessage(), messageSuffix), result.getThrowable())); } } else if (value instanceof Adaptable) { adaptedValue = ((Adaptable) value).adaptTo(type); if (adaptedValue == null) { return new Result<Object>(new ModelClassException(String.format("Could not adapt from %s to %s%s", value.getClass(), type, messageSuffix))); } } if (adaptedValue != null) { return new Result<Object>(adaptedValue); } else { return new Result<Object>(new ModelClassException( String.format("Could not adapt from %s to %s%s, because this class is not adaptable!", value.getClass(), type, messageSuffix))); } } private static boolean isAcceptableType(Class<?> type, Type genericType, Object value) { if (type.isInstance(value)) { if ((type == Collection.class || type == List.class) && genericType instanceof ParameterizedType && value instanceof Collection) { Iterator<?> it = ((Collection<?>) value).iterator(); if (!it.hasNext()) { // empty collection, so it doesn't really matter return true; } else { // this is not an ideal way to get the actual component type, but erasure... Class<?> actualComponentType = it.next().getClass(); Class<?> desiredComponentType = (Class<?>) ((ParameterizedType) genericType).getActualTypeArguments()[0]; return desiredComponentType.isAssignableFrom(actualComponentType); } } else { return true; } } if (type == Integer.TYPE) { return Integer.class.isInstance(value); } if (type == Long.TYPE) { return Long.class.isInstance(value); } if (type == Boolean.TYPE) { return Boolean.class.isInstance(value); } if (type == Double.TYPE) { return Double.class.isInstance(value); } if (type == Float.TYPE) { return Float.class.isInstance(value); } if (type == Short.TYPE) { return Short.class.isInstance(value); } if (type == Byte.TYPE) { return Byte.class.isInstance(value); } if (type == Character.TYPE) { return Character.class.isInstance(value); } return false; } @Activate protected void activate(final ComponentContext ctx) { Dictionary<?, ?> props = ctx.getProperties(); final int maxRecursionDepth = PropertiesUtil.toInteger(props.get(PROP_MAX_RECURSION_DEPTH), DEFAULT_MAX_RECURSION_DEPTH); this.invocationCountThreadLocal = new ThreadLocal<ThreadInvocationCounter>() { @Override protected ThreadInvocationCounter initialValue() { return new ThreadInvocationCounter(maxRecursionDepth); } }; this.adapterCache = Collections.synchronizedMap(new WeakHashMap<Object, Map<Class, Object>>()); BundleContext bundleContext = ctx.getBundleContext(); this.queue = new ReferenceQueue<Object>(); this.disposalCallbacks = new ConcurrentHashMap<java.lang.ref.Reference<Object>, DisposalCallbackRegistryImpl>(); Hashtable<Object, Object> properties = new Hashtable<Object, Object>(); properties.put(Constants.SERVICE_VENDOR, "Apache Software Foundation"); properties.put(Constants.SERVICE_DESCRIPTION, "Sling Models OSGi Service Disposal Job"); properties.put("scheduler.name", "Sling Models OSGi Service Disposal Job"); properties.put("scheduler.concurrent", false); properties.put("scheduler.period", PropertiesUtil.toLong(props.get(PROP_CLEANUP_JOB_PERIOD), DEFAULT_CLEANUP_JOB_PERIOD)); this.jobRegistration = bundleContext.registerService(Runnable.class.getName(), this, properties); this.listener = new ModelPackageBundleListener(ctx.getBundleContext(), this, this.adapterImplementations, bindingsValuesProvidersByContext); Hashtable<Object, Object> printerProps = new Hashtable<Object, Object>(); printerProps.put(Constants.SERVICE_VENDOR, "Apache Software Foundation"); printerProps.put(Constants.SERVICE_DESCRIPTION, "Sling Models Configuration Printer"); printerProps.put("felix.webconsole.label", "slingmodels"); printerProps.put("felix.webconsole.title", "Sling Models"); printerProps.put("felix.webconsole.configprinter.modes", "always"); this.configPrinterRegistration = bundleContext.registerService(Object.class.getName(), new ModelConfigurationPrinter(this, bundleContext, adapterImplementations), printerProps); } @Deactivate protected void deactivate() { this.adapterCache = null; this.clearDisposalCallbackRegistryQueue(); this.listener.unregisterAll(); this.adapterImplementations.removeAll(); if (jobRegistration != null) { jobRegistration.unregister(); jobRegistration = null; } if (configPrinterRegistration != null) { configPrinterRegistration.unregister(); configPrinterRegistration = null; } } protected void bindInjector(final Injector injector, final Map<String, Object> props) { RankedServices<Injector> newRankedServices = new RankedServices<Injector>(); RankedServices<Injector> injectorsPerInjectorName = injectors.putIfAbsent(injector.getName(), newRankedServices); if (injectorsPerInjectorName == null) { injectorsPerInjectorName = newRankedServices; } injectorsPerInjectorName.bind(injector, props); sortedInjectors.bind(injector, props); } protected void unbindInjector(final Injector injector, final Map<String, Object> props) { RankedServices<Injector> injectorsPerInjectorName = injectors.get(injector.getName()); if (injectorsPerInjectorName != null) { injectorsPerInjectorName.unbind(injector, props); } sortedInjectors.unbind(injector, props); } protected void bindInjectAnnotationProcessorFactory(final InjectAnnotationProcessorFactory factory, final Map<String, Object> props) { injectAnnotationProcessorFactories.bind(factory, props); } protected void unbindInjectAnnotationProcessorFactory(final InjectAnnotationProcessorFactory factory, final Map<String, Object> props) { injectAnnotationProcessorFactories.unbind(factory, props); } protected void bindInjectAnnotationProcessorFactory2(final InjectAnnotationProcessorFactory2 factory, final Map<String, Object> props) { injectAnnotationProcessorFactories2.bind(factory, props); } protected void unbindInjectAnnotationProcessorFactory2(final InjectAnnotationProcessorFactory2 factory, final Map<String, Object> props) { injectAnnotationProcessorFactories2.unbind(factory, props); } protected void bindStaticInjectAnnotationProcessorFactory(final StaticInjectAnnotationProcessorFactory factory, final Map<String, Object> props) { synchronized (staticInjectAnnotationProcessorFactories) { staticInjectAnnotationProcessorFactories.bind(factory, props); this.adapterImplementations.setStaticInjectAnnotationProcessorFactories(staticInjectAnnotationProcessorFactories.get()); } } protected void unbindStaticInjectAnnotationProcessorFactory(final StaticInjectAnnotationProcessorFactory factory, final Map<String, Object> props) { synchronized (staticInjectAnnotationProcessorFactories) { staticInjectAnnotationProcessorFactories.unbind(factory, props); this.adapterImplementations.setStaticInjectAnnotationProcessorFactories(staticInjectAnnotationProcessorFactories.get()); } } protected void bindImplementationPicker(final ImplementationPicker implementationPicker, final Map<String, Object> props) { synchronized (implementationPickers) { implementationPickers.bind(implementationPicker, props); this.adapterImplementations.setImplementationPickers(implementationPickers.get()); } } protected void unbindImplementationPicker(final ImplementationPicker implementationPicker, final Map<String, Object> props) { synchronized (implementationPickers) { implementationPickers.unbind(implementationPicker, props); this.adapterImplementations.setImplementationPickers(implementationPickers.get()); } } protected void bindModelExporter(final ModelExporter s, final Map<String, Object> props) { synchronized (modelExporters) { modelExporters.bind(s, props); } } protected void unbindModelExporter(final ModelExporter s, final Map<String, Object> props) { synchronized (modelExporters) { modelExporters.unbind(s, props); } } protected void bindViaProvider(final ViaProvider viaProvider, final Map<String, Object> props) { Class<? extends ViaProviderType> type = viaProvider.getType(); viaProviders.put(type, viaProvider); } protected void unbindViaProvider(final ViaProvider viaProvider, final Map<String, Object> props) { Class<? extends ViaProviderType> type = viaProvider.getType(); viaProviders.remove(type, viaProvider); } @Nonnull Collection<Injector> getInjectors() { return sortedInjectors.get(); } @Nonnull Collection<InjectAnnotationProcessorFactory> getInjectAnnotationProcessorFactories() { return injectAnnotationProcessorFactories.get(); } @Nonnull Collection<InjectAnnotationProcessorFactory2> getInjectAnnotationProcessorFactories2() { return injectAnnotationProcessorFactories2.get(); } @Nonnull Collection<StaticInjectAnnotationProcessorFactory> getStaticInjectAnnotationProcessorFactories() { return staticInjectAnnotationProcessorFactories.get(); } @Nonnull ImplementationPicker[] getImplementationPickers() { return adapterImplementations.getImplementationPickers(); } @Nonnull Map<Class<? extends ViaProviderType>, ViaProvider> getViaProviders() { return viaProviders; } @Override public boolean isModelAvailableForRequest(@Nonnull SlingHttpServletRequest request) { return adapterImplementations.getModelClassForRequest(request) != null; } @Override public boolean isModelAvailableForResource(@Nonnull Resource resource) { return adapterImplementations.getModelClassForResource(resource) != null; } @Override public Object getModelFromResource(Resource resource) { Class<?> clazz = this.adapterImplementations.getModelClassForResource(resource); if (clazz == null) { throw new ModelClassException("Could find model registered for resource type: " + resource.getResourceType()); } return handleBoundModelResult(internalCreateModel(resource, clazz)); } @Override public Object getModelFromRequest(SlingHttpServletRequest request) { Class<?> clazz = this.adapterImplementations.getModelClassForRequest(request); if (clazz == null) { throw new ModelClassException("Could find model registered for request path: " + request.getServletPath()); } return handleBoundModelResult(internalCreateModel(request, clazz)); } private Object handleBoundModelResult(Result<?> result) { if (!result.wasSuccessful()) { throw result.getThrowable(); } else { return result.getValue(); } } @Override public <T> T exportModel(Object model, String name, Class<T> targetClass, Map<String, String> options) throws ExportException, MissingExporterException { for (ModelExporter exporter : modelExporters) { if (exporter.getName().equals(name) && exporter.isSupported(targetClass)) { T resultObject = exporter.export(model, targetClass, options); return resultObject; } else { throw new MissingExporterException(name, targetClass); } } throw new MissingExporterException(name, targetClass); } @Override public <T> T exportModelForResource(Resource resource, String name, Class<T> targetClass, Map<String, String> options) throws ExportException, MissingExporterException { Class<?> clazz = this.adapterImplementations.getModelClassForResource(resource); if (clazz == null) { throw new ModelClassException("Could find model registered for resource type: " + resource.getResourceType()); } Result<?> result = internalCreateModel(resource, clazz); return handleAndExportResult(result, name, targetClass, options); } @Override public <T> T exportModelForRequest(SlingHttpServletRequest request, String name, Class<T> targetClass, Map<String, String> options) throws ExportException, MissingExporterException { Class<?> clazz = this.adapterImplementations.getModelClassForRequest(request); if (clazz == null) { throw new ModelClassException("Could find model registered for request path: " + request.getServletPath()); } Result<?> result = internalCreateModel(request, clazz); return handleAndExportResult(result, name, targetClass, options); } private <T> T handleAndExportResult(Result<?> result, String name, Class<T> targetClass, Map<String, String> options) throws ExportException, MissingExporterException { if (result.wasSuccessful()) { for (ModelExporter exporter : modelExporters) { if (exporter.getName().equals(name) && exporter.isSupported(targetClass)) { T resultObject = exporter.export(result.getValue(), targetClass, options); return resultObject; } else { throw new MissingExporterException(name, targetClass); } } throw new MissingExporterException(name, targetClass); } else { throw result.getThrowable(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.rabbitmq; import java.io.IOException; import java.net.URISyntaxException; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeoutException; import javax.net.ssl.TrustManager; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Address; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import com.rabbitmq.client.Envelope; import org.apache.camel.Consumer; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.impl.DefaultEndpoint; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; @UriEndpoint(scheme = "rabbitmq", title = "RabbitMQ", syntax = "rabbitmq:hostname:portNumber/exchangeName", consumerClass = RabbitMQConsumer.class, label = "messaging") public class RabbitMQEndpoint extends DefaultEndpoint { // header to indicate that the message body needs to be de-serialized public static final String SERIALIZE_HEADER = "CamelSerialize"; @UriPath @Metadata(required = "true") private String hostname; @UriPath(defaultValue = "5672") @Metadata(required = "true") private int portNumber; @UriPath @Metadata(required = "true") private String exchangeName; @UriParam(defaultValue = ConnectionFactory.DEFAULT_USER) private String username = ConnectionFactory.DEFAULT_USER; @UriParam(defaultValue = ConnectionFactory.DEFAULT_PASS) private String password = ConnectionFactory.DEFAULT_PASS; @UriParam(defaultValue = ConnectionFactory.DEFAULT_VHOST) private String vhost = ConnectionFactory.DEFAULT_VHOST; @UriParam(label = "consumer", defaultValue = "10") private int threadPoolSize = 10; @UriParam(label = "consumer", defaultValue = "true") private boolean autoAck = true; @UriParam(defaultValue = "true") private boolean autoDelete = true; @UriParam(defaultValue = "true") private boolean durable = true; @UriParam(label = "producer") private boolean bridgeEndpoint; @UriParam private String queue = String.valueOf(UUID.randomUUID().toString().hashCode()); @UriParam(defaultValue = "direct", enums = "direct,fanout,headers,topic") private String exchangeType = "direct"; @UriParam private String routingKey; @UriParam(label = "producer") private boolean skipQueueDeclare; @UriParam private Address[] addresses; @UriParam(defaultValue = "" + ConnectionFactory.DEFAULT_CONNECTION_TIMEOUT) private int connectionTimeout = ConnectionFactory.DEFAULT_CONNECTION_TIMEOUT; @UriParam(defaultValue = "" + ConnectionFactory.DEFAULT_CHANNEL_MAX) private int requestedChannelMax = ConnectionFactory.DEFAULT_CHANNEL_MAX; @UriParam(defaultValue = "" + ConnectionFactory.DEFAULT_FRAME_MAX) private int requestedFrameMax = ConnectionFactory.DEFAULT_FRAME_MAX; @UriParam(defaultValue = "" + ConnectionFactory.DEFAULT_HEARTBEAT) private int requestedHeartbeat = ConnectionFactory.DEFAULT_HEARTBEAT; @UriParam private String sslProtocol; @UriParam private TrustManager trustManager; @UriParam private Map<String, Object> clientProperties; @UriParam private ConnectionFactory connectionFactory; @UriParam private Boolean automaticRecoveryEnabled; @UriParam private Integer networkRecoveryInterval; @UriParam private Boolean topologyRecoveryEnabled; @UriParam(label = "consumer") private boolean prefetchEnabled; @UriParam(label = "consumer") private int prefetchSize; @UriParam(label = "consumer") private int prefetchCount; @UriParam(label = "consumer") private boolean prefetchGlobal; @UriParam(label = "consumer", defaultValue = "1") private int concurrentConsumers = 1; @UriParam(defaultValue = "true") private boolean declare = true; @UriParam private String deadLetterExchange; @UriParam private String deadLetterRoutingKey; @UriParam private String deadLetterQueue; @UriParam(defaultValue = "direct", enums = "direct,fanout,headers,topic") private String deadLetterExchangeType = "direct"; @UriParam(label = "producer", defaultValue = "10") private int channelPoolMaxSize = 10; @UriParam(label = "producer", defaultValue = "1000") private long channelPoolMaxWait = 1000; @UriParam(label = "producer") private boolean mandatory; @UriParam(label = "producer") private boolean immediate; @UriParam private ArgsConfigurer queueArgsConfigurer; @UriParam private ArgsConfigurer exchangeArgsConfigurer; @UriParam private long requestTimeout = 20000; @UriParam private long requestTimeoutCheckerInterval = 1000; @UriParam private boolean transferException; @UriParam(label = "producer") private boolean publisherAcknowledgements; @UriParam(label = "producer") private long publisherAcknowledgementsTimeout; // camel-jms supports this setting but it is not currently configurable in camel-rabbitmq private boolean useMessageIDAsCorrelationID = true; // camel-jms supports this setting but it is not currently configurable in camel-rabbitmq private String replyToType = ReplyToType.Temporary.name(); // camel-jms supports this setting but it is not currently configurable in camel-rabbitmq private String replyTo; private final RabbitMQMessageConverter messageConverter = new RabbitMQMessageConverter(); private final RabbitMQConnectionFactorySupport factoryCreator = new RabbitMQConnectionFactorySupport(); private final RabbitMQDeclareSupport declareSupport = new RabbitMQDeclareSupport(this); public RabbitMQEndpoint() { } public RabbitMQEndpoint(String endpointUri, RabbitMQComponent component) throws URISyntaxException { super(endpointUri, component); } public RabbitMQEndpoint(String endpointUri, RabbitMQComponent component, ConnectionFactory connectionFactory) throws URISyntaxException { super(endpointUri, component); this.connectionFactory = connectionFactory; } public Exchange createRabbitExchange(Envelope envelope, AMQP.BasicProperties properties, byte[] body) { Exchange exchange = super.createExchange(); messageConverter.populateRabbitExchange(exchange, envelope, properties, body, false); return exchange; } /** * Gets the message converter to convert between rabbit and camel */ protected RabbitMQMessageConverter getMessageConverter() { return messageConverter; } /** * Sends the body that is on the exchange */ public void publishExchangeToChannel(Exchange camelExchange, Channel channel, String routingKey) throws IOException { new RabbitMQMessagePublisher(camelExchange, channel, routingKey, this).publish(); } /** * Extracts name of the rabbitmq exchange */ protected String getExchangeName(Message msg) { String exchangeName = msg.getHeader(RabbitMQConstants.EXCHANGE_NAME, String.class); // If it is BridgeEndpoint we should ignore the message header of EXCHANGE_NAME if (exchangeName == null || isBridgeEndpoint()) { exchangeName = getExchangeName(); } return exchangeName; } @Override public Consumer createConsumer(Processor processor) throws Exception { RabbitMQConsumer consumer = new RabbitMQConsumer(this, processor); configureConsumer(consumer); return consumer; } public Connection connect(ExecutorService executor) throws IOException, TimeoutException { if (getAddresses() == null) { return getOrCreateConnectionFactory().newConnection(executor); } else { return getOrCreateConnectionFactory().newConnection(executor, getAddresses()); } } /** * If needed, declare Exchange, declare Queue and bind them with Routing Key */ public void declareExchangeAndQueue(Channel channel) throws IOException { declareSupport.declareAndBindExchangesAndQueuesUsing(channel); } private ConnectionFactory getOrCreateConnectionFactory() { if (connectionFactory == null) { connectionFactory = factoryCreator.createFactoryFor(this); } return connectionFactory; } @Override public Producer createProducer() throws Exception { return new RabbitMQProducer(this); } @Override public boolean isSingleton() { return true; } protected ExecutorService createExecutor() { if (getCamelContext() != null) { return getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, "RabbitMQConsumer", getThreadPoolSize()); } else { return Executors.newFixedThreadPool(getThreadPoolSize()); } } public String getUsername() { return username; } /** * Username in case of authenticated access */ public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } /** * Password for authenticated access */ public void setPassword(String password) { this.password = password; } public String getVhost() { return vhost; } /** * The vhost for the channel */ public void setVhost(String vhost) { this.vhost = vhost; } public String getHostname() { return hostname; } /** * The hostname of the running rabbitmq instance or cluster. */ public void setHostname(String hostname) { this.hostname = hostname; } public int getThreadPoolSize() { return threadPoolSize; } /** * The consumer uses a Thread Pool Executor with a fixed number of threads. This setting allows you to set that number of threads. */ public void setThreadPoolSize(int threadPoolSize) { this.threadPoolSize = threadPoolSize; } public int getPortNumber() { return portNumber; } /** * Port number for the host with the running rabbitmq instance or cluster. Default value is 5672. */ public void setPortNumber(int portNumber) { this.portNumber = portNumber; } public boolean isAutoAck() { return autoAck; } /** * If messages should be auto acknowledged */ public void setAutoAck(boolean autoAck) { this.autoAck = autoAck; } public boolean isAutoDelete() { return autoDelete; } /** * If it is true, the exchange will be deleted when it is no longer in use */ public void setAutoDelete(boolean autoDelete) { this.autoDelete = autoDelete; } public boolean isDurable() { return durable; } /** * If we are declaring a durable exchange (the exchange will survive a server restart) */ public void setDurable(boolean durable) { this.durable = durable; } public String getQueue() { return queue; } /** * The queue to receive messages from */ public void setQueue(String queue) { this.queue = queue; } public String getExchangeName() { return exchangeName; } /** * The exchange name determines which exchange produced messages will sent to. * In the case of consumers, the exchange name determines which exchange the queue will bind to. */ public void setExchangeName(String exchangeName) { this.exchangeName = exchangeName; } public String getExchangeType() { return exchangeType; } /** * The exchange type such as direct or topic. */ public void setExchangeType(String exchangeType) { this.exchangeType = exchangeType; } public String getRoutingKey() { return routingKey; } /** * The routing key to use when binding a consumer queue to the exchange. * For producer routing keys, you set the header rabbitmq.ROUTING_KEY. */ public void setRoutingKey(String routingKey) { this.routingKey = routingKey; } /** * If true the producer will not declare and bind a queue. * This can be used for directing messages via an existing routing key. */ public void setSkipQueueDeclare(boolean skipQueueDeclare) { this.skipQueueDeclare = skipQueueDeclare; } public boolean isSkipQueueDeclare() { return skipQueueDeclare; } /** * If the bridgeEndpoint is true, the producer will ignore the message header of "rabbitmq.EXCHANGE_NAME" and "rabbitmq.ROUTING_KEY" */ public void setBridgeEndpoint(boolean bridgeEndpoint) { this.bridgeEndpoint = bridgeEndpoint; } public boolean isBridgeEndpoint() { return bridgeEndpoint; } /** * If this option is set, camel-rabbitmq will try to create connection based on the setting of option addresses. * The addresses value is a string which looks like "server1:12345, server2:12345" */ public void setAddresses(String addresses) { Address[] addressArray = Address.parseAddresses(addresses); if (addressArray.length > 0) { this.addresses = addressArray; } } public Address[] getAddresses() { return addresses; } public int getConnectionTimeout() { return connectionTimeout; } /** * Connection timeout */ public void setConnectionTimeout(int connectionTimeout) { this.connectionTimeout = connectionTimeout; } public int getRequestedChannelMax() { return requestedChannelMax; } /** * Connection requested channel max (max number of channels offered) */ public void setRequestedChannelMax(int requestedChannelMax) { this.requestedChannelMax = requestedChannelMax; } public int getRequestedFrameMax() { return requestedFrameMax; } /** * Connection requested frame max (max size of frame offered) */ public void setRequestedFrameMax(int requestedFrameMax) { this.requestedFrameMax = requestedFrameMax; } public int getRequestedHeartbeat() { return requestedHeartbeat; } /** * Connection requested heartbeat (heart-beat in seconds offered) */ public void setRequestedHeartbeat(int requestedHeartbeat) { this.requestedHeartbeat = requestedHeartbeat; } public String getSslProtocol() { return sslProtocol; } /** * Enables SSL on connection, accepted value are `true`, `TLS` and 'SSLv3` */ public void setSslProtocol(String sslProtocol) { this.sslProtocol = sslProtocol; } public ConnectionFactory getConnectionFactory() { return connectionFactory; } /** * To use a custom RabbitMQ connection factory. * When this option is set, all connection options (connectionTimeout, requestedChannelMax...) set on URI are not used */ public void setConnectionFactory(ConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; } public TrustManager getTrustManager() { return trustManager; } /** * Configure SSL trust manager, SSL should be enabled for this option to be effective */ public void setTrustManager(TrustManager trustManager) { this.trustManager = trustManager; } public Map<String, Object> getClientProperties() { return clientProperties; } /** * Connection client properties (client info used in negotiating with the server) */ public void setClientProperties(Map<String, Object> clientProperties) { this.clientProperties = clientProperties; } public Boolean getAutomaticRecoveryEnabled() { return automaticRecoveryEnabled; } /** * Enables connection automatic recovery (uses connection implementation that performs automatic recovery when connection shutdown is not initiated by the application) */ public void setAutomaticRecoveryEnabled(Boolean automaticRecoveryEnabled) { this.automaticRecoveryEnabled = automaticRecoveryEnabled; } public Integer getNetworkRecoveryInterval() { return networkRecoveryInterval; } /** * Network recovery interval in milliseconds (interval used when recovering from network failure) */ public void setNetworkRecoveryInterval(Integer networkRecoveryInterval) { this.networkRecoveryInterval = networkRecoveryInterval; } public Boolean getTopologyRecoveryEnabled() { return topologyRecoveryEnabled; } /** * Enables connection topology recovery (should topology recovery be performed?) */ public void setTopologyRecoveryEnabled(Boolean topologyRecoveryEnabled) { this.topologyRecoveryEnabled = topologyRecoveryEnabled; } public boolean isPrefetchEnabled() { return prefetchEnabled; } /** * Enables the quality of service on the RabbitMQConsumer side. * You need to specify the option of prefetchSize, prefetchCount, prefetchGlobal at the same time */ public void setPrefetchEnabled(boolean prefetchEnabled) { this.prefetchEnabled = prefetchEnabled; } /** * The maximum amount of content (measured in octets) that the server will deliver, 0 if unlimited. * You need to specify the option of prefetchSize, prefetchCount, prefetchGlobal at the same time */ public void setPrefetchSize(int prefetchSize) { this.prefetchSize = prefetchSize; } public int getPrefetchSize() { return prefetchSize; } /** * The maximum number of messages that the server will deliver, 0 if unlimited. * You need to specify the option of prefetchSize, prefetchCount, prefetchGlobal at the same time */ public void setPrefetchCount(int prefetchCount) { this.prefetchCount = prefetchCount; } public int getPrefetchCount() { return prefetchCount; } /** * If the settings should be applied to the entire channel rather than each consumer * You need to specify the option of prefetchSize, prefetchCount, prefetchGlobal at the same time */ public void setPrefetchGlobal(boolean prefetchGlobal) { this.prefetchGlobal = prefetchGlobal; } public boolean isPrefetchGlobal() { return prefetchGlobal; } public int getConcurrentConsumers() { return concurrentConsumers; } /** * Number of concurrent consumers when consuming from broker. (eg similar as to the same option for the JMS component). */ public void setConcurrentConsumers(int concurrentConsumers) { this.concurrentConsumers = concurrentConsumers; } public boolean isDeclare() { return declare; } /** * If the option is true, camel declare the exchange and queue name and bind them together. * If the option is false, camel won't declare the exchange and queue name on the server. */ public void setDeclare(boolean declare) { this.declare = declare; } public String getDeadLetterExchange() { return deadLetterExchange; } /** * The name of the dead letter exchange */ public void setDeadLetterExchange(String deadLetterExchange) { this.deadLetterExchange = deadLetterExchange; } public String getDeadLetterQueue() { return deadLetterQueue; } /** * The name of the dead letter queue */ public void setDeadLetterQueue(String deadLetterQueue) { this.deadLetterQueue = deadLetterQueue; } public String getDeadLetterRoutingKey() { return deadLetterRoutingKey; } /** * The routing key for the dead letter exchange */ public void setDeadLetterRoutingKey(String deadLetterRoutingKey) { this.deadLetterRoutingKey = deadLetterRoutingKey; } public String getDeadLetterExchangeType() { return deadLetterExchangeType; } /** * The type of the dead letter exchange */ public void setDeadLetterExchangeType(String deadLetterExchangeType) { this.deadLetterExchangeType = deadLetterExchangeType; } /** * Get maximum number of opened channel in pool */ public int getChannelPoolMaxSize() { return channelPoolMaxSize; } public void setChannelPoolMaxSize(int channelPoolMaxSize) { this.channelPoolMaxSize = channelPoolMaxSize; } public long getChannelPoolMaxWait() { return channelPoolMaxWait; } /** * Set the maximum number of milliseconds to wait for a channel from the pool */ public void setChannelPoolMaxWait(long channelPoolMaxWait) { this.channelPoolMaxWait = channelPoolMaxWait; } public boolean isMandatory() { return mandatory; } /** * This flag tells the server how to react if the message cannot be routed to a queue. * If this flag is set, the server will return an unroutable message with a Return method. * If this flag is zero, the server silently drops the message. * <p/> * If the header is present rabbitmq.MANDATORY it will override this option. */ public void setMandatory(boolean mandatory) { this.mandatory = mandatory; } public boolean isImmediate() { return immediate; } /** * This flag tells the server how to react if the message cannot be routed to a queue consumer immediately. * If this flag is set, the server will return an undeliverable message with a Return method. * If this flag is zero, the server will queue the message, but with no guarantee that it will ever be consumed. * <p/> * If the header is present rabbitmq.IMMEDIATE it will override this option. */ public void setImmediate(boolean immediate) { this.immediate = immediate; } public ArgsConfigurer getQueueArgsConfigurer() { return queueArgsConfigurer; } /** * Set the configurer for setting the queue args in Channel.queueDeclare */ public void setQueueArgsConfigurer(ArgsConfigurer queueArgsConfigurer) { this.queueArgsConfigurer = queueArgsConfigurer; } public ArgsConfigurer getExchangeArgsConfigurer() { return exchangeArgsConfigurer; } /** * Set the configurer for setting the exchange args in Channel.exchangeDeclare */ public void setExchangeArgsConfigurer(ArgsConfigurer exchangeArgsConfigurer) { this.exchangeArgsConfigurer = exchangeArgsConfigurer; } /** * Set timeout for waiting for a reply when using the InOut Exchange Pattern (in milliseconds) */ public void setRequestTimeout(long requestTimeout) { this.requestTimeout = requestTimeout; } public long getRequestTimeout() { return requestTimeout; } /** * Set requestTimeoutCheckerInterval for inOut exchange */ public void setRequestTimeoutCheckerInterval(long requestTimeoutCheckerInterval) { this.requestTimeoutCheckerInterval = requestTimeoutCheckerInterval; } public long getRequestTimeoutCheckerInterval() { return requestTimeoutCheckerInterval; } /** * Get useMessageIDAsCorrelationID for inOut exchange */ public boolean isUseMessageIDAsCorrelationID() { return useMessageIDAsCorrelationID; } /** * When true and an inOut Exchange failed on the consumer side send the caused Exception back in the response */ public void setTransferException(boolean transferException) { this.transferException = transferException; } public boolean isTransferException() { return transferException; } /** * When true, the message will be published with <a href="https://www.rabbitmq.com/confirms.html">publisher acknowledgements</a> turned on */ public boolean isPublisherAcknowledgements() { return publisherAcknowledgements; } public void setPublisherAcknowledgements(final boolean publisherAcknowledgements) { this.publisherAcknowledgements = publisherAcknowledgements; } /** * The amount of time in milliseconds to wait for a basic.ack response from RabbitMQ server */ public long getPublisherAcknowledgementsTimeout() { return publisherAcknowledgementsTimeout; } public void setPublisherAcknowledgementsTimeout(final long publisherAcknowledgementsTimeout) { this.publisherAcknowledgementsTimeout = publisherAcknowledgementsTimeout; } /** * Get replyToType for inOut exchange */ public String getReplyToType() { return replyToType; } /** * Gets the Queue to reply to if you dont want to use temporary reply queues */ public String getReplyTo() { return replyTo; } }
package org.apache.lucene.index; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import org.apache.lucene.index.SegmentReader.Norm; import org.apache.lucene.search.Similarity; import org.apache.lucene.analysis.SimpleAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.store.AlreadyClosedException; /** * Tests cloning multiple types of readers, modifying the deletedDocs and norms * and verifies copy on write semantics of the deletedDocs and norms is * implemented properly */ public class TestIndexReaderClone extends LuceneTestCase { public void testCloneReadOnlySegmentReader() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader reader = IndexReader.open(dir1); IndexReader readOnlyReader = reader.clone(true); if (!isReadOnly(readOnlyReader)) { fail("reader isn't read only"); } if (deleteWorked(1, readOnlyReader)) { fail("deleting from the original should not have worked"); } reader.close(); readOnlyReader.close(); dir1.close(); } // LUCENE-1453 public void testFSDirectoryClone() throws Exception { String tempDir = System.getProperty("java.io.tmpdir"); if (tempDir == null) throw new IOException("java.io.tmpdir undefined, cannot run test"); File indexDir2 = new File(tempDir, "FSDirIndexReaderClone"); Directory dir1 = FSDirectory.getDirectory(indexDir2); TestIndexReaderReopen.createIndex(dir1, false); IndexReader reader = IndexReader.open(indexDir2); IndexReader readOnlyReader = (IndexReader) reader.clone(); reader.close(); readOnlyReader.close(); // Make sure we didn't pick up too many incRef's along // the way -- this close should be the final close: dir1.close(); try { dir1.listAll(); fail("did not hit AlreadyClosedException"); } catch (AlreadyClosedException ace) { // expected } } // open non-readOnly reader1, clone to non-readOnly // reader2, make sure we can change reader2 public void testCloneNoChangesStillReadOnly() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader r1 = IndexReader.open(dir1, false); IndexReader r2 = r1.clone(false); if (!deleteWorked(1, r2)) { fail("deleting from the cloned should have worked"); } r1.close(); r2.close(); dir1.close(); } // open non-readOnly reader1, clone to non-readOnly // reader2, make sure we can change reader1 public void testCloneWriteToOrig() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader r1 = IndexReader.open(dir1, false); IndexReader r2 = r1.clone(false); if (!deleteWorked(1, r1)) { fail("deleting from the original should have worked"); } r1.close(); r2.close(); dir1.close(); } // open non-readOnly reader1, clone to non-readOnly // reader2, make sure we can change reader2 public void testCloneWriteToClone() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader r1 = IndexReader.open(dir1, false); IndexReader r2 = r1.clone(false); if (!deleteWorked(1, r2)) { fail("deleting from the original should have worked"); } // should fail because reader1 holds the write lock assertTrue("first reader should not be able to delete", !deleteWorked(1, r1)); r2.close(); // should fail because we are now stale (reader1 // committed changes) assertTrue("first reader should not be able to delete", !deleteWorked(1, r1)); r1.close(); dir1.close(); } // create single-segment index, open non-readOnly // SegmentReader, add docs, reopen to multireader, then do // delete public void testReopenSegmentReaderToMultiReader() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader reader1 = IndexReader.open(dir1, false); TestIndexReaderReopen.modifyIndex(5, dir1); IndexReader reader2 = reader1.reopen(); assertTrue(reader1 != reader2); assertTrue(deleteWorked(1, reader2)); reader1.close(); reader2.close(); dir1.close(); } // open non-readOnly reader1, clone to readOnly reader2 public void testCloneWriteableToReadOnly() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader = IndexReader.open(dir1, false); IndexReader readOnlyReader = reader.clone(true); if (!isReadOnly(readOnlyReader)) { fail("reader isn't read only"); } if (deleteWorked(1, readOnlyReader)) { fail("deleting from the original should not have worked"); } // this readonly reader shouldn't have a write lock if (readOnlyReader.hasChanges) { fail("readOnlyReader has a write lock"); } reader.close(); readOnlyReader.close(); dir1.close(); } // open non-readOnly reader1, reopen to readOnly reader2 public void testReopenWriteableToReadOnly() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader = IndexReader.open(dir1, false); final int docCount = reader.numDocs(); assertTrue(deleteWorked(1, reader)); assertEquals(docCount-1, reader.numDocs()); IndexReader readOnlyReader = reader.reopen(true); if (!isReadOnly(readOnlyReader)) { fail("reader isn't read only"); } assertFalse(deleteWorked(1, readOnlyReader)); assertEquals(docCount-1, readOnlyReader.numDocs()); reader.close(); readOnlyReader.close(); dir1.close(); } // open readOnly reader1, clone to non-readOnly reader2 public void testCloneReadOnlyToWriteable() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader1 = IndexReader.open(dir1, true); IndexReader reader2 = reader1.clone(false); if (isReadOnly(reader2)) { fail("reader should not be read only"); } assertFalse("deleting from the original reader should not have worked", deleteWorked(1, reader1)); // this readonly reader shouldn't yet have a write lock if (reader2.hasChanges) { fail("cloned reader should not have write lock"); } assertTrue("deleting from the cloned reader should have worked", deleteWorked(1, reader2)); reader1.close(); reader2.close(); dir1.close(); } // open non-readOnly reader1 on multi-segment index, then // optimize the index, then clone to readOnly reader2 public void testReadOnlyCloneAfterOptimize() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader1 = IndexReader.open(dir1, false); IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); w.optimize(); w.close(); IndexReader reader2 = reader1.clone(true); assertTrue(isReadOnly(reader2)); reader1.close(); reader2.close(); dir1.close(); } private static boolean deleteWorked(int doc, IndexReader r) { boolean exception = false; try { // trying to delete from the original reader should throw an exception r.deleteDocument(doc); } catch (Exception ex) { exception = true; } return !exception; } public void testCloneReadOnlyDirectoryReader() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader = IndexReader.open(dir1); IndexReader readOnlyReader = reader.clone(true); if (!isReadOnly(readOnlyReader)) { fail("reader isn't read only"); } reader.close(); readOnlyReader.close(); dir1.close(); } public static boolean isReadOnly(IndexReader r) { if (r instanceof ReadOnlySegmentReader || r instanceof ReadOnlyDirectoryReader) return true; return false; } public void testParallelReader() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); final Directory dir2 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir2, true); IndexReader r1 = IndexReader.open(dir1); IndexReader r2 = IndexReader.open(dir2); ParallelReader pr1 = new ParallelReader(); pr1.add(r1); pr1.add(r2); performDefaultTests(pr1); pr1.close(); dir1.close(); dir2.close(); } /** * 1. Get a norm from the original reader 2. Clone the original reader 3. * Delete a document and set the norm of the cloned reader 4. Verify the norms * are not the same on each reader 5. Verify the doc deleted is only in the * cloned reader 6. Try to delete a document in the original reader, an * exception should be thrown * * @param r1 IndexReader to perform tests on * @throws Exception */ private void performDefaultTests(IndexReader r1) throws Exception { float norm1 = Similarity.decodeNorm(r1.norms("field1")[4]); IndexReader pr1Clone = (IndexReader) r1.clone(); pr1Clone.deleteDocument(10); pr1Clone.setNorm(4, "field1", 0.5f); assertTrue(Similarity.decodeNorm(r1.norms("field1")[4]) == norm1); assertTrue(Similarity.decodeNorm(pr1Clone.norms("field1")[4]) != norm1); assertTrue(!r1.isDeleted(10)); assertTrue(pr1Clone.isDeleted(10)); // try to update the original reader, which should throw an exception try { r1.deleteDocument(11); fail("Tried to delete doc 11 and an exception should have been thrown"); } catch (Exception exception) { // expectted } pr1Clone.close(); } public void testMixedReaders() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); final Directory dir2 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir2, true); IndexReader r1 = IndexReader.open(dir1); IndexReader r2 = IndexReader.open(dir2); MultiReader multiReader = new MultiReader(new IndexReader[] { r1, r2 }); performDefaultTests(multiReader); multiReader.close(); dir1.close(); dir2.close(); } public void testSegmentReaderUndeleteall() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(dir1); origSegmentReader.deleteDocument(10); assertDelDocsRefCountEquals(1, origSegmentReader); origSegmentReader.undeleteAll(); assertNull(origSegmentReader.deletedDocsRef); origSegmentReader.close(); // need to test norms? dir1.close(); } public void testSegmentReaderCloseReferencing() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(dir1); origSegmentReader.deleteDocument(1); origSegmentReader.setNorm(4, "field1", 0.5f); SegmentReader clonedSegmentReader = (SegmentReader) origSegmentReader .clone(); assertDelDocsRefCountEquals(2, origSegmentReader); origSegmentReader.close(); assertDelDocsRefCountEquals(1, origSegmentReader); // check the norm refs Norm norm = (Norm) clonedSegmentReader.norms.get("field1"); assertEquals(1, norm.bytesRef().refCount()); clonedSegmentReader.close(); dir1.close(); } public void testSegmentReaderDelDocsReferenceCounting() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader origReader = IndexReader.open(dir1); SegmentReader origSegmentReader = SegmentReader.getOnlySegmentReader(origReader); // deletedDocsRef should be null because nothing has updated yet assertNull(origSegmentReader.deletedDocsRef); // we deleted a document, so there is now a deletedDocs bitvector and a // reference to it origReader.deleteDocument(1); assertDelDocsRefCountEquals(1, origSegmentReader); // the cloned segmentreader should have 2 references, 1 to itself, and 1 to // the original segmentreader IndexReader clonedReader = (IndexReader) origReader.clone(); SegmentReader clonedSegmentReader = SegmentReader.getOnlySegmentReader(clonedReader); assertDelDocsRefCountEquals(2, origSegmentReader); // deleting a document creates a new deletedDocs bitvector, the refs goes to // 1 clonedReader.deleteDocument(2); assertDelDocsRefCountEquals(1, origSegmentReader); assertDelDocsRefCountEquals(1, clonedSegmentReader); // make sure the deletedocs objects are different (copy // on write) assertTrue(origSegmentReader.deletedDocs != clonedSegmentReader.deletedDocs); assertDocDeleted(origSegmentReader, clonedSegmentReader, 1); assertTrue(!origSegmentReader.isDeleted(2)); // doc 2 should not be deleted // in original segmentreader assertTrue(clonedSegmentReader.isDeleted(2)); // doc 2 should be deleted in // cloned segmentreader // deleting a doc from the original segmentreader should throw an exception try { origReader.deleteDocument(4); fail("expected exception"); } catch (LockObtainFailedException lbfe) { // expected } origReader.close(); // try closing the original segment reader to see if it affects the // clonedSegmentReader clonedReader.deleteDocument(3); clonedReader.flush(); assertDelDocsRefCountEquals(1, clonedSegmentReader); // test a reopened reader IndexReader reopenedReader = clonedReader.reopen(); IndexReader cloneReader2 = (IndexReader) reopenedReader.clone(); SegmentReader cloneSegmentReader2 = SegmentReader.getOnlySegmentReader(cloneReader2); assertDelDocsRefCountEquals(2, cloneSegmentReader2); clonedReader.close(); reopenedReader.close(); cloneReader2.close(); dir1.close(); } // LUCENE-1648 public void testCloneWithDeletes() throws Throwable { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader origReader = IndexReader.open(dir1); origReader.deleteDocument(1); IndexReader clonedReader = (IndexReader) origReader.clone(); origReader.close(); clonedReader.close(); IndexReader r = IndexReader.open(dir1); assertTrue(r.isDeleted(1)); r.close(); dir1.close(); } // LUCENE-1648 public void testCloneWithSetNorm() throws Throwable { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader orig = IndexReader.open(dir1); orig.setNorm(1, "field1", 17.0f); final byte encoded = Similarity.encodeNorm(17.0f); assertEquals(encoded, orig.norms("field1")[1]); // the cloned segmentreader should have 2 references, 1 to itself, and 1 to // the original segmentreader IndexReader clonedReader = (IndexReader) orig.clone(); orig.close(); clonedReader.close(); IndexReader r = IndexReader.open(dir1); assertEquals(encoded, r.norms("field1")[1]); r.close(); dir1.close(); } private void assertDocDeleted(SegmentReader reader, SegmentReader reader2, int doc) { assertEquals(reader.isDeleted(doc), reader2.isDeleted(doc)); } private void assertDelDocsRefCountEquals(int refCount, SegmentReader reader) { assertEquals(refCount, reader.deletedDocsRef.refCount()); } public void testCloneSubreaders() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, true); IndexReader reader = IndexReader.open(dir1); reader.deleteDocument(1); // acquire write lock IndexReader[] subs = reader.getSequentialSubReaders(); assert subs.length > 1; IndexReader[] clones = new IndexReader[subs.length]; for (int x=0; x < subs.length; x++) { clones[x] = (IndexReader) subs[x].clone(); } reader.close(); for (int x=0; x < subs.length; x++) { clones[x].close(); } dir1.close(); } public void testLucene1516Bug() throws Exception { final Directory dir1 = new MockRAMDirectory(); TestIndexReaderReopen.createIndex(dir1, false); IndexReader r1 = IndexReader.open(dir1); r1.incRef(); IndexReader r2 = r1.clone(false); r1.deleteDocument(5); r1.decRef(); r1.incRef(); r2.close(); r1.decRef(); r1.close(); dir1.close(); } public void testCloseStoredFields() throws Exception { final Directory dir = new MockRAMDirectory(); IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); w.setUseCompoundFile(false); Document doc = new Document(); doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED)); w.addDocument(doc); w.close(); IndexReader r1 = IndexReader.open(dir); IndexReader r2 = r1.clone(false); r1.close(); r2.close(); dir.close(); } }
package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.ChannelsLibrary.CMChannel; import com.planet_ink.coffee_mud.Libraries.interfaces.ColorLibrary.Color; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.Session.InputCallback; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class ColorSet extends StdCommand { public ColorSet() { } private final String[] access = I(new String[] { "COLORSET" }); @Override public String[] getAccessWords() { return access; } public String colorDescription(final String code) { final StringBuffer buf=new StringBuffer(""); String what=CMLib.color().translateANSItoCMCode(code); while((what!=null)&&(what.length()>1)) { for(final Color C : Color.values()) { if(what.charAt(1)==C.getCodeChar()) { buf.append("^"+C.getCodeChar()+CMStrings.capitalizeAndLower(C.name())); break; } } if(what.indexOf('|')>0) { what=what.substring(what.indexOf('|')+1); buf.append(L("^N=background, foreground=")); } else if(what.indexOf(ColorLibrary.COLORCODE_BACKGROUND)>0) { what=what.substring(what.indexOf(ColorLibrary.COLORCODE_BACKGROUND)).toUpperCase(); buf.append(L("^N=foreground, background=")); } else what=null; } return buf.toString(); } protected void pickColor(final MOB mob, final String[] set, final InputCallback callBack) { if(mob.session()!=null) { mob.session().prompt(new InputCallback(InputCallback.Type.PROMPT,"") { @Override public void showPrompt() { callBack.showPrompt(); } @Override public void timedOut() { callBack.timedOut(); } @Override public void callBack() { callBack.setInput("-1"); if(this.input.length()>0) { for(int c=0;c<Color.values().length;c++) { final Color C=Color.values()[c]; if((C.name().toUpperCase().startsWith(this.input.toUpperCase())) &&(CMParms.contains(set, ""+C.getCodeChar()))) { callBack.setInput(""+c); break; } } } callBack.callBack(); } }); } } public void makeColorChanges(final List<Pair<String,Integer>> theSet, final PlayerStats pstats, final Session session, final String[][] clookup) { String newChanges=""; final String[] common=CMLib.color().standardColorLookups(); for (final Pair<String,Integer> element : theSet) { final int c=element.second.intValue(); if(c<128) { if(!clookup[0][c].equals(common[c])) newChanges+=((char)c)+CMLib.color().translateANSItoCMCode(clookup[0][c])+"#"; } else { if(!clookup[0][c].equals(common['Q'])) newChanges+="("+c+")"+CMLib.color().translateANSItoCMCode(clookup[0][c])+"#"; } } pstats.setColorStr(newChanges); clookup[0]=session.getColorCodes().clone(); } @Override public boolean execute(final MOB mob, final List<String> commands, final int metaFlags) throws java.io.IOException { final Session session=mob.session(); if(session==null) return false; final PlayerStats pstats=mob.playerStats(); if(pstats==null) return false; final String[][] clookup=new String[][]{session.getColorCodes().clone()}; if((commands.size()>1) &&("DEFAULT".startsWith(CMParms.combine(commands,1).toUpperCase()))) { pstats.setColorStr(""); mob.tell(L("Your colors have been changed back to default.")); return false; } final List<String> allBackgroundColorsList = new ArrayList<String>(); final List<String> allBasicColorsList = new ArrayList<String>(); final List<String> allForegroundColorsList = new ArrayList<String>(); // ok future bo. We store the bg + fg channel colors as a single // ansi code. When stored in this way, there are limitations on the // use of bold, which means only dark backgrounds and light foregrounds // ever. If you don't believe me, spend another few hours trying it. for(final Color C : Color.values()) { if((C.isBasicColor() && (Character.isUpperCase(C.getCodeChar()))) ||(C.getCodeChar()=='K')) // black is ok for bg color { allBackgroundColorsList.add(Character.toString(C.getCodeChar())); } if(C.isBasicColor()) { allBasicColorsList.add(Character.toString(C.getCodeChar())); } if(C.isBasicColor() && (Character.isLowerCase(C.getCodeChar()))) { allForegroundColorsList.add(Character.toString(C.getCodeChar())); } } final String[] COLOR_ALLBACKGROUNDCOLORCODELETTERS = allBackgroundColorsList.toArray(new String[allBackgroundColorsList.size()]); final String[] COLOR_ALLBASICCOLORCODELETTERS = allBasicColorsList.toArray(new String[allBasicColorsList.size()]); final String[] COLOR_ALLFOREGROUNDCOLORCODELETTERS = allForegroundColorsList.toArray(new String[allForegroundColorsList.size()]); if(clookup[0]==null) return false; final List<Pair<String,Integer>> theSet= new ArrayList<Pair<String,Integer>>(); // don't localize these, as the prompt will handle it. theSet.add(new Pair<String,Integer>("Normal Text",Integer.valueOf('N'))); theSet.add(new Pair<String,Integer>("Highlighted Text",Integer.valueOf('H'))); theSet.add(new Pair<String,Integer>("Your Fight Text",Integer.valueOf('f'))); theSet.add(new Pair<String,Integer>("Fighting You Text",Integer.valueOf('e'))); theSet.add(new Pair<String,Integer>("Other Fight Text",Integer.valueOf('F'))); theSet.add(new Pair<String,Integer>("Spells",Integer.valueOf('S'))); theSet.add(new Pair<String,Integer>("Emotes",Integer.valueOf('E'))); theSet.add(new Pair<String,Integer>("Says",Integer.valueOf('T'))); theSet.add(new Pair<String,Integer>("Tells",Integer.valueOf('t'))); theSet.add(new Pair<String,Integer>("Room Titles",Integer.valueOf('O'))); theSet.add(new Pair<String,Integer>("Room Descriptions",Integer.valueOf('L'))); theSet.add(new Pair<String,Integer>("Weather",Integer.valueOf('J'))); theSet.add(new Pair<String,Integer>("Doors",Integer.valueOf('d'))); theSet.add(new Pair<String,Integer>("Items",Integer.valueOf('I'))); theSet.add(new Pair<String,Integer>("MOBs",Integer.valueOf('M'))); theSet.add(new Pair<String,Integer>("Channel Colors",Integer.valueOf('Q'))); for(int i=0;i<CMLib.channels().getNumChannels();i++) { final CMChannel C = CMLib.channels().getChannel(i); if((clookup[0][128+i]!=null)&&(clookup[0][128+i].length()>0)) theSet.add(new Pair<String,Integer>(C.name(),Integer.valueOf(128+i))); } final InputCallback[] IC=new InputCallback[1]; IC[0]=new InputCallback(InputCallback.Type.PROMPT,"") { @Override public void showPrompt() { final StringBuffer buf=new StringBuffer(""); for(int i=0;i<theSet.size();i++) { buf.append("\n\r^H"+CMStrings.padLeft(""+(i+1),2)+"^N) "+CMStrings.padRight(L(theSet.get(i).first),20)+": "); if(clookup[0][theSet.get(i).second.intValue()]!=null) buf.append(colorDescription(clookup[0][theSet.get(i).second.intValue()])); buf.append("^N"); } session.println(buf.toString()); session.promptPrint(L("Enter Number, channel name, or RETURN: ")); } @Override public void timedOut() { } @Override public void callBack() { if(input.trim().length()==0) return; if(!CMath.isInteger(input.trim())) { final String potChannelName = CMLib.channels().findChannelName(input.trim()); if(potChannelName != null) { final int code = CMLib.channels().getChannelIndex(potChannelName); if(code >=0) { Pair<String,Integer> newEntry = null; for(int x=0;x<theSet.size();x++) { final Pair<String,Integer> entry = theSet.get(x); if(entry.first.equals(potChannelName) || L(entry.first).equals(potChannelName)) { newEntry = entry; input = ""+(x+1); } } if(newEntry == null) { newEntry = new Pair<String,Integer>(potChannelName,Integer.valueOf(128+code)); clookup[0][128+code]=clookup[0]['Q']; theSet.add(newEntry); input = ""+theSet.size(); } } } } if(input.trim().length()==0) return; final int num=CMath.s_int(input.trim())-1; if((num<0) ||(num>=theSet.size())) mob.tell(L("That is not a valid entry!")); else { final StringBuffer buf=new StringBuffer(""); buf.append("\n\r\n\r^c"+CMStrings.padLeft(""+(num+1),2)+"^N) "+CMStrings.padRight(theSet.get(num).first,20)+": "); final int colorCodeNum = theSet.get(num).second.intValue(); buf.append(colorDescription(clookup[0][colorCodeNum])); if((colorCodeNum!='Q') && (colorCodeNum < 128)) { buf.append(L("^N\n\rAvailable Colors: ")); for(int ii=0;ii<Color.values().length;ii++) { final Color C = Color.values()[ii]; if(allBasicColorsList.contains(""+C.getCodeChar())) { if(ii>0) buf.append(", "); buf.append("^"+C.getCodeChar()+CMStrings.capitalizeAndLower(C.name())); } } session.println(buf.toString()+"^N"); pickColor(mob,COLOR_ALLBASICCOLORCODELETTERS,new InputCallback(InputCallback.Type.PROMPT,"") { @Override public void showPrompt() { session.promptPrint(L("Enter Name of New Color: ")); } @Override public void timedOut() { } @Override public void callBack() { final int colorNum=CMath.s_int(this.input); if(colorNum<0) mob.tell(L("That is not a valid color!")); else { clookup[0][colorCodeNum]=clookup[0][Color.values()[colorNum].getCodeChar()]; makeColorChanges(theSet, pstats, session, clookup); } session.prompt(IC[0].reset()); } }); } else { buf.append(L("^N\n\r\n\rAvailable Background Colors: ")); boolean first=true; for(final Color C : Color.values()) { if(allBackgroundColorsList.contains(Character.toString(C.getCodeChar()))) { if(first) first=false; else buf.append(", "); if(C==Color.BLACK) buf.append("^"+Color.WHITE.getCodeChar()+CMStrings.capitalizeAndLower(C.name())); else buf.append("^"+C.getCodeChar()+CMStrings.capitalizeAndLower(C.name())); } } buf.append(L("^N\n\rAvailable Foreground Colors: ")); first=true; for(final Color C : Color.values()) { if(allForegroundColorsList.contains(Character.toString(C.getCodeChar()))) { if(first) first=false; else buf.append(", "); buf.append("^"+C.getCodeChar()+CMStrings.capitalizeAndLower(C.name())); } } session.println(buf.toString()+"^N"); pickColor(mob,COLOR_ALLBACKGROUNDCOLORCODELETTERS,new InputCallback(InputCallback.Type.PROMPT,"") { @Override public void showPrompt() { session.promptPrint(L("Enter Name of Background Color: ")); } @Override public void timedOut() { } @Override public void callBack() { final int colorNum1=CMath.s_int(this.input); if(colorNum1<0) { mob.tell(L("That is not a valid Background color!")); session.prompt(IC[0].reset()); } else { pickColor(mob,COLOR_ALLFOREGROUNDCOLORCODELETTERS,new InputCallback(InputCallback.Type.PROMPT,"") { @Override public void showPrompt() { session.promptPrint(L("Enter Name of Foreground Color: ")); } @Override public void timedOut() { } @Override public void callBack() { final int colorNum2=CMath.s_int(this.input); if(colorNum2<0) mob.tell(L("That is not a valid Foreground color!")); else { final char colorCode1 = Character.toLowerCase(Color.values()[colorNum1].getCodeChar()); final char colorCode2 = Color.values()[colorNum2].getCodeChar(); clookup[0][colorCodeNum]="^"+colorCode2+"^~"+colorCode1; makeColorChanges(theSet, pstats, session, clookup); } session.prompt(IC[0].reset()); } }); } } }); } } } }; session.prompt(IC[0]); return false; } @Override public boolean canBeOrdered() { return false; } }
/* * Copyright 2014-2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.service; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Optional; import org.hawkular.inventory.Resources; import org.hawkular.inventory.api.ResourceFilter; import org.hawkular.inventory.api.model.Inventory; import org.hawkular.inventory.api.model.InventoryHealth; import org.hawkular.inventory.api.model.RawResource; import org.hawkular.inventory.api.model.Resource; import org.hawkular.inventory.api.model.ResourceNode; import org.hawkular.inventory.api.model.ResourceType; import org.hawkular.inventory.api.model.ResultSet; import org.infinispan.Cache; import org.infinispan.configuration.cache.SingleFileStoreConfiguration; import org.infinispan.manager.DefaultCacheManager; import org.infinispan.manager.EmbeddedCacheManager; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; /** * @author Joel Takvorian */ public class InventoryServiceIspnTest { private static final String ISPN_CONFIG_LOCAL = "/hawkular-inventory-ispn-test.xml"; private static EmbeddedCacheManager CACHE_MANAGER; private final InventoryServiceIspn service; private final InventoryStats inventoryStats; static { try { CACHE_MANAGER = new DefaultCacheManager(InventoryServiceIspn.class.getResourceAsStream(ISPN_CONFIG_LOCAL)); } catch (IOException e) { e.printStackTrace(); } } @BeforeClass public static void initClass() { } public InventoryServiceIspnTest() throws IOException { Cache<String, Object> resource = CACHE_MANAGER.getCache("resource"); Cache<String, Object> resourceType = CACHE_MANAGER.getCache("resource_type"); resource.clear(); resourceType.clear(); inventoryStats = new InventoryStats(resource, resourceType, new File(((SingleFileStoreConfiguration) resource.getAdvancedCache() .getCacheConfiguration() .persistence() .stores() .iterator() .next()).location())); inventoryStats.init(); service = new InventoryServiceIspn(resource, resourceType, getClass().getClassLoader().getResource("").getPath(), inventoryStats); } @Before public void setUp() { service.addResource(Resources.EAP1); service.addResource(Resources.EAP2); service.addResource(Resources.CHILD1); service.addResource(Resources.CHILD2); service.addResource(Resources.CHILD3); service.addResource(Resources.CHILD4); service.addResourceType(Resources.TYPE_EAP); service.addResourceType(Resources.TYPE_FOO); service.addResourceType(Resources.TYPE_BAR); } @Test public void shouldGetResourcesById() { Optional<Resource> eap1 = service.getResourceById("EAP-1"); assertThat(eap1).isPresent() .map(Resource::getName) .hasValue("EAP-1"); assertThat(eap1) .map(Resource::getType) .hasValueSatisfying(type -> assertThat(type.getId()).isEqualTo("EAP")); assertThat(service.getResourceById("EAP-2")).isPresent() .map(Resource::getName) .hasValue("EAP-2"); assertThat(service.getResourceById("child-1")).isPresent() .map(Resource::getName) .hasValue("Child 1"); } @Test public void shouldNotGetResourcesById() { assertThat(service.getResourceById("nada")).isNotPresent(); } @Test public void shouldGetTopResources() { Collection<Resource> top = service.getResources(ResourceFilter.rootOnly().build()).getResults(); assertThat(top) .extracting(Resource::getName) .containsOnly("EAP-1", "EAP-2"); assertThat(top) .extracting(Resource::getType) .extracting(ResourceType::getId) .containsOnly("EAP", "EAP"); } @Test public void shouldGetResourceTypes() { assertThat(service.getResourceTypes().getResults()) .extracting(ResourceType::getId) .containsOnly("EAP", "FOO", "BAR"); } @Test public void shouldGetAllEAPs() { assertThat(service.getResources(ResourceFilter.ofType("EAP").build()).getResults()) .extracting(Resource::getId) .containsOnly("EAP-1", "EAP-2"); } @Test public void shouldGetAllFOOs() { assertThat(service.getResources(ResourceFilter.ofType("FOO").build()).getResults()) .extracting(Resource::getId) .containsOnly("child-1", "child-3"); } @Test public void shouldGetNoNada() { assertThat(service.getResources(ResourceFilter.ofType("nada").build()).getResults()).isEmpty(); } @Test public void shouldGetOnlyChildren() { assertThat(service.getChildren("EAP-1").getResults()) .extracting(Resource::getId) .containsOnly("child-1", "child-2"); } @Test public void shouldGetChildren() { ResourceNode tree = service.getTree("EAP-1").orElseThrow(AssertionError::new); assertThat(tree.getChildren()) .extracting(ResourceNode::getId) .containsOnly("child-1", "child-2"); } @Test public void shouldGetEmptyChildren() { ResourceNode tree = service.getTree("child-1").orElseThrow(AssertionError::new); assertThat(tree.getChildren()).isEmpty(); } @Test public void shouldNotGetTree() { assertThat(service.getTree("nada")).isNotPresent(); } @Test public void shouldGetResourceType() { assertThat(service.getResourceType("EAP")).isPresent() .map(ResourceType::getId) .hasValue("EAP"); } @Test public void shouldNotGetResourceType() { assertThat(service.getResourceType("nada")).isNotPresent(); } @Test public void shouldFailOnDetectedCycle() { RawResource corruptedParent = new RawResource("CP", "CP", "feedX", "FOO", "CC", new ArrayList<>(), new HashMap<>(), new HashMap<>()); RawResource corruptedChild = new RawResource("CC", "CC", "feedX", "BAR", "CP", new ArrayList<>(), new HashMap<>(), new HashMap<>()); service.addResource(corruptedParent); service.addResource(corruptedChild); assertThatThrownBy(() -> service.getTree("CP")) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Cycle detected"); } @Test public void shouldNotGetAgentConfig() { assertThat(service.getAgentConfig("nada")).isNotPresent(); } @Test public void shouldGetAgentConfig() throws IOException { assertThat(service.getAgentConfig("test")).isPresent() .hasValueSatisfying(s -> assertThat(s).contains("AGENT CONFIG TEST")); } @Test public void shouldNotGetJMXExporterConfig() { assertThat(service.getJMXExporterConfig("nada")).isNotPresent(); } @Test public void shouldGetJMXExporterConfig() throws IOException { assertThat(service.getJMXExporterConfig("test")).isPresent() .hasValueSatisfying(s -> assertThat(s).contains("JMX EXPORTER TEST")); assertThat(service.getJMXExporterConfig("wildfly-10")).isPresent() .hasValueSatisfying(s -> assertThat(s).contains("- pattern:")); } @Test public void shouldRemoveResource() { assertThat(service.getResourceById("EAP-1")).isPresent(); service.deleteResources(Collections.singleton("EAP-1")); assertThat(service.getResourceById("EAP-1")).isNotPresent(); } @Test public void shouldRemoveResourceType() { assertThat(service.getResourceType("EAP")).isPresent(); service.deleteResourceTypes(Collections.singleton("EAP")); assertThat(service.getResourceType("EAP")).isNotPresent(); } @Test public void shouldGetAllEAPsPerFeed() { assertThat(service.getResources(ResourceFilter.ofType("EAP").andFeed("feed1").build()).getResults()) .extracting(Resource::getId) .containsOnly("EAP-1"); assertThat(service.getResources(ResourceFilter.ofType("EAP").andFeed("feed2").build()).getResults()) .extracting(Resource::getId) .containsOnly("EAP-2"); } @Test public void createLargeSetAndFetchPagination() { int maxFeeds = 10; int maxItems = 1000; for (int j = 0; j < maxFeeds; j++) { List<RawResource> resources = new ArrayList<>(); for (int i = 0; i < maxItems; i++) { RawResource resourceX = new RawResource("F" + j + "L" + i, "Large" + i, "feed" + j, "FOO", null, new ArrayList<>(), new HashMap<>(), new HashMap<>()); resources.add(resourceX); } service.addResource(resources); } ResultSet<Resource> results = service.getResources(ResourceFilter.ofType("FOO").build()); assertThat(results.getResultSize()).isEqualTo(maxFeeds * maxItems + 2); assertThat(results.getResults().size()).isEqualTo(100); for (int i = 0; i < (maxItems / 100); i++) { results = service.getResources(ResourceFilter.ofType("FOO").build(), i * 100, 100); assertThat(results.getResultSize()).isEqualTo(maxFeeds * maxItems + 2); assertThat(results.getResults().size()).isEqualTo(100); assertThat(results.getStartOffset()).isEqualTo(i * 100); } for (int j = 0; j < maxFeeds; j++) { results = service.getResources(ResourceFilter.ofType("FOO").andFeed("feed" + j).build(), 0, 1000); assertThat(results.getResults().size()).isEqualTo(1000); } } @Test public void shouldGetExport() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); service.buildExport(baos); byte[] bytes = baos.toByteArray(); String str = new String(bytes); Inventory export = new ObjectMapper(new JsonFactory()).readValue(str, Inventory.class); assertThat(export).isNotNull(); assertThat(export.getResources()).extracting(RawResource::getId).containsOnly("EAP-1", "EAP-2", "child-1", "child-2", "child-3", "child-4"); assertThat(export.getTypes()).extracting(ResourceType::getId).containsOnly("EAP", "FOO", "BAR"); } @Test public void shouldGetLargeExportWithStats() throws IOException, InterruptedException { int maxFeeds = 10; int maxItems = 1000; for (int j = 0; j < maxFeeds; j++) { List<RawResource> resources = new ArrayList<>(); for (int i = 0; i < maxItems; i++) { RawResource resourceX = new RawResource("F" + j + "L" + i, "Large" + i, "feed" + j, "FOO", null, new ArrayList<>(), new HashMap<>(), new HashMap<>()); resources.add(resourceX); } service.addResource(resources); } int maxTypes = 200; List<ResourceType> resourceTypes = new ArrayList<>(); for (int i = 0; i < maxTypes; i++) { ResourceType typeX = new ResourceType("RT" + i, new ArrayList<>(), new HashMap<>()); resourceTypes.add(typeX); } service.addResourceType(resourceTypes); ByteArrayOutputStream baos = new ByteArrayOutputStream(); service.buildExport(baos); byte[] bytes = baos.toByteArray(); String str = new String(bytes); Inventory export = new ObjectMapper(new JsonFactory()).readValue(str, Inventory.class); assertThat(export).isNotNull(); assertThat(export.getResources()) .hasSize(maxFeeds*maxItems + 6) .extracting(RawResource::getId) .contains("EAP-1", "F0L0", "F5L500", "F9L999"); assertThat(export.getTypes()) .hasSize(maxTypes + 3) .extracting(ResourceType::getId) .contains("EAP", "RT0", "RT100", "RT199"); InventoryHealth health = service.getHealthStatus(); assertThat(health.getInventoryStats()).isNotNull(); assertThat(health.getInventoryStats().getNumberOfResources()).isGreaterThan(10000); assertThat(health.getInventoryStats().getNumberOfResourcesInMemory()).isEqualTo(5000); assertThat(health.getDiskStats().getInventoryTotalSpace()).isGreaterThan(4000000L); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.zip.Inflater; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.BZip2Codec; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; import org.junit.After; import org.junit.Ignore; import org.junit.Test; @Ignore public class TestConcatenatedCompressedInput { private static final Log LOG = LogFactory.getLog(TestConcatenatedCompressedInput.class.getName()); private static int MAX_LENGTH = 10000; private static JobConf defaultConf = new JobConf(); private static FileSystem localFs = null; // from ~roelofs/ss30b-colors.hh final static String COLOR_RED = ""; // background doesn't matter... "" final static String COLOR_GREEN = ""; // background doesn't matter... "" final static String COLOR_YELLOW = ""; // DO force black background "" final static String COLOR_BLUE = ""; // do NOT force black background "" final static String COLOR_MAGENTA = ""; // background doesn't matter... "" final static String COLOR_CYAN = ""; // background doesn't matter... "" final static String COLOR_WHITE = ""; // DO force black background "" final static String COLOR_BR_RED = ""; // background doesn't matter... "" final static String COLOR_BR_GREEN = ""; // background doesn't matter... "" final static String COLOR_BR_YELLOW = ""; // DO force black background "" final static String COLOR_BR_BLUE = ""; // do NOT force black background "" final static String COLOR_BR_MAGENTA = ""; // background doesn't matter... "" final static String COLOR_BR_CYAN = ""; // background doesn't matter... "" final static String COLOR_BR_WHITE = ""; // DO force black background "" final static String COLOR_NORMAL = ""; static { try { defaultConf.set("fs.defaultFS", "file:///"); localFs = FileSystem.getLocal(defaultConf); } catch (IOException e) { throw new RuntimeException("init failure", e); } } @After public void after() { ZlibFactory.loadNativeZLib(); } private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "/tmp")), "TestConcatenatedCompressedInput").makeQualified(localFs); private static LineReader makeStream(String str) throws IOException { return new LineReader(new ByteArrayInputStream(str.getBytes("UTF-8")), defaultConf); } private static void writeFile(FileSystem fs, Path name, CompressionCodec codec, String contents) throws IOException { OutputStream stm; if (codec == null) { stm = fs.create(name); } else { stm = codec.createOutputStream(fs.create(name)); } stm.write(contents.getBytes()); stm.close(); } private static final Reporter voidReporter = Reporter.NULL; private static List<Text> readSplit(TextInputFormat format, InputSplit split, JobConf jobConf) throws IOException { List<Text> result = new ArrayList<Text>(); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, jobConf, voidReporter); LongWritable key = reader.createKey(); Text value = reader.createValue(); while (reader.next(key, value)) { result.add(value); value = reader.createValue(); } reader.close(); return result; } /** * Test using Hadoop's original, native-zlib gzip codec for reading. */ @Test public void testGzip() throws IOException { JobConf jobConf = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, jobConf); localFs.delete(workDir, true); // preferred, but not compatible with Apache/trunk instance of Hudson: /* assertFalse("[native (C/C++) codec]", (org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class == gzip.getDecompressorType()) ); System.out.println(COLOR_BR_RED + "testGzip() using native-zlib Decompressor (" + gzip.getDecompressorType() + ")" + COLOR_NORMAL); */ // alternative: if (org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class == gzip.getDecompressorType()) { System.out.println(COLOR_BR_RED + "testGzip() using native-zlib Decompressor (" + gzip.getDecompressorType() + ")" + COLOR_NORMAL); } else { LOG.warn("testGzip() skipped: native (C/C++) libs not loaded"); return; } /* * // THIS IS BUGGY: omits 2nd/3rd gzip headers; screws up 2nd/3rd CRCs-- * // see https://issues.apache.org/jira/browse/HADOOP-6799 * Path fnHDFS = new Path(workDir, "concat" + gzip.getDefaultExtension()); * //OutputStream out = localFs.create(fnHDFS); * //GzipCodec.GzipOutputStream gzOStm = new GzipCodec.GzipOutputStream(out); * // can just combine those two lines, probably * //GzipCodec.GzipOutputStream gzOStm = * // new GzipCodec.GzipOutputStream(localFs.create(fnHDFS)); * // oops, no: this is a protected helper class; need to access * // it via createOutputStream() instead: * OutputStream out = localFs.create(fnHDFS); * Compressor gzCmp = gzip.createCompressor(); * CompressionOutputStream gzOStm = gzip.createOutputStream(out, gzCmp); * // this SHOULD be going to HDFS: got out from localFs == HDFS * // ...yup, works * gzOStm.write("first gzip concat\n member\nwith three lines\n".getBytes()); * gzOStm.finish(); * gzOStm.resetState(); * gzOStm.write("2nd gzip concat member\n".getBytes()); * gzOStm.finish(); * gzOStm.resetState(); * gzOStm.write("gzip concat\nmember #3\n".getBytes()); * gzOStm.close(); * // * String fn = "hdfs-to-local-concat" + gzip.getDefaultExtension(); * Path fnLocal = new Path(System.getProperty("test.concat.data","/tmp"), fn); * localFs.copyToLocalFile(fnHDFS, fnLocal); */ // copy prebuilt (correct!) version of concat.gz to HDFS final String fn = "concat" + gzip.getDefaultExtension(); Path fnLocal = new Path(System.getProperty("test.concat.data", "/tmp"), fn); Path fnHDFS = new Path(workDir, fn); localFs.copyFromLocalFile(fnLocal, fnHDFS); writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, "this is a test\nof gzip\n"); FileInputFormat.setInputPaths(jobConf, workDir); TextInputFormat format = new TextInputFormat(); format.configure(jobConf); InputSplit[] splits = format.getSplits(jobConf, 100); assertEquals("compressed splits == 2", 2, splits.length); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.gz")) { splits[0] = splits[1]; splits[1] = tmp; } List<Text> results = readSplit(format, splits[0], jobConf); assertEquals("splits[0] num lines", 6, results.size()); assertEquals("splits[0][5]", "member #3", results.get(5).toString()); results = readSplit(format, splits[1], jobConf); assertEquals("splits[1] num lines", 2, results.size()); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of gzip", results.get(1).toString()); } /** * Test using the raw Inflater codec for reading gzip files. */ @Test public void testPrototypeInflaterGzip() throws IOException { CompressionCodec gzip = new GzipCodec(); // used only for file extension localFs.delete(workDir, true); // localFs = FileSystem instance System.out.println(COLOR_BR_BLUE + "testPrototypeInflaterGzip() using " + "non-native/Java Inflater and manual gzip header/trailer parsing" + COLOR_NORMAL); // copy prebuilt (correct!) version of concat.gz to HDFS final String fn = "concat" + gzip.getDefaultExtension(); Path fnLocal = new Path(System.getProperty("test.concat.data", "/tmp"), fn); Path fnHDFS = new Path(workDir, fn); localFs.copyFromLocalFile(fnLocal, fnHDFS); final FileInputStream in = new FileInputStream(fnLocal.toString()); assertEquals("concat bytes available", 148, in.available()); // should wrap all of this header-reading stuff in a running-CRC wrapper // (did so in BuiltInGzipDecompressor; see below) byte[] compressedBuf = new byte[256]; int numBytesRead = in.read(compressedBuf, 0, 10); assertEquals("header bytes read", 10, numBytesRead); assertEquals("1st byte", 0x1f, compressedBuf[0] & 0xff); assertEquals("2nd byte", 0x8b, compressedBuf[1] & 0xff); assertEquals("3rd byte (compression method)", 8, compressedBuf[2] & 0xff); byte flags = (byte)(compressedBuf[3] & 0xff); if ((flags & 0x04) != 0) { // FEXTRA numBytesRead = in.read(compressedBuf, 0, 2); assertEquals("XLEN bytes read", 2, numBytesRead); int xlen = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff; in.skip(xlen); } if ((flags & 0x08) != 0) { // FNAME while ((numBytesRead = in.read()) != 0) { assertFalse("unexpected end-of-file while reading filename", numBytesRead == -1); } } if ((flags & 0x10) != 0) { // FCOMMENT while ((numBytesRead = in.read()) != 0) { assertFalse("unexpected end-of-file while reading comment", numBytesRead == -1); } } if ((flags & 0xe0) != 0) { // reserved assertTrue("reserved bits are set??", (flags & 0xe0) == 0); } if ((flags & 0x02) != 0) { // FHCRC numBytesRead = in.read(compressedBuf, 0, 2); assertEquals("CRC16 bytes read", 2, numBytesRead); int crc16 = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff; } // ready to go! next bytes should be start of deflated stream, suitable // for Inflater numBytesRead = in.read(compressedBuf); // Inflater docs refer to a "dummy byte": no clue what that's about; // appears to work fine without one byte[] uncompressedBuf = new byte[256]; Inflater inflater = new Inflater(true); inflater.setInput(compressedBuf, 0, numBytesRead); try { int numBytesUncompressed = inflater.inflate(uncompressedBuf); String outString = new String(uncompressedBuf, 0, numBytesUncompressed, "UTF-8"); System.out.println("uncompressed data of first gzip member = [" + outString + "]"); } catch (java.util.zip.DataFormatException ex) { throw new IOException(ex.getMessage()); } in.close(); } /** * Test using the new BuiltInGzipDecompressor codec for reading gzip files. */ // NOTE: This fails on RHEL4 with "java.io.IOException: header crc mismatch" // due to buggy version of zlib (1.2.1.2) included. @Test public void testBuiltInGzipDecompressor() throws IOException { JobConf jobConf = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, jobConf); localFs.delete(workDir, true); // Don't use native libs for this test ZlibFactory.setNativeZlibLoaded(false); assertEquals("[non-native (Java) codec]", org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class, gzip.getDecompressorType()); System.out.println(COLOR_BR_YELLOW + "testBuiltInGzipDecompressor() using" + " non-native (Java Inflater) Decompressor (" + gzip.getDecompressorType() + ")" + COLOR_NORMAL); // copy single-member test file to HDFS String fn1 = "testConcatThenCompress.txt" + gzip.getDefaultExtension(); Path fnLocal1 = new Path(System.getProperty("test.concat.data","/tmp"),fn1); Path fnHDFS1 = new Path(workDir, fn1); localFs.copyFromLocalFile(fnLocal1, fnHDFS1); // copy multiple-member test file to HDFS // (actually in "seekable gzip" format, a la JIRA PIG-42) String fn2 = "testCompressThenConcat.txt" + gzip.getDefaultExtension(); Path fnLocal2 = new Path(System.getProperty("test.concat.data","/tmp"),fn2); Path fnHDFS2 = new Path(workDir, fn2); localFs.copyFromLocalFile(fnLocal2, fnHDFS2); FileInputFormat.setInputPaths(jobConf, workDir); // here's first pair of DecompressorStreams: final FileInputStream in1 = new FileInputStream(fnLocal1.toString()); final FileInputStream in2 = new FileInputStream(fnLocal2.toString()); assertEquals("concat bytes available", 2734, in1.available()); assertEquals("concat bytes available", 3413, in2.available()); // w/hdr CRC CompressionInputStream cin2 = gzip.createInputStream(in2); LineReader in = new LineReader(cin2); Text out = new Text(); int numBytes, totalBytes=0, lineNum=0; while ((numBytes = in.readLine(out)) > 0) { ++lineNum; totalBytes += numBytes; } in.close(); assertEquals("total uncompressed bytes in concatenated test file", 5346, totalBytes); assertEquals("total uncompressed lines in concatenated test file", 84, lineNum); ZlibFactory.loadNativeZLib(); // test GzipZlibDecompressor (native), just to be sure // (FIXME? could move this call to testGzip(), but would need filename // setup above) (alternatively, maybe just nuke testGzip() and extend this?) doMultipleGzipBufferSizes(jobConf, true); } // this tests either the native or the non-native gzip decoder with 43 // input-buffer sizes in order to try to catch any parser/state-machine // errors at buffer boundaries private static void doMultipleGzipBufferSizes(JobConf jConf, boolean useNative) throws IOException { System.out.println(COLOR_YELLOW + "doMultipleGzipBufferSizes() using " + (useNative? "GzipZlibDecompressor" : "BuiltInGzipDecompressor") + COLOR_NORMAL); int bufferSize; // ideally would add some offsets/shifts in here (e.g., via extra fields // of various sizes), but...significant work to hand-generate each header for (bufferSize = 1; bufferSize < 34; ++bufferSize) { jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); } bufferSize = 512; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 2*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 4*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 63*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 64*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 65*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 127*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 128*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); bufferSize = 129*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleGzipBufferSize(jConf); } // this tests both files (testCompressThenConcat, testConcatThenCompress); // all should work with either native zlib or new Inflater-based decoder private static void doSingleGzipBufferSize(JobConf jConf) throws IOException { TextInputFormat format = new TextInputFormat(); format.configure(jConf); // here's Nth pair of DecompressorStreams: InputSplit[] splits = format.getSplits(jConf, 100); assertEquals("compressed splits == 2", 2, splits.length); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("testCompressThenConcat.txt.gz")) { System.out.println(" (swapping)"); splits[0] = splits[1]; splits[1] = tmp; } List<Text> results = readSplit(format, splits[0], jConf); assertEquals("splits[0] length (num lines)", 84, results.size()); assertEquals("splits[0][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); assertEquals("splits[0][42]", "Tell me, does the magnetic virtue of the needles of the compasses of", results.get(42).toString()); results = readSplit(format, splits[1], jConf); assertEquals("splits[1] length (num lines)", 84, results.size()); assertEquals("splits[1][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); assertEquals("splits[1][42]", "Tell me, does the magnetic virtue of the needles of the compasses of", results.get(42).toString()); } /** * Test using the bzip2 codec for reading */ @Test public void testBzip2() throws IOException { JobConf jobConf = new JobConf(defaultConf); CompressionCodec bzip2 = new BZip2Codec(); ReflectionUtils.setConf(bzip2, jobConf); localFs.delete(workDir, true); System.out.println(COLOR_BR_CYAN + "testBzip2() using non-native CBZip2InputStream (presumably)" + COLOR_NORMAL); // copy prebuilt (correct!) version of concat.bz2 to HDFS final String fn = "concat" + bzip2.getDefaultExtension(); Path fnLocal = new Path(System.getProperty("test.concat.data", "/tmp"), fn); Path fnHDFS = new Path(workDir, fn); localFs.copyFromLocalFile(fnLocal, fnHDFS); writeFile(localFs, new Path(workDir, "part2.txt.bz2"), bzip2, "this is a test\nof bzip2\n"); FileInputFormat.setInputPaths(jobConf, workDir); TextInputFormat format = new TextInputFormat(); // extends FileInputFormat format.configure(jobConf); format.setMinSplitSize(256); // work around 2-byte splits issue // [135 splits for a 208-byte file and a 62-byte file(!)] InputSplit[] splits = format.getSplits(jobConf, 100); assertEquals("compressed splits == 2", 2, splits.length); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.bz2")) { splits[0] = splits[1]; splits[1] = tmp; } List<Text> results = readSplit(format, splits[0], jobConf); assertEquals("splits[0] num lines", 6, results.size()); assertEquals("splits[0][5]", "member #3", results.get(5).toString()); results = readSplit(format, splits[1], jobConf); assertEquals("splits[1] num lines", 2, results.size()); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of bzip2", results.get(1).toString()); } /** * Extended bzip2 test, similar to BuiltInGzipDecompressor test above. */ @Test public void testMoreBzip2() throws IOException { JobConf jobConf = new JobConf(defaultConf); CompressionCodec bzip2 = new BZip2Codec(); ReflectionUtils.setConf(bzip2, jobConf); localFs.delete(workDir, true); System.out.println(COLOR_BR_MAGENTA + "testMoreBzip2() using non-native CBZip2InputStream (presumably)" + COLOR_NORMAL); // copy single-member test file to HDFS String fn1 = "testConcatThenCompress.txt" + bzip2.getDefaultExtension(); Path fnLocal1 = new Path(System.getProperty("test.concat.data","/tmp"),fn1); Path fnHDFS1 = new Path(workDir, fn1); localFs.copyFromLocalFile(fnLocal1, fnHDFS1); // copy multiple-member test file to HDFS String fn2 = "testCompressThenConcat.txt" + bzip2.getDefaultExtension(); Path fnLocal2 = new Path(System.getProperty("test.concat.data","/tmp"),fn2); Path fnHDFS2 = new Path(workDir, fn2); localFs.copyFromLocalFile(fnLocal2, fnHDFS2); FileInputFormat.setInputPaths(jobConf, workDir); // here's first pair of BlockDecompressorStreams: final FileInputStream in1 = new FileInputStream(fnLocal1.toString()); final FileInputStream in2 = new FileInputStream(fnLocal2.toString()); assertEquals("concat bytes available", 2567, in1.available()); assertEquals("concat bytes available", 3056, in2.available()); /* // FIXME // The while-loop below dies at the beginning of the 2nd concatenated // member (after 17 lines successfully read) with: // // java.io.IOException: bad block header // at org.apache.hadoop.io.compress.bzip2.CBZip2InputStream.initBlock( // CBZip2InputStream.java:527) // // It is not critical to concatenated-gzip support, HADOOP-6835, so it's // simply commented out for now (and HADOOP-6852 filed). If and when the // latter issue is resolved--perhaps by fixing an error here--this code // should be reenabled. Note that the doMultipleBzip2BufferSizes() test // below uses the same testCompressThenConcat.txt.bz2 file but works fine. CompressionInputStream cin2 = bzip2.createInputStream(in2); LineReader in = new LineReader(cin2); Text out = new Text(); int numBytes, totalBytes=0, lineNum=0; while ((numBytes = in.readLine(out)) > 0) { ++lineNum; totalBytes += numBytes; } in.close(); assertEquals("total uncompressed bytes in concatenated test file", 5346, totalBytes); assertEquals("total uncompressed lines in concatenated test file", 84, lineNum); */ // test CBZip2InputStream with lots of different input-buffer sizes doMultipleBzip2BufferSizes(jobConf); } // this tests native bzip2 decoder with more than // three dozen input-buffer sizes in order to try to catch any parser/state- // machine errors at buffer boundaries private static void doMultipleBzip2BufferSizes(JobConf jConf) throws IOException { System.out.println(COLOR_MAGENTA + "doMultipleBzip2BufferSizes() using " + "default bzip2 decompressor" + COLOR_NORMAL); int bufferSize; // ideally would add some offsets/shifts in here (e.g., via extra header // data?), but...significant work to hand-generate each header, and no // bzip2 spec for reference for (bufferSize = 1; bufferSize < 34; ++bufferSize) { jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); } bufferSize = 512; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 2*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 4*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 63*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 64*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 65*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 127*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 128*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); bufferSize = 129*1024; jConf.setInt("io.file.buffer.size", bufferSize); doSingleBzip2BufferSize(jConf); } // this tests both files (testCompressThenConcat, testConcatThenCompress); all // should work with existing Java bzip2 decoder and any future native version private static void doSingleBzip2BufferSize(JobConf jConf) throws IOException { TextInputFormat format = new TextInputFormat(); format.configure(jConf); format.setMinSplitSize(5500); // work around 256-byte/22-splits issue // here's Nth pair of DecompressorStreams: InputSplit[] splits = format.getSplits(jConf, 100); assertEquals("compressed splits == 2", 2, splits.length); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("testCompressThenConcat.txt.gz")) { System.out.println(" (swapping)"); splits[0] = splits[1]; splits[1] = tmp; } // testConcatThenCompress (single) List<Text> results = readSplit(format, splits[0], jConf); assertEquals("splits[0] length (num lines)", 84, results.size()); assertEquals("splits[0][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); assertEquals("splits[0][42]", "Tell me, does the magnetic virtue of the needles of the compasses of", results.get(42).toString()); // testCompressThenConcat (multi) results = readSplit(format, splits[1], jConf); assertEquals("splits[1] length (num lines)", 84, results.size()); assertEquals("splits[1][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); assertEquals("splits[1][42]", "Tell me, does the magnetic virtue of the needles of the compasses of", results.get(42).toString()); } private static String unquote(String in) { StringBuffer result = new StringBuffer(); for(int i=0; i < in.length(); ++i) { char ch = in.charAt(i); if (ch == '\\') { ch = in.charAt(++i); switch (ch) { case 'n': result.append('\n'); break; case 'r': result.append('\r'); break; default: result.append(ch); break; } } else { result.append(ch); } } return result.toString(); } /** * Parse the command line arguments into lines and display the result. * @param args * @throws Exception */ public static void main(String[] args) throws Exception { for(String arg: args) { System.out.println("Working on " + arg); LineReader reader = makeStream(unquote(arg)); Text line = new Text(); int size = reader.readLine(line); while (size > 0) { System.out.println("Got: " + line.toString()); size = reader.readLine(line); } reader.close(); } } }
/* * The Gemma project * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.web.util; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * RequestUtil utility class * <p> * Good ol' copy-n-paste from <a href="http://www.javaworld.com/javaworld/jw-02-2002/ssl/utilityclass.txt"> * http://www.javaworld.com/javaworld/jw-02-2002/ssl/utilityclass.txt</a> which is referenced in the following article: * <a href="http://www.javaworld.com/javaworld/jw-02-2002/jw-0215-ssl.html"> * http://www.javaworld.com/javaworld/jw-02-2002/jw-0215-ssl.html</a> * </p> * From Appfuse. * * @author pavlidis */ public class RequestUtil { private static final String STOWED_REQUEST_ATTRIBS = "ssl.redirect.attrib.stowed"; private final transient static Log log = LogFactory.getLog( RequestUtil.class ); /** * Builds a query string from a given map of parameters * * @param m A map of parameters * @param ampersand String to use for ampersands (e.g. "&amp;" or "&amp;amp;" ) * @return query string (with no leading "?") */ public static StringBuffer createQueryStringFromMap(Map<String, String[]> m, String ampersand ) { StringBuffer aReturn = new StringBuffer( "" ); Set<Entry<String, String[]>> aEntryS = m.entrySet(); for ( Entry<String, String[]> aEntry : aEntryS ) { String[] o = aEntry.getValue(); if ( o.length == 0 ) { RequestUtil.append( aEntry.getKey(), "", aReturn, ampersand ); } else { String[] aValues = o; for ( String aValue : aValues ) { RequestUtil.append( aEntry.getKey(), aValue, aReturn, ampersand ); } } } return aReturn; } /** * Convenience method for deleting a cookie by name * * @param response the current web response * @param cookie the cookie to remove * @param path the path on which the cookie was set (i.e. /appfuse) */ public static void deleteCookie( HttpServletResponse response, Cookie cookie, String path ) { if ( cookie != null ) { // Delete the cookie by setting its maximum age to zero cookie.setMaxAge( 0 ); cookie.setPath( path ); response.addCookie( cookie ); } } /** * Convenience method to get the application's URL based on request variables. NOTE: this is pretty useless if * running behind a proxy. * * @param request request * @return app url */ public static String getAppURL( HttpServletRequest request ) { StringBuilder url = new StringBuilder(); int port = request.getServerPort(); if ( port < 0 ) { port = 80; // Work around java.net.URL bug } String scheme = request.getScheme(); url.append( scheme ); url.append( "://" ); url.append( request.getServerName() ); if ( ( scheme.equals( "http" ) && ( port != 80 ) ) || ( scheme.equals( "https" ) && ( port != 443 ) ) ) { url.append( ':' ); url.append( port ); } url.append( request.getContextPath() ); return url.toString(); } /** * Convenience method to get a cookie by name * * @param request the current request * @param name the name of the cookie to find * @return the cookie (if found), null if not found */ public static Cookie getCookie( HttpServletRequest request, String name ) { Cookie[] cookies = request.getCookies(); Cookie returnCookie = null; if ( cookies == null ) { return null; } for ( Cookie thisCookie : cookies ) { if ( thisCookie.getName().equals( name ) ) { // cookies with no value do me no good! if ( !thisCookie.getValue().equals( "" ) ) { returnCookie = thisCookie; break; } } } return returnCookie; } /** * @param aRequest request * @return Creates query String from request body parameters */ public static String getRequestParameters( HttpServletRequest aRequest ) { // set the ALGORIGTHM as defined for the application // ALGORITHM = (String) aRequest.getAttribute(Constants.ENC_ALGORITHM); Map<String, String[]> m = aRequest.getParameterMap(); return RequestUtil.createQueryStringFromMap( m, "&" ).toString(); } /** * Returns request attributes from session to request * * @param aRequest DOCUMENT ME! */ public static void reclaimRequestAttributes( HttpServletRequest aRequest ) { @SuppressWarnings("unchecked") Map<String, Object> map = ( Map<String, Object> ) aRequest.getSession() .getAttribute( RequestUtil.STOWED_REQUEST_ATTRIBS ); if ( map == null ) { return; } for ( String name : map.keySet() ) { aRequest.setAttribute( name, map.get( name ) ); } aRequest.getSession().removeAttribute( RequestUtil.STOWED_REQUEST_ATTRIBS ); } public static void setCookie( HttpServletResponse response, String name, String value, String path ) { if ( RequestUtil.log.isDebugEnabled() ) { RequestUtil.log.debug( "Setting cookie '" + name + "' on path '" + path + "'" ); } Cookie cookie = new Cookie( name, value ); cookie.setSecure( false ); cookie.setPath( path ); cookie.setMaxAge( 3600 * 24 * 30 ); // 30 days response.addCookie( cookie ); } /** * Stores request attributes in session * * @param aRequest the current request */ public static void stowRequestAttributes( HttpServletRequest aRequest ) { if ( aRequest.getSession().getAttribute( RequestUtil.STOWED_REQUEST_ATTRIBS ) != null ) { return; } Enumeration<String> e = aRequest.getAttributeNames(); Map<String, Object> map = new HashMap<>(); while ( e.hasMoreElements() ) { String name = e.nextElement(); map.put( name, aRequest.getAttribute( name ) ); } aRequest.getSession().setAttribute( RequestUtil.STOWED_REQUEST_ATTRIBS, map ); } /** * Appends new key and value pair to query string * * @param key parameter name * @param value value of parameter * @param queryString existing query string * @param ampersand string to use for ampersand (e.g. "&" or "&amp;") * @return query string (with no leading "?") */ private static StringBuffer append( Object key, Object value, StringBuffer queryString, String ampersand ) { if ( queryString.length() > 0 ) { queryString.append( ampersand ); } try { queryString.append( URLEncoder.encode( key.toString(), "UTF-8" ) ); queryString.append( "=" ); queryString.append( URLEncoder.encode( value.toString(), "UTF-8" ) ); } catch ( UnsupportedEncodingException e ) { // won't happen since we're hard-coding UTF-8 } return queryString; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantLock; import org.apache.geode.cache.CommitConflictException; import org.apache.geode.cache.EntryNotFoundException; import org.apache.geode.cache.Region; import org.apache.geode.cache.Region.Entry; import org.apache.geode.cache.TransactionDataNodeHasDepartedException; import org.apache.geode.cache.TransactionException; import org.apache.geode.cache.TransactionId; import org.apache.geode.cache.UnsupportedOperationInTransactionException; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ReliableReplyException; import org.apache.geode.distributed.internal.ReliableReplyProcessor21; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList; import org.apache.geode.internal.cache.tx.TXRegionStub; /** * TXStateStub lives on the accessor node when we are remoting a transaction. It is a stub for * {@link TXState}. */ public abstract class TXStateStub implements TXStateInterface { protected final DistributedMember target; protected final TXStateProxy proxy; protected Runnable internalAfterSendRollback; protected Runnable internalAfterSendCommit; Map<Region<?, ?>, TXRegionStub> regionStubs = new HashMap<>(); protected TXStateStub(TXStateProxy stateProxy, DistributedMember target) { this.target = target; proxy = stateProxy; internalAfterSendRollback = null; internalAfterSendCommit = null; } @Override public void precommit() throws CommitConflictException, UnsupportedOperationInTransactionException { throw new UnsupportedOperationInTransactionException( String.format("precommit() operation %s meant for Dist Tx is not supported", "precommit")); } /** * Implemented in subclasses for Peer vs. Client */ @Override public abstract void commit() throws CommitConflictException; protected abstract void validateRegionCanJoinTransaction(InternalRegion region) throws TransactionException; protected abstract TXRegionStub generateRegionStub(InternalRegion region); @Override public abstract void rollback(); @Override public abstract void afterCompletion(int status); @Override public void beforeCompletion() { // note that this class must do distribution as it is used as the stub class in some situations ReliableReplyProcessor21 response = JtaBeforeCompletionMessage.send(proxy.getCache(), proxy.getTxId().getUniqId(), getOriginatingMember(), target); try { response.waitForReliableDelivery(); } catch (ReliableReplyException e) { throw new TransactionDataNodeHasDepartedException(e); } catch (ReplyException e) { e.handleCause(); } catch (InterruptedException ignored) { } } /** * Get or create a TXRegionStub for the given region. For regions that are new to the tx, we * validate their eligibility. * * @param region The region to involve in the tx. * @return existing or new stub for region */ protected TXRegionStub getTXRegionStub(InternalRegion region) { TXRegionStub stub = regionStubs.get(region); if (stub == null) { /* * validate whether this region is legit or not */ validateRegionCanJoinTransaction(region); stub = generateRegionStub(region); regionStubs.put(region, stub); } return stub; } public Map<Region<?, ?>, TXRegionStub> getRegionStubs() { return regionStubs; } @Override public String toString() { return getClass() + "@" + System.identityHashCode(this) + " target node: " + target; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#destroyExistingEntry(org.apache.geode.internal * .cache.EntryEventImpl, boolean, java.lang.Object) */ @Override public void destroyExistingEntry(EntryEventImpl event, boolean cacheWrite, Object expectedOldValue) throws EntryNotFoundException { if (event.getOperation().isLocal()) { throw new UnsupportedOperationInTransactionException( "localDestroy() is not allowed in a transaction"); } TXRegionStub rs = getTXRegionStub(event.getRegion()); rs.destroyExistingEntry(event, cacheWrite, expectedOldValue); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getBeginTime() */ @Override public long getBeginTime() { // TODO Auto-generated method stub return 0; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getCache() */ @Override public InternalCache getCache() { return proxy.getCache(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getChanges() */ @Override public int getChanges() { // TODO Auto-generated method stub return 0; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getDeserializedValue(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion, boolean) */ @Override public Object getDeserializedValue(KeyInfo keyInfo, LocalRegion localRegion, boolean updateStats, boolean disableCopyOnRead, boolean preferCD, EntryEventImpl clientEvent, boolean returnTombstones, boolean retainResult, boolean createIfAbsent) { // We never have a local value if we are a stub... return null; } public Object getDeserializedValue(KeyInfo keyInfo, LocalRegion localRegion, boolean updateStats, boolean disableCopyOnRead, boolean preferCD, EntryEventImpl clientEvent, boolean returnTombstones) { // We never have a local value if we are a stub... return null; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getEntry(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion) */ @Override public Entry getEntry(KeyInfo keyInfo, LocalRegion r, boolean allowTombstones) { return getTXRegionStub(r).getEntry(keyInfo, allowTombstones); // Entry retVal = null; // if (r.getPartitionAttributes() != null) { // PartitionedRegion pr = (PartitionedRegion)r; // try { // retVal = pr.getEntryRemotely((InternalDistributedMember)target, // keyInfo.getBucketId(), keyInfo.getKey(), allowTombstones); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getEvent() */ @Override public TXEvent getEvent() { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getEvents() */ @Override public List getEvents() { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getRegions() */ @Override public Collection<InternalRegion> getRegions() { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getTransactionId() */ @Override public TransactionId getTransactionId() { return proxy.getTxId(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#invalidateExistingEntry(org.apache.geode. * internal.cache.EntryEventImpl, boolean, boolean) */ @Override public void invalidateExistingEntry(EntryEventImpl event, boolean invokeCallbacks, boolean forceNewEntry) { if (event.getOperation().isLocal()) { throw new UnsupportedOperationInTransactionException( "localInvalidate() is not allowed in a transaction"); } getTXRegionStub(event.getRegion()).invalidateExistingEntry(event, invokeCallbacks, forceNewEntry); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#isInProgress() */ @Override public boolean isInProgress() { return proxy.isInProgress(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#isInProgressAndSameAs(org.apache.geode. * internal.cache.TXStateInterface) */ @Override public boolean isInProgressAndSameAs(TXStateInterface state) { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#needsLargeModCount() */ @Override public boolean needsLargeModCount() { // TODO Auto-generated method stub return false; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#nextModSerialNum() */ @Override public int nextModSerialNum() { // TODO Auto-generated method stub return 0; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#readRegion(org.apache.geode.internal.cache. * LocalRegion) */ @Override public TXRegionState readRegion(InternalRegion r) { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#rmRegion(org.apache.geode.internal.cache. * LocalRegion) */ @Override public void rmRegion(LocalRegion r) { throw new UnsupportedOperationException(); } public void setAfterSendRollback(Runnable afterSend) { // TODO Auto-generated method stub internalAfterSendRollback = afterSend; } public void setAfterSendCommit(Runnable afterSend) { // TODO Auto-generated method stub internalAfterSendCommit = afterSend; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#txPutEntry(org.apache.geode.internal.cache. * EntryEventImpl, boolean, boolean, boolean) */ @Override public boolean txPutEntry(EntryEventImpl event, boolean ifNew, boolean requireOldValue, boolean checkResources, Object expectedOldValue) { return false; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#txReadEntry(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion, boolean) */ @Override public TXEntryState txReadEntry(KeyInfo entryKey, LocalRegion localRegion, boolean rememberRead, boolean createTxEntryIfAbsent) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#txReadRegion(org.apache.geode.internal.cache. * LocalRegion) */ @Override public TXRegionState txReadRegion(InternalRegion internalRegion) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#txWriteRegion(org.apache.geode.internal.cache. * LocalRegion, java.lang.Object) */ @Override public TXRegionState txWriteRegion(InternalRegion internalRegion, KeyInfo entryKey) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.TXStateInterface#writeRegion(org.apache.geode.internal.cache. * LocalRegion) */ @Override public TXRegionState writeRegion(InternalRegion r) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#containsKey(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion) */ @Override public boolean containsKey(KeyInfo keyInfo, LocalRegion localRegion) { return getTXRegionStub(localRegion).containsKey(keyInfo); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#containsValueForKey(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion) */ @Override public boolean containsValueForKey(KeyInfo keyInfo, LocalRegion localRegion) { return getTXRegionStub(localRegion).containsValueForKey(keyInfo); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#entryCount(org.apache.geode.internal.cache. * LocalRegion) */ @Override public int entryCount(LocalRegion localRegion) { return getTXRegionStub(localRegion).entryCount(); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#findObject(org.apache.geode.internal.cache. * LocalRegion, java.lang.Object, java.lang.Object, boolean, boolean, java.lang.Object) */ @Override public Object findObject(KeyInfo keyInfo, LocalRegion r, boolean isCreate, boolean generateCallbacks, Object value, boolean disableCopyOnRead, boolean preferCD, ClientProxyMembershipID requestingClient, EntryEventImpl clientEvent, boolean returnTombstones) { return getTXRegionStub(r).findObject(keyInfo, isCreate, generateCallbacks, value, preferCD, requestingClient, clientEvent); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#getAdditionalKeysForIterator(org.apache.geode. * internal.cache.LocalRegion) */ @Override public Set getAdditionalKeysForIterator(LocalRegion currRgn) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#getEntryForIterator(org.apache.geode.internal. * cache.LocalRegion, java.lang.Object, boolean) */ @Override public Object getEntryForIterator(KeyInfo keyInfo, LocalRegion currRgn, boolean rememberReads, boolean allowTombstones) { return getTXRegionStub(currRgn).getEntryForIterator(keyInfo, allowTombstones); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#getKeyForIterator(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion, boolean) */ @Override public Object getKeyForIterator(KeyInfo keyInfo, LocalRegion currRgn, boolean rememberReads, boolean allowTombstones) { Object key = keyInfo.getKey(); if (key instanceof RegionEntry) { return ((RegionEntry) key).getKey(); } return key; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#getValueInVM(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion, boolean) */ @Override public Object getValueInVM(KeyInfo keyInfo, LocalRegion localRegion, boolean rememberRead) { // TODO Auto-generated method stub return null; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#isDeferredStats() */ @Override public boolean isDeferredStats() { return true; } @Override public boolean putEntry(EntryEventImpl event, boolean ifNew, boolean ifOld, Object expectedOldValue, boolean requireOldValue, long lastModified, boolean overwriteDestroyed) { return putEntry(event, ifNew, ifOld, expectedOldValue, requireOldValue, lastModified, overwriteDestroyed, true, false); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#putEntry(org.apache.geode.internal.cache. * EntryEventImpl, boolean, boolean, java.lang.Object, boolean, long, boolean) */ @Override public boolean putEntry(EntryEventImpl event, boolean ifNew, boolean ifOld, Object expectedOldValue, boolean requireOldValue, long lastModified, boolean overwriteDestroyed, boolean invokeCallbacks, boolean throwConcurrentModification) { return getTXRegionStub(event.getRegion()).putEntry(event, ifNew, ifOld, expectedOldValue, requireOldValue, lastModified, overwriteDestroyed); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#getSerializedValue(org.apache.geode.internal. * cache.LocalRegion, java.lang.Object, java.lang.Object) */ @Override public Object getSerializedValue(LocalRegion localRegion, KeyInfo key, boolean doNotLockEntry, ClientProxyMembershipID requestingClient, EntryEventImpl clientEvent, boolean returnTombstones) { throw new UnsupportedOperationException(); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#putEntryOnRemote(org.apache.geode.internal. * cache.EntryEventImpl, boolean, boolean, java.lang.Object, boolean, long, boolean) */ @Override public boolean putEntryOnRemote(EntryEventImpl event, boolean ifNew, boolean ifOld, Object expectedOldValue, boolean requireOldValue, long lastModified, boolean overwriteDestroyed) throws DataLocationException { throw new IllegalStateException(); } @Override public boolean isFireCallbacks() { return false; } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#destroyOnRemote(java.lang.Integer, * org.apache.geode.internal.cache.EntryEventImpl, java.lang.Object) */ @Override public void destroyOnRemote(EntryEventImpl event, boolean cacheWrite, Object expectedOldValue) throws DataLocationException { throw new IllegalStateException(); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#invalidateOnRemote(org.apache.geode.internal. * cache.EntryEventImpl, boolean, boolean) */ @Override public void invalidateOnRemote(EntryEventImpl event, boolean invokeCallbacks, boolean forceNewEntry) throws DataLocationException { throw new IllegalStateException(); } @Override public void checkSupportsRegionDestroy() throws UnsupportedOperationInTransactionException { throw new UnsupportedOperationInTransactionException( "destroyRegion() is not supported while in a transaction"); } @Override public void checkSupportsRegionInvalidate() throws UnsupportedOperationInTransactionException { throw new UnsupportedOperationInTransactionException( "invalidateRegion() is not supported while in a transaction"); } @Override public void checkSupportsRegionClear() throws UnsupportedOperationInTransactionException { throw new UnsupportedOperationInTransactionException( "clear() is not supported while in a transaction"); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#getBucketKeys(org.apache.geode.internal.cache. * LocalRegion, int) */ @Override public Set getBucketKeys(LocalRegion localRegion, int bucketId, boolean allowTombstones) { PartitionedRegion pr = (PartitionedRegion) localRegion; /* * txtodo: what does this mean for c/s */ return pr.getBucketKeys(bucketId, allowTombstones); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.InternalDataView#getEntryOnRemote(java.lang.Object, * org.apache.geode.internal.cache.LocalRegion) */ @Override public Entry getEntryOnRemote(KeyInfo key, LocalRegion localRegion, boolean allowTombstones) throws DataLocationException { throw new IllegalStateException(); } /* * (non-Javadoc) * * @see org.apache.geode.internal.cache.TXStateInterface#getSemaphore() */ @Override public ReentrantLock getLock() { return proxy.getLock(); } /* * (non-Javadoc) * * @see * org.apache.geode.internal.cache.InternalDataView#getRegionKeysForIteration(org.apache.geode. * internal.cache.LocalRegion) */ @Override public Set getRegionKeysForIteration(LocalRegion currRegion) { return getTXRegionStub(currRegion).getRegionKeysForIteration(); } @Override public boolean isRealDealLocal() { return false; } public DistributedMember getTarget() { return target; } @Override public void postPutAll(DistributedPutAllOperation putallOp, VersionedObjectList successfulPuts, InternalRegion reg) { getTXRegionStub(reg).postPutAll(putallOp, successfulPuts, reg); } @Override public void postRemoveAll(DistributedRemoveAllOperation op, VersionedObjectList successfulOps, InternalRegion reg) { getTXRegionStub(reg).postRemoveAll(op, successfulOps, reg); } @Override public Entry accessEntry(KeyInfo keyInfo, LocalRegion localRegion) { return getEntry(keyInfo, localRegion, false); } @Override public void updateEntryVersion(EntryEventImpl event) throws EntryNotFoundException { throw new UnsupportedOperationException(); } @Override public void close() { // nothing needed } @Override public boolean isTxState() { return false; } @Override public boolean isTxStateStub() { return true; } @Override public boolean isTxStateProxy() { return false; } @Override public boolean isDistTx() { return false; } @Override public boolean isCreatedOnDistTxCoordinator() { return false; } }
package seedu.task.model; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Logger; import javafx.collections.transformation.FilteredList; import javafx.collections.transformation.SortedList; import seedu.task.commons.core.ComponentManager; import seedu.task.commons.core.LogsCenter; import seedu.task.commons.core.Status; import seedu.task.commons.core.UnmodifiableObservableList; import seedu.task.commons.events.model.TaskBookChangedEvent; import seedu.task.commons.util.StringUtil; import seedu.task.model.item.Event; import seedu.task.model.item.ReadOnlyEvent; import seedu.task.model.item.ReadOnlyTask; import seedu.task.model.item.Task; import seedu.task.model.item.UniqueEventList; import seedu.task.model.item.UniqueEventList.DuplicateEventException; import seedu.task.model.item.UniqueEventList.EventNotFoundException; import seedu.task.model.item.UniqueTaskList; import seedu.task.model.item.UniqueTaskList.TaskNotFoundException; /** * Represents the in-memory model of the task book data. * All changes to any model should be synchronized. */ public class ModelManager extends ComponentManager implements Model { private static final Status INCOMPLETE_STATUS = Status.INCOMPLETED; private static final Status COMPLETE_STATUS = Status.COMPLETED; private static final Logger logger = LogsCenter.getLogger(ModelManager.class); private final TaskBook taskBook; private final FilteredList<Task> filteredTasks; private final FilteredList<Event> filteredEvents; /** * Initializes a ModelManager with the given TaskBook * TaskBook and its variables should not be null */ public ModelManager(TaskBook src, UserPrefs userPrefs) { super(); assert src != null; assert userPrefs != null; logger.fine("Initializing with task book: " + src + " and user prefs " + userPrefs); taskBook = new TaskBook(src); filteredTasks = new FilteredList<>(taskBook.getTasks()); filteredEvents = new FilteredList<>(taskBook.getEvents()); } public ModelManager() { this(new TaskBook(), new UserPrefs()); } public ModelManager(ReadOnlyTaskBook initialData, UserPrefs userPrefs) { taskBook = new TaskBook(initialData); filteredTasks = new FilteredList<>(taskBook.getTasks()); filteredEvents = new FilteredList<>(taskBook.getEvents()); } @Override public void resetData(ReadOnlyTaskBook newData) { taskBook.resetData(newData); updateFilteredEventListToShowWithStatus(INCOMPLETE_STATUS); updateFilteredTaskListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public ReadOnlyTaskBook getTaskBook() { return taskBook; } /** Raises an event to indicate the model has changed */ private void indicateTaskBookChanged() { raise(new TaskBookChangedEvent(taskBook)); } //@@author A0121608N @Override public synchronized void deleteTask(ReadOnlyTask target) throws TaskNotFoundException { taskBook.removeTask(target); updateFilteredTaskListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public synchronized void deleteEvent(ReadOnlyEvent target) throws EventNotFoundException { taskBook.removeEvent(target); updateFilteredEventListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public synchronized void clearTasks() { updateFilteredTaskListToShowWithStatus(COMPLETE_STATUS); while(!filteredTasks.isEmpty()){ ReadOnlyTask task = filteredTasks.get(0); try { taskBook.removeTask(task); } catch (TaskNotFoundException tnfe) { assert false : "The target task cannot be missing"; } } updateFilteredTaskListToShowAll(); indicateTaskBookChanged(); } @Override public synchronized void clearEvents() { updateFilteredEventListToShowWithStatus(COMPLETE_STATUS); while(!filteredEvents.isEmpty()){ ReadOnlyEvent event = filteredEvents.get(0); try { taskBook.removeEvent(event); } catch (EventNotFoundException tnfe) { assert false : "The target event cannot be missing"; } } updateFilteredEventListToShowAll(); indicateTaskBookChanged(); } @Override public synchronized void markTask(ReadOnlyTask target){ taskBook.markTask(target); updateFilteredTaskListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } //@@author A0127570H @Override public synchronized void addTask(Task task) throws UniqueTaskList.DuplicateTaskException { taskBook.addTask(task); updateFilteredTaskListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public synchronized void addEvent(Event event) throws DuplicateEventException { taskBook.addEvent(event); updateFilteredEventListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public synchronized void editTask(Task editTask, ReadOnlyTask targetTask) throws UniqueTaskList.DuplicateTaskException { taskBook.editTask(editTask, targetTask); updateFilteredTaskListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } @Override public void editEvent(Event editEvent, ReadOnlyEvent targetEvent) throws UniqueEventList.DuplicateEventException { taskBook.editEvent(editEvent, targetEvent); updateFilteredEventListToShowWithStatus(INCOMPLETE_STATUS); indicateTaskBookChanged(); } //@@author //=========== Filtered Task List Accessors =============================================================== //@@author A0144702N @Override public UnmodifiableObservableList<ReadOnlyTask> getFilteredTaskList() { SortedList<Task> sortedTasks = new SortedList<>(filteredTasks); sortedTasks.setComparator(Task.getAscComparator()); return new UnmodifiableObservableList<>(sortedTasks); } @Override public UnmodifiableObservableList<ReadOnlyEvent> getFilteredEventList() { SortedList<Event> sortedEvents = new SortedList<>(filteredEvents); sortedEvents.setComparator(Event.getAscComparator()); return new UnmodifiableObservableList<>(sortedEvents); } @Override public void updateFilteredTaskListToShowAll() { filteredTasks.setPredicate(null); } @Override public void showFoundTaskList(Set<String> keywords, boolean isPowerSearch){ updateFilteredTaskList(new PredicateExpression(new NameQualifier(keywords, isPowerSearch))); } @Override public void showFoundEventList(Set<String> keywords, boolean isPowerSearch){ updateFilteredEventList(new PredicateExpression(new NameQualifier(keywords, isPowerSearch))); } @Override public void updateFilteredTaskListToShowWithStatus(Status status) { if(status == Status.ALL) { updateFilteredTaskListToShowAll(); } else { updateFilteredTaskList(new PredicateExpression(new StatusQualifier(status))); } } @Override public void updateFilteredEventListToShowWithStatus(Status status) { if(status == Status.ALL) { updateFilteredEventListToShowAll(); } else { updateFilteredEventList(new PredicateExpression(new StatusQualifier(status))); } } @Override public void updateFilteredEventListToShowAll() { filteredEvents.setPredicate(null); } private void updateFilteredTaskList(Expression expression) { filteredTasks.setPredicate(expression::satisfies); } private void updateFilteredEventList(Expression expression) { filteredEvents.setPredicate(expression::satisfies); } //========== Inner classes/interfaces used for filtering ================================================== interface Expression { boolean satisfies(ReadOnlyTask task); boolean satisfies(ReadOnlyEvent event); String toString(); } private class PredicateExpression implements Expression { private final Qualifier qualifier; PredicateExpression(Qualifier qualifier) { this.qualifier = qualifier; } @Override public boolean satisfies(ReadOnlyTask task) { qualifier.prepare(task); return qualifier.run(); } @Override public boolean satisfies(ReadOnlyEvent event) { qualifier.prepare(event); return qualifier.run(); } @Override public String toString() { return qualifier.toString(); } } interface Qualifier { boolean run(); void prepare(ReadOnlyTask task); void prepare(ReadOnlyEvent event); String toString(); } private class NameQualifier implements Qualifier { private boolean isPowerSearch; private Set<String> keyWords; private String targetName; private String targetDesc; NameQualifier(Set<String> keyWords, boolean isPowerSearch) { this.keyWords = keyWords; this.isPowerSearch = isPowerSearch; } @Override public String toString() { return String.join(", ", keyWords); } @Override /** * Filter out those having names and description not matched with the keywords. */ public boolean run() { List<String> sourceSet = new ArrayList<>(); if(isPowerSearch) { //break the name and desc to allow power search sourceSet = new ArrayList<>(Arrays.asList(targetName.split("\\s"))); sourceSet.addAll(Arrays.asList(targetDesc.split("\\s"))); //break the keyword to allow power search List<String> tempSet = new ArrayList<>(keyWords); keyWords = new HashSet<>(); tempSet.stream().forEach(keyword -> keyWords.addAll(Arrays.asList(keyword.split("\\s")))); } else { sourceSet.add(targetName); sourceSet.add(targetDesc); } for(String source: sourceSet) { boolean found = keyWords.stream() .filter(keyword -> StringUtil.isSimilar(source.trim(), keyword.trim())) .findAny() .isPresent(); if (found) { return true; } } return false; } @Override public void prepare(ReadOnlyTask task) { targetName = task.getTask().fullName; targetDesc = task.getDescriptionValue(); } @Override public void prepare(ReadOnlyEvent event) { targetName = event.getEvent().fullName; targetDesc = event.getDescriptionValue(); } } private class StatusQualifier implements Qualifier { private Boolean status; private Boolean targetStatus; StatusQualifier(Status status){ switch(status) { case COMPLETED: this.status = true; break; case INCOMPLETED: this.status = false; break; default: this.status = false; } } @Override public boolean run() { return targetStatus.equals(status); } @Override public String toString() { return (status ? "completed" : "not yet completed"); } @Override public void prepare(ReadOnlyTask task) { targetStatus = task.getTaskStatus(); } @Override public void prepare(ReadOnlyEvent event) { targetStatus = event.isEventCompleted(); } } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.directio.hive.parquet; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.io.IOUtils; import org.junit.Assume; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import com.asakusafw.directio.hive.parquet.mock.MockSimpleWithLong; import com.asakusafw.directio.hive.parquet.mock.WithDate; import com.asakusafw.directio.hive.parquet.mock.WithDateTime; import com.asakusafw.directio.hive.parquet.mock.WithDecimal; import com.asakusafw.directio.hive.parquet.mock.WithFour; import com.asakusafw.directio.hive.parquet.mock.WithString; import com.asakusafw.directio.hive.parquet.mock.WithStringSupports; import com.asakusafw.directio.hive.parquet.mock.WithTimestampSupports; import com.asakusafw.directio.hive.serde.DataModelDescriptorEditor; import com.asakusafw.directio.hive.serde.DataModelMapping.ExceptionHandlingStrategy; import com.asakusafw.directio.hive.serde.DataModelMapping.FieldMappingStrategy; import com.asakusafw.directio.hive.serde.FieldPropertyDescriptor; import com.asakusafw.directio.hive.serde.StringValueSerdeFactory; import com.asakusafw.directio.hive.serde.TimestampValueSerdeFactory; import com.asakusafw.directio.hive.serde.ValueSerde; import com.asakusafw.directio.hive.serde.ValueSerdeFactory; import com.asakusafw.directio.hive.serde.mock.MockSimple; import com.asakusafw.directio.hive.serde.mock.MockTypes; import com.asakusafw.info.hive.BuiltinStorageFormatInfo; import com.asakusafw.info.hive.StorageFormatInfo; import com.asakusafw.runtime.directio.Counter; import com.asakusafw.runtime.directio.DirectInputFragment; import com.asakusafw.runtime.directio.hadoop.StripedDataFormat; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; import com.asakusafw.runtime.value.Date; import com.asakusafw.runtime.value.DateOption; import com.asakusafw.runtime.value.DateTime; import com.asakusafw.runtime.value.DateTimeOption; import com.asakusafw.runtime.value.DecimalOption; import com.asakusafw.runtime.value.IntOption; import com.asakusafw.runtime.value.LongOption; import com.asakusafw.runtime.value.StringOption; import com.asakusafw.runtime.windows.WindowsSupport; /** * Test for {@link ParquetFileFormat}. */ public class ParquetFileFormatTest { /** * Windows platform support. */ @ClassRule public static final WindowsSupport WINDOWS_SUPPORT = new WindowsSupport(); private static final long LOCAL_TIMEZONE_OFFSET = TimeUnit.MILLISECONDS.toSeconds(TimeZone.getDefault().getRawOffset()); // test data may be created different timestamp private static final long TESTDATA_TIMEZONE_OFFSET = TimeUnit.MILLISECONDS.toSeconds(TimeZone.getTimeZone("JST").getRawOffset()); /** * A temporary folder for testing. */ @Rule public final TemporaryFolder folder = new TemporaryFolder(); private <T> ParquetFileFormat<T> format(Class<T> type, String... removes) { return format(type, Collections.emptyMap(), removes); } private <T> ParquetFileFormat<T> format( Class<T> type, Map<String, ? extends ValueSerde> edits, String... removes) { ParquetFileFormat<T> format = new ParquetFileFormat<>( "testing", new ParquetFormatConfiguration(), new DataModelDescriptorEditor(FieldPropertyDescriptor.extract(type)) .editAll(edits) .removeAll(Arrays.asList(removes)) .build()); format.setConf(new org.apache.hadoop.conf.Configuration()); return format; } /** * Test method for {@link AbstractParquetFileFormat#getSchema()}. */ @Test public void format_name() { assertThat( format(MockSimple.class).getSchema().getStorageFormat(), equalTo((Object) BuiltinStorageFormatInfo.of(StorageFormatInfo.FormatKind.PARQUET))); } /** * Test method for {@link AbstractParquetFileFormat#getSupportedType()}. */ @Test public void supported_type() { assertThat(format(MockSimple.class).getSupportedType(), equalTo((Object) MockSimple.class)); } /** * {@code tblproperties} for default settings. */ @Test public void table_properties_default() { Map<String, String> props = format(MockSimple.class).getSchema().getProperties(); assertThat(props.size(), is(0)); } /** * simple I/O. * @throws Exception if failed */ @Test public void io_simple() throws Exception { ParquetFileFormat<MockSimple> format = format(MockSimple.class); MockSimple in = new MockSimple(100, "Hello, world!"); MockSimple out = restore(format, in); assertThat(out.number, is(new IntOption(100))); assertThat(out.string, is(new StringOption("Hello, world!"))); } /** * I/O with all supported types. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_types() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(10, 2)); ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); MockTypes in = new MockTypes(); in.booleanOption.modify(true); in.byteOption.modify((byte) 1); in.shortOption.modify((short) 2); in.intOption.modify(3); in.longOption.modify(4L); in.floatOption.modify(5f); in.doubleOption.modify(6d); in.dateOption.modify(new Date(2014, 6, 1)); in.dateTimeOption.modify(new DateTime(2014, 6, 1, 2, 3, 4)); in.stringOption.modify("Hello, world!"); in.decimalOption.modify(new BigDecimal("7.89")); MockTypes out = restore(format, in); assertThat(out.booleanOption, equalTo(in.booleanOption)); assertThat(out.byteOption, equalTo(in.byteOption)); assertThat(out.shortOption, equalTo(in.shortOption)); assertThat(out.intOption, equalTo(in.intOption)); assertThat(out.longOption, equalTo(in.longOption)); assertThat(out.floatOption, equalTo(in.floatOption)); assertThat(out.doubleOption, equalTo(in.doubleOption)); assertThat(out.dateOption, equalTo(in.dateOption)); assertThat(out.dateTimeOption, equalTo(in.dateTimeOption)); assertThat(out.stringOption, equalTo(in.stringOption)); assertThat(out.decimalOption, equalTo(in.decimalOption)); } /** * I/O with decimals. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_decimals() throws Exception { for (int p = 2; p <= HiveDecimal.MAX_PRECISION; p++) { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(p, 2)); ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); MockTypes in = new MockTypes(); if (p < 3) { in.decimalOption.modify(new BigDecimal("0.14")); } else { in.decimalOption.modify(new BigDecimal("3.14")); } MockTypes out = restore(format, in); assertThat(out.decimalOption, equalTo(in.decimalOption)); } } /** * I/O with decimals. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_decimals_int32() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(9, 2)); int count = 100; ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); List<MockTypes> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { MockTypes in = new MockTypes(); in.decimalOption.modify(new BigDecimal("7.89")); inputs.add(in); } List<MockTypes> outputs = restore(format, inputs); MockTypes sample = inputs.get(0); for (MockTypes out : outputs) { assertThat(out.decimalOption, equalTo(sample.decimalOption)); } } /** * I/O with decimals. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_decimals_int64() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(18, 2)); int count = 100; ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); List<MockTypes> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { MockTypes in = new MockTypes(); in.decimalOption.modify(new BigDecimal("7.89")); inputs.add(in); } List<MockTypes> outputs = restore(format, inputs); MockTypes sample = inputs.get(0); for (MockTypes out : outputs) { assertThat(out.decimalOption, equalTo(sample.decimalOption)); } } /** * I/O with decimals. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_decimals_binary() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(38, 2)); int count = 100; ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); List<MockTypes> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { MockTypes in = new MockTypes(); in.decimalOption.modify(new BigDecimal("-7.89")); inputs.add(in); } List<MockTypes> outputs = restore(format, inputs); MockTypes sample = inputs.get(0); for (MockTypes out : outputs) { assertThat(out.decimalOption, equalTo(sample.decimalOption)); } } /** * I/O with all supported types. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_types_large() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(10, 2)); int count = 1000; ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); List<MockTypes> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { MockTypes in = new MockTypes(); in.booleanOption.modify(true); in.byteOption.modify((byte) 1); in.shortOption.modify((short) 2); in.intOption.modify(3); in.longOption.modify(4L); in.floatOption.modify(5f); in.doubleOption.modify(6d); in.dateOption.modify(new Date(2014, 6, 1)); in.dateTimeOption.modify(new DateTime(2014, 6, 1, 2, 3, 4)); in.stringOption.modify("Hello, world!"); in.decimalOption.modify(new BigDecimal("7.89")); inputs.add(in); } List<MockTypes> outputs = restore(format, inputs); MockTypes sample = inputs.get(0); for (MockTypes out : outputs) { assertThat(out.booleanOption, equalTo(sample.booleanOption)); assertThat(out.byteOption, equalTo(sample.byteOption)); assertThat(out.shortOption, equalTo(sample.shortOption)); assertThat(out.intOption, equalTo(sample.intOption)); assertThat(out.longOption, equalTo(sample.longOption)); assertThat(out.floatOption, equalTo(sample.floatOption)); assertThat(out.doubleOption, equalTo(sample.doubleOption)); assertThat(out.dateOption, equalTo(sample.dateOption)); assertThat(out.dateTimeOption, equalTo(sample.dateTimeOption)); assertThat(out.stringOption, equalTo(sample.stringOption)); assertThat(out.decimalOption, equalTo(sample.decimalOption)); } } /** * I/O with all supported types with {@code null}s. * @throws Exception if failed */ @Test public void io_nulls() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimalOption", ValueSerdeFactory.getDecimal(10, 2)); ParquetFileFormat<MockTypes> format = format(MockTypes.class, edits); MockTypes in = new MockTypes(); MockTypes out = restore(format, in); assertThat(out.booleanOption, equalTo(in.booleanOption)); assertThat(out.byteOption, equalTo(in.byteOption)); assertThat(out.shortOption, equalTo(in.shortOption)); assertThat(out.intOption, equalTo(in.intOption)); assertThat(out.longOption, equalTo(in.longOption)); assertThat(out.floatOption, equalTo(in.floatOption)); assertThat(out.doubleOption, equalTo(in.doubleOption)); assertThat(out.dateOption, equalTo(in.dateOption)); assertThat(out.dateTimeOption, equalTo(in.dateTimeOption)); assertThat(out.stringOption, equalTo(in.stringOption)); assertThat(out.decimalOption, equalTo(in.decimalOption)); } /** * I/O with fragment. * @throws Exception if failed */ @Test public void io_fragment() throws Exception { File file = folder.newFile(); Assume.assumeThat(file.delete() || file.exists() == false, is(true)); ParquetFileFormat<MockSimple> format = format(MockSimple.class); LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelOutput<MockSimple> output = format.createOutput( MockSimple.class, fs, new Path(file.toURI()), new Counter())) { output.write(new MockSimple(100, "Hello, world!")); } assertThat(file.exists(), is(true)); FileStatus stat = fs.getFileStatus(new Path(file.toURI())); List<DirectInputFragment> fragments = format.computeInputFragments(new StripedDataFormat.InputContext( MockSimple.class, Arrays.asList(stat), fs, -1L, -1L, false, false)); assertThat(fragments, hasSize(1)); DirectInputFragment first = fragments.get(0); try (ModelInput<MockSimple> input = format.createInput( MockSimple.class, fs, new Path(first.getPath()), first.getOffset(), first.getSize(), new Counter())) { MockSimple buf = new MockSimple(); assertThat(input.readTo(buf), is(true)); assertThat(buf.number, is(new IntOption(100))); assertThat(buf.string, is(new StringOption("Hello, world!"))); assertThat(input.readTo(buf), is(false)); } } /** * I/O with {@code v2}. * @throws Exception if failed */ @Test public void io_v_2() throws Exception { ParquetFileFormat<MockSimple> format = format(MockSimple.class); format.getFormatConfiguration().withWriterVersion("PARQUET_2_0"); MockSimple in = new MockSimple(100, "Hello, world!"); MockSimple out = restore(format, in); assertThat(out.number, is(new IntOption(100))); assertThat(out.string, is(new StringOption("Hello, world!"))); } /** * Field mapping by its name. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void mapping_by_position() throws Exception { ParquetFileFormat<WithFour> f1 = format(WithFour.class, "col1", "col3"); ParquetFileFormat<WithFour> f2 = format(WithFour.class, "col2", "col3"); f2.getFormatConfiguration().withFieldMappingStrategy(FieldMappingStrategy.POSITION); WithFour in = new WithFour(); in.col0.modify(0); in.col1.modify(1); in.col2.modify(2); in.col3.modify(3); File file = save(f1, Arrays.asList(in)); List<WithFour> results = load(f2, file); assertThat(results, hasSize(1)); WithFour out = results.get(0); assertThat(out.col0, is(new IntOption(0))); assertThat(out.col1, is(new IntOption(2))); assertThat(out.col2, is(new IntOption())); assertThat(out.col3, is(new IntOption())); } /** * Field mapping by its name. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void mapping_by_name() throws Exception { ParquetFileFormat<WithFour> f1 = format(WithFour.class, "col1", "col3"); ParquetFileFormat<WithFour> f2 = format(WithFour.class, "col2", "col3"); f2.getFormatConfiguration().withFieldMappingStrategy(FieldMappingStrategy.NAME); WithFour in = new WithFour(); in.col0.modify(0); in.col1.modify(1); in.col2.modify(2); in.col3.modify(3); File file = save(f1, Arrays.asList(in)); List<WithFour> results = load(f2, file); assertThat(results, hasSize(1)); WithFour out = results.get(0); assertThat(out.col0, is(new IntOption(0))); assertThat(out.col1, is(new IntOption())); assertThat(out.col2, is(new IntOption())); assertThat(out.col3, is(new IntOption())); } /** * fail on missing source. * @throws Exception if failed */ @Test public void fail_on_missing_source() throws Exception { ParquetFileFormat<WithFour> f1 = format(WithFour.class, "col3"); ParquetFileFormat<WithFour> f2 = format(WithFour.class); f2.getFormatConfiguration() .withFieldMappingStrategy(FieldMappingStrategy.NAME) .withOnMissingSource(ExceptionHandlingStrategy.FAIL); WithFour in = new WithFour(); File file = save(f1, Arrays.asList(in)); try { load(f2, file); fail(); } catch (IllegalArgumentException e) { // ok. } } /** * fail on missing target. * @throws Exception if failed */ @Test public void fail_on_missing_target() throws Exception { ParquetFileFormat<WithFour> f1 = format(WithFour.class); ParquetFileFormat<WithFour> f2 = format(WithFour.class, "col3"); f2.getFormatConfiguration() .withFieldMappingStrategy(FieldMappingStrategy.NAME) .withOnMissingTarget(ExceptionHandlingStrategy.FAIL); WithFour in = new WithFour(); File file = save(f1, Arrays.asList(in)); try { load(f2, file); fail(); } catch (IllegalArgumentException e) { // ok. } } /** * ignore on incompatible type. * @throws Exception if failed */ @Test public void ignore_on_incompatible_type() throws Exception { ParquetFileFormat<MockSimple> f1 = format(MockSimple.class); ParquetFileFormat<MockSimpleWithLong> f2 = format(MockSimpleWithLong.class); f2.getFormatConfiguration() .withFieldMappingStrategy(FieldMappingStrategy.NAME) .withOnIncompatibleType(ExceptionHandlingStrategy.IGNORE); MockSimple in = new MockSimple(100, "Hello, world!"); File file = save(f1, Arrays.asList(in)); List<MockSimpleWithLong> results = load(f2, file); assertThat(results, hasSize(1)); MockSimpleWithLong out = results.get(0); assertThat(out.number, is(new LongOption())); assertThat(out.string, is(in.string)); } /** * fail on incompatible type. * @throws Exception if failed */ @Test public void fail_on_incompatible_type() throws Exception { ParquetFileFormat<MockSimple> f1 = format(MockSimple.class); ParquetFileFormat<MockSimpleWithLong> f2 = format(MockSimpleWithLong.class); f2.getFormatConfiguration() .withFieldMappingStrategy(FieldMappingStrategy.NAME) .withOnIncompatibleType(ExceptionHandlingStrategy.FAIL); MockSimple in = new MockSimple(100, "Hello, world!"); File file = save(f1, Arrays.asList(in)); try { load(f2, file); fail(); } catch (IllegalArgumentException e) { // ok. } } /** * using strings. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_string() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimal", StringValueSerdeFactory.DECIMAL); edits.put("date", StringValueSerdeFactory.DATE); edits.put("datetime", StringValueSerdeFactory.DATETIME); ParquetFileFormat<WithStringSupports> format = format(WithStringSupports.class, edits); WithStringSupports in = new WithStringSupports(); in.decimal.modify(new BigDecimal("123.45")); in.date.modify(new Date(2014, 7, 1)); in.datetime.modify(new DateTime(2014, 7, 1, 12, 34, 56)); WithStringSupports out = restore(format, in); assertThat(out.decimal, is(in.decimal)); assertThat(out.date, is(in.date)); assertThat(out.datetime, is(in.datetime)); } /** * using strings with dictionary. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_string_dict() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("decimal", StringValueSerdeFactory.DECIMAL); edits.put("date", StringValueSerdeFactory.DATE); edits.put("datetime", StringValueSerdeFactory.DATETIME); ParquetFileFormat<WithStringSupports> format = format(WithStringSupports.class, edits); int count = 1000; List<WithStringSupports> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { WithStringSupports object = new WithStringSupports(); object.decimal.modify(new BigDecimal("123.45")); object.date.modify(new Date(2014, 7, 1)); object.datetime.modify(new DateTime(2014, 7, 1, 12, 34, 56)); inputs.add(object); } WithStringSupports sample = inputs.get(0); List<WithStringSupports> outputs = restore(format, inputs); for (WithStringSupports out : outputs) { assertThat(out.decimal, is(sample.decimal)); assertThat(out.date, is(sample.date)); assertThat(out.datetime, is(sample.datetime)); } } /** * using timestamps. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_timestamp() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("date", TimestampValueSerdeFactory.DATE); edits.put("datetime", ValueSerdeFactory.DATETIME); ParquetFileFormat<WithTimestampSupports> format = format(WithTimestampSupports.class, edits); WithTimestampSupports in = new WithTimestampSupports(); in.date.modify(new Date(2015, 7, 1)); in.datetime.modify(new DateTime(2015, 7, 1, 12, 34, 56)); WithTimestampSupports out = restore(format, in); assertThat(out.date, is(in.date)); assertThat(out.datetime, is(in.datetime)); } /** * using char. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_char() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("value", ValueSerdeFactory.getChar(10)); ParquetFileFormat<WithString> format = format(WithString.class, edits); WithString in = new WithString(); in.value.modify("Hello, world!"); WithString out = restore(format, in); assertThat(out.value, is(new StringOption("Hello, world!".substring(0, 10)))); } /** * using varchar. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_varchar() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("value", ValueSerdeFactory.getVarchar(10)); ParquetFileFormat<WithString> format = format(WithString.class, edits); WithString in = new WithString(); in.value.modify("Hello, world!"); WithString out = restore(format, in); assertThat(out.value, is(new StringOption("Hello, world!".substring(0, 10)))); } /** * using varchar. * @throws Exception if failed */ @SuppressWarnings("deprecation") @Test public void io_varchar_dict() throws Exception { Map<String, ValueSerde> edits = new HashMap<>(); edits.put("value", ValueSerdeFactory.getVarchar(10)); ParquetFileFormat<WithString> format = format(WithString.class, edits); int count = 1000; List<WithString> inputs = new ArrayList<>(); for (int i = 0; i < count; i++) { WithString in = new WithString(); in.value.modify("Hello"); inputs.add(in); } List<WithString> outputs = restore(format, inputs); for (WithString out : outputs) { assertThat(out.value, is(new StringOption("Hello"))); count--; } assertThat(count, is(0)); } /** * loading char type which generated by hive. * @throws Exception if failed */ @Test public void format_char() throws Exception { checkString("char-10-hello-parquet.parquet", ValueSerdeFactory.getChar(10), "Hello, Parquet!".substring(0, 10)); } /** * loading varchar type which generated by hive. * @throws Exception if failed */ @Test public void format_varchar() throws Exception { checkString("varchar-10-hello-parquet.parquet", ValueSerdeFactory.getVarchar(10), "Hello, Parquet!".substring(0, 10)); } private void checkString(String file, ValueSerde serde, String expected) throws IOException, InterruptedException { WithString buf = new WithString(); ParquetFileFormat<WithString> format = format( WithString.class, Collections.singletonMap("value", serde)); try (ModelInput<WithString> input = load(format, file)) { assertThat(input.readTo(buf), is(true)); assertThat(input.readTo(new WithString()), is(false)); } assertThat(buf.value, is(new StringOption(expected))); } /** * loading decimal type which generated by hive. * @throws Exception if failed */ @Test public void format_decimal() throws Exception { checkDecimal("decimal-9_2-3_14.parquet"); checkDecimal("decimal-18_2-3_14.parquet"); checkDecimal("decimal-38_2-3_14.parquet"); } private void checkDecimal(String file) throws IOException, InterruptedException { Pattern p = Pattern.compile("decimal-(\\d+)_(\\d+)-(.+)\\.parquet"); Matcher matcher = p.matcher(file); assertThat(matcher.matches(), is(true)); int precision = Integer.parseInt(matcher.group(1)); int scale = Integer.parseInt(matcher.group(2)); WithDecimal buf = new WithDecimal(); ParquetFileFormat<WithDecimal> format = format( WithDecimal.class, Collections.singletonMap("value", ValueSerdeFactory.getDecimal(precision, scale))); try (ModelInput<WithDecimal> input = load(format, file)) { assertThat(input.readTo(buf), is(true)); assertThat(input.readTo(new WithDecimal()), is(false)); } BigDecimal expected = new BigDecimal(matcher.group(3).replace('_', '.')); assertThat(buf.value, is(new DecimalOption(expected))); } /** * loading timestamp type which generated be hive. * @throws Exception if failed */ @Test public void format_timestamp() throws Exception { checkDateTime("timestamp-1970-01-01-00-00-00.parquet"); checkDateTime("timestamp-1970-01-01-12-34-56.parquet"); checkDateTime("timestamp-2014-12-01-23-59-59.parquet"); } @SuppressWarnings("deprecation") private void checkDateTime(String file) throws IOException, InterruptedException { Pattern p = Pattern.compile("timestamp-(\\d+)-(\\d+)-(\\d+)-(\\d+)-(\\d+)-(\\d+)\\.parquet"); Matcher matcher = p.matcher(file); assertThat(matcher.matches(), is(true)); WithDateTime buf = new WithDateTime(); try (ModelInput<WithDateTime> input = load(WithDateTime.class, file)) { assertThat(input.readTo(buf), is(true)); assertThat(input.readTo(new WithDateTime()), is(false)); } // fix timezone buf.value.modify(buf.value.get().getElapsedSeconds() + TESTDATA_TIMEZONE_OFFSET - LOCAL_TIMEZONE_OFFSET); DateTime expected = new DateTime( Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)), Integer.parseInt(matcher.group(3)), Integer.parseInt(matcher.group(4)), Integer.parseInt(matcher.group(5)), Integer.parseInt(matcher.group(6))); assertThat(buf.value, is(new DateTimeOption(expected))); } /** * loading date type which generated be hive. * @throws Exception if failed */ @Test public void format_date() throws Exception { checkDate("date-1970-01-01.parquet"); checkDate("date-2015-12-31.parquet"); checkDate("date-1995-05-23.parquet"); } private void checkDate(String file) throws IOException, InterruptedException { Pattern p = Pattern.compile("date-(\\d+)-(\\d+)-(\\d+)\\.parquet"); Matcher matcher = p.matcher(file); assertThat(matcher.matches(), is(true)); WithDate buf = new WithDate(); try (ModelInput<WithDate> input = load(WithDate.class, file)) { assertThat(input.readTo(buf), is(true)); assertThat(input.readTo(new WithDate()), is(false)); } Date expected = new Date( Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)), Integer.parseInt(matcher.group(3))); assertThat(buf.value, is(new DateOption(expected))); } private <T> ModelInput<T> load(Class<T> modelType, String name) throws IOException, InterruptedException { ParquetFileFormat<T> format = format(modelType); return load(format, name); } private <T> ModelInput<T> load(ParquetFileFormat<T> format, String name) throws IOException, InterruptedException { File target = folder.newFile(); try (InputStream in = getClass().getResourceAsStream(name)) { assertThat(in, is(notNullValue())); IOUtils.copyBytes(in, new FileOutputStream(target), 1024, true); } FileSystem fs = FileSystem.getLocal(format.getConf()); return format.createInput( format.getSupportedType(), fs, new Path(target.toURI()), 0, -1, new Counter()); } private <T> T restore(ParquetFileFormat<T> format, T value) throws IOException, InterruptedException { List<T> in = new ArrayList<>(); in.add(value); return restore(format, in).get(0); } private <T> List<T> restore(ParquetFileFormat<T> format, List<T> values) throws IOException, InterruptedException { File file = save(format, values); List<T> results = load(format, file); assertThat(values, hasSize(results.size())); return results; } private <T> File save(ParquetFileFormat<T> format, List<T> values) throws IOException, InterruptedException { File file = folder.newFile(); Assume.assumeThat(file.delete() || file.exists() == false, is(true)); LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelOutput<T> output = format.createOutput( format.getSupportedType(), fs, new Path(file.toURI()), new Counter())) { for (T value : values) { output.write(value); } } assertThat(file.exists(), is(true)); return file; } private <T> List<T> load(ParquetFileFormat<T> format, File file) throws IOException, InterruptedException { LocalFileSystem fs = FileSystem.getLocal(format.getConf()); try (ModelInput<T> input = format.createInput( format.getSupportedType(), fs, new Path(file.toURI()), 0, file.length(), new Counter())) { List<T> results = new ArrayList<>(); while (true) { @SuppressWarnings("unchecked") T value = (T) format.getDataModelDescriptor().createDataModelObject(); if (input.readTo(value) == false) { break; } results.add(value); } return results; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertGauge; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.apache.hadoop.test.MockitoMaker.make; import static org.apache.hadoop.test.MockitoMaker.stub; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsSource; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.impl.MetricsSystemImpl; import org.apache.hadoop.test.MetricsAsserts; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class TestQueueMetrics { static final int GB = 1024; // MB private static final Configuration conf = new Configuration(); private MetricsSystem ms; @Before public void setUp() { ms = new MetricsSystemImpl(); QueueMetrics.clearQueueMetrics(); } @Test public void testDefaultSingleQueueMetrics() { String queueName = "single"; String user = "alice"; QueueMetrics metrics = QueueMetrics.forQueue(ms, queueName, null, false, conf); MetricsSource queueSource= queueSource(ms, queueName); AppSchedulingInfo app = mockApp(user); metrics.submitApp(user, 1); MetricsSource userSource = userSource(ms, queueName, user); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); metrics.setAvailableResourcesToQueue(Resources.createResource(100*GB, 100)); metrics.incrPendingResources(user, 5, Resources.createResource(15*GB, 15)); // Available resources is set externally, as it depends on dynamic // configurable cluster/queue resources checkResources(queueSource, 0, 0, 0, 0, 0, 100*GB, 100, 15*GB, 15, 5, 0, 0, 0); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); metrics.allocateResources(user, 3, Resources.createResource(2*GB, 2)); checkResources(queueSource, 6*GB, 6, 3, 3, 0, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); metrics.releaseResources(user, 1, Resources.createResource(2*GB, 2)); checkResources(queueSource, 4*GB, 4, 2, 3, 1, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); metrics.finishApp(app, RMAppAttemptState.FINISHED); checkApps(queueSource, 1, 0, 0, 1, 0, 0, true); assertNull(userSource); } @Test public void testQueueAppMetricsForMultipleFailures() { String queueName = "single"; String user = "alice"; QueueMetrics metrics = QueueMetrics.forQueue(ms, queueName, null, false, new Configuration()); MetricsSource queueSource = queueSource(ms, queueName); AppSchedulingInfo app = mockApp(user); metrics.submitApp(user, 1); MetricsSource userSource = userSource(ms, queueName, user); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); metrics.finishApp(app, RMAppAttemptState.FAILED); checkApps(queueSource, 1, 0, 0, 0, 1, 0, true); // As the application has failed, framework retries the same application // based on configuration metrics.submitApp(user, 2); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); // Suppose say application has failed this time as well. metrics.finishApp(app, RMAppAttemptState.FAILED); checkApps(queueSource, 1, 0, 0, 0, 1, 0, true); // As the application has failed, framework retries the same application // based on configuration metrics.submitApp(user, 3); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); // Suppose say application has finished. metrics.finishApp(app, RMAppAttemptState.FINISHED); checkApps(queueSource, 1, 0, 0, 1, 0, 0, true); assertNull(userSource); } @Test public void testSingleQueueWithUserMetrics() { String queueName = "single2"; String user = "dodo"; QueueMetrics metrics = QueueMetrics.forQueue(ms, queueName, null, true, conf); MetricsSource queueSource = queueSource(ms, queueName); AppSchedulingInfo app = mockApp(user); metrics.submitApp(user, 1); MetricsSource userSource = userSource(ms, queueName, user); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); checkApps(userSource, 1, 1, 0, 0, 0, 0, true); metrics.setAvailableResourcesToQueue(Resources.createResource(100*GB, 100)); metrics.setAvailableResourcesToUser(user, Resources.createResource(10*GB, 10)); metrics.incrPendingResources(user, 5, Resources.createResource(15*GB, 15)); // Available resources is set externally, as it depends on dynamic // configurable cluster/queue resources checkResources(queueSource, 0, 0, 0, 0, 0, 100*GB, 100, 15*GB, 15, 5, 0, 0, 0); checkResources(userSource, 0, 0, 0, 0, 0, 10*GB, 10, 15*GB, 15, 5, 0, 0, 0); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); checkApps(userSource, 1, 0, 1, 0, 0, 0, true); metrics.allocateResources(user, 3, Resources.createResource(2*GB, 2)); checkResources(queueSource, 6*GB, 6, 3, 3, 0, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); checkResources(userSource, 6*GB, 6, 3, 3, 0, 10*GB, 10, 9*GB, 9, 2, 0, 0, 0); metrics.releaseResources(user, 1, Resources.createResource(2*GB, 2)); checkResources(queueSource, 4*GB, 4, 2, 3, 1, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); checkResources(userSource, 4*GB, 4, 2, 3, 1, 10*GB, 10, 9*GB, 9, 2, 0, 0, 0); metrics.finishApp(app, RMAppAttemptState.FINISHED); checkApps(queueSource, 1, 0, 0, 1, 0, 0, true); checkApps(userSource, 1, 0, 0, 1, 0, 0, true); } @Test public void testTwoLevelWithUserMetrics() { String parentQueueName = "root"; String leafQueueName = "root.leaf"; String user = "alice"; QueueMetrics parentMetrics = QueueMetrics.forQueue(ms, parentQueueName, null, true, conf); Queue parentQueue = make(stub(Queue.class).returning(parentMetrics). from.getMetrics()); QueueMetrics metrics = QueueMetrics.forQueue(ms, leafQueueName, parentQueue, true, conf); MetricsSource parentQueueSource = queueSource(ms, parentQueueName); MetricsSource queueSource = queueSource(ms, leafQueueName); AppSchedulingInfo app = mockApp(user); metrics.submitApp(user, 1); MetricsSource userSource = userSource(ms, leafQueueName, user); MetricsSource parentUserSource = userSource(ms, parentQueueName, user); checkApps(queueSource, 1, 1, 0, 0, 0, 0, true); checkApps(parentQueueSource, 1, 1, 0, 0, 0, 0, true); checkApps(userSource, 1, 1, 0, 0, 0, 0, true); checkApps(parentUserSource, 1, 1, 0, 0, 0, 0, true); parentMetrics.setAvailableResourcesToQueue(Resources.createResource(100*GB, 100)); metrics.setAvailableResourcesToQueue(Resources.createResource(100*GB, 100)); parentMetrics.setAvailableResourcesToUser(user, Resources.createResource(10*GB, 10)); metrics.setAvailableResourcesToUser(user, Resources.createResource(10*GB, 10)); metrics.incrPendingResources(user, 5, Resources.createResource(15*GB, 15)); checkResources(queueSource, 0, 0, 0, 0, 0, 100*GB, 100, 15*GB, 15, 5, 0, 0, 0); checkResources(parentQueueSource, 0, 0, 0, 0, 0, 100*GB, 100, 15*GB, 15, 5, 0, 0, 0); checkResources(userSource, 0, 0, 0, 0, 0, 10*GB, 10, 15*GB, 15, 5, 0, 0, 0); checkResources(parentUserSource, 0, 0, 0, 0, 0, 10*GB, 10, 15*GB, 15, 5, 0, 0, 0); metrics.incrAppsRunning(app, user); checkApps(queueSource, 1, 0, 1, 0, 0, 0, true); checkApps(userSource, 1, 0, 1, 0, 0, 0, true); metrics.allocateResources(user, 3, Resources.createResource(2*GB, 2)); metrics.reserveResource(user, Resources.createResource(3*GB, 3)); // Available resources is set externally, as it depends on dynamic // configurable cluster/queue resources checkResources(queueSource, 6*GB, 6, 3, 3, 0, 100*GB, 100, 9*GB, 9, 2, 3*GB, 3, 1); checkResources(parentQueueSource, 6*GB, 6, 3, 3, 0, 100*GB, 100, 9*GB, 9, 2, 3*GB, 3, 1); checkResources(userSource, 6*GB, 6, 3, 3, 0, 10*GB, 10, 9*GB, 9, 2, 3*GB, 3, 1); checkResources(parentUserSource, 6*GB, 6, 3, 3, 0, 10*GB, 10, 9*GB, 9, 2, 3*GB, 3, 1); metrics.releaseResources(user, 1, Resources.createResource(2*GB, 2)); metrics.unreserveResource(user, Resources.createResource(3*GB, 3)); checkResources(queueSource, 4*GB, 4, 2, 3, 1, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); checkResources(parentQueueSource, 4*GB, 4, 2, 3, 1, 100*GB, 100, 9*GB, 9, 2, 0, 0, 0); checkResources(userSource, 4*GB, 4, 2, 3, 1, 10*GB, 10, 9*GB, 9, 2, 0, 0, 0); checkResources(parentUserSource, 4*GB, 4, 2, 3, 1, 10*GB, 10, 9*GB, 9, 2, 0, 0, 0); metrics.finishApp(app, RMAppAttemptState.FINISHED); checkApps(queueSource, 1, 0, 0, 1, 0, 0, true); checkApps(parentQueueSource, 1, 0, 0, 1, 0, 0, true); checkApps(userSource, 1, 0, 0, 1, 0, 0, true); checkApps(parentUserSource, 1, 0, 0, 1, 0, 0, true); } @Test public void testMetricsCache() { MetricsSystem ms = new MetricsSystemImpl("cache"); ms.start(); try { String p1 = "root1"; String leafQueueName = "root1.leaf"; QueueMetrics p1Metrics = QueueMetrics.forQueue(ms, p1, null, true, conf); Queue parentQueue1 = make(stub(Queue.class).returning(p1Metrics). from.getMetrics()); QueueMetrics metrics = QueueMetrics.forQueue(ms, leafQueueName, parentQueue1, true, conf); Assert.assertNotNull("QueueMetrics for A shoudn't be null", metrics); // Re-register to check for cache hit, shouldn't blow up metrics-system... // also, verify parent-metrics QueueMetrics alterMetrics = QueueMetrics.forQueue(ms, leafQueueName, parentQueue1, true, conf); Assert.assertNotNull("QueueMetrics for alterMetrics shoudn't be null", alterMetrics); } finally { ms.shutdown(); } } @Test public void testMetricsInitializedOnRMInit() { YarnConfiguration conf = new YarnConfiguration(); conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); MockRM rm = new MockRM(conf); QueueMetrics metrics = rm.getResourceScheduler().getRootQueueMetrics(); checkApps(metrics, 0, 0, 0, 0, 0, 0, true); MetricsAsserts.assertGauge("ReservedContainers", 0, metrics); } // This is to test all metrics can consistently show up if specified true to // collect all metrics, even though they are not modified from last time they // are collected. If not collecting all metrics, only modified metrics will show up. @Test public void testCollectAllMetrics() { String queueName = "single"; QueueMetrics.forQueue(ms, queueName, null, false, conf); MetricsSource queueSource = queueSource(ms, queueName); checkApps(queueSource, 0, 0, 0, 0, 0, 0, true); try { // do not collect all metrics checkApps(queueSource, 0, 0, 0, 0, 0, 0, false); Assert.fail(); } catch (AssertionError e) { Assert.assertTrue(e.getMessage().contains( "Expected exactly one metric for name ")); } // collect all metrics checkApps(queueSource, 0, 0, 0, 0, 0, 0, true); } public static void checkApps(MetricsSource source, int submitted, int pending, int running, int completed, int failed, int killed, boolean all) { MetricsRecordBuilder rb = getMetrics(source, all); assertCounter("AppsSubmitted", submitted, rb); assertGauge("AppsPending", pending, rb); assertGauge("AppsRunning", running, rb); assertCounter("AppsCompleted", completed, rb); assertGauge("AppsFailed", failed, rb); assertCounter("AppsKilled", killed, rb); } public static void checkResources(MetricsSource source, int allocatedMB, int allocatedCores, int allocCtnrs, long aggreAllocCtnrs, long aggreReleasedCtnrs, int availableMB, int availableCores, int pendingMB, int pendingCores, int pendingCtnrs, int reservedMB, int reservedCores, int reservedCtnrs) { MetricsRecordBuilder rb = getMetrics(source); assertGauge("AllocatedMB", allocatedMB, rb); assertGauge("AllocatedVCores", allocatedCores, rb); assertGauge("AllocatedContainers", allocCtnrs, rb); assertCounter("AggregateContainersAllocated", aggreAllocCtnrs, rb); assertCounter("AggregateContainersReleased", aggreReleasedCtnrs, rb); assertGauge("AvailableMB", availableMB, rb); assertGauge("AvailableVCores", availableCores, rb); assertGauge("PendingMB", pendingMB, rb); assertGauge("PendingVCores", pendingCores, rb); assertGauge("PendingContainers", pendingCtnrs, rb); assertGauge("ReservedMB", reservedMB, rb); assertGauge("ReservedVCores", reservedCores, rb); assertGauge("ReservedContainers", reservedCtnrs, rb); } private static AppSchedulingInfo mockApp(String user) { AppSchedulingInfo app = mock(AppSchedulingInfo.class); when(app.getUser()).thenReturn(user); ApplicationId appId = BuilderUtils.newApplicationId(1, 1); ApplicationAttemptId id = BuilderUtils.newApplicationAttemptId(appId, 1); when(app.getApplicationAttemptId()).thenReturn(id); return app; } public static MetricsSource queueSource(MetricsSystem ms, String queue) { MetricsSource s = ms.getSource(QueueMetrics.sourceName(queue).toString()); return s; } public static MetricsSource userSource(MetricsSystem ms, String queue, String user) { MetricsSource s = ms.getSource(QueueMetrics.sourceName(queue). append(",user=").append(user).toString()); return s; } }
/* * Copyright 2014 Madhu Siddalingaiah * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.madhu.mr.view; import java.awt.BorderLayout; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.io.FileReader; import java.lang.reflect.Modifier; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.Properties; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JSplitPane; import javax.swing.JTextArea; import javax.swing.JTextField; import com.madhu.mr.KeyValueReader; import com.madhu.mr.KeyValueWriter; import com.madhu.mr.MapReduceJob; public class MainFrame extends JFrame implements ActionListener { private JMenuItem inputMenuItem; private JMenuItem outputMenuItem; private JMenuItem startMenuItem; private JMenuItem tipsMenuItem; private JMenuItem aboutMenuItem; private File inputDirectory; private File outputDirectory; private JButton goButton; private JComboBox jobCombo; private JLabel lblMapreduceJob; private JLabel lblReducers; private JPanel centerPanel; private JPanel inputPanel; private JPanel outputPanel; private JLabel lblInputDirectory; private JTextField inputDirTF; private JButton inputDirButton; private JLabel lblOutputDirectory; private JTextField outputDirTF; private JButton outputDirButton; private JPanel jobPanel; private JTextArea jobTextArea; private JPanel northPanel; private JPanel filePanel; private JSplitPane splitPane; private FormPanel formPanel; private JComboBox<String> inputCombo; private JComboBox<String> outputCombo; private JMenuItem exitMenuItem; public MainFrame(String title, Properties props) throws Exception { super(title); setDefaultCloseOperation(EXIT_ON_CLOSE); getContentPane().setLayout(new BorderLayout()); northPanel = new JPanel(); getContentPane().add(northPanel, BorderLayout.NORTH); lblMapreduceJob = new JLabel("MapReduce Job"); northPanel.add(lblMapreduceJob); jobCombo = new JComboBox(); northPanel.add(jobCombo); jobCombo.addActionListener(this); JPanel southPanel = new JPanel(); getContentPane().add(southPanel, BorderLayout.SOUTH); southPanel.setLayout(new FlowLayout(FlowLayout.CENTER, 5, 5)); goButton = new JButton("Go!"); goButton.setEnabled(false); southPanel.add(goButton); JMenuBar menuBar = new JMenuBar(); JMenu fileMenu = new JMenu("File"); fileMenu.add(inputMenuItem = new JMenuItem("Input directory...")); inputMenuItem.addActionListener(this); fileMenu.add(outputMenuItem = new JMenuItem("Output directory...")); outputMenuItem.addActionListener(this); fileMenu.addSeparator(); fileMenu.add(exitMenuItem = new JMenuItem("Exit")); exitMenuItem.addActionListener(this); JMenu editMenu = new JMenu("Edit"); JMenu helpMenu = new JMenu("Help"); helpMenu.add(startMenuItem = new JMenuItem("Getting started")); startMenuItem.addActionListener(this); helpMenu.add(tipsMenuItem = new JMenuItem("Tips and tricks...")); tipsMenuItem.addActionListener(this); helpMenu.add(aboutMenuItem = new JMenuItem("About")); aboutMenuItem.addActionListener(this); menuBar.add(fileMenu); menuBar.add(editMenu); menuBar.add(helpMenu); setJMenuBar(menuBar); centerPanel = new JPanel(); getContentPane().add(centerPanel, BorderLayout.CENTER); centerPanel.setLayout(new BorderLayout(0, 0)); splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT); centerPanel.add(splitPane, BorderLayout.CENTER); jobTextArea = new JTextArea(3, 20); jobTextArea.setWrapStyleWord(true); splitPane.setTopComponent(jobTextArea); jobPanel = new JPanel(); splitPane.setBottomComponent(jobPanel); filePanel = new JPanel(); centerPanel.add(filePanel, BorderLayout.SOUTH); filePanel.setLayout(new BorderLayout(0, 0)); inputPanel = new JPanel(); filePanel.add(inputPanel, BorderLayout.NORTH); FlowLayout flowLayout = (FlowLayout) inputPanel.getLayout(); flowLayout.setAlignment(FlowLayout.RIGHT); lblInputDirectory = new JLabel("Input directory"); inputPanel.add(lblInputDirectory); inputDirTF = new JTextField(); inputPanel.add(inputDirTF); inputDirTF.setColumns(40); inputDirTF.setEditable(false); inputDirButton = new JButton("..."); inputPanel.add(inputDirButton); inputCombo = new JComboBox<String>(KeyValueReader.TYPES); inputPanel.add(inputCombo); outputPanel = new JPanel(); filePanel.add(outputPanel, BorderLayout.SOUTH); FlowLayout flowLayout_1 = (FlowLayout) outputPanel.getLayout(); flowLayout_1.setAlignment(FlowLayout.RIGHT); lblOutputDirectory = new JLabel("Output directory"); outputPanel.add(lblOutputDirectory); outputDirTF = new JTextField(); outputPanel.add(outputDirTF); outputDirTF.setColumns(40); outputDirTF.setEditable(false); outputDirButton = new JButton("..."); outputPanel.add(outputDirButton); outputCombo = new JComboBox<String>(KeyValueWriter.TYPES); outputPanel.add(outputCombo); outputDirButton.addActionListener(this); inputDirButton.addActionListener(this); int index = 1; String clazz; while ((clazz = props.getProperty(String.format("job.class.%d", index))) != null) { jobCombo.addItem(Class.forName(clazz).newInstance()); index += 1; } goButton.addActionListener(this); doJobSelect(); pack(); } // FIXME: this doesn't seem to work with jar files public ArrayList<Class> getSubclasses(Class superClass) throws Exception { ArrayList<Class> list = new ArrayList<Class>(); ClassLoader cl = Thread.currentThread().getContextClassLoader(); Enumeration<URL> res = cl.getResources(""); System.out.println(res); while (res.hasMoreElements()) { URL url = res.nextElement(); String dir = url.getFile(); findClasses(dir, "", list, superClass); } return list; } private void findClasses(String root, String path, ArrayList<Class> list, Class superClass) throws Exception { File file = new File(root, path); if (file.isFile() && path.endsWith(".class")) { path = path.substring(1, path.length() - 6); Class<?> clazz = Class.forName(path.replace('/', '.')); if (superClass.isAssignableFrom(clazz) && !Modifier.isAbstract(clazz.getModifiers())) { list.add(clazz); } } else if (file.isDirectory()) { String[] pathList = file.list(); for (String p : pathList) { findClasses(root, path + '/' + p, list, superClass); } } } @Override public void actionPerformed(ActionEvent e) { Object source = e.getSource(); if (source == inputMenuItem || source == inputDirButton) { doChooseInput(); } else if (source == outputMenuItem || source == outputDirButton) { doChooseOutput(); } else if (source == startMenuItem) { JOptionPane.showMessageDialog(null, "1. Select input and output directories from the File menu\n" + "2. Choose a MapReduce job\n" + "3. Select the number of reducers\n" + "4. Press the Go! button\n" + "5. Step or run the Map tasks\n" + "6. Press Shuffle, then Sort\n" + "7. Step or run the Reduce tasks\n" + "8. Press the Write output button", "Getting started", JOptionPane.INFORMATION_MESSAGE); } else if (source == tipsMenuItem) { JOptionPane.showMessageDialog(null, "The number of mappers is equal to the number of input files.\n" + "After each mapper step, the destination reducer is displayed for each mapper.\n" + "MapReduce jobs can implement custom partitioners, sort and group comparators.\n" + "Hover the mouse pointer over keys or values to view details.", "Tips and tricks...", JOptionPane.INFORMATION_MESSAGE); } else if (source == aboutMenuItem) { JOptionPane.showMessageDialog(null, "MapReduce Viewer 1.0a\n" + "Copyright Madhu Siddalingaiah, 2014\n" + "madhu@madhu.com", "About", JOptionPane.INFORMATION_MESSAGE); } else if (source == jobCombo) { doJobSelect(); } else if (source == goButton) { try { doGoButton(); } catch (Exception exc) { exc.printStackTrace(); JOptionPane.showMessageDialog(null, exc.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } else if (source == exitMenuItem) { System.exit(0); } } private void doChooseInput() { File dir = new File(inputDirTF.getText()); if (!dir.exists()) { dir = new File(System.getProperty("user.dir")); } JFileChooser chooser = new JFileChooser(dir); chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int returnVal = chooser.showOpenDialog(this); if(returnVal == JFileChooser.APPROVE_OPTION) { inputDirectory = chooser.getSelectedFile(); inputDirTF.setText(inputDirectory.getAbsolutePath()); goButton.setEnabled(outputDirectory != null); } } private void doChooseOutput() { File dir = new File(outputDirTF.getText()); if (!dir.exists()) { dir = new File(System.getProperty("user.dir")); } JFileChooser chooser = new JFileChooser(dir); chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int returnVal = chooser.showOpenDialog(this); if(returnVal == JFileChooser.APPROVE_OPTION) { outputDirectory = chooser.getSelectedFile(); outputDirTF.setText(outputDirectory.getAbsolutePath()); goButton.setEnabled(inputDirectory != null); } } private void doGoButton() throws Exception { inputDirectory = new File(inputDirTF.getText()); if (!inputDirectory.exists()) { String msg = String.format("Input directory %s does not exist", inputDirectory); JOptionPane.showMessageDialog(null, msg, "Error", JOptionPane.ERROR_MESSAGE); return; } outputDirectory = new File(outputDirTF.getText()); if (outputDirectory.exists() && outputDirectory.list().length > 0) { String message = String.format("Output directory\n%s\nis not empty, " + "files may be overwritten", outputDirectory.getAbsolutePath()); JOptionPane.showMessageDialog(null, message, "About", JOptionPane.WARNING_MESSAGE); } MapReduceJob job = (MapReduceJob) jobCombo.getSelectedItem(); if (formPanel != null) { formPanel.collectValues(); } job.setInputDirectory(inputDirectory); job.setInputReaderClass(KeyValueReader.getReaderClass((String) inputCombo.getSelectedItem())); job.setOutputDirectory(outputDirectory); job.setOutputWriterClass(KeyValueWriter.getWriterClass((String) outputCombo.getSelectedItem())); job.init(); MRPanel p = new MRPanel(job); JFrame f = new JFrame(job.getName()); f.setDefaultCloseOperation(DISPOSE_ON_CLOSE); f.getContentPane().add(p); f.pack(); f.setVisible(true); } private void doJobSelect() { MapReduceJob job = (MapReduceJob) jobCombo.getSelectedItem(); jobTextArea.setText(job.getDescription()); Object params = job.getParameters(); if (params == null) { splitPane.setBottomComponent(new JLabel("No configurable parameters")); formPanel = null; } else { try { formPanel = new FormPanel(params); splitPane.setBottomComponent(formPanel); pack(); } catch (Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, e.toString(), "Error", JOptionPane.ERROR_MESSAGE); return; } } } /** * @param args * @throws Exception */ public static void main(String[] args) throws Exception { String propName = "props.txt"; if (args.length > 0) { propName = args[0]; } File file = new File(propName); if (!file.exists()) { System.out.println("Unable to find properties file " + propName); System.exit(1); } Properties props = new Properties(); props.load(new FileReader(file)); MainFrame m = new MainFrame("MapReduce Viewer", props); m.setVisible(true); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // $Id$ package javax.xml.transform; import java.lang.reflect.Method; import java.lang.reflect.InvocationTargetException; /** * This class specifies an exceptional condition that occurred * during the transformation process. */ public class TransformerException extends Exception { // Added serialVersionUID to preserve binary compatibility private static final long serialVersionUID = 975798773772956428L; /** Field locator specifies where the error occurred */ SourceLocator locator; /** * Method getLocator retrieves an instance of a SourceLocator * object that specifies where an error occurred. * * @return A SourceLocator object, or null if none was specified. */ public SourceLocator getLocator() { return locator; } /** * Method setLocator sets an instance of a SourceLocator * object that specifies where an error occurred. * * @param location A SourceLocator object, or null to clear the location. */ public void setLocator(SourceLocator location) { locator = location; } /** Field containedException specifies a wrapped exception. May be null. */ Throwable containedException; /** * This method retrieves an exception that this exception wraps. * * @return An Throwable object, or null. * @see #getCause */ public Throwable getException() { return containedException; } /** * Returns the cause of this throwable or <code>null</code> if the * cause is nonexistent or unknown. (The cause is the throwable that * caused this throwable to get thrown.) */ public Throwable getCause() { return ((containedException == this) ? null : containedException); } /** * Initializes the <i>cause</i> of this throwable to the specified value. * (The cause is the throwable that caused this throwable to get thrown.) * * <p>This method can be called at most once. It is generally called from * within the constructor, or immediately after creating the * throwable. If this throwable was created * with {@link #TransformerException(Throwable)} or * {@link #TransformerException(String,Throwable)}, this method cannot be called * even once. * * @param cause the cause (which is saved for later retrieval by the * {@link #getCause()} method). (A <tt>null</tt> value is * permitted, and indicates that the cause is nonexistent or * unknown.) * @return a reference to this <code>Throwable</code> instance. * @throws IllegalArgumentException if <code>cause</code> is this * throwable. (A throwable cannot * be its own cause.) * @throws IllegalStateException if this throwable was * created with {@link #TransformerException(Throwable)} or * {@link #TransformerException(String,Throwable)}, or this method has already * been called on this throwable. */ public synchronized Throwable initCause(Throwable cause) { if (this.containedException != null) { throw new IllegalStateException("Can't overwrite cause"); } if (cause == this) { throw new IllegalArgumentException( "Self-causation not permitted"); } this.containedException = cause; return this; } /** * Create a new TransformerException. * * @param message The error or warning message. */ public TransformerException(String message) { super(message); this.containedException = null; this.locator = null; } /** * Create a new TransformerException wrapping an existing exception. * * @param e The exception to be wrapped. */ public TransformerException(Throwable e) { super(e.toString()); this.containedException = e; this.locator = null; } /** * Wrap an existing exception in a TransformerException. * * <p>This is used for throwing processor exceptions before * the processing has started.</p> * * @param message The error or warning message, or null to * use the message from the embedded exception. * @param e Any exception */ public TransformerException(String message, Throwable e) { super(((message == null) || (message.length() == 0)) ? e.toString() : message); this.containedException = e; this.locator = null; } /** * Create a new TransformerException from a message and a Locator. * * <p>This constructor is especially useful when an application is * creating its own exception from within a DocumentHandler * callback.</p> * * @param message The error or warning message. * @param locator The locator object for the error or warning. */ public TransformerException(String message, SourceLocator locator) { super(message); this.containedException = null; this.locator = locator; } /** * Wrap an existing exception in a TransformerException. * * @param message The error or warning message, or null to * use the message from the embedded exception. * @param locator The locator object for the error or warning. * @param e Any exception */ public TransformerException(String message, SourceLocator locator, Throwable e) { super(message); this.containedException = e; this.locator = locator; } /** * Get the error message with location information * appended. * * @return A <code>String</code> representing the error message with * location information appended. */ public String getMessageAndLocation() { StringBuffer sbuffer = new StringBuffer(); String message = super.getMessage(); if (null != message) { sbuffer.append(message); } if (null != locator) { String systemID = locator.getSystemId(); int line = locator.getLineNumber(); int column = locator.getColumnNumber(); if (null != systemID) { sbuffer.append("; SystemID: "); sbuffer.append(systemID); } if (0 != line) { sbuffer.append("; Line#: "); sbuffer.append(line); } if (0 != column) { sbuffer.append("; Column#: "); sbuffer.append(column); } } return sbuffer.toString(); } /** * Get the location information as a string. * * @return A string with location info, or null * if there is no location information. */ public String getLocationAsString() { if (null != locator) { StringBuffer sbuffer = new StringBuffer(); String systemID = locator.getSystemId(); int line = locator.getLineNumber(); int column = locator.getColumnNumber(); if (null != systemID) { sbuffer.append("; SystemID: "); sbuffer.append(systemID); } if (0 != line) { sbuffer.append("; Line#: "); sbuffer.append(line); } if (0 != column) { sbuffer.append("; Column#: "); sbuffer.append(column); } return sbuffer.toString(); } else { return null; } } /** * Print the the trace of methods from where the error * originated. This will trace all nested exception * objects, as well as this object. */ public void printStackTrace() { printStackTrace(new java.io.PrintWriter(System.err, true)); } /** * Print the the trace of methods from where the error * originated. This will trace all nested exception * objects, as well as this object. * @param s The stream where the dump will be sent to. */ public void printStackTrace(java.io.PrintStream s) { printStackTrace(new java.io.PrintWriter(s)); } /** * Print the the trace of methods from where the error * originated. This will trace all nested exception * objects, as well as this object. * @param s The writer where the dump will be sent to. */ public void printStackTrace(java.io.PrintWriter s) { if (s == null) { s = new java.io.PrintWriter(System.err, true); } try { String locInfo = getLocationAsString(); if (null != locInfo) { s.println(locInfo); } super.printStackTrace(s); } catch (Throwable e) {} boolean isJdk14OrHigher = false; try { Throwable.class.getMethod("getCause",(Class[]) null); isJdk14OrHigher = true; } catch (NoSuchMethodException nsme) { // do nothing } // The printStackTrace method of the Throwable class in jdk 1.4 // and higher will include the cause when printing the backtrace. // The following code is only required when using jdk 1.3 or lower if (!isJdk14OrHigher) { Throwable exception = getException(); for (int i = 0; (i < 10) && (null != exception); i++) { s.println("---------"); try { if (exception instanceof TransformerException) { String locInfo = ((TransformerException) exception) .getLocationAsString(); if (null != locInfo) { s.println(locInfo); } } exception.printStackTrace(s); } catch (Throwable e) { s.println("Could not print stack trace..."); } try { Method meth = ((Object) exception).getClass().getMethod("getException", (Class[]) null); if (null != meth) { Throwable prev = exception; exception = (Throwable) meth.invoke(exception, (Object[]) null); if (prev == exception) { break; } } else { exception = null; } } catch (InvocationTargetException ite) { exception = null; } catch (IllegalAccessException iae) { exception = null; } catch (NoSuchMethodException nsme) { exception = null; } } } // insure output is written s.flush(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.connector.base.source.hybrid; import org.apache.flink.api.connector.source.Boundedness; import org.apache.flink.api.connector.source.Source; import org.apache.flink.api.connector.source.SourceReader; import org.apache.flink.api.connector.source.SourceReaderContext; import org.apache.flink.api.connector.source.mocks.MockSource; import org.apache.flink.api.connector.source.mocks.MockSourceSplit; import org.apache.flink.connector.base.source.reader.mocks.MockBaseSource; import org.apache.flink.connector.testutils.source.reader.TestingReaderContext; import org.apache.flink.connector.testutils.source.reader.TestingReaderOutput; import org.apache.flink.core.io.InputStatus; import org.apache.flink.mock.Whitebox; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import java.util.Collections; import java.util.List; /** Tests for {@link HybridSourceReader}. */ public class HybridSourceReaderTest { @Test public void testReader() throws Exception { TestingReaderContext readerContext = new TestingReaderContext(); TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>(); MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED); // 2 underlying readers to exercise switch SourceReader<Integer, MockSourceSplit> mockSplitReader1 = source.createReader(readerContext); SourceReader<Integer, MockSourceSplit> mockSplitReader2 = source.createReader(readerContext); HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext); Assert.assertThat(readerContext.getSentEvents(), Matchers.emptyIterable()); reader.start(); assertAndClearSourceReaderFinishedEvent(readerContext, -1); Assert.assertNull(currentReader(reader)); Assert.assertEquals(InputStatus.NOTHING_AVAILABLE, reader.pollNext(readerOutput)); Source source1 = new MockSource(null, 0) { @Override public SourceReader<Integer, MockSourceSplit> createReader( SourceReaderContext readerContext) { return mockSplitReader1; } }; reader.handleSourceEvents(new SwitchSourceEvent(0, source1, false)); MockSourceSplit mockSplit = new MockSourceSplit(0, 0, 1); mockSplit.addRecord(0); SwitchedSources switchedSources = new SwitchedSources(); switchedSources.put(0, source); HybridSourceSplit hybridSplit = HybridSourceSplit.wrapSplit(mockSplit, 0, switchedSources); reader.addSplits(Collections.singletonList(hybridSplit)); // drain splits InputStatus status = reader.pollNext(readerOutput); while (readerOutput.getEmittedRecords().isEmpty() || status == InputStatus.MORE_AVAILABLE) { status = reader.pollNext(readerOutput); Thread.sleep(10); } Assert.assertThat(readerOutput.getEmittedRecords(), Matchers.contains(0)); reader.pollNext(readerOutput); Assert.assertEquals( "before notifyNoMoreSplits", InputStatus.NOTHING_AVAILABLE, reader.pollNext(readerOutput)); reader.notifyNoMoreSplits(); reader.pollNext(readerOutput); assertAndClearSourceReaderFinishedEvent(readerContext, 0); Assert.assertEquals( "reader before switch source event", mockSplitReader1, currentReader(reader)); Source source2 = new MockSource(null, 0) { @Override public SourceReader<Integer, MockSourceSplit> createReader( SourceReaderContext readerContext) { return mockSplitReader2; } }; reader.handleSourceEvents(new SwitchSourceEvent(1, source2, true)); Assert.assertEquals( "reader after switch source event", mockSplitReader2, currentReader(reader)); reader.notifyNoMoreSplits(); Assert.assertEquals( "reader 1 after notifyNoMoreSplits", InputStatus.END_OF_INPUT, reader.pollNext(readerOutput)); reader.close(); } @Test public void testReaderRecovery() throws Exception { TestingReaderContext readerContext = new TestingReaderContext(); TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>(); MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED); HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext); reader.start(); assertAndClearSourceReaderFinishedEvent(readerContext, -1); reader.handleSourceEvents(new SwitchSourceEvent(0, source, false)); MockSourceSplit mockSplit = new MockSourceSplit(0, 0, 2147483647); SwitchedSources switchedSources = new SwitchedSources(); switchedSources.put(0, source); HybridSourceSplit hybridSplit = HybridSourceSplit.wrapSplit(mockSplit, 0, switchedSources); reader.addSplits(Collections.singletonList(hybridSplit)); List<HybridSourceSplit> snapshot = reader.snapshotState(0); Assert.assertThat(snapshot, Matchers.contains(hybridSplit)); // reader recovery readerContext.clearSentEvents(); reader = new HybridSourceReader<>(readerContext); reader.addSplits(snapshot); Assert.assertNull(currentReader(reader)); reader.start(); Assert.assertNull(currentReader(reader)); assertAndClearSourceReaderFinishedEvent(readerContext, -1); reader.handleSourceEvents(new SwitchSourceEvent(0, source, false)); Assert.assertNotNull(currentReader(reader)); Assert.assertThat(reader.snapshotState(1), Matchers.contains(hybridSplit)); reader.close(); } @Test public void testDefaultMethodDelegation() throws Exception { TestingReaderContext readerContext = new TestingReaderContext(); TestingReaderOutput<Integer> readerOutput = new TestingReaderOutput<>(); MockBaseSource source = new MockBaseSource(1, 1, Boundedness.BOUNDED) { @Override public SourceReader<Integer, MockSourceSplit> createReader( SourceReaderContext readerContext) { return Mockito.spy(super.createReader(readerContext)); } }; HybridSourceReader<Integer> reader = new HybridSourceReader<>(readerContext); reader.start(); assertAndClearSourceReaderFinishedEvent(readerContext, -1); reader.handleSourceEvents(new SwitchSourceEvent(0, source, false)); SourceReader<Integer, MockSourceSplit> underlyingReader = currentReader(reader); reader.notifyCheckpointComplete(1); Mockito.verify(underlyingReader).notifyCheckpointComplete(1); reader.notifyCheckpointAborted(1); Mockito.verify(underlyingReader).notifyCheckpointAborted(1); reader.close(); } private static SourceReader<Integer, MockSourceSplit> currentReader( HybridSourceReader<?> reader) { return (SourceReader) Whitebox.getInternalState(reader, "currentReader"); } private static void assertAndClearSourceReaderFinishedEvent( TestingReaderContext context, int sourceIndex) { Assert.assertThat(context.getSentEvents(), Matchers.iterableWithSize(1)); Assert.assertEquals( sourceIndex, ((SourceReaderFinishedEvent) context.getSentEvents().get(0)).sourceIndex()); context.clearSentEvents(); } }
/* * Copyright (c) 2007 Mike Heath. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.adbcj.tck.test; import org.adbcj.*; import org.testng.Assert; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; @Test(timeOut = 5000) public class TransactionTest extends AbstractWithConnectionManagerTest{ public void testBeginTransaction() throws Exception { Connection connection = connectionManager.connect().get(); try { Assert.assertTrue(!connection.isInTransaction(), "Connections should not start with transaction started"); connection.beginTransaction(); Assert.assertTrue(connection.isInTransaction(), "Connection should be in transaction"); try { connection.beginTransaction(); Assert.fail("Should have thrown exception because connection is already in transaction"); } catch (DbException e) { // Pass } } finally { connection.close(); } } public void testCommitRollbackWithNoTransaction() throws Exception { Connection connection = connectionManager.connect().get(); try { // Test commit with no transaction try { connection.commit(); Assert.fail("Not in transaction, commit should have failed"); } catch (DbException e) { // Pass } // Test rollback with no transaction try { connection.rollback(); Assert.fail("Not in transaction, rollback should have failed"); } catch (DbException e) { // Pass } connection.beginTransaction(); connection.rollback().get(); connection.beginTransaction(); connection.commit().get(); connection.beginTransaction(); } finally { connection.close(); } } public void testAfterCommitNotTransactionIsActive() throws Exception { Connection connection = connectionManager.connect().get(); connection.beginTransaction(); connection.executeQuery("SELECT * FROM updates ").get(); connection.commit().get(); Assert.assertFalse(connection.isInTransaction()); } public void testAfterRollbackNotTransactionIsActive() throws Exception { Connection connection = connectionManager.connect().get(); connection.beginTransaction(); connection.executeQuery("SELECT * FROM updates").get(); connection.rollback().get(); Assert.assertFalse(connection.isInTransaction()); } public void testRollback() throws Exception { Connection connection = connectionManager.connect().get(); try { // Clear out updates table Result result = connection.executeUpdate("DELETE FROM updates").get(); assertNotNull(result); // Make sure updates is empty ResultSet rs = connection.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 0); connection.beginTransaction(); // Insert a row result = connection.executeUpdate("INSERT INTO updates (id) VALUES (1)").get(); assertNotNull(result); assertEquals(result.getAffectedRows(), 1L); // Make sure we can select the row rs = connection.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 1); Value value = rs.get(0).get(0); assertEquals(value.getInt(), 1); // Rollback transaction connection.rollback().get(); // select query should now be empty rs = connection.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 0); } finally { connection.close(); } } public void testCommit() throws Exception { Connection connection = connectionManager.connect().get(); Connection connection2 = connectionManager.connect().get(); try { // Clear out updates table Result result = connection.executeUpdate("DELETE FROM updates").get(); assertNotNull(result); connection.beginTransaction(); // Insert a row result = connection.executeUpdate("INSERT INTO updates (id) VALUES (1)").get(); assertNotNull(result); assertEquals(result.getAffectedRows(), 1L); // Make sure second connection can't see data ResultSet rs = connection2.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 0); connection.commit().get(); // Make sure both connections can see data rs = connection.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 1); assertEquals(rs.get(0).get(0).getInt(), 1); rs = connection2.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 1); assertEquals(rs.get(0).get(0).getInt(), 1); } finally { connection.close(); connection2.close(); } } public void testAfterCommitAutoCommit() throws Exception { Connection connection = connectionManager.connect().get(); Connection connection2 = connectionManager.connect().get(); try { // Clear out updates table Result result = connection.executeUpdate("DELETE FROM updates").get(); assertNotNull(result); connection.beginTransaction(); // Insert a row result = connection.executeUpdate("INSERT INTO updates (id) VALUES (1)").get(); assertNotNull(result); assertEquals(result.getAffectedRows(), 1L); connection.commit().get(); result = connection.executeUpdate("INSERT INTO updates (id) VALUES (2)").get(); // Make sure both connections can see data ResultSet rs = connection2.executeQuery("SELECT id FROM updates").get(); assertNotNull(rs); assertEquals(rs.size(), 2); } finally { connection.close(); connection2.close(); } } }
// Copyright (C) 2014 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.restapi.change; import com.google.common.base.Strings; import com.google.gerrit.extensions.common.DiffWebLinkInfo; import com.google.gerrit.extensions.common.EditInfo; import com.google.gerrit.extensions.common.Input; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.BadRequestException; import com.google.gerrit.extensions.restapi.BinaryResult; import com.google.gerrit.extensions.restapi.ChildCollection; import com.google.gerrit.extensions.restapi.DefaultInput; import com.google.gerrit.extensions.restapi.IdString; import com.google.gerrit.extensions.restapi.RawInput; import com.google.gerrit.extensions.restapi.ResourceConflictException; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.Response; import com.google.gerrit.extensions.restapi.RestCollectionCreateView; import com.google.gerrit.extensions.restapi.RestCollectionDeleteMissingView; import com.google.gerrit.extensions.restapi.RestCollectionModifyView; import com.google.gerrit.extensions.restapi.RestModifyView; import com.google.gerrit.extensions.restapi.RestReadView; import com.google.gerrit.extensions.restapi.RestView; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.WebLinks; import com.google.gerrit.server.change.ChangeEditResource; import com.google.gerrit.server.change.ChangeResource; import com.google.gerrit.server.change.FileContentUtil; import com.google.gerrit.server.change.FileInfoJson; import com.google.gerrit.server.change.RevisionResource; import com.google.gerrit.server.edit.ChangeEdit; import com.google.gerrit.server.edit.ChangeEditJson; import com.google.gerrit.server.edit.ChangeEditModifier; import com.google.gerrit.server.edit.ChangeEditUtil; import com.google.gerrit.server.edit.UnchangedCommitMessageException; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.patch.PatchListNotAvailableException; import com.google.gerrit.server.permissions.PermissionBackendException; import com.google.gerrit.server.project.InvalidChangeOperationException; import com.google.gerrit.server.project.ProjectCache; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.Singleton; import java.io.IOException; import java.util.List; import java.util.Optional; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.kohsuke.args4j.Option; @Singleton public class ChangeEdits implements ChildCollection<ChangeResource, ChangeEditResource> { private final DynamicMap<RestView<ChangeEditResource>> views; private final Provider<Detail> detail; private final ChangeEditUtil editUtil; @Inject ChangeEdits( DynamicMap<RestView<ChangeEditResource>> views, Provider<Detail> detail, ChangeEditUtil editUtil) { this.views = views; this.detail = detail; this.editUtil = editUtil; } @Override public DynamicMap<RestView<ChangeEditResource>> views() { return views; } @Override public RestView<ChangeResource> list() { return detail.get(); } @Override public ChangeEditResource parse(ChangeResource rsrc, IdString id) throws ResourceNotFoundException, AuthException, IOException { Optional<ChangeEdit> edit = editUtil.byChange(rsrc.getNotes(), rsrc.getUser()); if (!edit.isPresent()) { throw new ResourceNotFoundException(id); } return new ChangeEditResource(rsrc, edit.get(), id.get()); } /** * Create handler that is activated when collection element is accessed but doesn't exist, e. g. * PUT request with a path was called but change edit wasn't created yet. Change edit is created * and PUT handler is called. */ public static class Create implements RestCollectionCreateView<ChangeResource, ChangeEditResource, Put.Input> { private final Put putEdit; @Inject Create(Put putEdit) { this.putEdit = putEdit; } @Override public Response<?> apply(ChangeResource resource, IdString id, Put.Input input) throws AuthException, ResourceConflictException, BadRequestException, IOException, PermissionBackendException { putEdit.apply(resource, id.get(), input.content); return Response.none(); } } public static class DeleteFile implements RestCollectionDeleteMissingView<ChangeResource, ChangeEditResource, Input> { private final DeleteContent deleteContent; @Inject DeleteFile(DeleteContent deleteContent) { this.deleteContent = deleteContent; } @Override public Response<?> apply(ChangeResource rsrc, IdString id, Input in) throws IOException, AuthException, BadRequestException, ResourceConflictException, PermissionBackendException { return deleteContent.apply(rsrc, id.get()); } } // TODO(davido): Turn the boolean options to ChangeEditOption enum, // like it's already the case for ListChangesOption/ListGroupsOption public static class Detail implements RestReadView<ChangeResource> { private final ChangeEditUtil editUtil; private final ChangeEditJson editJson; private final FileInfoJson fileInfoJson; private final Revisions revisions; private String base; private boolean list; private boolean downloadCommands; @Option(name = "--base", metaVar = "revision-id") public void setBase(String base) { this.base = base; } @Option(name = "--list") public void setList(boolean list) { this.list = list; } @Option(name = "--download-commands") public void setDownloadCommands(boolean downloadCommands) { this.downloadCommands = downloadCommands; } @Inject Detail( ChangeEditUtil editUtil, ChangeEditJson editJson, FileInfoJson fileInfoJson, Revisions revisions) { this.editJson = editJson; this.editUtil = editUtil; this.fileInfoJson = fileInfoJson; this.revisions = revisions; } @Override public Response<EditInfo> apply(ChangeResource rsrc) throws AuthException, IOException, ResourceNotFoundException, PermissionBackendException { Optional<ChangeEdit> edit = editUtil.byChange(rsrc.getNotes(), rsrc.getUser()); if (!edit.isPresent()) { return Response.none(); } EditInfo editInfo = editJson.toEditInfo(edit.get(), downloadCommands); if (list) { PatchSet basePatchSet = null; if (base != null) { RevisionResource baseResource = revisions.parse(rsrc, IdString.fromDecoded(base)); basePatchSet = baseResource.getPatchSet(); } try { editInfo.files = fileInfoJson.toFileInfoMap( rsrc.getChange(), edit.get().getEditCommit(), basePatchSet); } catch (PatchListNotAvailableException e) { throw new ResourceNotFoundException(e.getMessage()); } } return Response.ok(editInfo); } } /** * Post to edit collection resource. Two different operations are supported: * * <ul> * <li>Create non existing change edit * <li>Restore path in existing change edit * </ul> * * The combination of two operations in one request is supported. */ @Singleton public static class Post implements RestCollectionModifyView<ChangeResource, ChangeEditResource, Post.Input> { public static class Input { public String restorePath; public String oldPath; public String newPath; } private final ChangeEditModifier editModifier; private final GitRepositoryManager repositoryManager; @Inject Post(ChangeEditModifier editModifier, GitRepositoryManager repositoryManager) { this.editModifier = editModifier; this.repositoryManager = repositoryManager; } @Override public Response<?> apply(ChangeResource resource, Post.Input input) throws AuthException, BadRequestException, IOException, ResourceConflictException, PermissionBackendException { Project.NameKey project = resource.getProject(); try (Repository repository = repositoryManager.openRepository(project)) { if (isRestoreFile(input)) { editModifier.restoreFile(repository, resource.getNotes(), input.restorePath); } else if (isRenameFile(input)) { editModifier.renameFile(repository, resource.getNotes(), input.oldPath, input.newPath); } else { editModifier.createEdit(repository, resource.getNotes()); } } catch (InvalidChangeOperationException e) { throw new ResourceConflictException(e.getMessage()); } return Response.none(); } private static boolean isRestoreFile(Input input) { return input != null && !Strings.isNullOrEmpty(input.restorePath); } private static boolean isRenameFile(Input input) { return input != null && !Strings.isNullOrEmpty(input.oldPath) && !Strings.isNullOrEmpty(input.newPath); } } /** Put handler that is activated when PUT request is called on collection element. */ @Singleton public static class Put implements RestModifyView<ChangeEditResource, Put.Input> { public static class Input { @DefaultInput public RawInput content; } private final ChangeEditModifier editModifier; private final GitRepositoryManager repositoryManager; @Inject Put(ChangeEditModifier editModifier, GitRepositoryManager repositoryManager) { this.editModifier = editModifier; this.repositoryManager = repositoryManager; } @Override public Response<?> apply(ChangeEditResource rsrc, Input input) throws AuthException, ResourceConflictException, BadRequestException, IOException, PermissionBackendException { return apply(rsrc.getChangeResource(), rsrc.getPath(), input.content); } public Response<?> apply(ChangeResource rsrc, String path, RawInput newContent) throws ResourceConflictException, AuthException, BadRequestException, IOException, PermissionBackendException { if (Strings.isNullOrEmpty(path) || path.charAt(0) == '/') { throw new ResourceConflictException("Invalid path: " + path); } try (Repository repository = repositoryManager.openRepository(rsrc.getProject())) { editModifier.modifyFile(repository, rsrc.getNotes(), path, newContent); } catch (InvalidChangeOperationException e) { throw new ResourceConflictException(e.getMessage()); } return Response.none(); } } /** * Handler to delete a file. * * <p>This deletes the file from the repository completely. This is not the same as reverting or * restoring a file to its previous contents. */ @Singleton public static class DeleteContent implements RestModifyView<ChangeEditResource, Input> { private final ChangeEditModifier editModifier; private final GitRepositoryManager repositoryManager; @Inject DeleteContent(ChangeEditModifier editModifier, GitRepositoryManager repositoryManager) { this.editModifier = editModifier; this.repositoryManager = repositoryManager; } @Override public Response<?> apply(ChangeEditResource rsrc, Input input) throws AuthException, BadRequestException, ResourceConflictException, IOException, PermissionBackendException { return apply(rsrc.getChangeResource(), rsrc.getPath()); } public Response<?> apply(ChangeResource rsrc, String filePath) throws AuthException, BadRequestException, IOException, ResourceConflictException, PermissionBackendException { try (Repository repository = repositoryManager.openRepository(rsrc.getProject())) { editModifier.deleteFile(repository, rsrc.getNotes(), filePath); } catch (InvalidChangeOperationException e) { throw new ResourceConflictException(e.getMessage()); } return Response.none(); } } public static class Get implements RestReadView<ChangeEditResource> { private final FileContentUtil fileContentUtil; private final ProjectCache projectCache; @Option( name = "--base", aliases = {"-b"}, usage = "whether to load the content on the base revision instead of the change edit") private boolean base; @Inject Get(FileContentUtil fileContentUtil, ProjectCache projectCache) { this.fileContentUtil = fileContentUtil; this.projectCache = projectCache; } @Override public Response<BinaryResult> apply(ChangeEditResource rsrc) throws IOException { try { ChangeEdit edit = rsrc.getChangeEdit(); return Response.ok( fileContentUtil.getContent( projectCache.checkedGet(rsrc.getChangeResource().getProject()), base ? ObjectId.fromString(edit.getBasePatchSet().getRevision().get()) : edit.getEditCommit(), rsrc.getPath(), null)); } catch (ResourceNotFoundException | BadRequestException e) { return Response.none(); } } } @Singleton public static class GetMeta implements RestReadView<ChangeEditResource> { private final WebLinks webLinks; @Inject GetMeta(WebLinks webLinks) { this.webLinks = webLinks; } @Override public FileInfo apply(ChangeEditResource rsrc) { FileInfo r = new FileInfo(); ChangeEdit edit = rsrc.getChangeEdit(); Change change = edit.getChange(); List<DiffWebLinkInfo> links = webLinks.getDiffLinks( change.getProject().get(), change.getChangeId(), edit.getBasePatchSet().getPatchSetId(), edit.getBasePatchSet().getRefName(), rsrc.getPath(), 0, edit.getRefName(), rsrc.getPath()); r.webLinks = links.isEmpty() ? null : links; return r; } public static class FileInfo { public List<DiffWebLinkInfo> webLinks; } } @Singleton public static class EditMessage implements RestModifyView<ChangeResource, EditMessage.Input> { public static class Input { @DefaultInput public String message; } private final ChangeEditModifier editModifier; private final GitRepositoryManager repositoryManager; @Inject EditMessage(ChangeEditModifier editModifier, GitRepositoryManager repositoryManager) { this.editModifier = editModifier; this.repositoryManager = repositoryManager; } @Override public Object apply(ChangeResource rsrc, Input input) throws AuthException, IOException, BadRequestException, ResourceConflictException, PermissionBackendException { if (input == null || Strings.isNullOrEmpty(input.message)) { throw new BadRequestException("commit message must be provided"); } Project.NameKey project = rsrc.getProject(); try (Repository repository = repositoryManager.openRepository(project)) { editModifier.modifyMessage(repository, rsrc.getNotes(), input.message); } catch (UnchangedCommitMessageException e) { throw new ResourceConflictException(e.getMessage()); } return Response.none(); } } public static class GetMessage implements RestReadView<ChangeResource> { private final GitRepositoryManager repoManager; private final ChangeEditUtil editUtil; @Option( name = "--base", aliases = {"-b"}, usage = "whether to load the message on the base revision instead of the change edit") private boolean base; @Inject GetMessage(GitRepositoryManager repoManager, ChangeEditUtil editUtil) { this.repoManager = repoManager; this.editUtil = editUtil; } @Override public BinaryResult apply(ChangeResource rsrc) throws AuthException, IOException, ResourceNotFoundException { Optional<ChangeEdit> edit = editUtil.byChange(rsrc.getNotes(), rsrc.getUser()); String msg; if (edit.isPresent()) { if (base) { try (Repository repo = repoManager.openRepository(rsrc.getProject()); RevWalk rw = new RevWalk(repo)) { RevCommit commit = rw.parseCommit( ObjectId.fromString(edit.get().getBasePatchSet().getRevision().get())); msg = commit.getFullMessage(); } } else { msg = edit.get().getEditCommit().getFullMessage(); } return BinaryResult.create(msg) .setContentType(FileContentUtil.TEXT_X_GERRIT_COMMIT_MESSAGE) .base64(); } throw new ResourceNotFoundException(); } } }
import java.util.*; class ForLoops { public static void main(String[] args) {} public static int simpleCountUpFor() { int sum = 0; for (int i = 0; i < 100; ++i) { sum += i; } return sum; } public static int simpleCountUpFor2() { int sum = 0; int n = 100; for (int i = 0; i < n; ++i) { sum += i; } return sum; } public static int simpleCountUpEqualFor() { int sum = 0; for (int i = 0; i <= 100; ++i) { sum += i; } return sum; } public static int simpleCountUpByTwoFor() { int sum = 0; for (int i = 0; i < 100; i += 2) { sum += i; } return sum; } public static int complexCountUpFor() { int sum = 0; int x = 3; for (int i = 0; i < 100; i += x) { sum += i; } return sum; } public static int variableCountUpFor() { int sum = 0; int n = 100; for (int i = 0; i < n; ++i) { sum += i; n -= 1; } return sum; } private static int square(int x) { return x * x; } public static int functionCallBoundCountUpFor() { int sum = 0; int n = 2; for (int i = 0; i < square(n); ++i) { sum += i; } return sum; } public static int squareBoundCountUpFor() { int sum = 0; int n = 2; for (int i = 0; i < n * n; ++i) { sum += i; } return sum; } public static int andCountUpFor() { int sum = 0; for (int i = 0; i < 100 && i > -1; ++i) { sum += i; } return sum; } public static int orCountUpFor() { int sum = 0; for (int i = 0; i < 100 || i > 200; ++i) { sum += i; } return sum; } public static int twoOrCountUpFor() { int sum = 0; for (int i = 0; i < 100 || i > 200 || i == 50; ++i) { sum += i; } return sum; } public static int negatedCountUpFor() { int sum = 0; for (int i = 0; !(i > 100); ++i) { sum += i; } return sum; } public static int doubleCountUpFor() { int sum = 0; for (int i = 0; i < 100; i *= 2) { sum += i; } return sum; } public static int simpleCountDownFor() { int sum = 0; for (int i = 100; i > 0; --i) { sum += i; } return sum; } public static int simpleCountDownFor2() { int sum = 0; int n = 0; for (int i = 100; i > n; --i) { sum += i; } return sum; } public static int simpleCountDownEqualFor() { int sum = 0; for (int i = 100; i >= 0; --i) { sum += i; } return sum; } public static int simpleCountDownByTwoFor() { int sum = 0; for (int i = 100; i > 0; i -= 2) { sum += i; } return sum; } public static int complexCountDownFor() { int sum = 0; int x = 3; for (int i = 100; i > 0; i -= x) { sum += i; } return sum; } public static int variableCountDownFor() { int sum = 0; int n = 0; for (int i = 100; i > n; --i) { sum += i; n += 1; } return sum; } public static int forWithBreak() { int sum = 0; for (int i = 10; i < 135; ++i) { sum += i; if (sum > 400) { break; } } return sum; } public static int varConstraintFor() { int sum = 0; int max = 75; for (int i = 0; i < max; ++i) { sum += i; } return sum; } public static int rhsConstraintFor() { int sum = 0; for (int i = 0; 45 > i; ++i) { sum += i; } return sum; } public static int nestedFor() { int sum = 0; for (int i = 0; i < 10; ++i) { for (int j = 1; j < 512; j *= 2) { sum += (i * j); } } return sum; } public static void infiniteFor() { int sum = 0; for (int i = 0; ; ++i) { sum += i; System.out.println(i); } } public static int infiniteBreakFor() { int sum = 0; for (int i = 0; ; ++i) { sum += i; if (sum > 142) { break; } sum *= 2; } return sum; } private static int return100() { return Math.abs(100); } public static int fakeWhileFor() { int sum = 0; int i = 0; for (; i < return100(); ) { sum += i; i += 1; } return sum; } public static int arrayIteratorLoop() { int[] intArr = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; int sum = 0; for (int i : intArr) { sum += i; } return sum; } public static int iteratableLoop() { ArrayList<Integer> intList = new ArrayList<>(); intList.add(0); intList.add(1); intList.add(2); intList.add(3); intList.add(4); intList.add(5); intList.add(6); intList.add(7); intList.add(8); intList.add(9); int sum = 0; for (Integer i : intList) { sum += i; } return sum; } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.share.send_tab_to_self; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Bitmap; import android.net.Uri; import android.provider.Browser; import android.text.SpannableString; import android.text.method.LinkMovementMethod; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ListView; import android.widget.TextView; import org.chromium.base.IntentUtils; import org.chromium.base.metrics.RecordUserAction; import org.chromium.chrome.R; import org.chromium.chrome.browser.browserservices.intents.WebappConstants; import org.chromium.chrome.browser.document.ChromeLauncherActivity; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.signin.services.IdentityServicesProvider; import org.chromium.components.browser_ui.bottomsheet.BottomSheetContent; import org.chromium.components.browser_ui.bottomsheet.BottomSheetController; import org.chromium.components.browser_ui.widget.RoundedCornerImageView; import org.chromium.components.embedder_support.util.UrlConstants; import org.chromium.components.signin.base.AccountInfo; import org.chromium.components.signin.identitymanager.ConsentLevel; import org.chromium.components.signin.identitymanager.IdentityManager; import org.chromium.ui.text.NoUnderlineClickableSpan; import org.chromium.ui.text.SpanApplier; import org.chromium.ui.widget.Toast; import java.util.ArrayList; import java.util.List; /** * Bottom sheet content to display a list of devices a user can send a tab to after they have * chosen to share it with themselves through the SendTabToSelfFeature. If the user is signed-out * or no target devices are available, a prompt will be shown indicating to the user that * they must sign in to use the feature. */ public class DevicePickerBottomSheetContent implements BottomSheetContent, OnItemClickListener { private final Context mContext; private final BottomSheetController mController; private ViewGroup mToolbarView; private ViewGroup mContentView; private final DevicePickerBottomSheetAdapter mAdapter; private final Profile mProfile; private final String mUrl; private final String mTitle; private final long mNavigationTime; private static final int ACCOUNT_AVATAR_SIZE_DP = 24; public DevicePickerBottomSheetContent(Context context, String url, String title, long navigationTime, BottomSheetController controller) { mContext = context; mController = controller; mProfile = Profile.getLastUsedRegularProfile(); mAdapter = new DevicePickerBottomSheetAdapter(mProfile); mUrl = url; mTitle = title; mNavigationTime = navigationTime; createToolbarView(); createContentView(); } private void createToolbarView() { mToolbarView = (ViewGroup) LayoutInflater.from(mContext).inflate( R.layout.send_tab_to_self_device_picker_toolbar, null); TextView toolbarText = mToolbarView.findViewById(R.id.device_picker_toolbar); toolbarText.setText(R.string.send_tab_to_self_sheet_toolbar); } private void createContentView() { List<TargetDeviceInfo> targetDeviceList = new ArrayList<TargetDeviceInfo>(); SendTabToSelfAndroidBridgeJni.get().getAllTargetDeviceInfos(mProfile, targetDeviceList); // First check if sharing is unavailable, e.g. because there are no target devices. If so, // show |sharingUnavailableView|, modulo adjusting the strings and the visibility of the // settings button. ViewGroup sharingUnavailableView = (ViewGroup) LayoutInflater.from(mContext).inflate( R.layout.send_tab_to_self_feature_unavailable_prompt, null); TextView title = sharingUnavailableView.findViewById(R.id.title); TextView instructionsToEnable = sharingUnavailableView.findViewById(R.id.instructions_to_enable); if (targetDeviceList.isEmpty()) { mContentView = sharingUnavailableView; title.setText(R.string.send_tab_to_self_share_activity_title); instructionsToEnable.setText(R.string.send_tab_to_self_when_signed_in_unavailable); mToolbarView.setVisibility(View.GONE); // TODO(crbug.com/1298185): This is cumulating both signed-out and single device users. // Those should be recorded separately instead. RecordUserAction.record("SharingHubAndroid.SendTabToSelf.NoTargetDevices"); return; } // Sharing is available. mContentView = (ViewGroup) LayoutInflater.from(mContext).inflate( R.layout.send_tab_to_self_device_picker_list, null); ListView listView = mContentView.findViewById(R.id.device_picker_list); listView.setAdapter(mAdapter); listView.setOnItemClickListener(this); createManageDevicesLink(listView); } private void createManageDevicesLink(ListView deviceListView) { ViewGroup containerView = (ViewGroup) LayoutInflater.from(mContext).inflate( R.layout.send_tab_to_self_manage_devices_link, null); deviceListView.addFooterView(containerView); AccountInfo account = getSharingAccountInfo(); assert account != null : "The user must be signed in to share a tab"; // The avatar can be null in tests. if (account.getAccountImage() != null) { RoundedCornerImageView avatarView = containerView.findViewById(R.id.account_avatar); int accountAvatarSizePx = Math.round( ACCOUNT_AVATAR_SIZE_DP * mContext.getResources().getDisplayMetrics().density); avatarView.setImageBitmap(Bitmap.createScaledBitmap( account.getAccountImage(), accountAvatarSizePx, accountAvatarSizePx, false)); avatarView.setRoundedCorners(accountAvatarSizePx / 2, accountAvatarSizePx / 2, accountAvatarSizePx / 2, accountAvatarSizePx / 2); } Resources resources = mContext.getResources(); // The link is opened in a new tab to avoid exiting the current page, which the user // possibly wants to share (maybe they just clicked "Manage devices" by mistake). SpannableString linkText = SpanApplier.applySpans( resources.getString( R.string.send_tab_to_self_manage_devices_link, account.getEmail()), new SpanApplier.SpanInfo("<link>", "</link>", new NoUnderlineClickableSpan( mContext, this::openManageDevicesPageInNewTab))); TextView linkView = containerView.findViewById(R.id.manage_devices_link); linkView.setText(linkText); linkView.setMovementMethod(LinkMovementMethod.getInstance()); } private void openManageDevicesPageInNewTab(View unused) { Intent intent = new Intent() .setAction(Intent.ACTION_VIEW) .setData(Uri.parse(UrlConstants.GOOGLE_ACCOUNT_DEVICE_ACTIVITY_URL)) .setClass(mContext, ChromeLauncherActivity.class) .addFlags(Intent.FLAG_ACTIVITY_NEW_TASK) .putExtra(Browser.EXTRA_APPLICATION_ID, mContext.getPackageName()) .putExtra(WebappConstants.REUSE_URL_MATCHING_TAB_ELSE_NEW_TAB, true); IntentUtils.addTrustedIntentExtras(intent); mContext.startActivity(intent); } @Override public View getContentView() { return mContentView; } @Override public View getToolbarView() { return mToolbarView; } @Override public int getVerticalScrollOffset() { return 0; } @Override public void destroy() {} @Override public int getPriority() { return BottomSheetContent.ContentPriority.HIGH; } @Override public boolean swipeToDismissEnabled() { // This ensures that the bottom sheet reappears after the first time. Otherwise, the // second time that a user initiates a share, the bottom sheet does not re-appear. return true; } @Override public int getPeekHeight() { // Return DISABLED to ensure that the entire bottom sheet is shown. return BottomSheetContent.HeightMode.DISABLED; } @Override public float getFullHeightRatio() { // Return WRAP_CONTENT to have the bottom sheet only open as far as it needs to display the // list of devices and nothing beyond that. return BottomSheetContent.HeightMode.WRAP_CONTENT; } @Override public int getSheetContentDescriptionStringId() { return R.string.send_tab_to_self_content_description; } @Override public int getSheetHalfHeightAccessibilityStringId() { return R.string.send_tab_to_self_sheet_half_height; } @Override public int getSheetFullHeightAccessibilityStringId() { return R.string.send_tab_to_self_sheet_full_height; } @Override public int getSheetClosedAccessibilityStringId() { return R.string.send_tab_to_self_sheet_closed; } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { MetricsRecorder.recordDeviceClickedInShareSheet(); TargetDeviceInfo targetDeviceInfo = mAdapter.getItem(position); SendTabToSelfAndroidBridge.addEntry( mProfile, mUrl, mTitle, mNavigationTime, targetDeviceInfo.cacheGuid); Resources res = mContext.getResources(); if (ChromeFeatureList.isEnabled(ChromeFeatureList.SEND_TAB_TO_SELF_V2) || ChromeFeatureList.isEnabled(ChromeFeatureList.UPCOMING_SHARING_FEATURES)) { String deviceType = res.getString(R.string.send_tab_to_self_device_type_generic); if (targetDeviceInfo.deviceType == TargetDeviceInfo.DeviceType.PHONE) { deviceType = res.getString(R.string.send_tab_to_self_device_type_phone); } else if (targetDeviceInfo.deviceType == TargetDeviceInfo.DeviceType.WIN || targetDeviceInfo.deviceType == TargetDeviceInfo.DeviceType.MACOSX || targetDeviceInfo.deviceType == TargetDeviceInfo.DeviceType.LINUX || targetDeviceInfo.deviceType == TargetDeviceInfo.DeviceType.CHROMEOS) { deviceType = res.getString(R.string.send_tab_to_self_device_type_computer); } String toastMessage = res.getString(R.string.send_tab_to_self_v2_toast, deviceType); Toast.makeText(mContext, toastMessage, Toast.LENGTH_SHORT).show(); } else { String toastMessage = res.getString(R.string.send_tab_to_self_toast, targetDeviceInfo.deviceName); Toast.makeText(mContext, toastMessage, Toast.LENGTH_SHORT).show(); } mController.hideContent(this, true); } private static AccountInfo getSharingAccountInfo() { IdentityManager identityManager = IdentityServicesProvider.get().getIdentityManager( Profile.getLastUsedRegularProfile()); return identityManager.findExtendedAccountInfoByEmailAddress( identityManager.getPrimaryAccountInfo(ConsentLevel.SIGNIN).getEmail()); } }
package com.miguelgaeta.message_parser; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.core.io.IOContext; import com.fasterxml.jackson.core.json.ReaderBasedJsonParser; import com.fasterxml.jackson.core.sym.CharsToNameCanonicalizer; import java.io.IOException; import java.io.Reader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @SuppressWarnings("UnusedDeclaration") public class MessageParserImplJackson extends ReaderBasedJsonParser implements MessageParser { public MessageParserImplJackson(final Reader reader) throws IOException { this(new Factory().configure(reader)); } private MessageParserImplJackson(final Factory factory) throws IOException { super( factory.context, factory.parserFeatures, factory.decoratedReader, factory.objectCodec, factory.charsToNameCanonicalizer); nextToken(); } @Override public boolean beginObjectStructure() throws IOException { if (isExpectedStartObjectToken()) { nextToken(); return true; } nextToken(); return false; } @Override public void endObject() throws IOException { nextToken(); } @Override public boolean hasNext() throws IOException { return getCurrentToken() != JsonToken.END_ARRAY && getCurrentToken() != JsonToken.END_OBJECT; } @Override public String nextName() throws IOException { final String name = getCurrentName(); nextToken(); return name; } @Override public void skipValue() throws IOException { skipChildren(); nextToken(); } @Override public void nextNull() throws IOException { nextToken(); } @Override public String nextString() throws IOException { final String value = getValueAsString(); nextToken(); return value; } @Override public String nextString(String defaultValue) throws IOException { final String value = getValueAsString(defaultValue); nextToken(); return value; } @Override public String nextStringOrNull() throws IOException { final String value = getValueAsString(null); nextToken(); return value; } @Override public boolean nextBoolean() throws IOException { final boolean value = getValueAsBoolean(); nextToken(); return value; } @Override public boolean nextBoolean(boolean defaultValue) throws IOException { final boolean value = getValueAsBoolean(defaultValue); nextToken(); return value; } @Override public Boolean nextBooleanOrNull() throws IOException { final Boolean value = getCurrentToken() == JsonToken.VALUE_NULL ? null : getValueAsBoolean(); nextToken(); return value; } @Override public double nextDouble() throws IOException { final double value = getValueAsDouble(); nextToken(); return value; } @Override public double nextDouble(double defaultValue) throws IOException { final double value = getValueAsDouble(defaultValue); nextToken(); return value; } @Override public Double nextDoubleOrNull() throws IOException { final Double value = getCurrentToken() == JsonToken.VALUE_NULL ? null : getValueAsDouble(); nextToken(); return value; } @Override public long nextLong() throws IOException { final long value = getValueAsLong(); nextToken(); return value; } @Override public long nextLong(long defaultValue) throws IOException { final long value = getValueAsLong(defaultValue); nextToken(); return value; } @Override public Long nextLongOrNull() throws IOException { final Long value = getCurrentToken() == JsonToken.VALUE_NULL ? null : getValueAsLong(); nextToken(); return value; } @Override public int nextInt() throws IOException { final int value = getValueAsInt(); nextToken(); return value; } @Override public int nextInt(int defaultValue) throws IOException { final int value = getValueAsInt(defaultValue); nextToken(); return value; } @Override public Integer nextIntOrNull() throws IOException { final Integer value = getCurrentToken() == JsonToken.VALUE_NULL ? null : getValueAsInt(); nextToken(); return value; } @Override public <T> List<T> nextList(ListItem<T> item) throws IOException { final List<T> list = new ArrayList<>(); nextToken(); while (hasNext()) { final T i = item.get(); if (i != null) { list.add(i); } } nextToken(); return list; } @Override public <T> List<T> nextList(ListItem<T> item, boolean filterNull) throws IOException { final List<T> list = new ArrayList<>(); nextToken(); while (hasNext()) { final T i = item.get(); if (!filterNull || i != null) { list.add(i); } } nextToken(); return list; } @Override public <T> List<T> nextList(ListItem<T> item, boolean filterNull, ListInitializer<T> initializer) throws IOException { final List<T> list = initializer.get(); nextToken(); while (hasNext()) { final T i = item.get(); if (!filterNull || i != null) { list.add(i); } } nextToken(); return list; } @Override public <K, V> Map<K, V> nextListAsMap(ListItem<V> item, MapKey<K, V> key) throws IOException { final Map<K, V> map = new HashMap<>(); nextToken(); while (hasNext()) { final V i = item.get(); if (i != null) { map.put(key.get(i), i); } } nextToken(); return map; } @Override public <K, V> Map<K, V> nextListAsMap(ListItem<V> item, MapKey<K, V> key, boolean filterNull) throws IOException { final Map<K, V> map = new HashMap<>(); nextToken(); while (hasNext()) { final V i = item.get(); if (!filterNull || i != null) { map.put(key.get(i), i); } } nextToken(); return map; } @Override public <K, V> Map<K, V> nextListAsMap(ListItem<V> item, MapKey<K, V> key, boolean filterNull, MapInitializer<K, V> initializer) throws IOException { final Map<K, V> map = initializer.get(); nextToken(); while (hasNext()) { final V i = item.get(); if (!filterNull || i != null) { map.put(key.get(i), i); } } nextToken(); return map; } @Override public boolean nextObject(ObjectFieldAssigner handler) throws IOException { if (beginObjectStructure()) { while (hasNext()) { handler.assign(); } endObject(); return true; } return false; } @Override public <T> T readObject(Class<T> type) throws IOException { return readValueAs(type); } @Override public <T> T getReader(Class<T> type) { //noinspection unchecked return (T) this; } @Override public void beginArray() throws IOException { nextToken(); } @Override public void endArray() throws IOException { nextToken(); } @SuppressWarnings("SpellCheckingInspection") static class Factory extends JsonFactory { IOContext context; int parserFeatures; Reader decoratedReader; ObjectCodec objectCodec; CharsToNameCanonicalizer charsToNameCanonicalizer; public Factory configure(final Reader reader) throws IOException { context = _createContext(reader, false); parserFeatures = _parserFeatures; decoratedReader = _decorate(reader, context); objectCodec = _objectCodec; charsToNameCanonicalizer = _rootCharSymbols.makeChild(_factoryFeatures); return this; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.junit.Assert.*; import java.io.IOException; import java.net.InetSocketAddress; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.ipc.RemoteException; import org.junit.Test; /** * This class tests if getblocks request works correctly. */ public class TestGetBlocks { private static final int blockSize = 8192; private static final String racks[] = new String[] { "/d1/r1", "/d1/r1", "/d1/r2", "/d1/r2", "/d1/r2", "/d2/r3", "/d2/r3" }; private static final int numDatanodes = racks.length; /** * Stop the heartbeat of a datanode in the MiniDFSCluster * * @param cluster * The MiniDFSCluster * @param hostName * The hostName of the datanode to be stopped * @return The DataNode whose heartbeat has been stopped */ private DataNode stopDataNodeHeartbeat(MiniDFSCluster cluster, String hostName) { for (DataNode dn : cluster.getDataNodes()) { if (dn.getDatanodeId().getHostName().equals(hostName)) { DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, true); return dn; } } return null; } /** * Test if the datanodes returned by * {@link ClientProtocol#getBlockLocations(String, long, long)} is correct * when stale nodes checking is enabled. Also test during the scenario when 1) * stale nodes checking is enabled, 2) a writing is going on, 3) a datanode * becomes stale happen simultaneously * * @throws Exception */ @Test public void testReadSelectNonStaleDatanode() throws Exception { HdfsConfiguration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true); long staleInterval = 30 * 1000 * 60; conf.setLong(DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY, staleInterval); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(numDatanodes).racks(racks).build(); cluster.waitActive(); InetSocketAddress addr = new InetSocketAddress("localhost", cluster.getNameNodePort()); DFSClient client = new DFSClient(addr, conf); List<DatanodeDescriptor> nodeInfoList = cluster.getNameNode() .getNamesystem().getBlockManager().getDatanodeManager() .getDatanodeListForReport(DatanodeReportType.LIVE); assertEquals("Unexpected number of datanodes", numDatanodes, nodeInfoList.size()); FileSystem fileSys = cluster.getFileSystem(); FSDataOutputStream stm = null; try { // do the writing but do not close the FSDataOutputStream // in order to mimic the ongoing writing final Path fileName = new Path("/file1"); stm = fileSys.create(fileName, true, fileSys.getConf().getInt( CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) 3, blockSize); stm.write(new byte[(blockSize * 3) / 2]); // We do not close the stream so that // the writing seems to be still ongoing stm.hflush(); LocatedBlocks blocks = client.getNamenode().getBlockLocations( fileName.toString(), 0, blockSize); DatanodeInfo[] nodes = blocks.get(0).getLocations(); assertEquals(nodes.length, 3); DataNode staleNode = null; DatanodeDescriptor staleNodeInfo = null; // stop the heartbeat of the first node staleNode = this.stopDataNodeHeartbeat(cluster, nodes[0].getHostName()); assertNotNull(staleNode); // set the first node as stale staleNodeInfo = cluster.getNameNode().getNamesystem().getBlockManager() .getDatanodeManager() .getDatanode(staleNode.getDatanodeId()); DFSTestUtil.resetLastUpdatesWithOffset(staleNodeInfo, -(staleInterval + 1)); LocatedBlocks blocksAfterStale = client.getNamenode().getBlockLocations( fileName.toString(), 0, blockSize); DatanodeInfo[] nodesAfterStale = blocksAfterStale.get(0).getLocations(); assertEquals(nodesAfterStale.length, 3); assertEquals(nodesAfterStale[2].getHostName(), nodes[0].getHostName()); // restart the staleNode's heartbeat DataNodeTestUtils.setHeartbeatsDisabledForTests(staleNode, false); // reset the first node as non-stale, so as to avoid two stale nodes DFSTestUtil.resetLastUpdatesWithOffset(staleNodeInfo, 0); LocatedBlock lastBlock = client.getLocatedBlocks(fileName.toString(), 0, Long.MAX_VALUE).getLastLocatedBlock(); nodes = lastBlock.getLocations(); assertEquals(nodes.length, 3); // stop the heartbeat of the first node for the last block staleNode = this.stopDataNodeHeartbeat(cluster, nodes[0].getHostName()); assertNotNull(staleNode); // set the node as stale DatanodeDescriptor dnDesc = cluster.getNameNode().getNamesystem() .getBlockManager().getDatanodeManager() .getDatanode(staleNode.getDatanodeId()); DFSTestUtil.resetLastUpdatesWithOffset(dnDesc, -(staleInterval + 1)); LocatedBlock lastBlockAfterStale = client.getLocatedBlocks( fileName.toString(), 0, Long.MAX_VALUE).getLastLocatedBlock(); nodesAfterStale = lastBlockAfterStale.getLocations(); assertEquals(nodesAfterStale.length, 3); assertEquals(nodesAfterStale[2].getHostName(), nodes[0].getHostName()); } finally { if (stm != null) { stm.close(); } client.close(); cluster.shutdown(); } } /** test getBlocks */ @Test public void testGetBlocks() throws Exception { final Configuration CONF = new HdfsConfiguration(); final short REPLICATION_FACTOR = (short) 2; final int DEFAULT_BLOCK_SIZE = 1024; CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE); MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF).numDataNodes( REPLICATION_FACTOR).build(); try { cluster.waitActive(); long fileLen = 2 * DEFAULT_BLOCK_SIZE; DFSTestUtil.createFile(cluster.getFileSystem(), new Path("/tmp.txt"), fileLen, REPLICATION_FACTOR, 0L); // get blocks & data nodes List<LocatedBlock> locatedBlocks; DatanodeInfo[] dataNodes = null; boolean notWritten; do { final DFSClient dfsclient = new DFSClient( DFSUtilClient.getNNAddress(CONF), CONF); locatedBlocks = dfsclient.getNamenode() .getBlockLocations("/tmp.txt", 0, fileLen).getLocatedBlocks(); assertEquals(2, locatedBlocks.size()); notWritten = false; for (int i = 0; i < 2; i++) { dataNodes = locatedBlocks.get(i).getLocations(); if (dataNodes.length != REPLICATION_FACTOR) { notWritten = true; try { Thread.sleep(10); } catch (InterruptedException e) { } break; } } } while (notWritten); // get RPC client to namenode InetSocketAddress addr = new InetSocketAddress("localhost", cluster.getNameNodePort()); NamenodeProtocol namenode = NameNodeProxies.createProxy(CONF, DFSUtilClient.getNNUri(addr), NamenodeProtocol.class).getProxy(); // get blocks of size fileLen from dataNodes[0] BlockWithLocations[] locs; locs = namenode.getBlocks(dataNodes[0], fileLen).getBlocks(); assertEquals(locs.length, 2); assertEquals(locs[0].getStorageIDs().length, 2); assertEquals(locs[1].getStorageIDs().length, 2); // get blocks of size BlockSize from dataNodes[0] locs = namenode.getBlocks(dataNodes[0], DEFAULT_BLOCK_SIZE).getBlocks(); assertEquals(locs.length, 1); assertEquals(locs[0].getStorageIDs().length, 2); // get blocks of size 1 from dataNodes[0] locs = namenode.getBlocks(dataNodes[0], 1).getBlocks(); assertEquals(locs.length, 1); assertEquals(locs[0].getStorageIDs().length, 2); // get blocks of size 0 from dataNodes[0] getBlocksWithException(namenode, dataNodes[0], 0); // get blocks of size -1 from dataNodes[0] getBlocksWithException(namenode, dataNodes[0], -1); // get blocks of size BlockSize from a non-existent datanode DatanodeInfo info = DFSTestUtil.getDatanodeInfo("1.2.3.4"); getBlocksWithException(namenode, info, 2); } finally { cluster.shutdown(); } } private void getBlocksWithException(NamenodeProtocol namenode, DatanodeInfo datanode, long size) throws IOException { boolean getException = false; try { namenode.getBlocks(DFSTestUtil.getLocalDatanodeInfo(), 2); } catch (RemoteException e) { getException = true; assertTrue(e.getClassName().contains("HadoopIllegalArgumentException")); } assertTrue(getException); } @Test public void testBlockKey() { Map<Block, Long> map = new HashMap<Block, Long>(); final Random RAN = new Random(); final long seed = RAN.nextLong(); System.out.println("seed=" + seed); RAN.setSeed(seed); long[] blkids = new long[10]; for (int i = 0; i < blkids.length; i++) { blkids[i] = 1000L + RAN.nextInt(100000); map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]); } System.out.println("map=" + map.toString().replace(",", "\n ")); for (int i = 0; i < blkids.length; i++) { Block b = new Block(blkids[i], 0, HdfsConstants.GRANDFATHER_GENERATION_STAMP); Long v = map.get(b); System.out.println(b + " => " + v); assertEquals(blkids[i], v.longValue()); } } }
package org.soluvas.benchmarkemail; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Sample usage of {@link BMEApi}. Only for testing. */ public class BMEApiMain { private static final Logger log = LoggerFactory.getLogger(BMEApiMain.class); private final String username = "Your Benchmark Email Login"; // Put in your account username private final String password = "Your Benchmark Email Password"; // Put in your account password private String token = ""; private String listId = "BenchmarkEmail List ID - see listGet() method"; // put in ID of any list that you wish to use below private InterfaceBMEApi bmServices = null; public static void main(String[] args) { BMEApiMain obj = new BMEApiMain(); obj.run(); } private void run() { initialize(); // This gets the initial token thats used in all the examples below if (token.length() > 0) { // clientGetContactInfo(); // tokenAdd(); // tokenGet(); // tokenDelete(); listCreate(); // listGet(); // listDelete(); // listAddContacts(); // listGetContacts(); // listGetContactDetails(); // listUpdateContactDetails(); // listDeleteContacts(); // listUnsubscribeContacts(); // emailCreate(); // confirmEmailAdd(); // confirmEmailList(); // emailGet(); // emailGetDetail(); // emailSchedule(); // emailSendNow(); // emailUnSchedule(); // emailSendTest(); // emailUpdate(); // emailAssignList(); // emailDelete(); } else { System.out.println("Failed to authenticate with BME"); } } public void clientGetContactInfo() { final Map<String, String> map = bmServices.clientGetContactInfo(token); for (final String key : map.keySet()) { final String value = map.get(key); System.out.print(key + " = " + value + "\t"); } } public void tokenAdd() { String mytoken = "adios123999"; boolean flag = bmServices.tokenAdd(username, password, mytoken); System.out.print("flag = " + flag + "\t"); } public void tokenDelete() { String mytoken = "adios123999"; boolean flag = bmServices.tokenDelete(username, password, mytoken); System.out.print("flag = " + flag + "\t"); } public void tokenGet() { Object[] mytokens = bmServices.tokenGet(username, password); int TokenSize = mytokens.length; int Counter = 0; for (final Object mytoken : mytokens) { Counter++; System.out.println(" Token ( " + Counter + " ) " + mytoken); } ; } public void listCreate() { String Mylistname = "My List Created from Java Now !!"; try { String Output = bmServices.listCreate(token, Mylistname); System.out.println("ListID = " + Output); } catch (Exception e) { throw new BenchmarkEmailException(e, "List by name '%s' exists", Mylistname); } } public void listAddContacts() { List<Map<String, Object>> contacts = new ArrayList<>(); contacts.add(new HashMap()); contacts.add(new HashMap()); contacts.get(0).put("email", "sankey21@spideydomain.com"); contacts.get(0).put("firstname", "Peter"); contacts.get(0).put("lastname", "Parker"); contacts.get(1).put("email", "sankey22@battydomain.com"); contacts.get(1).put("firstname", "Bruce"); contacts.get(1).put("lastname", "Wayne"); int result = bmServices.listAddContacts(token, listId, contacts, null); System.out.println("Result = " + result); } public void listDelete() { boolean flag = bmServices.listDelete(token, listId); System.out.println("Flag = " + flag); } public void listGetContactDetails() { String email = "user1@battydomain.com"; Map<Object, Object> ContactDetail = bmServices.listGetContactDetails(token, listId, email); for (final Object key : ContactDetail.keySet()) { final Object value = ContactDetail.get(key); System.out.println(key + " = " + value + "(" + value.getClass().getSimpleName() + ")"); } ; } public void listUpdateContactDetails() { String email = "user1@battydomain.com"; Map<Object, Object> ContactDetail = bmServices.listGetContactDetails(token, listId, email); String contactID = (String) ContactDetail.get("id"); System.out.println("Contact ID = " + contactID); HashMap ContactDetailFinal = new HashMap(); for (final Object key : ContactDetail.keySet()) { final Object value = ContactDetail.get(key); ContactDetailFinal.put(key, value); } ; ContactDetailFinal.put("Company Name", "Justice League United"); ContactDetailFinal.put("FirstName", "Clark"); ContactDetailFinal.put("LastName", "Kent"); Map<Object, Object> output = bmServices.listUpdateContactDetails(token, listId, contactID, ContactDetailFinal); for (final Object key : output.keySet()) { final Object value = output.get(key); System.out.println(key + " = " + value + "(" + value.getClass().getSimpleName() + ")"); } ; } public void listDeleteContacts() { String email = "Jean.grey@xmen.com"; Map<Object, Object> ContactDetail = bmServices.listGetContactDetails(token, listId, email); StringBuffer str = new StringBuffer(); String contactID = (String) ContactDetail.get("id"); str.append(contactID).append(","); ContactDetail = bmServices.listGetContactDetails(token, listId, "tarzan@tarzan.com"); contactID = (String) ContactDetail.get("id"); str.append(contactID); contactID = str.toString(); boolean flag = bmServices.listDeleteContacts(token, listId, contactID); System.out.println("Flag = " + flag); } public void listUnsubscribeContacts() { String[] ContactAddress = new String[2]; ContactAddress[0] = "Edgar.burroughs@wildfiresanket.com"; ContactAddress[1] = "Bruce.banner@hulksanket.com"; int result = bmServices.listUnsubscribeContacts(token, listId, ContactAddress); System.out.println("Result = " + result); } public void emailCreate() { HashMap emailDetail = new HashMap(); emailDetail.put("fromEmail", "ash1@bmesrv.com"); emailDetail.put("fromName", "Steve"); emailDetail.put("emailName", "Sales Promo May 23 2012"); emailDetail.put("replyEmail", "feedback@____.com"); emailDetail.put("subject", "New Products launch at our store 1"); emailDetail.put("templateContent", "<html><body> Hello World </body></html>"); emailDetail.put("toListID", Integer.parseInt(listId.trim())); emailDetail.put("scheduleDate", "1 May 2010 5:00"); /* In UTC */ emailDetail.put("webpageVersion", true); emailDetail.put("permissionReminderMessage", "You are receiving this email because of your relationship with our company. Unsubscribe is available at the bottom of this email."); String output = bmServices.emailCreate(token, emailDetail); System.out.println("output = " + output); } public void confirmEmailAdd() { String TargetEmailID = "user1@battydomain.com,user1@spideydomain.com"; String Output = bmServices.confirmEmailAdd(token, TargetEmailID); if (Output.length() == 0) { System.out.println("Done " + Output); } else { System.out.println("Could not be added " + Output); } ; } public void confirmEmailList() { Object[] EmailIds = bmServices.confirmEmailList(token); int Counter = 0; for (final Object Email : EmailIds) { final Map<String, Object> map = (Map<String, Object>) Email; for (final Object key : map.keySet()) { final Object value = map.get(key); System.out.print(key + " = " + value + "(" + value.getClass().getSimpleName() + ")\t"); } ; System.out.println(); } ; } public void emailGet() { int pageNumber = 1; int pageSize = 10; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); int Counter = 0; for (final Object Campaign : Campaigns) { final Map<String, Object> map = (Map<String, Object>) Campaign; for (final Object key : map.keySet()) { final Object value = map.get(key); System.out.print(key + " = " + value + ")\t"); } ; System.out.println(); } ; } public void emailGetDetail() { int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); int Counter = 0; String EmailID = ""; if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } ; if (EmailID.length() > 0) { Map<Object, Object> map = bmServices.emailGetDetail(token, EmailID); for (final Object key : map.keySet()) { final Object value = map.get(key); System.out.println(key + " = " + value); } ; } ; } public void emailSchedule() { int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); String EmailID = ""; if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } ; try { boolean allok = bmServices.emailSchedule(token, EmailID, "14 Jul 2020 12:00"); System.out.println(allok); } catch (Exception ex) { System.out.println(ex.getMessage()); } ; } public void emailSendNow() { int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); String EmailID = ""; if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } ; try { boolean allok = bmServices.emailSendNow(token, EmailID); System.out.println(allok); } catch (Exception ex) { System.out.println(ex.getMessage()); } ; } public void emailUnSchedule() { int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); String EmailID = ""; if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } ; try { boolean allok = bmServices.emailUnSchedule(token, EmailID); System.out.println(allok); } catch (Exception ex) { System.out.println(ex.getMessage()); } ; } public void emailSendTest() { int pageNumber = 1; int pageSize = 1; String testEmail = "yourname@sitedomain.com"; String EmailID = ""; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } ; try { boolean allok = bmServices.emailSendTest(token, EmailID, testEmail); System.out.println(allok); } catch (Exception ex) { System.out.println(ex.getMessage()); } ; } public void emailUpdate() { String EmailID = ""; int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); } ; HashMap emailUpdate = new HashMap(); emailUpdate.put("id", EmailID); emailUpdate.put("fromEmail", "user1@sitedomain.com"); emailUpdate.put("fromName", "Steve"); emailUpdate.put("emailName", "Sales Promo May 10"); emailUpdate.put("replyEmail", "feedback@sitedomain.com"); emailUpdate.put("subject", "New Products launch at our store"); emailUpdate.put("templateContent", "<html><body> Hello World </body></html>"); emailUpdate.put("toListID", Integer.parseInt(listId.trim())); emailUpdate.put("permissionReminderMessage", "You are receiving this email because of your relationship with our company. Unsubscribe is available at the bottom of this email."); boolean output = bmServices.emailUpdate(token, emailUpdate); System.out.println("output = " + output); } public void listGet() { final int pageNumber = 1; final int pageSize = 100; // final List<Map<String, Object>> lists = bmServices.listGet(token, "", pageNumber, pageSize, "", ""); // int counter = 0; // for (final Map<String, Object> list : lists) { // final Map<String, Object> map = list; // counter++; // System.out.print(counter + ". "); // for (final Object key : map.keySet()) { // final Object value = map.get(key); // System.out.print(key + " = " + value + " (" + value.getClass().getSimpleName() + ")\t"); // } // System.out.println(); // } } public void listGetContacts() { int pageNumber = 1; int pageSize = 2; List<Map<String, Object>> Contacts = bmServices.listGetContacts(token, listId, "", pageNumber, pageSize, "", ""); int Counter = 0; for (final Map<String, Object> contact : Contacts) { final Map<String, Object> map = contact; for (final Object key : map.keySet()) { final Object value = map.get(key); System.out.print(key + " = " + value + "(" + value.getClass().getSimpleName() + ")\t"); } ; System.out.println(); } ; } public void emailAssignList() { HashMap emailLists[] = new HashMap[1]; emailLists[0] = new HashMap(); String EmailID = ""; int pageNumber = 1; int pageSize = 2; String ListID = ""; Object[] campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); if (campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) campaigns[0]; EmailID = (String) map.get("id"); } List<Map<String, Object>> lists = bmServices.listGet(token, "", pageNumber, pageSize, "", ""); if (lists.size() > 0) { HashMap Mylist = (HashMap) lists.get(0); ListID = (String) Mylist.get("id"); } emailLists[0].put("emailID", EmailID); emailLists[0].put("toListID", ListID); boolean flag = bmServices.emailAssignList(token, EmailID, emailLists); System.out.println("output = " + flag); } public void emailDelete() { int pageNumber = 1; int pageSize = 1; Object[] Campaigns = bmServices.emailGet(token, "", "", pageNumber, pageSize, "", ""); String EmailID = ""; if (Campaigns.length > 0) { Map<String, Object> map = (Map<String, Object>) Campaigns[0]; EmailID = (String) map.get("id"); System.out.println("EmailID = " + EmailID); } try { boolean allok = bmServices.emailDelete(token, EmailID); log.info("delete {}: {}", EmailID, allok); } catch (Exception ex) { throw new BenchmarkEmailException(ex.getMessage(), ex); } } public void initialize() { bmServices = BMEApi.getBMEServices(); token = bmServices.login(username, password); if (token.length() > 0) { log.info("Logged in successfully with Token -> {}", token); } else { log.info("Failed to authenticate with BME"); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.kudu; import com.google.common.collect.ImmutableMap; import com.google.common.primitives.Ints; import io.trino.Session; import io.trino.execution.Lifespan; import io.trino.execution.QueryStats; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.Split; import io.trino.metadata.TableHandle; import io.trino.operator.OperatorStats; import io.trino.security.AllowAllAccessControl; import io.trino.spi.QueryId; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.DynamicFilter; import io.trino.spi.predicate.TupleDomain; import io.trino.split.SplitSource; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.DistributedQueryRunner; import io.trino.testing.MaterializedResult; import io.trino.testing.QueryRunner; import io.trino.testing.ResultWithQueryId; import io.trino.tpch.TpchTable; import io.trino.transaction.TransactionId; import io.trino.transaction.TransactionManager; import org.intellij.lang.annotations.Language; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE; import static io.trino.SystemSessionProperties.JOIN_REORDERING_STRATEGY; import static io.trino.plugin.kudu.KuduQueryRunnerFactory.createKuduQueryRunnerTpch; import static io.trino.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING; import static io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED; import static io.trino.sql.analyzer.FeaturesConfig.JoinDistributionType.BROADCAST; import static io.trino.sql.analyzer.FeaturesConfig.JoinReorderingStrategy.NONE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public class TestKuduIntegrationDynamicFilter extends AbstractTestQueryFramework { private TestingKuduServer kuduServer; @Override protected QueryRunner createQueryRunner() throws Exception { kuduServer = new TestingKuduServer(); return createKuduQueryRunnerTpch( kuduServer, Optional.of(""), ImmutableMap.of("dynamic_filtering_wait_timeout", "1h"), ImmutableMap.of( "dynamic-filtering.small-broadcast.max-distinct-values-per-driver", "100", "dynamic-filtering.small-broadcast.range-row-limit-per-driver", "100"), TpchTable.getTables()); } @AfterClass(alwaysRun = true) public final void destroy() { kuduServer.close(); } @Test(timeOut = 30_000) public void testIncompleteDynamicFilterTimeout() throws Exception { QueryRunner runner = getQueryRunner(); TransactionManager transactionManager = runner.getTransactionManager(); TransactionId transactionId = transactionManager.beginTransaction(false); Session session = Session.builder(getSession()) .setCatalogSessionProperty("kudu", "dynamic_filtering_wait_timeout", "1s") .build() .beginTransactionId(transactionId, transactionManager, new AllowAllAccessControl()); QualifiedObjectName tableName = new QualifiedObjectName("kudu", "tpch", "orders"); Optional<TableHandle> tableHandle = runner.getMetadata().getTableHandle(session, tableName); assertTrue(tableHandle.isPresent()); SplitSource splitSource = runner.getSplitManager() .getSplits(session, tableHandle.get(), UNGROUPED_SCHEDULING, new IncompleteDynamicFilter()); List<Split> splits = new ArrayList<>(); while (!splitSource.isFinished()) { splits.addAll(splitSource.getNextBatch(NOT_PARTITIONED, Lifespan.taskWide(), 1000).get().getSplits()); } splitSource.close(); assertFalse(splits.isEmpty()); } private static class IncompleteDynamicFilter implements DynamicFilter { @Override public CompletableFuture<?> isBlocked() { return CompletableFuture.runAsync(() -> { try { TimeUnit.HOURS.sleep(1); } catch (InterruptedException e) { throw new IllegalStateException(e); } }); } @Override public boolean isComplete() { return false; } @Override public boolean isAwaitable() { return true; } @Override public TupleDomain<ColumnHandle> getCurrentPredicate() { return TupleDomain.all(); } } @Test public void testJoinDynamicFilteringSingleValue() { // Join lineitem with a single row of orders assertDynamicFiltering( "SELECT * FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.comment = 'nstructions sleep furiously among '", withBroadcastJoin(), 6, 6, 1); } @Test public void testJoinDynamicFilteringBlockProbeSide() { // Wait for both build sides to finish before starting the scan of 'lineitem' table (should be very selective given the dynamic filters). assertDynamicFiltering( "SELECT l.comment" + " FROM lineitem l, part p, orders o" + " WHERE l.orderkey = o.orderkey AND o.comment = 'nstructions sleep furiously among '" + " AND p.partkey = l.partkey AND p.comment = 'onic deposits'", withBroadcastJoinNonReordering(), 1, 1, 1, 1); } private void assertDynamicFiltering(@Language("SQL") String selectQuery, Session session, int expectedRowCount, int... expectedOperatorRowsRead) { DistributedQueryRunner runner = getDistributedQueryRunner(); ResultWithQueryId<MaterializedResult> result = runner.executeWithQueryId(session, selectQuery); assertEquals(result.getResult().getRowCount(), expectedRowCount); assertEquals(getOperatorRowsRead(runner, result.getQueryId()), Ints.asList(expectedOperatorRowsRead)); } private Session withBroadcastJoin() { return Session.builder(getSession()) .setSystemProperty(JOIN_DISTRIBUTION_TYPE, BROADCAST.name()) .build(); } private Session withBroadcastJoinNonReordering() { return Session.builder(getSession()) .setSystemProperty(JOIN_DISTRIBUTION_TYPE, BROADCAST.name()) .setSystemProperty(JOIN_REORDERING_STRATEGY, NONE.name()) .build(); } private static List<Integer> getOperatorRowsRead(DistributedQueryRunner runner, QueryId queryId) { QueryStats stats = runner.getCoordinator().getQueryManager().getFullQueryInfo(queryId).getQueryStats(); return stats.getOperatorSummaries() .stream() .filter(summary -> summary.getOperatorType().equals("ScanFilterAndProjectOperator")) .map(OperatorStats::getInputPositions) .map(Math::toIntExact) .collect(toImmutableList()); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.apps.svgbrowser; import java.awt.event.ActionEvent; import java.awt.*; import java.io.BufferedReader; import java.io.File; import java.io.FileWriter; import java.io.InputStreamReader; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.net.Authenticator; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import java.util.StringTokenizer; import java.util.Vector; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.ImageIcon; import javax.swing.JOptionPane; import javax.swing.JProgressBar; import javax.swing.UIManager; import javax.swing.plaf.FontUIResource; import org.apache.batik.swing.JSVGCanvas; import org.apache.batik.swing.gvt.GVTTreeRendererAdapter; import org.apache.batik.swing.gvt.GVTTreeRendererEvent; import org.apache.batik.swing.svg.GVTTreeBuilderAdapter; import org.apache.batik.swing.svg.GVTTreeBuilderEvent; import org.apache.batik.swing.svg.SVGDocumentLoaderAdapter; import org.apache.batik.swing.svg.SVGDocumentLoaderEvent; import org.apache.batik.util.ApplicationSecurityEnforcer; import org.apache.batik.util.Platform; import org.apache.batik.util.ParsedURL; import org.apache.batik.util.SVGConstants; import org.apache.batik.util.XMLResourceDescriptor; import org.apache.batik.util.gui.resource.ResourceManager; /** * This class contains the main method of an SVG viewer. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id: Main.java 498740 2007-01-22 18:35:57Z dvholten $ */ public class Main implements Application { /** * Extension used in addition to the scriptType value * to read from the PreferenceManager whether or not the * scriptType can be loaded. */ public static final String UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION = ".load"; /** * User home property */ public static final String PROPERTY_USER_HOME = "user.home"; /** * System property for specifying an additional policy file. */ public static final String PROPERTY_JAVA_SECURITY_POLICY = "java.security.policy"; /** * Batik configuration sub-directory */ public static final String BATIK_CONFIGURATION_SUBDIRECTORY = ".batik"; /** * Name of the Squiggle configuration file */ public static final String SQUIGGLE_CONFIGURATION_FILE = "preferences.xml"; /** * Name of the Squiggle policy file */ public static final String SQUIGGLE_POLICY_FILE = "__svgbrowser.policy"; /** * Entry for granting network access to scripts */ public static final String POLICY_GRANT_SCRIPT_NETWORK_ACCESS = "grant {\n permission java.net.SocketPermission \"*\", \"listen, connect, resolve, accept\";\n};\n\n"; /** * Entry for granting file system access to scripts */ public static final String POLICY_GRANT_SCRIPT_FILE_ACCESS = "grant {\n permission java.io.FilePermission \"<<ALL FILES>>\", \"read\";\n};\n\n"; /** * Entry for the list of recently visited URI */ public static final String PREFERENCE_KEY_VISITED_URI_LIST = "preference.key.visited.uri.list"; /** * Entry for the maximum number of last visited URIs */ public static final String PREFERENCE_KEY_VISITED_URI_LIST_LENGTH = "preference.key.visited.uri.list.length"; /** * List of separators between URI values in the preference * file */ public static final String URI_SEPARATOR = " "; /** * Default font-family value. */ public static final String DEFAULT_DEFAULT_FONT_FAMILY = "Arial, Helvetica, sans-serif"; /** * SVG initialization file, used to trigger loading of most of * the Batik classes */ public static final String SVG_INITIALIZATION = "resources/init.svg"; /** * Stores the initialization file URI */ protected String svgInitializationURI; /** * Creates a viewer frame and shows it.. * @param args The command-line arguments. */ public static void main(String[] args) { new Main(args); } /** * The gui resources file name */ public static final String RESOURCES = "org.apache.batik.apps.svgbrowser.resources.Main"; /** * URL for Squiggle's security policy file */ public static final String SQUIGGLE_SECURITY_POLICY = "org/apache/batik/apps/svgbrowser/resources/svgbrowser.policy"; /** * The resource bundle */ protected static ResourceBundle bundle; /** * The resource manager */ protected static ResourceManager resources; static { bundle = ResourceBundle.getBundle(RESOURCES, Locale.getDefault()); resources = new ResourceManager(bundle); } /** * The frame's icon. */ protected static ImageIcon frameIcon = new ImageIcon (Main.class.getResource(resources.getString("Frame.icon"))); /** * The preference manager. */ protected XMLPreferenceManager preferenceManager; /** * Maximum number of recently visited URIs */ public static final int MAX_VISITED_URIS = 10; /** * The array of last visited URIs */ protected Vector lastVisited = new Vector(); /** * The actual allowed maximum number of last visited URIs */ protected int maxVisitedURIs = MAX_VISITED_URIS; /** * The arguments. */ protected String[] arguments; /** * Controls whether the application can override the * system security policy property. This is done when there * was no initial security policy specified when the application * started, in which case Batik will use that property. */ protected boolean overrideSecurityPolicy = false; /** * Script security enforcement is delegated to the * security utility */ protected ApplicationSecurityEnforcer securityEnforcer; /** * The option handlers. */ protected Map handlers = new HashMap(); { handlers.put("-font-size", new FontSizeHandler()); } /** * The viewer frames. */ protected List viewerFrames = new LinkedList(); /** * The preference dialog. */ protected PreferenceDialog preferenceDialog; /** * The UI specialization to use in the JSVGViewerFrames. */ protected String uiSpecialization; /** * Creates a new application. * @param args The command-line arguments. */ public Main(String[] args) { arguments = args; if (Platform.isOSX) { uiSpecialization = "OSX"; // Move the menu bars to the top of the screen. System.setProperty("apple.laf.useScreenMenuBar", "true"); // Register listeners for the About and Preferences menu items // in the application menu (using reflection). try { Class Application = Class.forName("com.apple.eawt.Application"); Class ApplicationListener = Class.forName("com.apple.eawt.ApplicationListener"); Class ApplicationEvent = Class.forName("com.apple.eawt.ApplicationEvent"); Method getApplication = Application.getMethod("getApplication", new Class[0]); Method addApplicationListener = Application.getMethod("addApplicationListener", new Class[] { ApplicationListener }); final Method setHandled = ApplicationEvent.getMethod("setHandled", new Class[] { Boolean.TYPE }); Method setEnabledPreferencesMenu = Application.getMethod("setEnabledPreferencesMenu", new Class[] { Boolean.TYPE }); InvocationHandler listenerHandler = new InvocationHandler() { public Object invoke(Object proxy, Method method, Object[] args) { String name = method.getName(); if (name.equals("handleAbout")) { JSVGViewerFrame relativeTo = viewerFrames.isEmpty() ? null : (JSVGViewerFrame) viewerFrames.get(0); AboutDialog dlg = new AboutDialog(relativeTo); // Work around pack() bug on some platforms dlg.setSize(dlg.getPreferredSize()); dlg.setLocationRelativeTo(relativeTo); dlg.setVisible(true); dlg.toFront(); } else if (name.equals("handlePreferences")) { JSVGViewerFrame relativeTo = viewerFrames.isEmpty() ? null : (JSVGViewerFrame) viewerFrames.get(0); showPreferenceDialog(relativeTo); } else if (name.equals("handleQuit")) { // Do nothing, let the OS quit the app. } else { return null; } try { setHandled.invoke(args[0], new Object[] { Boolean.TRUE }); } catch (Exception e) { } return null; } }; Object application = getApplication.invoke(null, (Object[]) null); setEnabledPreferencesMenu.invoke(application, new Object[] { Boolean.TRUE }); Object listener = Proxy.newProxyInstance(Main.class.getClassLoader(), new Class[] { ApplicationListener }, listenerHandler); addApplicationListener.invoke(application, new Object[] { listener }); } catch (Exception ex) { ex.printStackTrace(); uiSpecialization = null; } } // // Preferences // Map defaults = new HashMap(11); defaults.put(PreferenceDialog.PREFERENCE_KEY_LANGUAGES, Locale.getDefault().getLanguage()); defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_HOST, ""); defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_PORT, ""); defaults.put(PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA, "screen"); defaults.put(PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY, DEFAULT_DEFAULT_FONT_FAMILY); defaults.put(PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN, new Integer(ResourceOrigin.DOCUMENT)); defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN, new Integer(ResourceOrigin.ANY)); defaults.put(PREFERENCE_KEY_VISITED_URI_LIST, ""); defaults.put(PREFERENCE_KEY_VISITED_URI_LIST_LENGTH, new Integer(MAX_VISITED_URIS)); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE, new Integer(1)); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU, new Float(0.75f)); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS, new Float(10)); defaults.put(PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED, Boolean.TRUE); securityEnforcer = new ApplicationSecurityEnforcer(this.getClass(), SQUIGGLE_SECURITY_POLICY); try { preferenceManager = new XMLPreferenceManager(SQUIGGLE_CONFIGURATION_FILE, defaults); String dir = System.getProperty(PROPERTY_USER_HOME); File f = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY); f.mkdir(); XMLPreferenceManager.setPreferenceDirectory(f.getCanonicalPath()); preferenceManager.load(); setPreferences(); initializeLastVisited(); Authenticator.setDefault(new JAuthenticator()); } catch (Exception e) { e.printStackTrace(); } // // Initialization // final AboutDialog initDialog = new AboutDialog(); ((BorderLayout) initDialog.getContentPane().getLayout()).setVgap(8); final JProgressBar pb = new JProgressBar(0, 3); initDialog.getContentPane().add(pb, BorderLayout.SOUTH); // Work around pack() bug on some platforms Dimension ss = initDialog.getToolkit().getScreenSize(); Dimension ds = initDialog.getPreferredSize(); initDialog.setLocation((ss.width - ds.width) / 2, (ss.height - ds.height) / 2); initDialog.setSize(ds); initDialog.setVisible(true); final JSVGViewerFrame v = new JSVGViewerFrame(this); JSVGCanvas c = v.getJSVGCanvas(); c.addSVGDocumentLoaderListener(new SVGDocumentLoaderAdapter() { public void documentLoadingStarted(SVGDocumentLoaderEvent e) { pb.setValue(1); } public void documentLoadingCompleted(SVGDocumentLoaderEvent e) { pb.setValue(2); } }); c.addGVTTreeBuilderListener(new GVTTreeBuilderAdapter() { public void gvtBuildCompleted(GVTTreeBuilderEvent e) { pb.setValue(3); } }); c.addGVTTreeRendererListener(new GVTTreeRendererAdapter() { public void gvtRenderingCompleted(GVTTreeRendererEvent e) { initDialog.dispose(); v.dispose(); System.gc(); run(); } }); c.setSize(100, 100); svgInitializationURI = Main.class.getResource(SVG_INITIALIZATION).toString(); c.loadSVGDocument(svgInitializationURI); } /** * Installs a custom policy file in the '.batik' directory. This is initialized * with the content of the policy file coming with the distribution */ public void installCustomPolicyFile() throws IOException { String securityPolicyProperty = System.getProperty(PROPERTY_JAVA_SECURITY_POLICY); if (overrideSecurityPolicy || securityPolicyProperty == null || "".equals(securityPolicyProperty)) { // Access default policy file ParsedURL policyURL = new ParsedURL(securityEnforcer.getPolicyURL()); // Override the user policy String dir = System.getProperty(PROPERTY_USER_HOME); File batikConfigDir = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY); File policyFile = new File(batikConfigDir, SQUIGGLE_POLICY_FILE); // Copy original policy file into local policy file Reader r = new BufferedReader(new InputStreamReader(policyURL.openStream())); Writer w = new FileWriter(policyFile); char[] buf = new char[1024]; int n = 0; while ( (n=r.read(buf, 0, buf.length)) != -1 ) { w.write(buf, 0, n); } r.close(); // Now, append additional grants depending on the security // settings boolean grantScriptNetworkAccess = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS); boolean grantScriptFileAccess = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS); if (grantScriptNetworkAccess) { w.write(POLICY_GRANT_SCRIPT_NETWORK_ACCESS); } if (grantScriptFileAccess) { w.write(POLICY_GRANT_SCRIPT_FILE_ACCESS); } w.close(); // We now use the JAVA_SECURITY_POLICY property, so // we allow override on subsequent calls. overrideSecurityPolicy = true; System.setProperty(PROPERTY_JAVA_SECURITY_POLICY, policyFile.toURL().toString()); } } /** * Runs the application. */ public void run() { try { int i = 0; for (; i < arguments.length; i++) { OptionHandler oh = (OptionHandler)handlers.get(arguments[i]); if (oh == null) { break; } i = oh.handleOption(i); } JSVGViewerFrame frame = createAndShowJSVGViewerFrame(); while (i < arguments.length) { if (arguments[i].length() == 0) { i++; continue; } File file = new File(arguments[i]); String uri = null; try{ if (file.canRead()) { uri = file.toURL().toString(); } }catch(SecurityException se){ // Cannot access files. } if(uri == null){ uri = arguments[i]; ParsedURL purl = null; purl = new ParsedURL(arguments[i]); if (!purl.complete()) // This is not a valid uri uri = null; } if (uri != null) { if (frame == null) frame = createAndShowJSVGViewerFrame(); frame.showSVGDocument(uri); frame = null; } else { // Let the user know that we are // skipping this file... // Note that frame may be null, which is // a valid argument for showMessageDialog // NOTE: Need to revisit Resources/Messages usage to // have a single entry point. Should have a // formated message here instead of a + ... JOptionPane.showMessageDialog (frame, resources.getString("Error.skipping.file") + arguments[i]); } i++; } } catch (Exception e) { e.printStackTrace(); printUsage(); } } /** * Prints the command line usage. */ protected void printUsage() { System.out.println(); System.out.println(resources.getString("Command.header")); System.out.println(resources.getString("Command.syntax")); System.out.println(); System.out.println(resources.getString("Command.options")); Iterator it = handlers.keySet().iterator(); while (it.hasNext()) { String s = (String)it.next(); System.out.println(((OptionHandler)handlers.get(s)).getDescription()); } } /** * This interface represents an option handler. */ protected interface OptionHandler { /** * Handles the current option. * @return the index of argument just before the next one to handle. */ int handleOption(int i); /** * Returns the option description. */ String getDescription(); } /** * To handle the '-font-size' option. */ protected class FontSizeHandler implements OptionHandler { public int handleOption(int i) { int size = Integer.parseInt(arguments[++i]); Font font = new Font("Dialog", Font.PLAIN, size); FontUIResource fontRes = new FontUIResource(font); UIManager.put("CheckBox.font", fontRes); UIManager.put("PopupMenu.font", fontRes); UIManager.put("TextPane.font", fontRes); UIManager.put("MenuItem.font", fontRes); UIManager.put("ComboBox.font", fontRes); UIManager.put("Button.font", fontRes); UIManager.put("Tree.font", fontRes); UIManager.put("ScrollPane.font", fontRes); UIManager.put("TabbedPane.font", fontRes); UIManager.put("EditorPane.font", fontRes); UIManager.put("TitledBorder.font", fontRes); UIManager.put("Menu.font", fontRes); UIManager.put("TextArea.font", fontRes); UIManager.put("OptionPane.font", fontRes); UIManager.put("DesktopIcon.font", fontRes); UIManager.put("MenuBar.font", fontRes); UIManager.put("ToolBar.font", fontRes); UIManager.put("RadioButton.font", fontRes); UIManager.put("RadioButtonMenuItem.font", fontRes); UIManager.put("ToggleButton.font", fontRes); UIManager.put("ToolTip.font", fontRes); UIManager.put("ProgressBar.font", fontRes); UIManager.put("TableHeader.font", fontRes); UIManager.put("Panel.font", fontRes); UIManager.put("List.font", fontRes); UIManager.put("ColorChooser.font", fontRes); UIManager.put("PasswordField.font", fontRes); UIManager.put("TextField.font", fontRes); UIManager.put("Table.font", fontRes); UIManager.put("Label.font", fontRes); UIManager.put("InternalFrameTitlePane.font", fontRes); UIManager.put("CheckBoxMenuItem.font", fontRes); return i; } public String getDescription() { return resources.getString("Command.font-size"); } } // Application /////////////////////////////////////////////// /** * Creates and shows a new viewer frame. */ public JSVGViewerFrame createAndShowJSVGViewerFrame() { JSVGViewerFrame mainFrame = new JSVGViewerFrame(this); mainFrame.setSize(resources.getInteger("Frame.width"), resources.getInteger("Frame.height")); mainFrame.setIconImage(frameIcon.getImage()); mainFrame.setTitle(resources.getString("Frame.title")); mainFrame.setVisible(true); viewerFrames.add(mainFrame); setPreferences(mainFrame); return mainFrame; } /** * Closes the given viewer frame. */ public void closeJSVGViewerFrame(JSVGViewerFrame f) { f.getJSVGCanvas().stopProcessing(); viewerFrames.remove(f); if (viewerFrames.size() == 0) { System.exit(0); } f.dispose(); } /** * Creates a new application exit action. */ public Action createExitAction(JSVGViewerFrame vf) { return new AbstractAction() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; } /** * Opens the given link in a new window. */ public void openLink(String url) { JSVGViewerFrame f = createAndShowJSVGViewerFrame(); f.getJSVGCanvas().loadSVGDocument(url); } /** * Returns the XML parser class name. */ public String getXMLParserClassName() { return XMLResourceDescriptor.getXMLParserClassName(); } /** * Returns true if the XML parser must be in validation mode, false * otherwise. */ public boolean isXMLParserValidating() { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING); } /** * Shows the preference dialog. */ public void showPreferenceDialog(JSVGViewerFrame f) { if (preferenceDialog == null) { preferenceDialog = new PreferenceDialog(f, preferenceManager); } if (preferenceDialog.showDialog() == PreferenceDialog.OK_OPTION) { try { preferenceManager.save(); setPreferences(); } catch (Exception e) { } } } private void setPreferences() throws IOException { Iterator it = viewerFrames.iterator(); while (it.hasNext()) { setPreferences((JSVGViewerFrame)it.next()); } System.setProperty("proxyHost", preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_PROXY_HOST)); System.setProperty("proxyPort", preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_PROXY_PORT)); installCustomPolicyFile(); securityEnforcer.enforceSecurity (preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING) ); } private void setPreferences(JSVGViewerFrame vf) { boolean db = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING); vf.getJSVGCanvas().setDoubleBufferedRendering(db); boolean sr = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING); vf.getJSVGCanvas().setProgressivePaint(sr); boolean d = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE); vf.setDebug(d); boolean aa = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW); vf.setAutoAdjust(aa); boolean dd = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE); vf.getJSVGCanvas().setSelectionOverlayXORMode(dd); int al = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE); if (al < 0 || al > 2) { al = 1; } switch (al) { case 0: // none vf.getJSVGCanvas().setAnimationLimitingNone(); break; case 1: { // %cpu float pc = preferenceManager.getFloat (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU); if (pc <= 0f || pc > 1.0f) { pc = 0.75f; } vf.getJSVGCanvas().setAnimationLimitingCPU(pc); break; } case 2: { // fps float fps = preferenceManager.getFloat (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS); if (fps <= 0f) { fps = 10f; } vf.getJSVGCanvas().setAnimationLimitingFPS(fps); break; } } } /** * Returns the user languages. */ public String getLanguages() { String s = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_LANGUAGES); return (s == null) ? Locale.getDefault().getLanguage() : s; } /** * Returns the user stylesheet uri. * @return null if no user style sheet was specified. */ public String getUserStyleSheetURI() { boolean enabled = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED); String ssPath = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET); if (!enabled || ssPath.length() == 0) { return null; } try { File f = new File(ssPath); if (f.exists()) { return f.toURL().toString(); } } catch (IOException ioe) { // Nothing... } return ssPath; } /** * Returns the default value for the CSS * "font-family" property */ public String getDefaultFontFamily() { return preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY); } /** * Returns the CSS media to use. * @return empty string if no CSS media was specified. */ public String getMedia() { String s = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA); return (s == null) ? "screen" : s; } /** * Returns true if the selection overlay is painted in XOR mode, false * otherwise. */ public boolean isSelectionOverlayXORMode() { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE); } /** * Returns true if the input scriptType can be loaded in * this application. */ public boolean canLoadScriptType(String scriptType){ if (SVGConstants.SVG_SCRIPT_TYPE_ECMASCRIPT.equals(scriptType)){ return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT); } else if (SVGConstants.SVG_SCRIPT_TYPE_JAVA.equals(scriptType)){ return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA); } else { return preferenceManager.getBoolean (scriptType + UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION); } } /** * Returns the allowed origins for scripts. * @see ResourceOrigin */ public int getAllowedScriptOrigin() { int ret = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN); return ret; } /** * Returns the allowed origins for external * resources. * @see ResourceOrigin */ public int getAllowedExternalResourceOrigin() { int ret = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN); return ret; } /** * Notifies Application of recently visited URI */ public void addVisitedURI(String uri) { if(svgInitializationURI.equals(uri)) { return; } int maxVisitedURIs = preferenceManager.getInteger (PREFERENCE_KEY_VISITED_URI_LIST_LENGTH); if (maxVisitedURIs < 0) { maxVisitedURIs = 0; } if (lastVisited.contains(uri)) { lastVisited.removeElement(uri); } while (lastVisited.size() > 0 && lastVisited.size() > (maxVisitedURIs-1)) { lastVisited.removeElementAt(0); } if (maxVisitedURIs > 0) { lastVisited.addElement(uri); } // Now, save the list of visited URL into the preferences StringBuffer lastVisitedBuffer = new StringBuffer( lastVisited.size() * 8 ); for (int i=0; i<lastVisited.size(); i++) { lastVisitedBuffer.append (URLEncoder.encode(lastVisited.get(i).toString())); lastVisitedBuffer.append(URI_SEPARATOR); } preferenceManager.setString (PREFERENCE_KEY_VISITED_URI_LIST, lastVisitedBuffer.toString()); try { preferenceManager.save(); } catch (Exception e) { // As in other places. But this is ugly... } } /** * Asks Application for a list of recently visited URI. */ public String[] getVisitedURIs() { String[] visitedURIs = new String[lastVisited.size()]; lastVisited.toArray(visitedURIs); return visitedURIs; } /** * Returns the UI resource specialization to use. */ public String getUISpecialization() { return uiSpecialization; } /** * Initializes the lastVisited array */ protected void initializeLastVisited(){ String lastVisitedStr = preferenceManager.getString(PREFERENCE_KEY_VISITED_URI_LIST); StringTokenizer st = new StringTokenizer(lastVisitedStr, URI_SEPARATOR); int n = st.countTokens(); int maxVisitedURIs = preferenceManager.getInteger (PREFERENCE_KEY_VISITED_URI_LIST_LENGTH); if (n > maxVisitedURIs) { n = maxVisitedURIs; } for (int i=0; i<n; i++) { lastVisited.addElement(URLDecoder.decode(st.nextToken())); } } }
// @(#)DotDump.java 7/2003 // Copyright (c) 1998-2003, Distributed Real-time Computing Lab (DRCL) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // 3. Neither the name of "DRCL" nor the names of its contributors may be used // to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // package drcl.comp.tool; /* * Tool to create and view directed graph of JavaSim port connections. * Copyright (C) 2003 Peter Kolloch * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ import drcl.comp.Component; import drcl.comp.Port; import java.awt.BorderLayout; import java.awt.Image; import java.io.IOException; import java.io.PrintStream; import java.io.FileOutputStream; import java.util.Collection; import java.util.Map; import java.util.Iterator; import javax.imageio.ImageIO; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JScrollPane; //modified by Hung-ying Tyan: //(1) for subclassing //(2) add show(): to work with Tcl/Java //(2) add dumpToFile(String fileName) /** * Static methods to visualize JavaSim component connections as a directed graph. * Only components which have not-hidden ports are shown. * * "Normal" ports are displayed in blue, shadow ports in green (including shadow * connections). If there is a blue arrow from port1 to port2 it means, that a * message sent at port1 is delivered to port2. (except unexplainable loops from * a port to itself) * * The class uses the "dot" program from the graphviz package * (http://www.graphviz.org) for graph creation. It must be installed and in the path. * * @author Peter Kolloch */ public class DotDump implements Runnable { public String INDENT = "\t"; public String SHADOW_CONNECTION_COLOR = "#319E41"; public String REAL_CONNECTION_COLOR = "#356ABF"; Component root; boolean showBarePort = false; boolean showRoot = false; Collection links = new java.util.LinkedList(); Map components = new java.util.HashMap(); // Component -> ComponentRecord Map pendingPorts = new java.util.HashMap(); // Port -> dont care Collection pendingLinks = new java.util.LinkedList(); // Port[]{from, to} // components being blocked // ports of blocked components are also blocked Map compmap = new java.util.HashMap(); // Component -> dont care public DotDump() { this(Component.Root); } public DotDump(Component root_) { root = root_; } public void setRoot(Component root_) { root = root_; } public Component getRoot() { return root; } /** * Automatically called before each rendering. */ public void reset() { compmap.clear(); links.clear(); components.clear(); pendingLinks.clear(); } /** * Enables/disables showing ports that have no connection. * Default is off; */ public void setShowBarePortEnabled(boolean s) { showBarePort = s; } public boolean isShowBarePortEnabled() { return showBarePort; } /** * Enables/disables showing the root component. * Default is off; */ public void setShowRootEnabled(boolean s) { showRoot = s; } public boolean isShowRootEnabled() { return showRoot; } /** * Dumps a graph description sutiable for the graphviz package of all components * to the given PrintStream. */ public void dumpComponents(PrintStream out) { reset(); out.println("digraph G {"); out.print(_graphAttributes(INDENT)); Collection top = new java.util.LinkedList(); if (showRoot) top.add(dumpComponents(INDENT, root)); else { Component[] cc = root.getAllComponents(); for (int i = 0; i < cc.length; i++) if (showing(cc[i])) top.add(dumpComponents(INDENT, cc[i])); } processPending(); // we must print all connections in the end, because otherwise // all ports (graph nodes) are shown as part of the component // (subgraph) they first appear in // HT: components as well if we consider not printing ports without // any connection // print components/ports for (Iterator iter = top.iterator(); iter.hasNext();) { ComponentRecord r = (ComponentRecord)iter.next(); dump(r, out); } // print links for (Iterator iter = links.iterator(); iter.hasNext();) { Object element = (Object) iter.next(); out.println(element); } out.println("}"); out.close(); } // process pending ports and connections private void processPending() { for (Iterator iter = pendingLinks.iterator(); iter.hasNext();) { Port[] pp = (Port[]) iter.next(); Port from = pp[0]; Port to = pp[1]; // if port's host is not showing, the port is not showing if (!components.containsKey(from.host) || !components.containsKey(to.host)) continue; String color = from.host.isDirectlyRelatedTo(to.host)? SHADOW_CONNECTION_COLOR: REAL_CONNECTION_COLOR; links.add(_connect(from, to, color)); // if link is pending, add to component record for (int i=0; i<2; i++) { Port p = pp[i]; if (!pendingPorts.containsKey(p)) continue; ComponentRecord r = (ComponentRecord)components.get(p.host); r.print(_port(r.prefix + INDENT, p)); pendingPorts.remove(p); } } } private void dump(ComponentRecord r, PrintStream out) { out.print(r); ComponentRecord[] rr = r.getChildRecords(); for (int i=0; i<rr.length; i++) dump(rr[i], out); out.println(r.prefix + "}"); } private ComponentRecord dumpComponents(String prefix_, Component c) { ComponentRecord r = new ComponentRecord(c, prefix_); components.put(c, r); // create a subgraph for this component r.println(prefix_ + "subgraph \"" + _getComponentClusterName(c) + "\" {"); String newPrefix_ = prefix_ + INDENT; r.print(_component(newPrefix_, c)); Port[] ports = c.getAllPorts(); for (int j = 0; j < ports.length; j++) { if (!showing(ports[j])) continue; boolean anyConnect_ = false; boolean anyPendingConnect_ = false; Port[] shadowsOut = ports[j].getOutShadows(); for (int i = 0; i < shadowsOut.length; i++) if (showing(shadowsOut[i])) { if (compmap.containsKey(shadowsOut[i].host)) { links.add(_connect(ports[j], shadowsOut[i], SHADOW_CONNECTION_COLOR)); anyConnect_ = true; } else { pendingPorts.put(shadowsOut[i], this); pendingLinks.add(new Port[]{ports[j], shadowsOut[i]}); anyPendingConnect_ = true; } } Port[] connectedTo = ports[j].getConceptualInPeers(); for (int i = 0; i < connectedTo.length; i++) if (showing(connectedTo[i])) { if (compmap.containsKey(connectedTo[i].host)) { links.add(_connect(ports[j], connectedTo[i], REAL_CONNECTION_COLOR)); anyConnect_ = true; } else { pendingPorts.put(connectedTo[i], this); pendingLinks.add(new Port[]{ports[j], connectedTo[i]}); anyPendingConnect_ = true; } } Port[] shadowsIn = ports[j].getInShadows(); for (int i = 0; i < shadowsIn.length; i++) if (showing(shadowsIn[i])) { if (compmap.containsKey(shadowsIn[i].host)) { links.add(_connect(shadowsIn[i], ports[j], SHADOW_CONNECTION_COLOR )); anyConnect_ = true; } else { pendingPorts.put(shadowsIn[i], this); pendingLinks.add(new Port[]{shadowsIn[i], ports[j]}); anyPendingConnect_ = true; } } if (showBarePort || anyConnect_) r.print(_port(newPrefix_, ports[j])); else if (anyPendingConnect_) pendingPorts.put(ports[j], this); } Component[] components = c.getAllComponents(); for (int i = 0; i < components.length; i++) if (showing(components[i])) { ComponentRecord child = dumpComponents(newPrefix_, components[i]); r.add(child); } return r; } protected final String _getComponentClusterName(Component c) { return "cluster_" + c; } protected String _graphAttributes(String prefix_) { return ""; } /** This method calls {@link #_showing(Component)} and takes care of * bookkeeping blocked components. */ public final boolean showing(Component c) { if (compmap.containsKey(c)) return false; boolean show_ = _showing(c); if (!show_) compmap.put(c, this); return show_; } /** This method returns true if the host component of the port * is not blocked and {@link #_showing(Port)} returns true. */ public final boolean showing(Port p) { Component host_ = p.getHost(); if (host_ != null && !_showing(host_)) return false; else return _showing(p); } /** Returns true if desired to show the component and its inside structure. * By default, it always returns true. */ protected boolean _showing(Component c) { return true; } /** Returns true if desired to show the port and its connections. * By default, it returns false for "hidden" ports. */ protected boolean _showing(Port p) { return p.getID().charAt(0) != '.'; } /** * Returns the "dot" description for the component. */ protected String _component(String prefix_, Component c) { return prefix_ + "graph [label=\"" + c.getID() + "\",color=\"#93A4BF\"];\n"; } /** * Returns the "dot" description for the port. */ protected String _port(String prefix_, Port p) { String portColor = p.isShadow()? "#319E41": "#356ABF"; return prefix_ + "\"" + p + "\" [label=\"" + p.getID() + "@" + p.getGroupID() + "\",color=\"" + portColor + "\"];\n"; } /** * Returns the "dot" description for the connection. */ protected String _connect(Port p1, Port p2, String color_) { return "\"" + p1 + "\" -> \"" + p2 + "\" [color=\"" + color_ + "\"];"; } /** * Calls the dot program with the description generated by the * {@link #dumpComponents(PrintStream)} method as input and creates * a Java image object from its output. */ public Image createComponentImage() throws IOException { final Process dot = Runtime.getRuntime().exec(new String[] { "dot", "-Tpng" }); new Thread() { public void run() { dumpComponents(new PrintStream(dot.getOutputStream())); try { dot.getOutputStream().close(); } catch (IOException e) { throw new RuntimeException("unexpected exception", e); } } }.start(); return ImageIO.read(dot.getInputStream()); } /** * A simplistic image viewer for the image returned by * {@link #createComponentImage()}. */ void _showComponents() throws IOException { JFrame frame = new JFrame("JavaSim Components"); JLabel label = new JLabel(new ImageIcon(createComponentImage())); JScrollPane pane = new JScrollPane(label); frame.getContentPane().add(pane, BorderLayout.CENTER); frame.pack(); frame.setVisible(true); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); } public void run() { try { _showComponents(); } catch (Exception e) { e.printStackTrace(); } } /** * Runs _showComponents() in a separate thread. */ public void show() { new Thread(this).start(); } public void toFile(String fileName_) { try { dumpComponents(new PrintStream(new FileOutputStream(fileName_))); } catch (Exception e) { e.printStackTrace(); } } public static void example() { // example with shadowing Component test = new Component("test"); Component.Root.addComponent(test); Component intest = new Component("intest"); test.addComponent( intest); intest.addPort( "real").connectTo( test.addPort("shadow")); Component test2 = new Component("test2"); Component.Root.addComponent(test2); test.getPort( "shadow").connectTo( test2.addPort( "stuff")); // example with (incorrectly?) displayed port loops and wire joining Component blupps = new Component("blupps"); Component.Root.addComponent(blupps); Component blupps2 = new Component("blupps2"); Component.Root.addComponent(blupps2); blupps.addPort("stuff").connect( blupps2.addPort("stuff")); /* blupps.getPort("stuff").connect( blupps2.addPort("stuff2")); // indirectly connects to both ports of blupps2! wire joining! blupps.addPort("stuff2").connectTo( blupps2.getPort("stuff")); */ } public String info() { StringBuffer sb = new StringBuffer("blocked components: " + compmap.size() + "\n"); for (Iterator it_ = compmap.keySet().iterator(); it_.hasNext(); ) sb.append("\t" + it_.next() + "\n"); return "Root: " + root + "\nshowRoot: " + showRoot + "\nshowBarePort: " + showBarePort + sb.toString(); } public static void main(String[] args) throws IOException { example(); new DotDump().show(); } class ComponentRecord { Component c; String prefix; StringBuffer lines = new StringBuffer(); Collection childlist = new java.util.LinkedList(); ComponentRecord(Component c, String prefix_) { this.c = c; prefix = prefix_; } public void print(String line_) { lines.append(line_); } public void println(String line_) { lines.append(line_ + "\n"); } public void add(ComponentRecord child) { childlist.add(child); } public ComponentRecord[] getChildRecords() { return (ComponentRecord[]) childlist.toArray(new ComponentRecord[childlist.size()]); } // append ending "}" public String toString() { return lines.toString(); } } }
/* * Copyright 2009-2016 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * */ package org.mrgeo.test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.gdal.gdal.Dataset; import org.mrgeo.data.DataProviderFactory; import org.mrgeo.data.ProviderProperties; import org.mrgeo.data.image.MrsImageDataProvider; import org.mrgeo.data.raster.MrGeoRaster; import org.mrgeo.hdfs.utils.HadoopFileUtils; import org.mrgeo.image.MrsImage; import org.mrgeo.image.MrsPyramid; import org.mrgeo.image.MrsPyramidMetadata; import org.mrgeo.job.JobCancelledException; import org.mrgeo.job.JobFailedException; import org.mrgeo.mapalgebra.MapAlgebra; import org.mrgeo.mapalgebra.parser.ParserException; import org.mrgeo.utils.GDALJavaUtils; import org.mrgeo.utils.GDALUtils; import org.mrgeo.utils.tms.Bounds; import org.mrgeo.utils.tms.TMSUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @SuppressWarnings("all") // test code, not included in production public class MapOpTestUtils extends TestUtils { private static final Logger log = LoggerFactory.getLogger(MapOpTestUtils.class); public MapOpTestUtils(final Class<?> testClass) throws IOException { super(testClass); } public MrsPyramidMetadata getImageMetadata(final String testName) throws IOException { MrsImageDataProvider dp = DataProviderFactory.getMrsImageDataProvider(new Path(outputHdfs, testName).toString(), DataProviderFactory.AccessMode.READ, (ProviderProperties) null); return dp.getMetadataReader().read(); } public void generateBaselinePyramid(final Configuration conf, final String testName, final String ex) throws IOException, JobFailedException, JobCancelledException, ParserException { runMapAlgebraExpression(conf, testName, ex); final Path src = new Path(outputHdfs, testName); final MrsPyramid pyramid = MrsPyramid.open(src.toString(), (ProviderProperties) null); if (pyramid != null) { final Path dst = new Path(inputLocal, testName); final FileSystem fs = dst.getFileSystem(conf); fs.copyToLocalFile(src, dst); } } public void generateBaselineTif(final Configuration conf, final String testName, final String ex) throws IOException, JobFailedException, JobCancelledException, ParserException { generateBaselineTif(conf, testName, ex, Double.NaN); } public void generateBaselineTif(final Configuration conf, final String testName, final String ex, double nodata) throws IOException, JobFailedException, JobCancelledException, ParserException { runMapAlgebraExpression(conf, testName, ex); saveBaselineTif(testName, nodata); } public void saveBaselineTif(String testName, double nodata) throws IOException { final MrsPyramid pyramid = MrsPyramid.open(new Path(outputHdfs, testName).toString(), (ProviderProperties) null); MrsPyramidMetadata meta = pyramid.getMetadata(); try (MrsImage image = pyramid.getImage(meta.getMaxZoomLevel())) { MrGeoRaster raster = image.getRaster(); final File baselineTif = new File(new File(inputLocal), testName + ".tif"); Bounds tilesBounds = TMSUtils.tileBounds(meta.getBounds(), image.getMaxZoomlevel(), image.getTilesize()); GDALJavaUtils .saveRaster(raster.toDataset(meta.getBounds(), meta.getDefaultValues()), baselineTif.getCanonicalPath(), tilesBounds, nodata); } } public void runRasterExpression(final Configuration conf, final String testName, final String ex) throws ParserException, IOException, JobFailedException, JobCancelledException { runRasterExpression(conf, testName, null, ex); } public void runRasterExpression(final Configuration conf, final String testName, final TestUtils.ValueTranslator testTranslator, final String ex) throws ParserException, IOException, JobFailedException, JobCancelledException { runMapAlgebraExpression(conf, testName, ex); compareRasterOutput(testName, testTranslator, null); } public void runRasterExpression(final Configuration conf, final String testName, final TestUtils.ValueTranslator baselineTranslator, final TestUtils.ValueTranslator testTranslator, final String ex) throws ParserException, IOException, JobFailedException, JobCancelledException { runMapAlgebraExpression(conf, testName, ex); compareRasterOutput(testName, baselineTranslator, testTranslator, null); } public void compareRasterOutput(final String testName, final TestUtils.ValueTranslator testTranslator) throws IOException { compareRasterOutput(testName, null, testTranslator, null); } public void compareLocalRasterOutput(final String testName, final TestUtils.ValueTranslator testTranslator) throws IOException { compareLocalRasterOutput(testName, null, testTranslator, null); } public void compareLocalRasterOutput(final String testName, final TestUtils.ValueTranslator testTranslator, final ProviderProperties providerProperties) throws IOException { compareLocalRasterOutput(testName, null, testTranslator, providerProperties); } /** * Runs the map algebra expression and stores the results to outputHdfs in a * subdirectory that matches the testName. No comparison against expected * output is done. See other methods in this class like runVectorExpression and * runRasterExpression for that capability. */ public void runMapAlgebraExpression(final Configuration conf, final String testName, final String ex) throws IOException, JobFailedException, JobCancelledException, ParserException { HadoopFileUtils.delete(new Path(outputHdfs, testName)); log.info(ex); long start = System.currentTimeMillis(); ProviderProperties pp = ProviderProperties.fromDelimitedString(""); MapAlgebra.validateWithExceptions(ex, pp); MapAlgebra.mapalgebra(ex, (new Path(outputHdfs, testName)).toString(), conf, pp, null); log.info("Test Execution time: " + (System.currentTimeMillis() - start)); } private void compareRasterOutput(final String testName, final TestUtils.ValueTranslator testTranslator, final ProviderProperties providerProperties) throws IOException { compareRasterOutput(testName, null, testTranslator, providerProperties); } private void compareRasterOutput(final String testName, final TestUtils.ValueTranslator baselineTranslator, final TestUtils.ValueTranslator testTranslator, final ProviderProperties providerProperties) throws IOException { final MrsPyramid pyramid = MrsPyramid.open(new Path(outputHdfs, testName).toString(), providerProperties); final MrsImage image = pyramid.getHighestResImage(); try { // The output against which to compare could either be a tif or a MrsPyramid. // We check for the tif first. final File file = new File(inputLocal); final File baselineTif = new File(file, testName + ".tif"); if (baselineTif.exists()) { TestUtils.compareRasters(baselineTif, baselineTranslator, image.getRaster(), testTranslator); } else { final String inputLocalAbs = file.getCanonicalFile().toURI().toString(); final MrsPyramid goldenPyramid = MrsPyramid.open(inputLocalAbs + "/" + testName, providerProperties); final MrsImage goldenImage = goldenPyramid.getImage(image.getZoomlevel()); try { TestUtils.compareRasters(goldenImage.getRaster(), image.getRaster()); } finally { if (goldenImage != null) { goldenImage.close(); } } } } finally { if (image != null) { image.close(); } } } private void compareLocalRasterOutput(final String testName, final TestUtils.ValueTranslator baselineTranslator, final TestUtils.ValueTranslator testTranslator, final ProviderProperties providerProperties) throws IOException { final File tf = new File(getOutputLocal()); final File testFile = new File(tf, testName + ".tif"); final MrGeoRaster testRaster; if (testFile.exists()) { Dataset d = GDALUtils.open(testFile.getCanonicalPath()); testRaster = MrGeoRaster.fromDataset(d); } else { final String inputLocalAbs = tf.getCanonicalFile().toURI().toString(); final MrsPyramid testPyramid = MrsPyramid.open(inputLocalAbs + "/" + testName, providerProperties); final MrsImage testImage = testPyramid.getImage(testPyramid.getMaximumLevel()); testRaster = testImage.getRaster(); } // The output against which to compare could either be a tif or a MrsPyramid. // We check for the tif first. final File file = new File(inputLocal); final File baselineTif = new File(file, testName + ".tif"); if (baselineTif.exists()) { TestUtils.compareRasters(baselineTif, baselineTranslator, testRaster, testTranslator); } else { final String inputLocalAbs = file.getCanonicalFile().toURI().toString(); final MrsPyramid goldenPyramid = MrsPyramid.open(inputLocalAbs + "/" + testName, providerProperties); final MrsImage goldenImage = goldenPyramid.getImage(goldenPyramid.getMaximumLevel()); try { TestUtils.compareRasters(goldenImage.getRaster(), testRaster); } finally { if (goldenImage != null) { goldenImage.close(); } } } } }
// Copyright 2006 Konrad Twardowski // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.makagiga.fs.feeds; import static org.makagiga.commons.UI.i18n; import java.awt.Color; import java.awt.Image; import java.awt.Window; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessControlContext; import java.security.AccessController; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import javax.swing.Action; import javax.swing.Icon; import org.makagiga.MainWindow; import org.makagiga.Tabs; import org.makagiga.Vars; import org.makagiga.commons.Config; import org.makagiga.commons.FS; import org.makagiga.commons.Flags; import org.makagiga.commons.MAction; import org.makagiga.commons.MActionInfo; import org.makagiga.commons.MApplication; import org.makagiga.commons.MArrayList; import org.makagiga.commons.MColor; import org.makagiga.commons.MDataAction; import org.makagiga.commons.MIcon; import org.makagiga.commons.MLogger; import org.makagiga.commons.TK; import org.makagiga.commons.UI; import org.makagiga.commons.cache.FileCache; import org.makagiga.commons.html.HTMLBuilder; import org.makagiga.commons.mv.MV; import org.makagiga.commons.swing.MButton; import org.makagiga.commons.swing.MCheckBox; import org.makagiga.commons.swing.MCheckBoxTable; import org.makagiga.commons.swing.MDialog; import org.makagiga.commons.swing.MFileChooser; import org.makagiga.commons.swing.MMenu; import org.makagiga.commons.swing.MMessage; import org.makagiga.commons.swing.MNotification; import org.makagiga.commons.swing.MPanel; import org.makagiga.commons.swing.MRadioButton; import org.makagiga.commons.swing.MSlider; import org.makagiga.commons.swing.MSmallButton; import org.makagiga.commons.swing.MTabbedPane; import org.makagiga.commons.swing.MText; import org.makagiga.commons.swing.MTimer; import org.makagiga.commons.swing.MToolBar; import org.makagiga.editors.Editor; import org.makagiga.feeds.archive.Archive; import org.makagiga.feeds.archive.ArchiveException; import org.makagiga.feeds.opml.OPMLReader; import org.makagiga.feeds.opml.OPMLWriter; import org.makagiga.fs.AbstractFS; import org.makagiga.fs.FSDynamic; import org.makagiga.fs.FSException; import org.makagiga.fs.FSHelper; import org.makagiga.fs.FSNewFile; import org.makagiga.fs.FSNewFolder; import org.makagiga.fs.FSOpen; import org.makagiga.fs.FSProperties; import org.makagiga.fs.FSQuery; import org.makagiga.fs.MetaInfo; import org.makagiga.plugins.PluginInfo; import org.makagiga.plugins.PluginMenu; import org.makagiga.plugins.PluginSettings; import org.makagiga.search.Query; import org.makagiga.tree.ItemRenderer; import org.makagiga.tree.Tree; import org.makagiga.web.Favicon; public final class FeedsFS extends AbstractFS implements FSDynamic, FSNewFile, FSNewFolder, FSOpen, FSProperties, FSQuery, PluginMenu { // public /** * @since 2.0 */ public static final int USE_EXCLUDE_FROM_FETCH_ALL_FEEDS = 1; /** * @since 2.0 */ public static final int USE_EXCLUDE_FROM_INTERVAL_FETCH = 1 << 1; /** * @since 3.8.3 */ public static final int FETCH_OFFLINE = 1 << 2; // private private AddFeedDialog.MainPanel addFeedPanel; private ArchiveOptions archiveOptions; private Color unreadColor = DEFAULT_UNREAD_COLOR; private CustomAdvancedPanel customAdvancedPanel; private MAction filtersAction; private MAction markAllAsReadAction; private static final MLogger log = MLogger.get("feeds"); private MetaInfo currentImportFolder; private final MTimer intervalFetchTimer; // package final AccessControlContext acc; static final Color DEFAULT_UNREAD_COLOR = new UnreadColor(0xb1d28f); FetchAllFeedsAction fetchAllFeedsAction = new FetchAllFeedsAction(); // public @Override public boolean canImport(final MetaInfo metaInfo) { if (!super.canImport(metaInfo)) return false; if (metaInfo.isAnyFolder()) return true; return false; } @Override public void deleteNotify(final MetaInfo metaInfo, final boolean move) { if (!move && metaInfo.isDynamicFolder()) { Config config = metaInfo.getConfig(); String url = config.read("x.url", null); if (url != null) { FeedDownloader.abort(metaInfo); // TODO: 2.0: count reference before remove: Archive.getInstance().remove(url); FileCache cache = FileCache.getInstance(); cache.getGroup("feed").remove(url); // hide notifications for deleted item if (!MNotification.isEmpty()) { for (MNotification.Message i : MNotification.getInstance()) { Action action = i.getAction(); if ( (action instanceof FeedThread.Action) && (FeedThread.Action.class.cast(action).getParent() == metaInfo) ) { i.hide(); } } } } } } /** * Fetches all feeds. * * @since 2.0 */ public void fetchFeeds(final int flags) { fetchFeeds(getRoot(), flags); } /** * @since 2.0 */ public void fetchFeeds(final MetaInfo folder, final int flags) { if (FeedDownloader.isActive()) return; // FIXME: do not expand other (collapsed) folders new Tree.Scanner(folder) { @Override public void processItem(final MetaInfo item) { download(item, false, flags); } }; // do not change mode if no feeds if (FeedDownloader.isActive()) { boolean fetchOffline = (flags & FETCH_OFFLINE) != 0; FeedDownloader.setFetchMode(this, false, fetchOffline); } } /** * @since 3.4 */ public Color getUnreadColor() { return TK.get(unreadColor, DEFAULT_UNREAD_COLOR); } /** * @since 4.0 */ public void setUnreadColor(final Color value) { unreadColor = (value == null) ? null : new UnreadColor(value); } @Override public void setupItemRenderer(final ItemRenderer renderer, final MetaInfo item) { if (item instanceof ArticleMetaInfo) { ArticleMetaInfo article = (ArticleMetaInfo)item; item.setColor(getColor(article)); } else if (item.isDynamicFolder()) { if (Boolean.TRUE.equals(item.getProperty("feed.no-favicon", false))) { //System.err.println(item + ": No favicon"); return; } MIcon icon = Vars.treeSmallIcons.get() ? item.getSmallIcon() : item.getIcon(); if (icon == null) return; // favicon already set? if (icon.getOverlay() != null) return; Image favicon = item.getProperty("feed.favicon", null); if (favicon == null) { String url = item.getConfig().read("x.faviconURL", ""); if (url.isEmpty()) { //System.err.println(item + ": Empty favicon URL"); item.setProperty("feed.no-favicon", true); return; } try { //System.err.println(item + ": Reading favicon"); favicon = Favicon.getCachedImage(new URL(url), 16); } catch (IOException exception) { MLogger.developerException(exception); } if (favicon == null) { //System.err.println(item + ": No favicon (2)"); item.setProperty("feed.no-favicon", true); } else item.setProperty("feed.favicon", favicon); } icon.setOverlay(favicon); } } // FS @Override public void applyProperties(final MetaInfo[] selection) { if (selection.length > 1) return; MetaInfo metaInfo = selection[0]; if (!metaInfo.isDynamicFolder() || !(metaInfo.getFS() instanceof FeedsFS)) return; if (addFeedPanel == null) return; // update config Config config = metaInfo.getConfig(); config.write("x.archivePolicy", archiveOptions.getPolicy().name()); config.write("x.blockImages", customAdvancedPanel.blockImagesCheckBox.isSelected()); config.write("x.excludeFromFetchAllFeeds", customAdvancedPanel.excludeFromFetchAllFeeds.isSelected()); config.write("x.excludeFromIntervalFetch", customAdvancedPanel.excludeFromIntervalFetch.isSelected()); config.write("x.removeArticlesAfter", archiveOptions.getDays()); //config.write("x.url", addFeedPanel.feedURLTextField.getText()); config.write("x.showFullPage", customAdvancedPanel.showFullPageCheckBox.isSelected()); //addFeedPanel.feedURLTextField.saveAutoCompletion(); // update tree if (customAdvancedPanel.shouldRefresh()/* || addFeedPanel.shouldRefresh()*/) { refresh(metaInfo); } } @Override public void freeProperties() { addFeedPanel = null; archiveOptions = null; customAdvancedPanel = null; } @Override public Flags getMetaInfoCapabilities(final MetaInfo metaInfo) { return Flags.valueOf(META_INFO_CATEGORY | META_INFO_COLOR | META_INFO_COMMENT | META_INFO_ICON | META_INFO_RATING | META_INFO_TAGS); } @Override public void initProperties(final MTabbedPane<MPanel> tabs, final MetaInfo[] selection) { if (selection.length > 1) return; final MetaInfo metaInfo = selection[0]; if (!metaInfo.isDynamicFolder() || !(metaInfo.getFS() instanceof FeedsFS)) return; addFeedPanel = new AddFeedDialog.MainPanel(true, null); MPanel generalTab = tabs.getTabAt(GENERAL_TAB); generalTab.add(addFeedPanel); final Config config = metaInfo.getConfig(); addFeedPanel.originalFeedURL = config.read("x.url", null); MText.setText(addFeedPanel.feedURLTextField, addFeedPanel.originalFeedURL); addFeedPanel.setupPropertiesPanel(); MPanel advancedTab = tabs.getTabAt(ADVANCED_TAB); tabs.setEnabledAt(ADVANCED_TAB, true); tabs.setIconAt(ADVANCED_TAB, getSmallIcon()); archiveOptions = new ArchiveOptions(false); FeedsFSPlugin plugin = (FeedsFSPlugin)getInfo().getPlugin(); FeedsFSPlugin.Settings settings = plugin.readSettings(metaInfo, false); archiveOptions.setDays(settings.removeArticlesAfter, settings.archivePolicy); advancedTab.add(archiveOptions); customAdvancedPanel = new CustomAdvancedPanel(); customAdvancedPanel.showExpiredArticlesAgainButton.addActionListener(e -> { try { customAdvancedPanel.showExpiredArticlesAgainButton.setEnabled(false); customAdvancedPanel.showExpiredArticlesAgainButton.setIcon(MIcon.small("ui/ok")); String url = config.read("x.url", null); Archive.getInstance().removeExpiredInfo(url); } catch (ArchiveException exception) { MLogger.developerException(exception); } } ); customAdvancedPanel.blockImagesCheckBox.setAndRemember(config.read("x.blockImages", false)); customAdvancedPanel.excludeFromFetchAllFeeds.setSelected(config.read("x.excludeFromFetchAllFeeds", false)); customAdvancedPanel.excludeFromIntervalFetch.setSelected(config.read("x.excludeFromIntervalFetch", false)); customAdvancedPanel.showFullPageCheckBox.setSelected(config.read("x.showFullPage", false)); advancedTab.add(customAdvancedPanel); } @Override public void lockProperties(final boolean locked) { /* if (addFeedPanel != null) { addFeedPanel.setLocked(locked); } */ if (archiveOptions != null) { archiveOptions.days.setEnabled(!locked); archiveOptions.doNotRemoveArticles.setEnabled(!locked); archiveOptions.removeArticles.setEnabled(!locked); archiveOptions.useGlobalSettings.setEnabled(!locked); } if (customAdvancedPanel != null) { customAdvancedPanel.blockImagesCheckBox.setEnabled(!locked); customAdvancedPanel.excludeFromFetchAllFeeds.setEnabled(!locked); customAdvancedPanel.excludeFromIntervalFetch.setEnabled(!locked); customAdvancedPanel.showFullPageCheckBox.setEnabled(!locked); } } @Override public Icon getNewFileActionIcon(final MIcon.Size size) { return MIcon.stock("ui/feed", size); } @Override public String getNewFileActionText() { return i18n("Add RSS Feed"); } @Override public String getNewFileDefaultName() { return i18n("New RSS Feed"); } /** * @since 4.0 */ @Override public MetaInfo newFile(final Window owner, final MetaInfo parent) throws FSException { return newFile(parent, null); } /** * @since 2.2 */ @Override public Action getNewFileAction() { return new MAction(getNewFileActionText(), "ui/feed", action -> { try { newFile(action.getSourceWindow(), getRoot()); } catch (FSException exception) { action.showErrorMessage(exception); } } ); } /** * @since 2.2 */ @Override public int getNewFileOptions() { return NEW_FILE_OPTION_SHOW_IN_MENU; } /** * @since 3.8.11 */ public MetaInfo newFile(final MetaInfo parent, final String link) throws FSException { MainWindow mainWindow = MainWindow.getInstance(); AddFeedDialog addFeedDialog = new AddFeedDialog( mainWindow.isActive() ? mainWindow : null, link, this ); if (!addFeedDialog.exec()) return null; String url = addFeedDialog.getNewURL(); if (alreadyExists(null, url)) return null; MetaInfo feedMetaInfo = createUniqueDynamicFolder(parent, addFeedDialog.getNewFileName()); if (feedMetaInfo != null) { Config config = feedMetaInfo.getConfig(); config.write("x.blockImages", addFeedDialog.getBlockImages()); config.write("x.excludeFromFetchAllFeeds", addFeedDialog.getExcludeFromFetchAllFeeds()); config.write("x.url", url); feedMetaInfo.sync(); Tree.getInstance().open(feedMetaInfo); } return feedMetaInfo; } /** * @throws IllegalArgumentException If {@code key} is invalid */ @Override public void setMetaInfoProperty(final MetaInfo metaInfo, final int key, final Object value) { if (!(metaInfo instanceof ArticleMetaInfo)) return; ArticleMetaInfo article = (ArticleMetaInfo)metaInfo; boolean repaintArticleList = false; String propertyKey; switch (key) { case META_INFO_CATEGORY: propertyKey = Archive.CATEGORY_PROPERTY; break; case META_INFO_COLOR: propertyKey = Archive.COLOR_PROPERTY; article.setColor((Color)value); repaintArticleList = true; break; case META_INFO_COMMENT: propertyKey = Archive.COMMENT_PROPERTY; break; case META_INFO_ICON: propertyKey = Archive.ICON_PROPERTY; if (value == null) metaInfo.clearIcon(); else metaInfo.setIconName((String)value); repaintArticleList = true; break; case META_INFO_RATING: propertyKey = Archive.RATING_PROPERTY; break; case META_INFO_TAGS: propertyKey = Archive.TAGS_PROPERTY; break; default: throw new IllegalArgumentException("Unknown \"key\" value: " + key); } String url = metaInfo.getParentFolder().getConfig().read("x.url", null); String id = article.getID(); try { Archive.getInstance().setItemProperty(url, id, new Archive.ItemProperty(propertyKey, value)); } catch (ArchiveException exception) { MLogger.exception(exception); } if (repaintArticleList) { Editor<?> tab = Tabs.getInstance().getSelectedTab(); if (tab instanceof FeedViewer) FeedViewer.class.cast(tab).getArticleList().repaint(); } } @Override public void updateInfo(final MetaInfo metaInfo, final HTMLBuilder html) { if (metaInfo.isDynamicFolder()) { Config config = metaInfo.getConfig(); MetaInfo.addSeparator(html); MetaInfo.addInfo(html, i18n("Feed Address:"), TK.centerSqueeze(config.read("x.url", null), 128)); } } @Override public boolean dynamic(final int flags, final MetaInfo parent) throws FSException { // download feeds return download(parent, true, 0); } @Override public void open(final int flags, final MetaInfo metaInfo) throws FSException { if (!metaInfo.isVirtualFile()) return; MetaInfo parent = metaInfo.getParentFolder(); // HACK: temp. workaround if (parent == null) // refresh in progress return; FeedViewer viewer = null; Tabs tabs = Tabs.getInstance(); // First, try to find a viewer that matches the same RSS Channel. List<FeedViewer> tabList = Editor.findEditor(FeedViewer.class); for (FeedViewer editor : tabList) { if ( (editor.getMetaInfo() != null) && (editor.getMetaInfo().getParentFolder() == parent) ) { viewer = editor; if (editor.getMetaInfo() == metaInfo) break; // while } } if (viewer == null) { for (FeedViewer editor : tabList) { viewer = editor; if (editor.getMetaInfo() == metaInfo) break; // while } } // collapse old folder if (viewer != null) { MetaInfo viewerParent = viewer.getMetaInfo().getParentFolder(); if ((viewerParent != null) && (viewerParent != parent)) { Tree tree = Tree.getInstance(); tree.setExpanded(viewerParent, false); } } boolean openInNewTab = (flags & FSOpen.MIDDLE_BUTTON) != 0; boolean sameMetaInfo = (viewer != null) && (viewer.getMetaInfo() == metaInfo); // open in new tab if ((viewer == null) || (openInNewTab && !sameMetaInfo)) { viewer = new FeedViewer(); viewer.setMetaInfo(metaInfo); // FIXME: do not steal focus from the browser component (caused by JTabbedPane) tabs.addEditor(viewer); } // open in existing tab else { if (!sameMetaInfo) { MetaInfo old = viewer.getMetaInfo(); tabs.addRecentlyClosedTab(old); old.setOpen(false); old.refresh(true); viewer.setMetaInfo(metaInfo); } metaInfo.setOpen(true); tabs.selectEditor(viewer); tabs.setTabInfo(viewer, metaInfo); MainWindow.getInstance().updateState(); } if (metaInfo instanceof ArticleMetaInfo) { ArticleMetaInfo article = (ArticleMetaInfo)metaInfo; //article.setNew(false); article.setUnread(false); } updateInfo(parent); // after "setUnread" if (!sameMetaInfo) viewer.loadFromMetaInfo(parent, metaInfo); // refresh new item metaInfo.refresh(true); } // FSQuery /** * @since 4.6 */ @Override public boolean matches(final Query query, final MetaInfo metaInfo) { if (!metaInfo.isDynamicFolder()) return false; // match feed URL Query.Criterium criterium = query.getCriteria().get(Query.KEYWORDS); if (criterium != null) { Config config = metaInfo.getConfig(); String url = config.read("x.url", null); if (url != null) { for (String i : criterium.getKeywords()) { if (TK.containsIgnoreCase(url, i)) return true; } } } return false; } // PluginMenu @Override public void updateMenu(final String type, final MMenu menu) { if (type.equals(EXPORT_MENU)) { menu.addSeparator(false); MetaInfo folder = Tree.getInstance().getCurrentFolder(true); String text = folder.isFSRoot() ? i18n("Export All Feeds") : i18n("Export \"{0}\" Feeds", folder); menu.add(new ExportAction(text)); } else if (type.equals(IMPORT_MENU)) { menu.addSeparator(false); menu.add(new ImportAction()); menu.add(new ImportFromLifereaAction()); } else if (type.equals(TREE_MENU)) { MetaInfo feed = Tree.getInstance().getCurrentFolder(true); if (FeedDownloader.isActive()) menu.add(fetchAllFeedsAction); else menu.add(new FetchFeedsAction(feed)); if (filtersAction == null) filtersAction = new Filters.FiltersAction(); if (markAllAsReadAction == null) markAllAsReadAction = new MarkAllAsReadAction(); if ( (feed.getFS() instanceof FeedsFS) && feed.isDynamicFolder() ) { filtersAction.setEnabled(feed.isWriteable()); filtersAction.setName(i18n("RSS Filters: {0}", TK.centerSqueeze(feed.toString(), 15))); markAllAsReadAction.setEnabled(true); } else { filtersAction.setEnabled(false); filtersAction.setName(i18n("RSS Filters...")); markAllAsReadAction.setEnabled(false); } menu.add(filtersAction); menu.add(markAllAsReadAction); } } @Override public void updateToolBar(final String type, final MToolBar toolBar) { } // protected FeedsFS(final PluginInfo info) throws FSException { // package super(info, "feeds", i18n("Feeds")); acc = AccessController.getContext(); scan(); // create sample feeds if no items Config config = Config.getDefault(); if (getRoot().isEmpty(MV.MODEL) && config.read("Feeds.createSamples", true)) { config.write("Feeds.createSamples", false); addSample("xkcd", "labels/emotion/happy", "http://xkcd.com/atom.xml"); //addSample("Gmail - Unread Messages", "ui/star", "https://mail.google.com/mail/feed/atom/unread"); addSample("Quotes of the Day", null, "http://feeds.feedburner.com/quotationspage/qotd"); addSample("reddit.com - programming", null, "http://www.reddit.com/r/programming/.rss"); addSample("Slashdot", "ui/console", "http://rss.slashdot.org/Slashdot/slashdot"); } intervalFetchTimer = MTimer.minutes(FeedsFSPlugin.DEFAULT_FETCH_INTERVAL, timer -> { if (!MApplication.offline.booleanValue()) fetchFeeds(getRoot(), USE_EXCLUDE_FROM_INTERVAL_FETCH); return MTimer.CONTINUE; } ); } @Override protected String getDefaultIconName(final MetaInfo metaInfo) { return metaInfo.isDynamicFolder() ? "ui/feed" : null; } @Override protected void processFile(final MetaInfo parent, final File file) { processDynamicFolder(parent, file); } // private private void addSample(final String name, final String iconName, final String url) { MetaInfo dynamicFolder = addDynamicFolder(getRoot(), name); if (dynamicFolder != null) { if (iconName != null) dynamicFolder.setIconName(iconName); Config config = dynamicFolder.getConfig(); config.write("x.url", url); config.sync(); } } private static void doRemoveAllChildren(final MetaInfo parent) { for (FeedViewer i : FeedViewer.findEditor(FeedViewer.class)) { if (i.getMetaInfo().getParentFolder() == parent) { i.waitingForUpdateFrom = parent; i.messageLabel.setWarningMessage(i18n("Please Wait...")); i.messageLabel.setVisible(true); } } for (MetaInfo i : parent.list(MV.MODEL)) i.setParent(null); Tree tree = Tree.getInstance(); tree.setCloseTabsOnRemove(false); try { tree.removeAllChildren(parent); } finally { tree.setCloseTabsOnRemove(true); } } private boolean download(final MetaInfo parent, final boolean expandFolder, final int flags) { if (!parent.isDynamicFolder()) return false; // cancel download Object feedThread = parent.clearProperty("feed.thread"); if (feedThread instanceof FeedThread) { FeedThread.class.cast(feedThread).abort(); return true; } Config config = parent.getConfig(); if ( ((flags & USE_EXCLUDE_FROM_FETCH_ALL_FEEDS) != 0) && config.read("x.excludeFromFetchAllFeeds", false) ) return true; if ( ((flags & USE_EXCLUDE_FROM_INTERVAL_FETCH) != 0) && config.read("x.excludeFromIntervalFetch", false) ) return true; // used to limit feed downloads - max 1 download/minute Long lastFetch = parent.getProperty("feed.lastFetch", null); if ( (lastFetch != null) && ((System.currentTimeMillis() - lastFetch) < TimeUnit.MINUTES.toMillis(1)) ) { log.debugFormat("Feed \"%s\" already downloaded", parent); if (expandFolder) Tree.getInstance().setExpanded(parent, true); return true; } // start download doRemoveAllChildren(parent); boolean fetchOffline = (flags & FETCH_OFFLINE) != 0; FeedDownloader.download(this, parent, expandFolder, null, fetchOffline); return true; } private void exportTo(final File file) { final OPMLWriter writer = new OPMLWriter(); writer.beginOPML(); writer.emptyHead(); writer.beginBody(); new Tree.Scanner(Tree.getInstance().getCurrentFolder(true)) { @Override public void processItem(final MetaInfo item) { if (item.isDynamicFolder()) { Config config = item.getConfig(); writer.addOutline( item.toString(), "xmlUrl", writer.escape(config.read("x.url", "")) ); } else if (item.isFolder()) { writer.beginOutline(item.toString()); } } @Override public void processParent(final MetaInfo parent) { if (parent.isFolder()) writer.endOutline(); } }; writer.endBody(); writer.endOPML(); try { writer.write(file); } catch (IOException exception) { MMessage.error(null, exception, i18n("Could not export feeds")); } } private void importFrom(final File file, final MetaInfo toFolder) { currentImportFolder = toFolder; try { OPMLReader reader = new OPMLReader() { private boolean inEmptyFolder; private Map<String, MetaInfo> existingURLs = FeedsFS.this.findAllURLs(); @Override protected void onEnd(final String name) { super.onEnd(name); if (name.equals("outline") && (inEmptyFolder || !hasAttributes())) { inEmptyFolder = false; MetaInfo parent = currentImportFolder.getParentFolder(); if (parent == null) currentImportFolder = toFolder; else if (parent.getFS() instanceof FeedsFS) currentImportFolder = parent; else currentImportFolder = toFolder; } } @Override protected void onOutline() { String url = getStringAttribute("xmlUrl"); // folder if (url == null) { try { currentImportFolder = currentImportFolder.getFS().createUniqueFolder(currentImportFolder, getTextAttribute(i18n("Unnamed"))); inEmptyFolder = true; } catch (FSException exception) { cancel(); MMessage.error(null, exception); } } // file else if (!FeedsFS.this.alreadyExists(existingURLs, url)) { String name = FSHelper.getUniqueName(currentImportFolder.getTargetFile(), getTextAttribute(i18n("Unnamed")), "properties"); Config config = new Config(FS.makePath(currentImportFolder.getTargetPath(), FSHelper.escapeName(name))); config.write("x.url", url); config.sync(); MetaInfo dynamicFolder = MetaInfo.createDynamicFolder(FeedsFS.this, config.getPath()); currentImportFolder.add(dynamicFolder); dynamicFolder.sync(); inEmptyFolder = false; } } }; reader.read(file); } catch (Exception exception) { MMessage.error(null, exception, i18n("Could not import feeds")); } finally { currentImportFolder = null; getRoot().reload(); Tree.getInstance().selectItem(toFolder, true); } } // package boolean alreadyExists(Map<String, MetaInfo> existingURLs, final String newURL) { if (existingURLs == null) existingURLs = findAllURLs(); MetaInfo item = existingURLs.get(newURL); if (item != null) { MNotification.Message message = FeedThread.showNotification( item, new FeedThread.ErrorAction(item, i18n("RSS feed already exists: {0}", newURL)) ); message.toFront(); return true; } return false; } Map<String, MetaInfo> findAllURLs() { final Map<String, MetaInfo> result = new HashMap<>(); new Tree.Scanner(this) { @Override public void processItem(final MetaInfo item) { if (item.isDynamicFolder()) { String url = item.getConfig().read("x.url", null); if (url == null) log.debugFormat("Missing Feed URL: %s", item.getFilePath()); else result.put(url, item); } } }; return result; } Color getColor(final ArticleMetaInfo article) { Color articleColor = article.getColor(); if ((articleColor == null) || (articleColor instanceof UnreadColor)) { if (article.isUnread()) articleColor = getUnreadColor(); else articleColor = null; } return articleColor; } static void refresh(final MetaInfo parent) { parent.clearProperty("feed.lastFetch"); // force download doRemoveAllChildren(parent); Tree.getInstance().open(parent); } FeedsFSPlugin.Settings reloadSettings(final FeedsFSPlugin plugin) { FeedsFSPlugin.Settings settings = plugin.readGlobalSettings(); intervalFetchTimer.setDelay(TimeUnit.MINUTES, settings.minutes); if (settings.useIntervalFetching) intervalFetchTimer.restart(); else intervalFetchTimer.stop(); return settings; } static FeedsFS self() { FeedsFS fs = FeedsFSPlugin._fsInstance; if (fs == null) log.error("\"feeds\" VFS no longer available"); return fs; } void updateInfo(final MetaInfo dynamicFolder) { int newCount = 0; int unreadCount = 0; for (MetaInfo i : dynamicFolder.list(MV.MODEL)) { if (i instanceof ArticleMetaInfo) { ArticleMetaInfo article = (ArticleMetaInfo)i; if (article.isNew()) newCount++; if (article.isUnread()) unreadCount++; } } StringBuilder s = new StringBuilder(); boolean dead = dynamicFolder.getProperty("feed.dead", false); if (dead) { s.append(i18n("Dead RSS source?")); } if (newCount > 0) { if (s.length() > 0) s.append(", "); s.append(i18n("New: {0}", newCount)); } if (unreadCount > 0) { if (s.length() > 0) s.append(", "); s.append(i18n("Unread: {0}", unreadCount)); } if (s.length() == 0) { dynamicFolder.clearProperty(MetaInfo.EXTRA_COLOR); dynamicFolder.clearProperty(MetaInfo.EXTRA_TEXT); } else { if (dead) dynamicFolder.setProperty(MetaInfo.EXTRA_COLOR, MColor.BRICK_RED); else if ((newCount > 0) || (unreadCount > 0)) dynamicFolder.setProperty(MetaInfo.EXTRA_COLOR, getUnreadColor()); else dynamicFolder.clearProperty(MetaInfo.EXTRA_COLOR); dynamicFolder.setProperty(MetaInfo.EXTRA_TEXT, s.toString()); } dynamicFolder.refresh(true); } // public classes /** * @since 3.4 */ public static final class ArticleMetaInfo extends MetaInfo.VirtualFileMetaInfo { // private private byte state; private static final byte UNREAD_STATE = 1; private static final byte NEW_STATE = 1 << 1; // used by FeedsFSPlugin navigation actions private static final byte WAS_UNREAD_STATE = 1 << 2; // public public ArticleMetaInfo(final AbstractFS fs, final String name, final String articleID) { super(fs, name); setID(Objects.toString(articleID, "")); } /** * @since 3.8.3 */ public boolean isNew() { return (state & NEW_STATE) != 0; } /** * @since 3.8.3 */ public void setNew(final boolean value) { if (value) state |= NEW_STATE; else state &= ~NEW_STATE; } public boolean isUnread() { return (state & UNREAD_STATE) != 0; } public void setUnread(final boolean value) { if (value) state |= (UNREAD_STATE | WAS_UNREAD_STATE); else state &= ~UNREAD_STATE; } // package boolean wasUnread() { return (state & WAS_UNREAD_STATE) != 0; } } /** * @since 1.2 */ public static final class ArchiveOptions extends FeedsFSPlugin.AbstractPanel { // private private final MRadioButton doNotRemoveArticles; private final MRadioButton removeArticles; private MRadioButton useGlobalSettings; private final MSlider days; // public /** * @since 4.0 */ public ArchiveOptions(final boolean globalSettings) { super(i18n("Archive"), !globalSettings); if (!globalSettings) { useGlobalSettings = new MRadioButton(i18n("Use global settings")); add(useGlobalSettings); } removeArticles = new MRadioButton(); MSmallButton cleanUpArchiveButton = new MSmallButton( MIcon.small("ui/delete"), i18n("Manually Remove Unused RSS Archives...") ); cleanUpArchiveButton.addActionListener(e -> cleanUpArchive(cleanUpArchiveButton)); add(cleanUpArchiveButton); MPanel removeArticlesPanel = MPanel.createHBoxPanel(); removeArticlesPanel.add(removeArticles); removeArticlesPanel.addGap(); removeArticlesPanel.addStretch(); removeArticlesPanel.add(cleanUpArchiveButton); add(removeArticlesPanel); days = new MSlider() { @Override protected void onChange() { removeArticles.setSelected(true); updateText(); } }; days.setMinimum(FeedsFSPlugin.Settings.MIN_DAYS); days.setMaximum(FeedsFSPlugin.Settings.MAX_DAYS); days.showSimpleLabels(); days.setEventsEnabled(true); add(days); removeArticles.addActionListener(e -> days.requestFocusInWindow()); doNotRemoveArticles = new MRadioButton(i18n("Do not remove old articles (slow)")); add(doNotRemoveArticles); UI.group( useGlobalSettings, removeArticles, doNotRemoveArticles ); } public Archive.Policy getPolicy() { if (doNotRemoveArticles.isSelected()) return Archive.Policy.DO_NOT_REMOVE; if (removeArticles.isSelected()) return Archive.Policy.REMOVE; return (useGlobalSettings == null) ? Archive.Policy.REMOVE : Archive.Policy.USE_GLOBAL_SETTINGS; } public int getDays() { return days.getValue(); } public void setDays(final int value, final Archive.Policy policy) { days.setValue(value); switch (policy) { case DO_NOT_REMOVE: doNotRemoveArticles.setSelected(true); break; case REMOVE: removeArticles.setSelected(true); break; case USE_GLOBAL_SETTINGS: if (useGlobalSettings == null) removeArticles.setSelected(true); else useGlobalSettings.setSelected(true); break; } updateText(); } // private private void cleanUpArchive(final MButton button) {//!!!rss widget? Archive archive = Archive.getInstance(); FeedsFS fs = FeedsFS.self(); Map<String, MetaInfo> urlMap = fs.findAllURLs(); Window owner = getWindowAncestor(); TreeSet<String> unused = new TreeSet<>(); for (Map.Entry<String, Archive.Item> i : archive) { if (!urlMap.containsKey(i.getKey())) unused.add(i.getKey()); } if (unused.isEmpty()) { button.setEnabled(false); button.setIcon(MIcon.small("ui/ok")); return; } MCheckBoxTable<String> table = new MCheckBoxTable<>( i18n("Delete"), i18n("Address"), true, unused ); MDialog dialog = table.createDialog( owner, i18n("Remove Unused RSS Archives"), i18n("Items:"), MActionInfo.DELETE ); if (dialog.exec()) { if (owner != null) UI.setWaitCursor(owner, true); try { for (MCheckBoxTable.CheckBoxItem<String> i : table.getModel()) { if (i.isSelected()) archive.removeItem(i.getValue()); } } finally { if (owner != null) UI.setWaitCursor(owner, false); } } } private void updateText() { removeArticles.setText(i18n("Automatically remove articles after {0} day(s)", days.getValue())); } } public final class FetchAllFeedsAction extends MAction { // private private boolean idle; // package MButton button; // public public FetchAllFeedsAction() { setHTMLHelp(i18n("Download and update all RSS feeds.")); setIdle(true); } @Override public void onAction() { if (idle) { MetaInfo root = FeedsFS.this.getRoot(); MainWindow.showTree(root); Tree.getInstance().setExpanded(root, true); FeedsFS.this.fetchFeeds(USE_EXCLUDE_FROM_FETCH_ALL_FEEDS); } else { FeedDownloader.abortAll(); } } public void setIdle(final boolean idle) { this.idle = idle; if (idle) { setName(i18n("Refresh All RSS Feeds")); setIconName("ui/feed"); if (button != null) button.setRolloverIcon(MIcon.stock("ui/refresh")); } else { setName(i18n("Cancel Refresh")); setIconName("ui/stop"); if (button != null) button.setRolloverIcon(button.getIcon()); } } } // private classes private abstract class AbstractImportAction extends MAction { // private private String newFolderName; // public @Override public void onAction() { MFileChooser fileChooser = createFileChooser(); if (fileChooser.openDialog()) { File file = fileChooser.getSelectedFile(); if (newFolderName == null) newFolderName = file.getName(); try { FeedsFS.this.importFrom(file, getTargetFolder()); } catch (FSException exception) { MMessage.error(null, exception); } } } // protected protected File getImportFile() { return null; } protected MetaInfo getTargetFolder() throws FSException { MetaInfo folder = FeedsFS.this.getCurrentFolder(false); return FeedsFS.this.createUniqueFolder(folder, i18n("Imported from {0}", newFolderName)); } protected MFileChooser createFileChooser() { MFileChooser fileChooser = MFileChooser.createFileChooser(null, i18n("Import Feed List")); fileChooser.setFileFilter( fileChooser.addFilter("OPML", "opml", "xml") ); fileChooser.setApproveText(MActionInfo.IMPORT.getText()); fileChooser.setConfigKey("importFeeds"); File dir = getImportFile(); if (dir != null) fileChooser.setSelectedFile(dir); return fileChooser; } // private private AbstractImportAction(final String text, final String newFolderName) { super(text, "ui/feed"); this.newFolderName = newFolderName; } } private static final class CustomAdvancedPanel extends FeedsFSPlugin.AbstractPanel { // private private boolean showExpiredArticlesAgainClicked; private final MButton showExpiredArticlesAgainButton; private final MCheckBox blockImagesCheckBox; private final MCheckBox excludeFromFetchAllFeeds; private final MCheckBox excludeFromIntervalFetch; private final MCheckBox showFullPageCheckBox; // public public CustomAdvancedPanel() { super(i18n("Miscellaneous"), false); // do not set top margin setMargin(0, getContentMargin(), getContentMargin(), getContentMargin()); blockImagesCheckBox = new MCheckBox(i18n("Block Images")); add(blockImagesCheckBox); showFullPageCheckBox = new MCheckBox(i18n("Load and Show Full Page") + " - " + i18n("Experimental")); add(showFullPageCheckBox); // exclude from fetch all feeds excludeFromFetchAllFeeds = new MCheckBox(i18n("Exclude from \"Refresh All Feeds\"")); add(excludeFromFetchAllFeeds); // exclude from interval fetch excludeFromIntervalFetch = new MCheckBox(i18n("Exclude from \"interval\" fetch")); add(excludeFromIntervalFetch); addContentGap(); showExpiredArticlesAgainButton = new MButton(i18n("Show Expired Articles Again")); showExpiredArticlesAgainButton.addActionListener(e -> showExpiredArticlesAgainClicked = true); add(showExpiredArticlesAgainButton); addContentGap(); addStretch(); MButton globalSettingsButton = new MButton(new FeedsFS.GlobalSettingsAction()); globalSettingsButton.setIconNameUI("ui/feed"); add(globalSettingsButton); } // protected @Override protected boolean shouldRefresh() { return showExpiredArticlesAgainClicked || blockImagesCheckBox.isModified(); } } private final class ExportAction extends MAction { // public public ExportAction(final String name) { super(name, "ui/feed"); } @Override public void onAction() { MFileChooser fileChooser = MFileChooser.createFileChooser(this.getSourceWindow(), i18n("Export Feeds")); fileChooser.setFileFilter( fileChooser.addFilter("OPML", "opml", "xml") ); fileChooser.setApproveText(i18n("Export")); fileChooser.setConfigKey("exportFeeds"); fileChooser.setSelectedFile(new File("feeds.opml")); if (fileChooser.saveDialog()) exportTo(fileChooser.getSelectedFile()); } } private final class FetchFeedsAction extends MDataAction<MetaInfo> { // public @Override public void onAction() { FeedsFS.this.fetchFeeds(this.getData(), 0); } // private private FetchFeedsAction(final MetaInfo feed) { super(feed, i18n("Refresh Feeds: {0}", TK.centerSqueeze(feed.toString(), 15)), "ui/refresh"); } } private final class ImportAction extends AbstractImportAction { // private private ImportAction() { super(i18n("Import Feed List"), null); } } private final class ImportFromLifereaAction extends AbstractImportAction { // protected @Override protected File getImportFile() { MArrayList<File> lifereaDirs = new MArrayList<>(); Path home = FS.getUserPath(); try (DirectoryStream<Path> ds = Files.newDirectoryStream(home, ".liferea*")) { for (Path i : ds) { if (Files.isDirectory(i)) lifereaDirs.add(i.toFile()); } } catch (IOException exception) { MLogger.exception(exception); } if (lifereaDirs.isEmpty()) return null; lifereaDirs.sort(); return new File(lifereaDirs.getLast(), "feedlist.opml"); } // private private ImportFromLifereaAction() { super(i18n("Import Feed List from {0}", "Liferea"), "Liferea"); } } private static final class MarkAllAsReadAction extends MAction { // public public MarkAllAsReadAction() { super(i18n("Mark All as Read")); } @Override public void onAction() { MetaInfo feed = Tree.getInstance().getCurrentFolder(true); String url = feed.getConfig().read("x.url", null); try { Archive.getInstance().markAs(url, Archive.State.READ); FeedsFS.refresh(feed); } catch (ArchiveException exception) { showErrorMessage(exception); } } } private static final class UnreadColor extends Color { // private private UnreadColor(final Color value) { super(value.getRGB()); } private UnreadColor(final int rgb) { super(rgb); } } // package classes static final class GlobalSettingsAction extends MAction { // public public GlobalSettingsAction() { super(i18n("Global Settings...")); } @Override public void onAction() { PluginSettings.showPluginOptionsDialog( getSourceWindow(), MDialog.APPLY_BUTTON, FeedsFS.self().getInfo() ); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.InterruptedIOException; import java.util.Random; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IntegrationTestBase; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.BufferedMutatorParams; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.NMapInputFormat; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl; import org.apache.hadoop.hbase.mapreduce.WALPlayer; import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** * A large test which loads a lot of data that has internal references, and * verifies the data. * * In load step, 200 map tasks are launched, which in turn write loadmapper.num_to_write * (default 100K) rows to an hbase table. Rows are written in blocks, for a total of * 100 blocks. Each row in a block, contains loadmapper.backrefs (default 50) references * to random rows in the prev block. * * Verify step is scans the table, and verifies that for every referenced row, the row is * actually there (no data loss). Failed rows are output from reduce to be saved in the * job output dir in hdfs and inspected later. * * This class can be run as a unit test, as an integration test, or from the command line * * Originally taken from Apache Bigtop. */ @Category(IntegrationTests.class) public class IntegrationTestLoadAndVerify extends IntegrationTestBase { private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestLoadAndVerify.class); private static final String TEST_NAME = "IntegrationTestLoadAndVerify"; private static final byte[] TEST_FAMILY = Bytes.toBytes("f1"); private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1"); private static final String NUM_TO_WRITE_KEY = "loadmapper.num_to_write"; private static final long NUM_TO_WRITE_DEFAULT = 100*1000; private static final String TABLE_NAME_KEY = "loadmapper.table"; private static final String TABLE_NAME_DEFAULT = "table"; private static final String NUM_BACKREFS_KEY = "loadmapper.backrefs"; private static final int NUM_BACKREFS_DEFAULT = 50; private static final String NUM_MAP_TASKS_KEY = "loadmapper.map.tasks"; private static final String NUM_REDUCE_TASKS_KEY = "verify.reduce.tasks"; private static final int NUM_MAP_TASKS_DEFAULT = 200; private static final int NUM_REDUCE_TASKS_DEFAULT = 35; private static final int SCANNER_CACHING = 500; private static final int MISSING_ROWS_TO_LOG = 10; // YARN complains when too many counters private String toRun = null; private String keysDir = null; private enum Counters { ROWS_WRITTEN, REFERENCES_WRITTEN, REFERENCES_CHECKED } @Override public void setUpCluster() throws Exception { util = getTestingUtil(getConf()); util.initializeCluster(3); this.setConf(util.getConfiguration()); if (!util.isDistributedCluster()) { getConf().setLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT / 100); getConf().setInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT / 100); getConf().setInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT / 10); util.startMiniMapReduceCluster(); } } @Override public void cleanUpCluster() throws Exception { super.cleanUpCluster(); if (!util.isDistributedCluster()) { util.shutdownMiniMapReduceCluster(); } } /** * Converts a "long" value between endian systems. * Borrowed from Apache Commons IO * @param value value to convert * @return the converted value */ public static long swapLong(long value) { return ( ( ( value >> 0 ) & 0xff ) << 56 ) + ( ( ( value >> 8 ) & 0xff ) << 48 ) + ( ( ( value >> 16 ) & 0xff ) << 40 ) + ( ( ( value >> 24 ) & 0xff ) << 32 ) + ( ( ( value >> 32 ) & 0xff ) << 24 ) + ( ( ( value >> 40 ) & 0xff ) << 16 ) + ( ( ( value >> 48 ) & 0xff ) << 8 ) + ( ( ( value >> 56 ) & 0xff ) << 0 ); } public static class LoadMapper extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable> { protected long recordsToWrite; protected Connection connection; protected BufferedMutator mutator; protected Configuration conf; protected int numBackReferencesPerRow; protected String shortTaskId; protected Random rand = new Random(); protected Counter rowsWritten, refsWritten; @Override public void setup(Context context) throws IOException { conf = context.getConfiguration(); recordsToWrite = conf.getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT); String tableName = conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT); numBackReferencesPerRow = conf.getInt(NUM_BACKREFS_KEY, NUM_BACKREFS_DEFAULT); this.connection = ConnectionFactory.createConnection(conf); mutator = connection.getBufferedMutator( new BufferedMutatorParams(TableName.valueOf(tableName)) .writeBufferSize(4 * 1024 * 1024)); String taskId = conf.get("mapreduce.task.attempt.id"); Matcher matcher = Pattern.compile(".+_m_(\\d+_\\d+)").matcher(taskId); if (!matcher.matches()) { throw new RuntimeException("Strange task ID: " + taskId); } shortTaskId = matcher.group(1); rowsWritten = context.getCounter(Counters.ROWS_WRITTEN); refsWritten = context.getCounter(Counters.REFERENCES_WRITTEN); } @Override public void cleanup(Context context) throws IOException { mutator.close(); connection.close(); } @Override protected void map(NullWritable key, NullWritable value, Context context) throws IOException, InterruptedException { String suffix = "/" + shortTaskId; byte[] row = Bytes.add(new byte[8], Bytes.toBytes(suffix)); int BLOCK_SIZE = (int)(recordsToWrite / 100); for (long i = 0; i < recordsToWrite;) { long blockStart = i; for (long idxInBlock = 0; idxInBlock < BLOCK_SIZE && i < recordsToWrite; idxInBlock++, i++) { long byteSwapped = swapLong(i); Bytes.putLong(row, 0, byteSwapped); Put p = new Put(row); p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY); if (blockStart > 0) { for (int j = 0; j < numBackReferencesPerRow; j++) { long referredRow = blockStart - BLOCK_SIZE + rand.nextInt(BLOCK_SIZE); Bytes.putLong(row, 0, swapLong(referredRow)); p.addColumn(TEST_FAMILY, row, HConstants.EMPTY_BYTE_ARRAY); } refsWritten.increment(1); } rowsWritten.increment(1); mutator.mutate(p); if (i % 100 == 0) { context.setStatus("Written " + i + "/" + recordsToWrite + " records"); context.progress(); } } // End of block, flush all of them before we start writing anything // pointing to these! mutator.flush(); } } } public static class VerifyMapper extends TableMapper<BytesWritable, BytesWritable> { static final BytesWritable EMPTY = new BytesWritable(HConstants.EMPTY_BYTE_ARRAY); @Override protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException { BytesWritable bwKey = new BytesWritable(key.get()); BytesWritable bwVal = new BytesWritable(); for (Cell kv : value.listCells()) { if (Bytes.compareTo(TEST_QUALIFIER, 0, TEST_QUALIFIER.length, kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()) == 0) { context.write(bwKey, EMPTY); } else { bwVal.set(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()); context.write(bwVal, bwKey); } } } } public static class VerifyReducer extends Reducer<BytesWritable, BytesWritable, Text, Text> { private Counter refsChecked; private Counter rowsWritten; @Override public void setup(Context context) throws IOException { refsChecked = context.getCounter(Counters.REFERENCES_CHECKED); rowsWritten = context.getCounter(Counters.ROWS_WRITTEN); } @Override protected void reduce(BytesWritable referredRow, Iterable<BytesWritable> referrers, VerifyReducer.Context ctx) throws IOException, InterruptedException { boolean gotOriginalRow = false; int refCount = 0; for (BytesWritable ref : referrers) { if (ref.getLength() == 0) { assert !gotOriginalRow; gotOriginalRow = true; } else { refCount++; } } refsChecked.increment(refCount); if (!gotOriginalRow) { String parsedRow = makeRowReadable(referredRow.getBytes(), referredRow.getLength()); String binRow = Bytes.toStringBinary(referredRow.getBytes(), 0, referredRow.getLength()); LOG.error("Reference error row " + parsedRow); ctx.write(new Text(binRow), new Text(parsedRow)); rowsWritten.increment(1); } } private String makeRowReadable(byte[] bytes, int length) { long rowIdx = swapLong(Bytes.toLong(bytes, 0)); String suffix = Bytes.toString(bytes, 8, length - 8); return "Row #" + rowIdx + " suffix " + suffix; } } protected Job doLoad(Configuration conf, TableDescriptor tableDescriptor) throws Exception { Path outputDir = getTestDir(TEST_NAME, "load-output"); LOG.info("Load output dir: " + outputDir); NMapInputFormat.setNumMapTasks(conf, conf.getInt(NUM_MAP_TASKS_KEY, NUM_MAP_TASKS_DEFAULT)); conf.set(TABLE_NAME_KEY, tableDescriptor.getTableName().getNameAsString()); Job job = Job.getInstance(conf); job.setJobName(TEST_NAME + " Load for " + tableDescriptor.getTableName()); job.setJarByClass(this.getClass()); setMapperClass(job); job.setInputFormatClass(NMapInputFormat.class); job.setNumReduceTasks(0); setJobScannerConf(job); FileOutputFormat.setOutputPath(job, outputDir); TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); TableMapReduceUtil.initCredentials(job); assertTrue(job.waitForCompletion(true)); return job; } protected void setMapperClass(Job job) { job.setMapperClass(LoadMapper.class); } protected void doVerify(Configuration conf, TableDescriptor tableDescriptor) throws Exception { Path outputDir = getTestDir(TEST_NAME, "verify-output"); LOG.info("Verify output dir: " + outputDir); Job job = Job.getInstance(conf); job.setJarByClass(this.getClass()); job.setJobName(TEST_NAME + " Verification for " + tableDescriptor.getTableName()); setJobScannerConf(job); Scan scan = new Scan(); TableMapReduceUtil.initTableMapperJob( tableDescriptor.getTableName().getNameAsString(), scan, VerifyMapper.class, BytesWritable.class, BytesWritable.class, job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class); int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING); TableMapReduceUtil.setScannerCaching(job, scannerCaching); job.setReducerClass(VerifyReducer.class); job.setNumReduceTasks(conf.getInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT)); FileOutputFormat.setOutputPath(job, outputDir); assertTrue(job.waitForCompletion(true)); long numOutputRecords = job.getCounters().findCounter(Counters.ROWS_WRITTEN).getValue(); assertEquals(0, numOutputRecords); } /** * Tool to search missing rows in WALs and hfiles. * Pass in file or dir of keys to search for. Key file must have been written by Verify step * (we depend on the format it writes out. We'll read them in and then search in hbase * WALs and oldWALs dirs (Some of this is TODO). */ public static class WALSearcher extends WALPlayer { public WALSearcher(Configuration conf) { super(conf); } /** * The actual searcher mapper. */ public static class WALMapperSearcher extends WALMapper { private SortedSet<byte []> keysToFind; private AtomicInteger rows = new AtomicInteger(0); @Override public void setup(Mapper<WALKey, WALEdit, ImmutableBytesWritable, Mutation>.Context context) throws IOException { super.setup(context); try { this.keysToFind = readKeysToSearch(context.getConfiguration()); LOG.info("Loaded keys to find: count=" + this.keysToFind.size()); } catch (InterruptedException e) { throw new InterruptedIOException(e.toString()); } } @Override protected boolean filter(Context context, Cell cell) { // TODO: Can I do a better compare than this copying out key? byte [] row = new byte [cell.getRowLength()]; System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength()); boolean b = this.keysToFind.contains(row); if (b) { String keyStr = Bytes.toStringBinary(row); try { LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey()); } catch (IOException|InterruptedException e) { LOG.warn(e.toString(), e); } if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) { context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1); } context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1); } return b; } } // Put in place the above WALMapperSearcher. @Override public Job createSubmittableJob(String[] args) throws IOException { Job job = super.createSubmittableJob(args); // Call my class instead. job.setJarByClass(WALMapperSearcher.class); job.setMapperClass(WALMapperSearcher.class); job.setOutputFormatClass(NullOutputFormat.class); return job; } } static final String FOUND_GROUP_KEY = "Found"; static final String SEARCHER_INPUTDIR_KEY = "searcher.keys.inputdir"; static SortedSet<byte []> readKeysToSearch(final Configuration conf) throws IOException, InterruptedException { Path keysInputDir = new Path(conf.get(SEARCHER_INPUTDIR_KEY)); FileSystem fs = FileSystem.get(conf); SortedSet<byte []> result = new TreeSet<>(Bytes.BYTES_COMPARATOR); if (!fs.exists(keysInputDir)) { throw new FileNotFoundException(keysInputDir.toString()); } if (!fs.isDirectory(keysInputDir)) { FileStatus keyFileStatus = fs.getFileStatus(keysInputDir); readFileToSearch(conf, fs, keyFileStatus, result); } else { RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(keysInputDir, false); while(iterator.hasNext()) { LocatedFileStatus keyFileStatus = iterator.next(); // Skip "_SUCCESS" file. if (keyFileStatus.getPath().getName().startsWith("_")) continue; readFileToSearch(conf, fs, keyFileStatus, result); } } return result; } private static SortedSet<byte[]> readFileToSearch(final Configuration conf, final FileSystem fs, final FileStatus keyFileStatus, SortedSet<byte []> result) throws IOException, InterruptedException { // verify uses file output format and writes <Text, Text>. We can read it as a text file try (InputStream in = fs.open(keyFileStatus.getPath()); BufferedReader reader = new BufferedReader(new InputStreamReader(in))) { // extract out the key and return that missing as a missing key String line; while ((line = reader.readLine()) != null) { if (line.isEmpty()) continue; String[] parts = line.split("\\s+"); if (parts.length >= 1) { String key = parts[0]; result.add(Bytes.toBytesBinary(key)); } else { LOG.info("Cannot parse key from: " + line); } } } return result; } private int doSearch(Configuration conf, String keysDir) throws Exception { Path inputDir = new Path(keysDir); getConf().set(SEARCHER_INPUTDIR_KEY, inputDir.toString()); SortedSet<byte []> keys = readKeysToSearch(getConf()); if (keys.isEmpty()) throw new RuntimeException("No keys to find"); LOG.info("Count of keys to find: " + keys.size()); for(byte [] key: keys) LOG.info("Key: " + Bytes.toStringBinary(key)); // Now read all WALs. In two dirs. Presumes certain layout. Path walsDir = new Path(CommonFSUtils.getWALRootDir(getConf()), HConstants.HREGION_LOGDIR_NAME); Path oldWalsDir = new Path( CommonFSUtils.getWALRootDir(getConf()), HConstants.HREGION_OLDLOGDIR_NAME); LOG.info("Running Search with keys inputDir=" + inputDir + " against " + getConf().get(HConstants.HBASE_DIR)); int ret = ToolRunner.run(new WALSearcher(getConf()), new String [] {walsDir.toString(), ""}); if (ret != 0) return ret; return ToolRunner.run(new WALSearcher(getConf()), new String [] {oldWalsDir.toString(), ""}); } private static void setJobScannerConf(Job job) { long lpr = job.getConfiguration().getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT) / 100; job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, (int)lpr); } public Path getTestDir(String testName, String subdir) throws IOException { Path testDir = util.getDataTestDirOnTestFS(testName); FileSystem fs = FileSystem.get(getConf()); fs.deleteOnExit(testDir); return new Path(new Path(testDir, testName), subdir); } @Test public void testLoadAndVerify() throws Exception { TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(TEST_NAME)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build(); Admin admin = getTestingUtil(getConf()).getAdmin(); admin.createTable(tableDescriptor, Bytes.toBytes(0L), Bytes.toBytes(-1L), 40); doLoad(getConf(), tableDescriptor); doVerify(getConf(), tableDescriptor); // Only disable and drop if we succeeded to verify - otherwise it's useful // to leave it around for post-mortem getTestingUtil(getConf()).deleteTable(tableDescriptor.getTableName()); } @Override public void printUsage() { printUsage(this.getClass().getSimpleName() + " <options>" + " [-Doptions] <load|verify|loadAndVerify|search>", "Options", ""); System.err.println(""); System.err.println(" Loads a table with row dependencies and verifies the dependency chains"); System.err.println("Options"); System.err.println(" -Dloadmapper.table=<name> Table to write/verify (default autogen)"); System.err.println(" -Dloadmapper.backrefs=<n> Number of backreferences per row (default 50)"); System.err.println(" -Dloadmapper.num_to_write=<n> Number of rows per mapper (default 100,000 per mapper)"); System.err.println(" -Dloadmapper.deleteAfter=<bool> Delete after a successful verify (default true)"); System.err.println(" -Dloadmapper.numPresplits=<n> Number of presplit regions to start with (default 40)"); System.err.println(" -Dloadmapper.map.tasks=<n> Number of map tasks for load (default 200)"); System.err.println(" -Dverify.reduce.tasks=<n> Number of reduce tasks for verify (default 35)"); System.err.println(" -Dverify.scannercaching=<n> Number hbase scanner caching rows to read (default 50)"); } @Override protected void processOptions(CommandLine cmd) { super.processOptions(cmd); String[] args = cmd.getArgs(); if (args == null || args.length < 1) { printUsage(); throw new RuntimeException("Incorrect Number of args."); } toRun = args[0]; if (toRun.equalsIgnoreCase("search")) { if (args.length > 1) { keysDir = args[1]; } } } @Override public int runTestFromCommandLine() throws Exception { IntegrationTestingUtility.setUseDistributedCluster(getConf()); boolean doLoad = false; boolean doVerify = false; boolean doSearch = false; boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter",true); int numPresplits = getConf().getInt("loadmapper.numPresplits", 40); if (toRun.equalsIgnoreCase("load")) { doLoad = true; } else if (toRun.equalsIgnoreCase("verify")) { doVerify= true; } else if (toRun.equalsIgnoreCase("loadAndVerify")) { doLoad=true; doVerify= true; } else if (toRun.equalsIgnoreCase("search")) { doLoad=false; doVerify= false; doSearch = true; if (keysDir == null) { System.err.println("Usage: search <KEYS_DIR>]"); return 1; } } else { System.err.println("Invalid argument " + toRun); printUsage(); return 1; } // create HTableDescriptor for specified table TableName table = getTablename(); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(table) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build(); if (doLoad) { try (Connection conn = ConnectionFactory.createConnection(getConf()); Admin admin = conn.getAdmin()) { admin.createTable(tableDescriptor, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPresplits); doLoad(getConf(), tableDescriptor); } } if (doVerify) { doVerify(getConf(), tableDescriptor); if (doDelete) { getTestingUtil(getConf()).deleteTable(tableDescriptor.getTableName()); } } if (doSearch) { return doSearch(getConf(), keysDir); } return 0; } @Override public TableName getTablename() { return TableName.valueOf(getConf().get(TABLE_NAME_KEY, TEST_NAME)); } @Override protected Set<String> getColumnFamilies() { return Sets.newHashSet(Bytes.toString(TEST_FAMILY)); } public static void main(String argv[]) throws Exception { Configuration conf = HBaseConfiguration.create(); IntegrationTestingUtility.setUseDistributedCluster(conf); int ret = ToolRunner.run(conf, new IntegrationTestLoadAndVerify(), argv); System.exit(ret); } }
package com.github.chaparqanatoos.kaggle.knowledge.titanic; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import au.com.bytecode.opencsv.CSVReader; import edu.berkeley.compbio.jlibsvm.ImmutableSvmParameter; import edu.berkeley.compbio.jlibsvm.ImmutableSvmParameterPoint; import edu.berkeley.compbio.jlibsvm.SVM; import edu.berkeley.compbio.jlibsvm.binary.BinaryClassificationProblem; import edu.berkeley.compbio.jlibsvm.binary.BinaryModel; import edu.berkeley.compbio.jlibsvm.binary.BooleanClassificationProblemImpl; import edu.berkeley.compbio.jlibsvm.binary.C_SVC; import edu.berkeley.compbio.jlibsvm.kernel.LinearKernel; import edu.berkeley.compbio.jlibsvm.util.SparseVector; public class Trainer { /** * @param args */ public static void main(String[] args) { if (args.length < 2) { System.err .println("Usage: java com.kaggle.nabeelmukhtar.amazon.Trainer data/train.csv.train.split.csv data/train.csv.test.split.csv"); System.exit(-1); } System.out.println("Started training....."); BinaryModel<Boolean, SparseVector> model = train(args[0]); System.out.println("Finished training....."); // System.out.println("Starting evalation....."); // double score = evaluate(args[1], model); // System.out.println("Finished evaluation....."); // System.out.println("Score: " + score); System.out.println("Starting prediction....."); Map<TitanicRecord, Boolean> results = predict(args[1], model); System.out.println("Finished prediction....."); saveResults(results, "data/titanic/submission.csv"); } public static BinaryModel<Boolean, SparseVector> train(String fileName) { float gamma = 1.0f; LinearKernel kernel = new LinearKernel(); Set<SparseVector> trueExamples = new HashSet<SparseVector>(); Set<SparseVector> falseExamples = new HashSet<SparseVector>(); Map<SparseVector, Integer> exampleIds = new HashMap<SparseVector, Integer>(); loadTrainingData(fileName, trueExamples, falseExamples, exampleIds); BinaryClassificationProblem<Boolean, SparseVector> problem = new BooleanClassificationProblemImpl<Boolean, SparseVector>( Boolean.class, Boolean.TRUE, trueExamples, Boolean.FALSE, falseExamples, exampleIds); SVM<Boolean, SparseVector, BinaryClassificationProblem<Boolean, SparseVector>> svm = new C_SVC<Boolean, SparseVector>(); ImmutableSvmParameterPoint.Builder<Boolean, SparseVector> builder = new ImmutableSvmParameterPoint.Builder<Boolean, SparseVector>(); // set svm parameters builder.kernel = kernel; builder.nu = 0.5f; builder.cache_size = 100; builder.eps = 1e-3f; builder.p = 0.1f; builder.C = 1.0f; builder.shrinking = true; builder.probability = false; builder.redistributeUnbalancedC = true; // builder.gridsearchBinaryMachinesIndependently = true; // builder.scaleBinaryMachinesIndependently = true; // builder.scalingModelLearner = new LinearScalingModelLearner(500, // true); ImmutableSvmParameter<Boolean, SparseVector> param = builder.build(); BinaryModel<Boolean, SparseVector> model = (BinaryModel<Boolean, SparseVector>) svm.train(problem, param); // do_cross_validation(svm, problem, param); return model; } public static double evaluate(String testFileName, BinaryModel<Boolean, SparseVector> model) { Set<SparseVector> trueExamples = new HashSet<SparseVector>(); Set<SparseVector> falseExamples = new HashSet<SparseVector>(); Map<SparseVector, Integer> exampleIds = new HashMap<SparseVector, Integer>(); loadTrainingData(testFileName, trueExamples, falseExamples, exampleIds); int tp = 0, tn = 0, fp = 0, fn = 0; for (SparseVector x : falseExamples) { if (model.predictLabel(x)) { fp++; } else { tn++; } } for (SparseVector x : trueExamples) { if (model.predictLabel(x)) { tp++; } else { fn++; } } double prec = (double) tp / (double) (tp + fp); double recall = (double) tp / (double) (tp + fn); double f = (2 * prec * recall) / (prec + recall); System.out.println("prec:" + prec + ", recall:" + recall + ", f-measure:" + f); return f; } public static Map<TitanicRecord, Boolean> predict(String testFileName, BinaryModel<Boolean, SparseVector> model) { List<TitanicRecord> tests = new ArrayList<TitanicRecord>(); loadTestData(testFileName, tests); Map<TitanicRecord, Boolean> results = new LinkedHashMap<TitanicRecord, Boolean>(); for (TitanicRecord record : tests) { results.put(record, model.predictLabel(convertToSparseVector(record))); } return results; } private static void loadTrainingData(String fileName, Set<SparseVector> trueExamples, Set<SparseVector> falseExamples, Map<SparseVector, Integer> exampleIds) { CSVReader reader = null; try { reader = new CSVReader(new FileReader(fileName)); CSVParser parser = new CSVParser(); int id = 1; boolean header = true; String [] nextLine; while ((nextLine = reader.readNext()) != null) { if (header) { // skip.... header = false; } else { TitanicRecord record = parser.parse(nextLine, true); SparseVector vector = convertToSparseVector(record); if (record.isSurvived()) { trueExamples.add(vector); } else { falseExamples.add(vector); } exampleIds.put(vector, id); id++; } } System.out.println("Loaded " + id + " records...."); } catch (Exception e) { e.printStackTrace(); } finally { closeReader(reader); } } private static void loadTestData(String fileName, List<TitanicRecord> tests) { CSVReader reader = null; try { reader = new CSVReader(new FileReader(fileName)); CSVParser parser = new CSVParser(); boolean header = true; String [] nextLine; while ((nextLine = reader.readNext()) != null) { if (header) { // skip.... header = false; } else { TitanicRecord record = parser.parse(nextLine, false); tests.add(record); } } } catch (Exception e) { e.printStackTrace(); } finally { closeReader(reader); } } private static void saveResults(Map<TitanicRecord, Boolean> results, String outputFileName) { PrintWriter output = null; try { output = new PrintWriter(new FileWriter(outputFileName)); // header output.println("survived ,pclass, name, sex, age, sibsp, parch, ticket, fare, cabin, embarked"); for (TitanicRecord id : results.keySet()) { output.println((results.get(id) ? "1" : "0") + "," + id.getpClass() + "," + id.getName() + "," + TitanicRecord.GENDERS[id.getSex()] + "," + id.getAge() + "," + id.getSibsp() + "," + id.getParch() + "," + id.getTicket() + "," + id.getFare() + "," + id.getCabin() + "," + TitanicRecord.EMBARKED[id.getEmbarked()]); } } catch (Exception e) { e.printStackTrace(); } finally { closeWriter(output); } } private static SparseVector convertToSparseVector(TitanicRecord record) { SparseVector vector = new SparseVector(10); vector.indexes[0] = 0; vector.values[0] = record.getpClass(); vector.indexes[1] = 1; vector.values[1] = record.getParch(); // vector.indexes[2] = 2; // vector.values[2] = record.getEmbarked(); vector.indexes[3] = 3; vector.values[3] = record.getSex(); // vector.indexes[4] = 4; // vector.values[4] = record.getSibsp(); vector.indexes[5] = 5; vector.values[5] = (int) record.getAge(); // vector.indexes[6] = 6; // vector.values[6] = (int) record.getFare(); // vector.indexes[8] = 8; // vector.values[8] = record.getRoleFamilyDescription(); return vector; } private static void do_cross_validation(SVM<Boolean, SparseVector, BinaryClassificationProblem<Boolean, SparseVector>> svm, BinaryClassificationProblem<Boolean, SparseVector> problem, ImmutableSvmParameter<Boolean, SparseVector> param) { // int i; int total_correct = 0; int total_unknown = 0; double total_error = 0; double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0; // double[] target = new double[problem.l]; int numExamples = problem.getNumExamples(); Map cvResult = svm.discreteCrossValidation(problem, param); for (SparseVector p : problem.getExamples().keySet()) // for (i = 0; i < numExamples; i++) { Object prediction = cvResult.get(p); if (prediction == null) { ++total_unknown; } else if (prediction.equals(problem.getTargetValue(p))) { ++total_correct; } } int classifiedExamples = numExamples - total_unknown; System.out.print("Cross Validation Classified = " + 100.0 * classifiedExamples / numExamples + "%\n"); System.out.print("Cross Validation Accuracy (of those classified) = " + 100.0 * total_correct / classifiedExamples + "%\n"); System.out.print("Cross Validation Accuracy (of total) = " + 100.0 * total_correct / numExamples + "%\n"); } /** * */ protected static void closeReader(CSVReader reader) { if (reader != null) { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } } /** * */ protected static void closeWriter(Writer writer) { if (writer != null) { try { writer.close(); } catch (IOException e) { e.printStackTrace(); } } } }
/* * Copyright 2017 Tommaso Teofili * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.tteofili.p2h; import java.io.File; import java.io.FileOutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.commons.io.IOUtils; import org.datavec.api.util.ClassPathResource; import org.deeplearning4j.models.embeddings.WeightLookupTable; import org.deeplearning4j.models.paragraphvectors.ParagraphVectors; import org.deeplearning4j.models.word2vec.VocabWord; import org.deeplearning4j.text.documentiterator.FilenamesLabelAwareIterator; import org.deeplearning4j.text.documentiterator.LabelAwareIterator; import org.deeplearning4j.text.tokenization.tokenizer.preprocessor.CommonPreprocessor; import org.deeplearning4j.text.tokenization.tokenizerfactory.DefaultTokenizerFactory; import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.nd4j.linalg.api.ndarray.INDArray; /** * Tests for par2hier */ @RunWith(Parameterized.class) public class Par2HierTest { private final Par2HierUtils.Method method; private int k; public Par2HierTest(Par2HierUtils.Method method, int k) { this.method = method; this.k = k; } @Parameterized.Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][] { {Par2HierUtils.Method.CLUSTER, 4}, {Par2HierUtils.Method.CLUSTER, 3}, {Par2HierUtils.Method.CLUSTER, 2}, {Par2HierUtils.Method.CLUSTER, 1}, {Par2HierUtils.Method.SUM, 4}, {Par2HierUtils.Method.SUM, 3}, {Par2HierUtils.Method.SUM, 2}, {Par2HierUtils.Method.SUM, 1}, }); } @Test public void testP2HOnMTPapers() throws Exception { ParagraphVectors paragraphVectors; LabelAwareIterator iterator; TokenizerFactory tokenizerFactory; ClassPathResource resource = new ClassPathResource("papers/sbc"); // build a iterator for our MT papers dataset iterator = new FilenamesLabelAwareIterator.Builder() .addSourceFolder(resource.getFile()) .build(); tokenizerFactory = new DefaultTokenizerFactory(); tokenizerFactory.setTokenPreProcessor(new CommonPreprocessor()); Map<String, INDArray> hvs = new TreeMap<>(); Map<String, INDArray> pvs = new TreeMap<>(); paragraphVectors = new ParagraphVectors.Builder() .iterate(iterator) .tokenizerFactory(tokenizerFactory) // .epochs(3) // .layerSize(200) // .batchSize(10) .build(); // fit model paragraphVectors.fit(); Par2Hier par2Hier = new Par2Hier(paragraphVectors, method, k); // fit model par2Hier.fit(); Map<String, String[]> comparison = new TreeMap<>(); // extract similarities WeightLookupTable<VocabWord> lookupTable = paragraphVectors.getLookupTable(); WeightLookupTable p2hlt = par2Hier.getLookupTable(); List<String> labels = paragraphVectors.getLabelsSource().getLabels(); for (String label : labels) { INDArray vector = lookupTable.vector(label); INDArray hvector = p2hlt.vector(label); pvs.put(label, vector.dup()); hvs.put(label, hvector.dup()); Collection<String> strings = paragraphVectors.nearestLabels(vector, 2); Collection<String> hstrings = par2Hier.nearestLabels(hvector, 2); String[] stringsArray = new String[2]; stringsArray[0] = new LinkedList<>(strings).get(1); stringsArray[1] = new LinkedList<>(hstrings).get(1); comparison.put(label, stringsArray); } System.out.println("--->func(args):pv,p2h"); // measure similarity indexes double[] intraDocumentSimilarity = getIntraDocumentSimilarity(comparison); System.out.println("ids(" + k + "," + method + "):" + Arrays.toString(intraDocumentSimilarity)); double[] depthSimilarity = getDepthSimilarity(comparison); System.out.println("ds(" + k + "," + method + "):" + Arrays.toString(depthSimilarity)); // classification Map<Integer, Map<Integer, Long>> pvCounts = new HashMap<>(); Map<Integer, Map<Integer, Long>> p2hCounts = new HashMap<>(); int topK = 6; for (String label : labels) { INDArray vector = lookupTable.vector(label); Collection<String> strings = paragraphVectors.nearestLabels(vector, topK); Collection<String> hstrings = par2Hier.nearestLabels(vector, topK); int labelDepth = label.split("\\.").length - 1; int stringDepth = getClass(strings); int hstringDepth = getClass(hstrings); updateCM(pvCounts, labelDepth, stringDepth); updateCM(p2hCounts, labelDepth, hstringDepth); } ConfusionMatrix pvCM = new ConfusionMatrix(pvCounts); ConfusionMatrix p2hCM = new ConfusionMatrix(p2hCounts); System.out.println("mf1(" + k + "," + method + "):" + pvCM.getF1Measure() + "," + p2hCM.getF1Measure()); System.out.println("acc(" + k + "," + method + "):" + pvCM.getAccuracy() + "," + p2hCM.getAccuracy()); // create a CSV with a raw comparison File pvFile = Files.createFile(Paths.get("target/comparison-" + System.currentTimeMillis() + "-" + k + "-" + method + ".csv")).toFile(); FileOutputStream pvOutputStream = new FileOutputStream(pvFile); try { Map<String, INDArray> pvs2 = Par2HierUtils.svdPCA(pvs, 2); Map<String, INDArray> hvs2 = Par2HierUtils.svdPCA(hvs, 2); String pvCSV = asStrings(pvs2, hvs2); IOUtils.write(pvCSV, pvOutputStream); } finally { pvOutputStream.flush(); pvOutputStream.close(); } } private void updateCM(Map<Integer, Map<Integer, Long>> pvCounts, int labelDepth, int stringDepth) { Map<Integer, Long> stringLongMap = pvCounts.get(labelDepth); if (stringLongMap != null) { Long aLong = stringLongMap.get(stringDepth); if (aLong != null) { stringLongMap.put(stringDepth, aLong + 1); } else { stringLongMap.put(stringDepth, 1L); } } else { stringLongMap = new HashMap<>(); stringLongMap.put(stringDepth, 1L); pvCounts.put(labelDepth, stringLongMap); } } private int getClass(Collection<String> strings) { Map<Integer, Integer> m = new HashMap<>(); for (String s : strings) { int depth = s.split("\\.").length - 1; m.put(depth, m.containsKey(depth) ? m.get(depth) + 1 : 1); } int max = 0; int md = 0; for (Map.Entry<Integer, Integer> e : m.entrySet()) { if (e.getValue() > max) { md = e.getKey(); max = e.getValue(); } } return md; } private String asStrings(Map<String, INDArray> pvs, Map<String, INDArray> hvs) { StringBuilder builder = new StringBuilder(); builder.append("doc, depth, Paragraph, PV x, PV y, HV x, HV y\n"); for (Map.Entry<String, INDArray> entry : pvs.entrySet()) { String key = entry.getKey(); String depth = String.valueOf(key.split("\\.").length - 1); String c = String.valueOf(key.charAt(3)); if (Character.isDigit(key.charAt(4))) { c += key.charAt(4); } String doc = String.valueOf(c); builder.append(doc).append(',').append(depth).append(", ").append(entry.toString().replace("=[", ",").replace("]", ",")); String s = hvs.get(key).toString(); builder.append(s.replace("[", "").replace("]", "")).append("\n"); } return builder.toString(); } private double[] getDepthSimilarity(Map<String, String[]> comparison) { double pvSimilarity = 0; double hvSimilarity = 0; for (Map.Entry<String, String[]> c : comparison.entrySet()) { String label = c.getKey(); String nearestPV = c.getValue()[0]; String nearestHV = c.getValue()[1]; int labelDepth = label.split("\\.").length; int pvDepth = nearestPV.split("\\.").length; int hvDepth = nearestHV.split("\\.").length; if (labelDepth == hvDepth) { hvSimilarity++; } if (labelDepth == pvDepth) { pvSimilarity++; } } double size = comparison.keySet().size(); pvSimilarity /= size; hvSimilarity /= size; return new double[] {pvSimilarity, hvSimilarity}; } private double[] getIntraDocumentSimilarity(Map<String, String[]> comparison) { double pvSimilarity = 0; double hvSimilarity = 0; for (Map.Entry<String, String[]> c : comparison.entrySet()) { String label = c.getKey(); String nearestPV = c.getValue()[0]; String nearestHV = c.getValue()[1]; if (label.charAt(3) == nearestHV.charAt(3) && label.charAt(4) == nearestHV.charAt(4)) { hvSimilarity++; } if (label.charAt(3) == nearestPV.charAt(3) && label.charAt(4) == nearestPV.charAt(4)) { pvSimilarity++; } } double size = comparison.keySet().size(); pvSimilarity /= size; hvSimilarity /= size; return new double[] {pvSimilarity, hvSimilarity}; } static class ConfusionMatrix { private final Map<Integer, Map<Integer, Long>> linearizedMatrix; private double accuracy = -1d; private ConfusionMatrix(Map<Integer, Map<Integer, Long>> linearizedMatrix) { this.linearizedMatrix = linearizedMatrix; } /** * get the linearized confusion matrix as a {@link Map} * * @return a {@link Map} whose keys are the correct classification answers and whose values are the actual answers' * counts */ public Map<Integer, Map<Integer, Long>> getLinearizedMatrix() { return Collections.unmodifiableMap(linearizedMatrix); } /** * calculate precision on the given class * * @param klass the class to calculate the precision for * @return the precision for the given class */ public double getPrecision(Integer klass) { Map<Integer, Long> classifications = linearizedMatrix.get(klass); double tp = 0; double den = 0; // tp + fp if (classifications != null) { for (Map.Entry<Integer, Long> entry : classifications.entrySet()) { if (klass.equals(entry.getKey())) { tp += entry.getValue(); } } for (Map<Integer, Long> values : linearizedMatrix.values()) { if (values.containsKey(klass)) { den += values.get(klass); } } } return tp > 0 ? tp / den : 0; } /** * calculate recall on the given class * * @param klass the class to calculate the recall for * @return the recall for the given class */ public double getRecall(Integer klass) { Map<Integer, Long> classifications = linearizedMatrix.get(klass); double tp = 0; double fn = 0; if (classifications != null) { for (Map.Entry<Integer, Long> entry : classifications.entrySet()) { if (klass.equals(entry.getKey())) { tp += entry.getValue(); } else { fn += entry.getValue(); } } } return tp + fn > 0 ? tp / (tp + fn) : 0; } /** * get the F-1 measure on this confusion matrix * * @return the F-1 measure */ public double getF1Measure() { double recall = getRecall(); double precision = getPrecision(); return precision > 0 && recall > 0 ? 2 * precision * recall / (precision + recall) : 0; } /** * Calculate accuracy on this confusion matrix using the formula: * {@literal accuracy = correctly-classified / (correctly-classified + wrongly-classified)} * * @return the accuracy */ public double getAccuracy() { if (this.accuracy == -1) { double tp = 0d; double tn = 0d; double tfp = 0d; // tp + fp double fn = 0d; for (Map.Entry<Integer, Map<Integer, Long>> classification : linearizedMatrix.entrySet()) { Integer klass = classification.getKey(); for (Map.Entry<Integer, Long> entry : classification.getValue().entrySet()) { if (klass.equals(entry.getKey())) { tp += entry.getValue(); } else { fn += entry.getValue(); } } for (Map<Integer, Long> values : linearizedMatrix.values()) { if (values.containsKey(klass)) { tfp += values.get(klass); } else { tn++; } } } this.accuracy = (tp + tn) / (tfp + fn + tn); } return this.accuracy; } /** * get the macro averaged precision (see {@link #getPrecision(Integer)}) over all the classes. * * @return the macro averaged precision as computed from the confusion matrix */ public double getPrecision() { double p = 0; for (Map.Entry<Integer, Map<Integer, Long>> classification : linearizedMatrix.entrySet()) { Integer klass = classification.getKey(); p += getPrecision(klass); } return p / linearizedMatrix.size(); } /** * get the macro averaged recall (see {@link #getRecall(Integer)}) over all the classes * * @return the recall as computed from the confusion matrix */ public double getRecall() { double r = 0; for (Map.Entry<Integer, Map<Integer, Long>> classification : linearizedMatrix.entrySet()) { Integer klass = classification.getKey(); r += getRecall(klass); } return r / linearizedMatrix.size(); } @Override public String toString() { return "ConfusionMatrix{" + "linearizedMatrix=" + linearizedMatrix + '}'; } } }
/* * Copyright(c) 2017 lizhaotailang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.marktony.espresso.component; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.support.annotation.Dimension; import android.util.AttributeSet; import android.view.View; import io.github.marktony.espresso.R; /** * Created by lizhaotailang on 2017/3/4. */ public class Timeline extends View { @Dimension private int atomSize = 24; @Dimension private int lineSize = 12; private Drawable startLine; private Drawable finishLine; private Drawable atomDrawable; public Timeline(Context context) { this(context, null); } public Timeline(Context context, AttributeSet attrs) { this(context, attrs, 0); } public Timeline(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(attrs); } private void init(AttributeSet attrs) { TypedArray typedArray = getContext().obtainStyledAttributes(attrs, R.styleable.Timeline); atomSize = typedArray.getDimensionPixelSize(R.styleable.Timeline_atom_size, atomSize); lineSize = typedArray.getDimensionPixelSize(R.styleable.Timeline_line_size, lineSize); startLine = typedArray.getDrawable(R.styleable.Timeline_start_line); finishLine = typedArray.getDrawable(R.styleable.Timeline_finish_line); atomDrawable = typedArray.getDrawable(R.styleable.Timeline_atom); typedArray.recycle(); if (startLine != null) { startLine.setCallback(this); } if (finishLine != null) { finishLine.setCallback(this); } if (atomDrawable != null) { atomDrawable.setCallback(this); } } @Override protected void onDraw(Canvas canvas) { if (startLine != null) { startLine.draw(canvas); } if (finishLine != null) { finishLine.draw(canvas); } if (atomDrawable != null) { atomDrawable.draw(canvas); } super.onDraw(canvas); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int w = getPaddingLeft() + getPaddingRight(); int h = getPaddingTop() + getPaddingBottom(); if (atomDrawable != null) { w += atomSize; h += atomSize; } w = Math.max(w, getMeasuredWidth()); h = Math.max(h, getMeasuredHeight()); int width = resolveSizeAndState(w, widthMeasureSpec, 0); int height = resolveSizeAndState(h, heightMeasureSpec, 0); setMeasuredDimension(width, height); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); initDrawableSize(); } private void initDrawableSize() { int pLeft = getPaddingLeft(); int pRight = getPaddingRight(); int pTop = getPaddingTop(); int pBottom = getPaddingBottom(); int width = getWidth(); int height = getHeight(); int cWidth = width - pLeft - pRight; int cHeight = height - pTop - pBottom; Rect bounds; if (atomDrawable != null) { int atomSize = Math.min(this.atomSize, Math.min(cWidth, cHeight)); atomDrawable.setBounds(pLeft, pTop, pLeft + atomSize, pTop + atomSize); bounds = atomDrawable.getBounds(); } else { bounds = new Rect(pLeft, pTop, pLeft + cWidth, pTop + cHeight); } int halfLineSize = lineSize >> 1; int lineLeft = bounds.centerX() - halfLineSize; if (startLine != null) { startLine.setBounds(lineLeft, 0, lineLeft + lineSize, bounds.top); } if (finishLine != null) { finishLine.setBounds(lineLeft, bounds.bottom, lineLeft + lineSize, height); } } public void setLineSize(int lineSize) { if (this.lineSize != lineSize) { this.lineSize = lineSize; initDrawableSize(); invalidate(); } } public void setAtomSize(int atomSize) { if (this.atomSize != atomSize) { this.atomSize = atomSize; initDrawableSize(); invalidate(); } } public void setStartLine(Drawable startLine) { if (this.startLine != startLine) { this.startLine = startLine; if (this.startLine != null) { this.startLine.setCallback(this); } initDrawableSize(); invalidate(); } } public void setFinishLine(Drawable finishLine) { if (this.finishLine != finishLine) { this.finishLine = finishLine; if (this.finishLine != null) { this.finishLine.setCallback(this); } initDrawableSize(); invalidate(); } } public void setAtomDrawable(Drawable atomDrawable) { if (this.atomDrawable != atomDrawable) { this.atomDrawable = atomDrawable; if (this.atomDrawable != null) { this.atomDrawable.setCallback(this); } initDrawableSize(); invalidate(); } } }
/** * * Copyright 2003-2007 Jive Software. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.workgroup.user; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArraySet; import org.jivesoftware.smack.SmackException; import org.jivesoftware.smack.SmackException.NoResponseException; import org.jivesoftware.smack.SmackException.NotConnectedException; import org.jivesoftware.smack.StanzaCollector; import org.jivesoftware.smack.StanzaListener; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.XMPPException.XMPPErrorException; import org.jivesoftware.smack.filter.AndFilter; import org.jivesoftware.smack.filter.FromMatchesFilter; import org.jivesoftware.smack.filter.StanzaFilter; import org.jivesoftware.smack.filter.StanzaTypeFilter; import org.jivesoftware.smack.packet.ExtensionElement; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.Message; import org.jivesoftware.smack.packet.Presence; import org.jivesoftware.smack.packet.Stanza; import org.jivesoftware.smackx.disco.ServiceDiscoveryManager; import org.jivesoftware.smackx.disco.packet.DiscoverInfo; import org.jivesoftware.smackx.muc.MultiUserChatManager; import org.jivesoftware.smackx.muc.packet.MUCUser; import org.jivesoftware.smackx.workgroup.MetaData; import org.jivesoftware.smackx.workgroup.WorkgroupInvitation; import org.jivesoftware.smackx.workgroup.WorkgroupInvitationListener; import org.jivesoftware.smackx.workgroup.ext.forms.WorkgroupForm; import org.jivesoftware.smackx.workgroup.packet.DepartQueuePacket; import org.jivesoftware.smackx.workgroup.packet.QueueUpdate; import org.jivesoftware.smackx.workgroup.packet.SessionID; import org.jivesoftware.smackx.workgroup.packet.UserID; import org.jivesoftware.smackx.workgroup.settings.ChatSetting; import org.jivesoftware.smackx.workgroup.settings.ChatSettings; import org.jivesoftware.smackx.workgroup.settings.OfflineSettings; import org.jivesoftware.smackx.workgroup.settings.SoundSettings; import org.jivesoftware.smackx.workgroup.settings.WorkgroupProperties; import org.jivesoftware.smackx.xdata.Form; import org.jivesoftware.smackx.xdata.FormField; import org.jivesoftware.smackx.xdata.packet.DataForm; import org.jxmpp.jid.DomainBareJid; import org.jxmpp.jid.EntityJid; import org.jxmpp.jid.Jid; /** * Provides workgroup services for users. Users can join the workgroup queue, depart the * queue, find status information about their placement in the queue, and register to * be notified when they are routed to an agent.<p> * * This class only provides a users perspective into a workgroup and is not intended * for use by agents. * * @author Matt Tucker * @author Derek DeMoro */ public class Workgroup { private final Jid workgroupJID; private final XMPPConnection connection; private boolean inQueue; private final CopyOnWriteArraySet<WorkgroupInvitationListener> invitationListeners; private final CopyOnWriteArraySet<QueueListener> queueListeners; private int queuePosition = -1; private int queueRemainingTime = -1; /** * Creates a new workgroup instance using the specified workgroup JID * (eg support@workgroup.example.com) and XMPP connection. The connection must have * undergone a successful login before being used to construct an instance of * this class. * * @param workgroupJID the JID of the workgroup. * @param connection an XMPP connection which must have already undergone a * successful login. */ public Workgroup(Jid workgroupJID, XMPPConnection connection) { // Login must have been done before passing in connection. if (!connection.isAuthenticated()) { throw new IllegalStateException("Must login to server before creating workgroup."); } this.workgroupJID = workgroupJID; this.connection = connection; inQueue = false; invitationListeners = new CopyOnWriteArraySet<>(); queueListeners = new CopyOnWriteArraySet<>(); // Register as a queue listener for internal usage by this instance. addQueueListener(new QueueListener() { @Override public void joinedQueue() { inQueue = true; } @Override public void departedQueue() { inQueue = false; queuePosition = -1; queueRemainingTime = -1; } @Override public void queuePositionUpdated(int currentPosition) { queuePosition = currentPosition; } @Override public void queueWaitTimeUpdated(int secondsRemaining) { queueRemainingTime = secondsRemaining; } }); /** * Internal handling of an invitation.Recieving an invitation removes the user from the queue. */ MultiUserChatManager.getInstanceFor(connection).addInvitationListener( new org.jivesoftware.smackx.muc.InvitationListener() { @Override public void invitationReceived(XMPPConnection conn, org.jivesoftware.smackx.muc.MultiUserChat room, EntityJid inviter, String reason, String password, Message message, MUCUser.Invite invitation) { inQueue = false; queuePosition = -1; queueRemainingTime = -1; } }); // Register a packet listener for all the messages sent to this client. StanzaFilter typeFilter = new StanzaTypeFilter(Message.class); connection.addAsyncStanzaListener(new StanzaListener() { @Override public void processStanza(Stanza packet) { handlePacket(packet); } }, typeFilter); } /** * Returns the name of this workgroup (eg support@example.com). * * @return the name of the workgroup. */ public Jid getWorkgroupJID() { return workgroupJID; } /** * Returns true if the user is currently waiting in the workgroup queue. * * @return true if currently waiting in the queue. */ public boolean isInQueue() { return inQueue; } /** * Returns true if the workgroup is available for receiving new requests. The workgroup will be * available only when agents are available for this workgroup. * * @return true if the workgroup is available for receiving new requests. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public boolean isAvailable() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { Presence directedPresence = new Presence(Presence.Type.available); directedPresence.setTo(workgroupJID); StanzaFilter typeFilter = new StanzaTypeFilter(Presence.class); StanzaFilter fromFilter = FromMatchesFilter.create(workgroupJID); StanzaCollector collector = connection.createStanzaCollectorAndSend(new AndFilter(fromFilter, typeFilter), directedPresence); Presence response = collector.nextResultOrThrow(); return Presence.Type.available == response.getType(); } /** * Returns the users current position in the workgroup queue. A value of 0 means * the user is next in line to be routed; therefore, if the queue position * is being displayed to the end user it is usually a good idea to add 1 to * the value this method returns before display. If the user is not currently * waiting in the workgroup, or no queue position information is available, -1 * will be returned. * * @return the user's current position in the workgroup queue, or -1 if the * position isn't available or if the user isn't in the queue. */ public int getQueuePosition() { return queuePosition; } /** * Returns the estimated time (in seconds) that the user has to left wait in * the workgroup queue before being routed. If the user is not currently waiting * int he workgroup, or no queue time information is available, -1 will be * returned. * * @return the estimated time remaining (in seconds) that the user has to * wait in the workgroup queue, or -1 if time information isn't available * or if the user isn't int the queue. */ public int getQueueRemainingTime() { return queueRemainingTime; } /** * Joins the workgroup queue to wait to be routed to an agent. After joining * the queue, queue status events will be sent to indicate the user's position and * estimated time left in the queue. Once joining the queue, there are three ways * the user can leave the queue: <ul> * * <li>The user is routed to an agent, which triggers a GroupChat invitation. * <li>The user asks to leave the queue by calling the {@link #departQueue} method. * <li>A server error occurs, or an administrator explicitly removes the user * from the queue. * </ul> * * A user cannot request to join the queue again if already in the queue. Therefore, * this method will throw an IllegalStateException if the user is already in the queue.<p> * * Some servers may be configured to require certain meta-data in order to * join the queue. In that case, the {@link #joinQueue(Form)} method should be * used instead of this method so that meta-data may be passed in.<p> * * The server tracks the conversations that a user has with agents over time. By * default, that tracking is done using the user's JID. However, this is not always * possible. For example, when the user is logged in anonymously using a web client. * In that case the user ID might be a randomly generated value put into a persistent * cookie or a username obtained via the session. A userID can be explicitly * passed in by using the {@link #joinQueue(Form, Jid)} method. When specified, * that userID will be used instead of the user's JID to track conversations. The * server will ignore a manually specified userID if the user's connection to the server * is not anonymous. * * @throws XMPPException if an error occurred joining the queue. An error may indicate * that a connection failure occurred or that the server explicitly rejected the * request to join the queue. * @throws SmackException * @throws InterruptedException */ public void joinQueue() throws XMPPException, SmackException, InterruptedException { joinQueue(null); } /** * Joins the workgroup queue to wait to be routed to an agent. After joining * the queue, queue status events will be sent to indicate the user's position and * estimated time left in the queue. Once joining the queue, there are three ways * the user can leave the queue: <ul> * * <li>The user is routed to an agent, which triggers a GroupChat invitation. * <li>The user asks to leave the queue by calling the {@link #departQueue} method. * <li>A server error occurs, or an administrator explicitly removes the user * from the queue. * </ul> * * A user cannot request to join the queue again if already in the queue. Therefore, * this method will throw an IllegalStateException if the user is already in the queue.<p> * * Some servers may be configured to require certain meta-data in order to * join the queue.<p> * * The server tracks the conversations that a user has with agents over time. By * default, that tracking is done using the user's JID. However, this is not always * possible. For example, when the user is logged in anonymously using a web client. * In that case the user ID might be a randomly generated value put into a persistent * cookie or a username obtained via the session. A userID can be explicitly * passed in by using the {@link #joinQueue(Form, Jid)} method. When specified, * that userID will be used instead of the user's JID to track conversations. The * server will ignore a manually specified userID if the user's connection to the server * is not anonymous. * * @param answerForm the completed form the send for the join request. * @throws XMPPException if an error occurred joining the queue. An error may indicate * that a connection failure occurred or that the server explicitly rejected the * request to join the queue. * @throws SmackException * @throws InterruptedException */ public void joinQueue(Form answerForm) throws XMPPException, SmackException, InterruptedException { joinQueue(answerForm, null); } /** * <p>Joins the workgroup queue to wait to be routed to an agent. After joining * the queue, queue status events will be sent to indicate the user's position and * estimated time left in the queue. Once joining the queue, there are three ways * the user can leave the queue: <ul> * * <li>The user is routed to an agent, which triggers a GroupChat invitation. * <li>The user asks to leave the queue by calling the {@link #departQueue} method. * <li>A server error occurs, or an administrator explicitly removes the user * from the queue. * </ul> * * A user cannot request to join the queue again if already in the queue. Therefore, * this method will throw an IllegalStateException if the user is already in the queue.<p> * * Some servers may be configured to require certain meta-data in order to * join the queue.<p> * * The server tracks the conversations that a user has with agents over time. By * default, that tracking is done using the user's JID. However, this is not always * possible. For example, when the user is logged in anonymously using a web client. * In that case the user ID might be a randomly generated value put into a persistent * cookie or a username obtained via the session. When specified, that userID will * be used instead of the user's JID to track conversations. The server will ignore a * manually specified userID if the user's connection to the server is not anonymous. * * @param answerForm the completed form associated with the join request. * @param userID String that represents the ID of the user when using anonymous sessions * or <tt>null</tt> if a userID should not be used. * @throws XMPPErrorException if an error occurred joining the queue. An error may indicate * that a connection failure occurred or that the server explicitly rejected the * request to join the queue. * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public void joinQueue(Form answerForm, Jid userID) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { // If already in the queue ignore the join request. if (inQueue) { throw new IllegalStateException("Already in queue " + workgroupJID); } JoinQueuePacket joinPacket = new JoinQueuePacket(workgroupJID, answerForm, userID); connection.createStanzaCollectorAndSend(joinPacket).nextResultOrThrow(); // Notify listeners that we've joined the queue. fireQueueJoinedEvent(); } /** * <p>Joins the workgroup queue to wait to be routed to an agent. After joining * the queue, queue status events will be sent to indicate the user's position and * estimated time left in the queue. Once joining the queue, there are three ways * the user can leave the queue: <ul> * * <li>The user is routed to an agent, which triggers a GroupChat invitation. * <li>The user asks to leave the queue by calling the {@link #departQueue} method. * <li>A server error occurs, or an administrator explicitly removes the user * from the queue. * </ul> * * A user cannot request to join the queue again if already in the queue. Therefore, * this method will throw an IllegalStateException if the user is already in the queue.<p> * * Some servers may be configured to require certain meta-data in order to * join the queue.<p> * * The server tracks the conversations that a user has with agents over time. By * default, that tracking is done using the user's JID. However, this is not always * possible. For example, when the user is logged in anonymously using a web client. * In that case the user ID might be a randomly generated value put into a persistent * cookie or a username obtained via the session. When specified, that userID will * be used instead of the user's JID to track conversations. The server will ignore a * manually specified userID if the user's connection to the server is not anonymous. * * @param metadata metadata to create a dataform from. * @param userID String that represents the ID of the user when using anonymous sessions * or <tt>null</tt> if a userID should not be used. * @throws XMPPException if an error occurred joining the queue. An error may indicate * that a connection failure occurred or that the server explicitly rejected the * request to join the queue. * @throws SmackException * @throws InterruptedException */ public void joinQueue(Map<String,Object> metadata, Jid userID) throws XMPPException, SmackException, InterruptedException { // If already in the queue ignore the join request. if (inQueue) { throw new IllegalStateException("Already in queue " + workgroupJID); } // Build dataform from metadata Form form = new Form(DataForm.Type.submit); Iterator<String> iter = metadata.keySet().iterator(); while (iter.hasNext()) { String name = iter.next(); String value = metadata.get(name).toString(); FormField field = new FormField(name); field.setType(FormField.Type.text_single); form.addField(field); form.setAnswer(name, value); } joinQueue(form, userID); } /** * Departs the workgroup queue. If the user is not currently in the queue, this * method will do nothing.<p> * * Normally, the user would not manually leave the queue. However, they may wish to * under certain circumstances -- for example, if they no longer wish to be routed * to an agent because they've been waiting too long. * * @throws XMPPErrorException if an error occurred trying to send the depart queue * request to the server. * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public void departQueue() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { // If not in the queue ignore the depart request. if (!inQueue) { return; } DepartQueuePacket departPacket = new DepartQueuePacket(this.workgroupJID); connection.createStanzaCollectorAndSend(departPacket).nextResultOrThrow(); // Notify listeners that we're no longer in the queue. fireQueueDepartedEvent(); } /** * Adds a queue listener that will be notified of queue events for the user * that created this Workgroup instance. * * @param queueListener the queue listener. */ public void addQueueListener(QueueListener queueListener) { queueListeners.add(queueListener); } /** * Removes a queue listener. * * @param queueListener the queue listener. */ public void removeQueueListener(QueueListener queueListener) { queueListeners.remove(queueListener); } /** * Adds an invitation listener that will be notified of groupchat invitations * from the workgroup for the the user that created this Workgroup instance. * * @param invitationListener the invitation listener. */ public void addInvitationListener(WorkgroupInvitationListener invitationListener) { invitationListeners.add(invitationListener); } /** * Removes an invitation listener. * * @param invitationListener the invitation listener. */ public void removeQueueListener(WorkgroupInvitationListener invitationListener) { invitationListeners.remove(invitationListener); } private void fireInvitationEvent(WorkgroupInvitation invitation) { for (WorkgroupInvitationListener listener : invitationListeners) { // CHECKSTYLE:OFF listener.invitationReceived(invitation); // CHECKSTYLE:ON } } private void fireQueueJoinedEvent() { for (QueueListener listener : queueListeners) { // CHECKSTYLE:OFF listener.joinedQueue(); // CHECKSTYLE:ON } } private void fireQueueDepartedEvent() { for (QueueListener listener : queueListeners) { listener.departedQueue(); } } private void fireQueuePositionEvent(int currentPosition) { for (QueueListener listener : queueListeners) { listener.queuePositionUpdated(currentPosition); } } private void fireQueueTimeEvent(int secondsRemaining) { for (QueueListener listener : queueListeners) { listener.queueWaitTimeUpdated(secondsRemaining); } } // PacketListener Implementation. private void handlePacket(Stanza packet) { if (packet instanceof Message) { Message msg = (Message) packet; // Check to see if the user left the queue. ExtensionElement pe = msg.getExtension("depart-queue", "http://jabber.org/protocol/workgroup"); ExtensionElement queueStatus = msg.getExtension("queue-status", "http://jabber.org/protocol/workgroup"); if (pe != null) { fireQueueDepartedEvent(); } else if (queueStatus != null) { QueueUpdate queueUpdate = (QueueUpdate) queueStatus; if (queueUpdate.getPosition() != -1) { fireQueuePositionEvent(queueUpdate.getPosition()); } if (queueUpdate.getRemaingTime() != -1) { fireQueueTimeEvent(queueUpdate.getRemaingTime()); } } else { // Check if a room invitation was sent and if the sender is the workgroup MUCUser mucUser = msg.getExtension("x", "http://jabber.org/protocol/muc#user"); MUCUser.Invite invite = mucUser != null ? mucUser.getInvite() : null; if (invite != null && workgroupJID.equals(invite.getFrom())) { String sessionID = null; Map<String, List<String>> metaData = null; pe = msg.getExtension(SessionID.ELEMENT_NAME, SessionID.NAMESPACE); if (pe != null) { sessionID = ((SessionID) pe).getSessionID(); } pe = msg.getExtension(MetaData.ELEMENT_NAME, MetaData.NAMESPACE); if (pe != null) { metaData = ((MetaData) pe).getMetaData(); } WorkgroupInvitation inv = new WorkgroupInvitation(connection.getUser(), msg.getFrom(), workgroupJID, sessionID, msg.getBody(), msg.getFrom(), metaData); fireInvitationEvent(inv); } } } } /** * IQ stanza(/packet) to request joining the workgroup queue. */ private class JoinQueuePacket extends IQ { private final Jid userID; private final DataForm form; public JoinQueuePacket(Jid workgroup, Form answerForm, Jid userID) { super("join-queue", "http://jabber.org/protocol/workgroup"); this.userID = userID; setTo(workgroup); setType(IQ.Type.set); form = answerForm.getDataFormToSend(); addExtension(form); } @Override protected IQChildElementXmlStringBuilder getIQChildElementBuilder(IQChildElementXmlStringBuilder buf) { buf.rightAngleBracket(); buf.append("<queue-notifications/>"); // Add the user unique identification if the session is anonymous if (connection.isAnonymous()) { buf.append(new UserID(userID).toXML()); } // Append data form text buf.append(form.toXML()); return buf; } } /** * Returns a single chat setting based on it's identified key. * * @param key the key to find. * @return the ChatSetting if found, otherwise false. * @throws XMPPException if an error occurs while getting information from the server. * @throws SmackException * @throws InterruptedException */ public ChatSetting getChatSetting(String key) throws XMPPException, SmackException, InterruptedException { ChatSettings chatSettings = getChatSettings(key, -1); return chatSettings.getFirstEntry(); } /** * Returns ChatSettings based on type. * * @param type the type of ChatSettings to return. * @return the ChatSettings of given type, otherwise null. * @throws XMPPException if an error occurs while getting information from the server. * @throws SmackException * @throws InterruptedException */ public ChatSettings getChatSettings(int type) throws XMPPException, SmackException, InterruptedException { return getChatSettings(null, type); } /** * Returns all ChatSettings. * * @return all ChatSettings of a given workgroup. * @throws XMPPException if an error occurs while getting information from the server. * @throws SmackException * @throws InterruptedException */ public ChatSettings getChatSettings() throws XMPPException, SmackException, InterruptedException { return getChatSettings(null, -1); } /** * Asks the workgroup for it's Chat Settings. * * @return key specify a key to retrieve only that settings. Otherwise for all settings, key should be null. * @throws NoResponseException * @throws XMPPErrorException if an error occurs while getting information from the server. * @throws NotConnectedException * @throws InterruptedException */ private ChatSettings getChatSettings(String key, int type) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { ChatSettings request = new ChatSettings(); if (key != null) { request.setKey(key); } if (type != -1) { request.setType(type); } request.setType(IQ.Type.get); request.setTo(workgroupJID); ChatSettings response = connection.createStanzaCollectorAndSend(request).nextResultOrThrow(); return response; } /** * The workgroup service may be configured to send email. This queries the Workgroup Service * to see if the email service has been configured and is available. * * @return true if the email service is available, otherwise return false. * @throws SmackException * @throws InterruptedException */ public boolean isEmailAvailable() throws SmackException, InterruptedException { ServiceDiscoveryManager discoManager = ServiceDiscoveryManager.getInstanceFor(connection); try { DomainBareJid workgroupService = workgroupJID.asDomainBareJid(); DiscoverInfo infoResult = discoManager.discoverInfo(workgroupService); return infoResult.containsFeature("jive:email:provider"); } catch (XMPPException e) { return false; } } /** * Asks the workgroup for it's Offline Settings. * * @return offlineSettings the offline settings for this workgroup. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public OfflineSettings getOfflineSettings() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { OfflineSettings request = new OfflineSettings(); request.setType(IQ.Type.get); request.setTo(workgroupJID); return connection.createStanzaCollectorAndSend(request).nextResultOrThrow(); } /** * Asks the workgroup for it's Sound Settings. * * @return soundSettings the sound settings for the specified workgroup. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public SoundSettings getSoundSettings() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { SoundSettings request = new SoundSettings(); request.setType(IQ.Type.get); request.setTo(workgroupJID); return connection.createStanzaCollectorAndSend(request).nextResultOrThrow(); } /** * Asks the workgroup for it's Properties. * * @return the WorkgroupProperties for the specified workgroup. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public WorkgroupProperties getWorkgroupProperties() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { WorkgroupProperties request = new WorkgroupProperties(); request.setType(IQ.Type.get); request.setTo(workgroupJID); return connection.createStanzaCollectorAndSend(request).nextResultOrThrow(); } /** * Asks the workgroup for it's Properties. * * @param jid the jid of the user who's information you would like the workgroup to retreive. * @return the WorkgroupProperties for the specified workgroup. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public WorkgroupProperties getWorkgroupProperties(String jid) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { WorkgroupProperties request = new WorkgroupProperties(); request.setJid(jid); request.setType(IQ.Type.get); request.setTo(workgroupJID); return connection.createStanzaCollectorAndSend( request).nextResultOrThrow(); } /** * Returns the Form to use for all clients of a workgroup. It is unlikely that the server * will change the form (without a restart) so it is safe to keep the returned form * for future submissions. * * @return the Form to use for searching transcripts. * @throws XMPPErrorException * @throws NoResponseException * @throws NotConnectedException * @throws InterruptedException */ public Form getWorkgroupForm() throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { WorkgroupForm workgroupForm = new WorkgroupForm(); workgroupForm.setType(IQ.Type.get); workgroupForm.setTo(workgroupJID); WorkgroupForm response = connection.createStanzaCollectorAndSend( workgroupForm).nextResultOrThrow(); return Form.getFormFrom(response); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.collect.ImmutableMap; import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard; import org.apache.jackrabbit.oak.plugins.tree.RootProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeProvider; import org.apache.jackrabbit.oak.plugins.tree.impl.RootProviderService; import org.apache.jackrabbit.oak.plugins.tree.impl.TreeProviderService; import org.apache.jackrabbit.oak.security.authentication.AuthenticationConfigurationImpl; import org.apache.jackrabbit.oak.security.authentication.token.TokenConfigurationImpl; import org.apache.jackrabbit.oak.security.authorization.AuthorizationConfigurationImpl; import org.apache.jackrabbit.oak.security.authorization.composite.CompositeAuthorizationConfiguration; import org.apache.jackrabbit.oak.security.authorization.restriction.WhiteboardRestrictionProvider; import org.apache.jackrabbit.oak.security.principal.PrincipalConfigurationImpl; import org.apache.jackrabbit.oak.security.privilege.PrivilegeConfigurationImpl; import org.apache.jackrabbit.oak.security.user.UserConfigurationImpl; import org.apache.jackrabbit.oak.security.user.whiteboard.WhiteboardAuthorizableActionProvider; import org.apache.jackrabbit.oak.security.user.whiteboard.WhiteboardAuthorizableNodeName; import org.apache.jackrabbit.oak.security.user.whiteboard.WhiteboardUserAuthenticationFactory; import org.apache.jackrabbit.oak.spi.security.CompositeConfiguration; import org.apache.jackrabbit.oak.spi.security.ConfigurationBase; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.SecurityConfiguration; import org.apache.jackrabbit.oak.spi.security.SecurityProvider; import org.apache.jackrabbit.oak.spi.security.authentication.AuthenticationConfiguration; import org.apache.jackrabbit.oak.spi.security.authentication.token.CompositeTokenConfiguration; import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants; import org.apache.jackrabbit.oak.spi.security.principal.CompositePrincipalConfiguration; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConstants; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardAware; import org.osgi.framework.BundleContext; import static com.google.common.base.Preconditions.checkNotNull; @Deprecated public class SecurityProviderImpl implements SecurityProvider, WhiteboardAware { private volatile AuthenticationConfiguration authenticationConfiguration; private volatile PrivilegeConfiguration privilegeConfiguration; private volatile UserConfiguration userConfiguration; private final CompositeAuthorizationConfiguration authorizationConfiguration = new CompositeAuthorizationConfiguration(this); private final CompositePrincipalConfiguration principalConfiguration = new CompositePrincipalConfiguration(this); private final CompositeTokenConfiguration tokenConfiguration = new CompositeTokenConfiguration(this); private final WhiteboardAuthorizableNodeName authorizableNodeName = new WhiteboardAuthorizableNodeName(); private final WhiteboardAuthorizableActionProvider authorizableActionProvider = new WhiteboardAuthorizableActionProvider(); private final WhiteboardRestrictionProvider restrictionProvider = new WhiteboardRestrictionProvider(); private final WhiteboardUserAuthenticationFactory userAuthenticationFactory = new WhiteboardUserAuthenticationFactory(UserConfigurationImpl.getDefaultAuthenticationFactory()); private ConfigurationParameters configuration; private Whiteboard whiteboard; private final RootProvider rootProvider = new RootProviderService(); private final TreeProvider treeProvider = new TreeProviderService(); /** * Default constructor used in OSGi environments. */ public SecurityProviderImpl() { this(ConfigurationParameters.EMPTY); } /** * Create a new {@code SecurityProvider} instance with the given configuration * parameters. * * @param configuration security configuration */ public SecurityProviderImpl(@Nonnull ConfigurationParameters configuration) { checkNotNull(configuration); this.configuration = configuration; authenticationConfiguration = initDefaultConfiguration(new AuthenticationConfigurationImpl(this)); userConfiguration = initDefaultConfiguration(new UserConfigurationImpl(this)); privilegeConfiguration = initDefaultConfiguration(new PrivilegeConfigurationImpl()); initCompositeConfiguration(authorizationConfiguration, new AuthorizationConfigurationImpl(this)); initCompositeConfiguration(principalConfiguration, new PrincipalConfigurationImpl(this)); initCompositeConfiguration(tokenConfiguration, new TokenConfigurationImpl(this)); } @Override public void setWhiteboard(@Nonnull Whiteboard whiteboard) { this.whiteboard = whiteboard; } @Override public Whiteboard getWhiteboard() { return whiteboard; } @Nonnull @Override public ConfigurationParameters getParameters(@Nullable String name) { if (name == null) { return configuration; } ConfigurationParameters params = configuration.getConfigValue(name, ConfigurationParameters.EMPTY); for (SecurityConfiguration sc : getConfigurations()) { if (sc != null && sc.getName().equals(name)) { return ConfigurationParameters.of(params, sc.getParameters()); } } return params; } @Nonnull @Override public Iterable<? extends SecurityConfiguration> getConfigurations() { Set<SecurityConfiguration> scs = new HashSet<SecurityConfiguration>(); scs.add(authenticationConfiguration); scs.add(authorizationConfiguration); scs.add(userConfiguration); scs.add(principalConfiguration); scs.add(privilegeConfiguration); scs.add(tokenConfiguration); return scs; } @SuppressWarnings("unchecked") @Nonnull @Override public <T> T getConfiguration(@Nonnull Class<T> configClass) { if (AuthenticationConfiguration.class == configClass) { return (T) authenticationConfiguration; } else if (AuthorizationConfiguration.class == configClass) { return (T) authorizationConfiguration; } else if (UserConfiguration.class == configClass) { return (T) userConfiguration; } else if (PrincipalConfiguration.class == configClass) { return (T) principalConfiguration; } else if (PrivilegeConfiguration.class == configClass) { return (T) privilegeConfiguration; } else if (TokenConfiguration.class == configClass) { return (T) tokenConfiguration; } else { throw new IllegalArgumentException("Unsupported security configuration class " + configClass); } } protected void activate(BundleContext context) { whiteboard = new OsgiWhiteboard(context); authorizableActionProvider.start(whiteboard); authorizableNodeName.start(whiteboard); restrictionProvider.start(whiteboard); userAuthenticationFactory.start(whiteboard); initializeConfigurations(); } protected void deactivate() { authorizableActionProvider.stop(); authorizableNodeName.stop(); restrictionProvider.stop(); userAuthenticationFactory.stop(); } @SuppressWarnings("UnusedDeclaration") protected void bindPrincipalConfiguration(@Nonnull PrincipalConfiguration reference) { principalConfiguration.addConfiguration(initConfiguration(reference)); } @SuppressWarnings("UnusedDeclaration") protected void unbindPrincipalConfiguration(@Nonnull PrincipalConfiguration reference) { principalConfiguration.removeConfiguration(reference); } @SuppressWarnings("UnusedDeclaration") protected void bindTokenConfiguration(@Nonnull TokenConfiguration reference) { tokenConfiguration.addConfiguration(initConfiguration(reference)); } @SuppressWarnings("UnusedDeclaration") protected void unbindTokenConfiguration(@Nonnull TokenConfiguration reference) { tokenConfiguration.removeConfiguration(reference); } @SuppressWarnings("UnusedDeclaration") protected void bindAuthorizationConfiguration(@Nonnull AuthorizationConfiguration reference) { authorizationConfiguration.addConfiguration(initConfiguration(reference)); } @SuppressWarnings("UnusedDeclaration") protected void unbindAuthorizationConfiguration(@Nonnull AuthorizationConfiguration reference) { authorizationConfiguration.removeConfiguration(reference); } //------------------------------------------------------------< private >--- private void initializeConfigurations() { initConfiguration(authorizationConfiguration, ConfigurationParameters.of( AccessControlConstants.PARAM_RESTRICTION_PROVIDER, restrictionProvider) ); Map<String, Object> userMap = ImmutableMap.<String,Object>of( UserConstants.PARAM_AUTHORIZABLE_ACTION_PROVIDER, authorizableActionProvider, UserConstants.PARAM_AUTHORIZABLE_NODE_NAME, authorizableNodeName, UserConstants.PARAM_USER_AUTHENTICATION_FACTORY, userAuthenticationFactory); initConfiguration(userConfiguration, ConfigurationParameters.of(userMap)); initConfiguration(authenticationConfiguration); initConfiguration(privilegeConfiguration); } private <T extends SecurityConfiguration> T initConfiguration(@Nonnull T config) { return initConfiguration(config, ConfigurationParameters.EMPTY); } private <T extends SecurityConfiguration> T initConfiguration(@Nonnull T config, @Nonnull ConfigurationParameters params) { if (config instanceof ConfigurationBase) { ConfigurationBase cfg = (ConfigurationBase) config; cfg.setSecurityProvider(this); cfg.setParameters(ConfigurationParameters.of(params, cfg.getParameters())); cfg.setRootProvider(rootProvider); cfg.setTreeProvider(treeProvider); } return config; } private CompositeConfiguration initCompositeConfiguration(@Nonnull CompositeConfiguration composite, @Nonnull SecurityConfiguration defaultConfig) { composite.setRootProvider(rootProvider); composite.setTreeProvider(treeProvider); composite.setDefaultConfig(initDefaultConfiguration(defaultConfig)); return composite; } private <T extends SecurityConfiguration> T initDefaultConfiguration(@Nonnull T config) { if (config instanceof ConfigurationBase) { ConfigurationBase cfg = (ConfigurationBase) config; cfg.setRootProvider(rootProvider); cfg.setTreeProvider(treeProvider); } return config; } @SuppressWarnings("UnusedDeclaration") protected void bindAuthenticationConfiguration(AuthenticationConfiguration authenticationConfiguration) { this.authenticationConfiguration = authenticationConfiguration; } @SuppressWarnings("UnusedDeclaration") protected void unbindAuthenticationConfiguration(AuthenticationConfiguration authenticationConfiguration) { this.authenticationConfiguration = null; } @SuppressWarnings("UnusedDeclaration") protected void bindPrivilegeConfiguration(PrivilegeConfiguration privilegeConfiguration) { this.privilegeConfiguration = privilegeConfiguration; } @SuppressWarnings("UnusedDeclaration") protected void unbindPrivilegeConfiguration(PrivilegeConfiguration privilegeConfiguration) { this.privilegeConfiguration = null; } @SuppressWarnings("UnusedDeclaration") protected void bindUserConfiguration(UserConfiguration userConfiguration) { this.userConfiguration = userConfiguration; } @SuppressWarnings("UnusedDeclaration") protected void unbindUserConfiguration(UserConfiguration userConfiguration) { this.userConfiguration = null; } }
package com.thinkbiganalytics.nifi.feedmgr; /*- * #%L * thinkbig-nifi-rest-client-api * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.base.Predicate; import com.google.common.collect.ComparisonChain; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient; import com.thinkbiganalytics.nifi.rest.client.NiFiRestClient; import com.thinkbiganalytics.nifi.rest.client.NifiClientRuntimeException; import com.thinkbiganalytics.nifi.rest.client.NifiComponentNotFoundException; import com.thinkbiganalytics.nifi.rest.model.NiFiAllowableValue; import com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform; import com.thinkbiganalytics.nifi.rest.model.NifiError; import com.thinkbiganalytics.nifi.rest.model.NifiProcessGroup; import com.thinkbiganalytics.nifi.rest.model.NifiProperty; import com.thinkbiganalytics.nifi.rest.support.NifiConnectionUtil; import com.thinkbiganalytics.nifi.rest.support.NifiConstants; import com.thinkbiganalytics.nifi.rest.support.NifiProcessUtil; import com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil; import com.thinkbiganalytics.nifi.rest.support.NifiTemplateNameUtil; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.web.api.dto.ConnectionDTO; import org.apache.nifi.web.api.dto.ControllerServiceDTO; import org.apache.nifi.web.api.dto.FlowSnippetDTO; import org.apache.nifi.web.api.dto.ProcessGroupDTO; import org.apache.nifi.web.api.dto.ProcessorDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.ws.rs.WebApplicationException; /** * Helper class used to create templates in NiFi * This class is used with the {@link TemplateInstanceCreator} */ public class TemplateCreationHelper { public static final String REUSABLE_TEMPLATES_CATEGORY_NAME = "Reusable Templates"; private static final Logger log = LoggerFactory.getLogger(TemplateCreationHelper.class); public static String REUSABLE_TEMPLATES_PROCESS_GROUP_NAME = "reusable_templates"; public static String TEMPORARY_TEMPLATE_INSPECTION_GROUP_NAME = "kylo_temporary_template_inspection"; /** * REST client for NiFi API */ @Nonnull private final NiFiRestClient nifiRestClient; LegacyNifiRestClient restClient; private List<NifiError> errors = new ArrayList<>(); private Set<ControllerServiceDTO> snapshotControllerServices; private Set<ControllerServiceDTO> snapshottedEnabledControllerServices = new HashSet<>(); private Map<String, ControllerServiceDTO> mergedControllerServices; private Set<ControllerServiceDTO> newlyCreatedControllerServices; public TemplateCreationHelper(LegacyNifiRestClient restClient) { this.restClient = restClient; this.nifiRestClient = restClient.getNiFiRestClient(); } public static String getVersionedProcessGroupName(String name) { return NifiTemplateNameUtil.getVersionedProcessGroupName(name); } public static String parseVersionedProcessGroupName(String name) { return NifiTemplateNameUtil.parseVersionedProcessGroupName(name); } /** * Creates an instance of the supplied template under the temporary inspection group inside its own process group * * @param templateId the template to instantiate * @return the process group holding this template */ public ProcessGroupDTO createTemporaryTemplateFlow(@Nonnull final String templateId) { ProcessGroupDTO temporaryTemplateInspectionGroup = null; //first get the parent temp group Optional<ProcessGroupDTO> group = nifiRestClient.processGroups().findByName("root", TEMPORARY_TEMPLATE_INSPECTION_GROUP_NAME, false, false); if (!group.isPresent()) { temporaryTemplateInspectionGroup = nifiRestClient.processGroups().create("root", TEMPORARY_TEMPLATE_INSPECTION_GROUP_NAME); } else { temporaryTemplateInspectionGroup = group.get(); } //next create the temp group snapshotControllerServiceReferences(); ProcessGroupDTO tempGroup = nifiRestClient.processGroups().create(temporaryTemplateInspectionGroup.getId(), "template_" + System.currentTimeMillis()); FlowSnippetDTO snippet = instantiateFlowFromTemplate(tempGroup.getId(), templateId); identifyNewlyCreatedControllerServiceReferences(); tempGroup.setContents(snippet); //now delete it nifiRestClient.processGroups().delete(tempGroup); cleanupControllerServices(); return tempGroup; } /** * Instantiates the specified template in the specified process group. * * <p>Controller services that are created under the specified process group will be moved to the root process group. This side-effect may be removed in the future.</p> * * @param processGroupId the process group id * @param templateId the template id * @return the instantiated flow * @throws NifiComponentNotFoundException if the process group or template does not exist */ @Nonnull public FlowSnippetDTO instantiateFlowFromTemplate(@Nonnull final String processGroupId, @Nonnull final String templateId) throws NifiComponentNotFoundException { // Instantiate template final NiFiRestClient nifiClient = restClient.getNiFiRestClient(); final FlowSnippetDTO templateFlow = nifiClient.processGroups().instantiateTemplate(processGroupId, templateId); // Move controller services to root process group (NiFi >= v1.0) final Set<ControllerServiceDTO> groupControllerServices = nifiClient.processGroups().getControllerServices(processGroupId); final Map<String, String> idMap = new HashMap<>(groupControllerServices.size()); groupControllerServices.stream() .filter(controllerService -> controllerService.getParentGroupId().equals(processGroupId)) .forEach(groupControllerService -> { // Delete scoped service final String oldId = groupControllerService.getId(); nifiClient.controllerServices().delete(groupControllerService.getId()); // Create root service final ControllerServiceDTO rootControllerService = new ControllerServiceDTO(); rootControllerService.setComments(groupControllerService.getComments()); rootControllerService.setName(groupControllerService.getName()); rootControllerService.setType(groupControllerService.getType()); final String rootId = nifiClient.processGroups().createControllerService("root", rootControllerService).getId(); // Map old ID to new ID idMap.put(oldId, rootId); }); // Set properties on root controller services groupControllerServices.stream() .filter(controllerService -> controllerService.getParentGroupId().equals(processGroupId)) .forEach(groupControllerService -> { final Map<String, String> properties = groupControllerService.getProperties(); groupControllerService.getDescriptors().values().stream() .filter(descriptor -> StringUtils.isNotBlank(descriptor.getIdentifiesControllerService())) .forEach(descriptor -> { final String name = descriptor.getName(); final String oldId = properties.get(name); properties.put(name, idMap.get(oldId)); }); final ControllerServiceDTO rootControllerService = new ControllerServiceDTO(); rootControllerService.setId(idMap.get(groupControllerService.getId())); rootControllerService.setProperties(properties); nifiClient.controllerServices().update(rootControllerService); }); // Return flow return templateFlow; } public void snapshotControllerServiceReferences() throws TemplateCreationException { Set<ControllerServiceDTO> controllerServiceEntity = restClient.getControllerServices(); if (controllerServiceEntity != null) { snapshotControllerServices = controllerServiceEntity; for (ControllerServiceDTO serviceDTO : controllerServiceEntity) { if (serviceDTO.getState().equals(NifiProcessUtil.SERVICE_STATE.ENABLED.name())) { snapshottedEnabledControllerServices.add(serviceDTO); } } } } public List<NifiError> getErrors() { return errors; } /** * Compare the services in Nifi with the ones from the snapshot and return any that are not in the snapshot */ public Set<ControllerServiceDTO> identifyNewlyCreatedControllerServiceReferences() { Set<ControllerServiceDTO> newServices = new HashSet<>(); Set<ControllerServiceDTO> controllerServiceEntity = restClient.getControllerServices(); if (controllerServiceEntity != null) { if (snapshotControllerServices != null) { for (ControllerServiceDTO dto : controllerServiceEntity) { if (!snapshotControllerServices.contains(dto)) { newServices.add(dto); } } } else { newServices = controllerServiceEntity; } } newlyCreatedControllerServices = newServices; mergeControllerServices(); return newServices; } /** * Tries to enable the specified controller service. * * @param controllerService the controller service to enable * @param properties property overrides for the controller service * @param enabledServices map of enabled controller service ids and names to DTOs * @param allServices map of all controller service ids to * @return the enabled controller service * @throws NifiComponentNotFoundException if the controller service does not exist * @throws WebApplicationException if the controller service cannot be enabled */ @Nonnull private ControllerServiceDTO tryToEnableControllerService(@Nonnull final ControllerServiceDTO controllerService, @Nullable final Map<String, String> properties, @Nonnull final Map<String, ControllerServiceDTO> enabledServices, @Nonnull final Map<String, ControllerServiceDTO> allServices) { // Check if already enabled if ("ENABLED".equals(controllerService.getState())) { return controllerService; } // Fix controller service references final NiFiPropertyDescriptorTransform propertyDescriptorTransform = restClient.getPropertyDescriptorTransform(); final List<NifiProperty> changedProperties = fixControllerServiceReferences(properties, enabledServices, allServices, NifiPropertyUtil.getPropertiesForService(controllerService, propertyDescriptorTransform)); if (!changedProperties.isEmpty()) { changedProperties.forEach(property -> { controllerService.getProperties().put(property.getKey(), property.getValue()); }); nifiRestClient.controllerServices().update(controllerService); } // Enable controller service return restClient.enableControllerServiceAndSetProperties(controllerService.getId(), properties); } private void mergeControllerServices() { final Map<String, ControllerServiceDTO> map = new HashMap<String, ControllerServiceDTO>(); final Map<String, List<ControllerServiceDTO>> serviceNameMap = new HashMap<>(); //first use the snapshotted servies as a baseline for (ControllerServiceDTO serviceDTO : snapshotControllerServices) { map.put(serviceDTO.getId(), serviceDTO); if (!serviceNameMap.containsKey(serviceDTO.getName())) { serviceNameMap.put(serviceDTO.getName(), new ArrayList<ControllerServiceDTO>()); } serviceNameMap.get(serviceDTO.getName()).add(serviceDTO); } //now try to merge in the newly created services if they exist by ID or name then reference the existing one, otherwise add them to the map List<ControllerServiceDTO> matchingControllerServices = Lists.newArrayList(Iterables.filter(newlyCreatedControllerServices, new Predicate<ControllerServiceDTO>() { @Override public boolean apply(ControllerServiceDTO controllerServiceDTO) { return map.containsKey(controllerServiceDTO.getId()) || serviceNameMap.containsKey(controllerServiceDTO.getName()); } })); //add any others not matched to the map to return List<ControllerServiceDTO> unmatchedServices = Lists.newArrayList(Iterables.filter(newlyCreatedControllerServices, new Predicate<ControllerServiceDTO>() { @Override public boolean apply(ControllerServiceDTO controllerServiceDTO) { return !map.containsKey(controllerServiceDTO.getId()) && !serviceNameMap.containsKey(controllerServiceDTO.getName()); } })); if (unmatchedServices != null && !unmatchedServices.isEmpty()) { for (ControllerServiceDTO serviceToAdd : unmatchedServices) { map.put(serviceToAdd.getId(), serviceToAdd); } } //if match existing services, then delete the new ones if (matchingControllerServices != null && !matchingControllerServices.isEmpty()) { for (ControllerServiceDTO serviceToDelete : matchingControllerServices) { try { restClient.deleteControllerService(serviceToDelete.getId()); } catch (NifiClientRuntimeException e) { log.error("Exception while attempting to mergeControllerServices. Unable to delete Service {}. {}", serviceToDelete.getId(), e.getMessage()); } } } mergedControllerServices = map; } public List<NifiProperty> updateControllerServiceReferences(List<ProcessorDTO> processors) { return updateControllerServiceReferences(processors, null); } /** * Fix references to the controller services on the processor properties * * @param processors processors to inspect * @param controllerServiceProperties property overrides for controller services * @return the list of properties that were modified */ public List<NifiProperty> updateControllerServiceReferences(List<ProcessorDTO> processors, Map<String, String> controllerServiceProperties) { try { //merge the snapshotted services with the newly created ones and update respective processors in the newly created flow final Map<String, ControllerServiceDTO> enabledServices = new HashMap<>(); Map<String, ControllerServiceDTO> allServices = mergedControllerServices; for (ControllerServiceDTO dto : allServices.values()) { if (NifiProcessUtil.SERVICE_STATE.ENABLED.name().equals(dto.getState())) { enabledServices.put(dto.getId(), dto); enabledServices.put(dto.getName(), dto); } } List<NifiProperty> properties = new ArrayList<>(); Map<String, ProcessGroupDTO> processGroupDTOMap = new HashMap<>(); for (ProcessorDTO dto : processors) { ProcessGroupDTO groupDTO = processGroupDTOMap.get(dto.getParentGroupId()); if (groupDTO == null) { //we can create a tmp group dto here as all we need is the id groupDTO = new ProcessGroupDTO(); groupDTO.setId(dto.getParentGroupId()); groupDTO.setName(dto.getParentGroupId()); processGroupDTOMap.put(dto.getParentGroupId(), groupDTO); } properties.addAll(NifiPropertyUtil.getPropertiesForProcessor(groupDTO, dto, restClient.getPropertyDescriptorTransform())); } List<NifiProperty> updatedProperties = fixControllerServiceReferences(controllerServiceProperties, enabledServices, allServices, properties); updatedProperties .forEach(property -> restClient.updateProcessorProperty(property.getProcessGroupId(), property.getProcessorId(), property)); return updatedProperties; } catch (NifiClientRuntimeException e) { errors.add(new NifiError(NifiError.SEVERITY.FATAL, "Error trying to identify Controller Services. " + e.getMessage(), NifiProcessGroup.CONTROLLER_SERVICE_CATEGORY)); } return Collections.emptyList(); } /** * Enables the controller services for the specified properties or changes the property value to an enabled service. * * @param controllerServiceProperties property overrides for controller services * @param enabledServices map of enabled controller service ids and names to DTOs * @param allServices map of all controller service ids to DTOs * @param properties the processor properties to update * @return the list of properties that were modified */ @Nonnull private List<NifiProperty> fixControllerServiceReferences(@Nullable final Map<String, String> controllerServiceProperties, @Nonnull final Map<String, ControllerServiceDTO> enabledServices, @Nonnull final Map<String, ControllerServiceDTO> allServices, @Nonnull final List<NifiProperty> properties) { return properties.stream() // Pick properties that reference a controller service .filter(property -> StringUtils.isNotBlank(property.getPropertyDescriptor().getIdentifiesControllerService())) // Pick properties that reference a disabled or unknown controller service .filter(property -> !enabledServices.containsKey(property.getValue())) // Find a controller service .filter(property -> { final Optional<ControllerServiceDTO> controllerService = findControllerServiceForProperty(controllerServiceProperties, enabledServices, allServices, property); if (controllerService.isPresent()) { if (!controllerService.get().getId().equals(property.getValue())) { property.setValue(controllerService.get().getId()); return true; } } else if (property.getPropertyDescriptor().isRequired()) { final String message = "Unable to find a valid controller service for the '" + property.getKey() + "' property of the '" + property.getProcessorName() + "' " + "processor."; errors.add(new NifiError(NifiError.SEVERITY.FATAL, message, NifiProcessGroup.CONTROLLER_SERVICE_CATEGORY)); } return false; }) .collect(Collectors.toList()); } /** * Finds and enables a controller service for the specified processor property. * * @param controllerServiceProperties property overrides for controller services * @param enabledServices map of enabled controller service ids and names to DTOs * @param allServices map of all controller service ids to DTOs * @param property the processor properties to update * @return the matching controller service */ private Optional<ControllerServiceDTO> findControllerServiceForProperty(@Nullable final Map<String, String> controllerServiceProperties, @Nonnull final Map<String, ControllerServiceDTO> enabledServices, @Nonnull final Map<String, ControllerServiceDTO> allServices, @Nonnull final NifiProperty property) { return property.getPropertyDescriptor().getAllowableValues().stream() // Pick values that exist .filter(allowableValue -> allServices.containsKey(allowableValue.getValue())) // Sort allowed values by priority .sorted((a, b) -> { final String propertyValue = property.getValue(); final String value1 = a.getValue(); final String value2 = b.getValue(); return ComparisonChain.start() // 1. Matches property value .compareTrueFirst(value1.equals(propertyValue), value2.equals(propertyValue)) // 2. Service is enabled .compareTrueFirst(enabledServices.containsKey(value1), enabledServices.containsKey(value2)) // 3. Similar service is enabled .compareTrueFirst(enabledServices.containsKey(a.getDisplayName()), enabledServices.containsKey(b.getDisplayName())) .result(); }) // Map to controller service DTO .map(NiFiAllowableValue::getValue) .map(allServices::get) // Try to enable controller service .filter(controllerService -> { try { tryToEnableControllerService(controllerService, controllerServiceProperties, enabledServices, allServices); return true; } catch (final Exception e) { log.error("Failed to enable controller service [id:{},name:{}]: {}", controllerService.getId(), controllerService.getName(), e.toString(), e); return false; } }) // Return first enabled controller service .findFirst(); } public void cleanupControllerServices() { //only delete the services that were created if none of them with that type existed in the system before // only keep them if they are the first of their kind if (snapshotControllerServices != null && !snapshotControllerServices.isEmpty()) { final Set<String> serviceTypes = new HashSet<>(); for (ControllerServiceDTO dto : snapshotControllerServices) { serviceTypes.add(dto.getType()); } List<ControllerServiceDTO> servicesToDelete = Lists.newArrayList(Iterables.filter(newlyCreatedControllerServices, new Predicate<ControllerServiceDTO>() { @Override public boolean apply(ControllerServiceDTO controllerServiceDTO) { return serviceTypes.contains(controllerServiceDTO.getType()) && (controllerServiceDTO.getReferencingComponents() == null || controllerServiceDTO.getReferencingComponents().size() == 0); } })); if (servicesToDelete != null && !servicesToDelete.isEmpty()) { try { restClient.deleteControllerServices(servicesToDelete); } catch (Exception e) { log.info("error attempting to cleanup controller services while trying to delete Services: " + e.getMessage() + ". It might be wise to login to NIFI and verify there are not extra controller services"); getErrors().add(new NifiError(NifiError.SEVERITY.INFO, "There is an error attempting to remove the controller service :" + e.getMessage())); } } } } /** * Deletes the input port connections to the specified process group. * * <p>When versioning we want to delete only the input port connections. Keep output port connections in place as they may still have data running through them that should flow through the * system.</p> * * @param processGroup the process group with input port connections * @throws NifiClientRuntimeException if a connection cannot be deleted */ private void deleteInputPortConnections(@Nonnull final ProcessGroupDTO processGroup) throws NifiClientRuntimeException { // Get the list of incoming connections coming from some source to this process group final Set<ConnectionDTO> connectionsEntity = restClient.getProcessGroupConnections(processGroup.getParentGroupId()); if (connectionsEntity == null) { return; } final List<ConnectionDTO> connections = NifiConnectionUtil.findConnectionsMatchingDestinationGroupId(connectionsEntity, processGroup.getId()); if (connections == null) { return; } // Delete the connections for (ConnectionDTO connection : connections) { final String type = connection.getSource().getType(); log.info("Found connection {} matching source type {} and destination group {}.", connection.getId(), type, connection.getDestination().getId()); // Stop the port if (NifiConstants.NIFI_PORT_TYPE.INPUT_PORT.name().equalsIgnoreCase(type)) { try { restClient.stopInputPort(connection.getSource().getGroupId(), connection.getSource().getId()); log.info("Stopped input port {} for connection: {} ", connection.getSource().getId(), connection.getId()); } catch (Exception e) { log.error("Failed to stop input port for connection: {}", connection.getId(), e); throw new NifiClientRuntimeException("Error stopping the input port " + connection.getSource().getId() + " for connection " + connection.getId() + " prior to deleting the " + "connection."); } } // Delete the connection try { restClient.deleteConnection(connection, false); } catch (Exception e) { log.error("Failed to delete the connection: {}", connection.getId(), e); final String source = (connection.getSource() != null) ? connection.getSource().getName() : null; final String destination = (connection.getDestination() != null) ? connection.getDestination().getName() : null; throw new NifiClientRuntimeException("Error deleting the connection " + connection.getId() + " with source " + source + " and destination " + destination + "."); } } } /** * Version a ProcessGroup renaming it with the name - {timestamp millis}. * If {@code removeIfInactive} is true it will not version but just delete it * * @param processGroup the group to version */ public ProcessGroupDTO versionProcessGroup(ProcessGroupDTO processGroup) { log.info("Versioning Process Group {} ", processGroup.getName()); restClient.disableAllInputProcessors(processGroup.getId()); log.info("Disabled Inputs for {} ", processGroup.getName()); //attempt to stop all processors try { restClient.stopInputs(processGroup.getId()); log.info("Stopped Input Ports for {}, ", processGroup.getName()); } catch (Exception e) { log.error("Error trying to stop Input Ports for {} while creating a new version ", processGroup.getName()); } //delete input connections try { deleteInputPortConnections(processGroup); } catch (NifiClientRuntimeException e) { log.error("Error trying to delete input port connections for Process Group {} while creating a new version. ", processGroup.getName(), e); getErrors().add(new NifiError(NifiError.SEVERITY.FATAL, "The input port connections to the process group " + processGroup.getName() + " could not be deleted. Please delete them manually " + "in NiFi and try again.")); } String versionedProcessGroupName = getVersionedProcessGroupName(processGroup.getName()); //rename the feedGroup to be name+timestamp processGroup.setName(versionedProcessGroupName); restClient.updateProcessGroup(processGroup); log.info("Renamed ProcessGroup to {}, ", processGroup.getName()); return processGroup; } public void markProcessorsAsRunning(NifiProcessGroup newProcessGroup) { if (newProcessGroup.isSuccess()) { try { restClient.markProcessorGroupAsRunning(newProcessGroup.getProcessGroupEntity()); } catch (NifiClientRuntimeException e) { String errorMsg = "Unable to mark feed as " + NifiProcessUtil.PROCESS_STATE.RUNNING + "."; newProcessGroup .addError(newProcessGroup.getProcessGroupEntity().getId(), "", NifiError.SEVERITY.WARN, errorMsg, "Process State"); newProcessGroup.setSuccess(false); } } } public void markConnectionPortsAsRunning(ProcessGroupDTO entity) { restClient.markConnectionPortsAsRunning(entity); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.test; import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA; import static org.apache.drill.exec.util.StoragePluginTestUtils.ROOT_SCHEMA; import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URL; import java.nio.file.Paths; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import org.apache.drill.common.config.DrillProperties; import org.apache.drill.common.logical.FormatPluginConfig; import org.apache.drill.common.logical.StoragePluginConfig; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.ZookeeperHelper; import org.apache.drill.exec.client.DrillClient; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.memory.RootAllocatorFactory; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.UserBitShared.QueryType; import org.apache.drill.exec.rpc.user.QueryDataBatch; import org.apache.drill.exec.server.Drillbit; import org.apache.drill.exec.server.RemoteServiceSet; import org.apache.drill.exec.store.SchemaFactory; import org.apache.drill.exec.store.StoragePluginRegistry; import org.apache.drill.exec.store.StoragePluginRegistry.PluginException; import org.apache.drill.exec.store.StoragePluginRegistryImpl; import org.apache.drill.exec.store.dfs.FileSystemConfig; import org.apache.drill.exec.store.dfs.WorkspaceConfig; import org.apache.drill.exec.store.mock.MockStorageEngineConfig; import org.apache.drill.exec.store.sys.store.provider.ZookeeperPersistentStoreProvider; import org.apache.drill.exec.util.StoragePluginTestUtils; import org.apache.drill.shaded.guava.com.google.common.base.Charsets; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap; import org.apache.drill.shaded.guava.com.google.common.io.Resources; import org.apache.drill.test.DrillTestWrapper.TestServices; import org.apache.hadoop.fs.FileSystem; /** * Test fixture to start a Drillbit with provide options, create a client, and * execute queries. Can be used in JUnit tests, or in ad-hoc programs. Provides * a builder to set the necessary embedded Drillbit and client options, then * creates the requested Drillbit and client. */ public class ClusterFixture extends BaseFixture implements AutoCloseable { public static final int MAX_WIDTH_PER_NODE = 2; @SuppressWarnings("serial") public static final Properties TEST_CONFIGURATIONS = new Properties() { { // Properties here mimic those in drill-root/pom.xml, Surefire plugin // configuration. They allow tests to run successfully in Eclipse. put(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE, false); // The CTTAS function requires that the default temporary workspace be // writable. By default, the default temporary workspace points to // dfs.tmp. But, the test setup marks dfs.tmp as read-only. To work // around this, tests are supposed to use dfs. So, we need to // set the default temporary workspace to dfs.tmp. put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, DFS_TMP_SCHEMA); put(ExecConstants.HTTP_ENABLE, false); put("drill.catastrophic_to_standard_out", true); // Verbose errors. put(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY, true); // See Drillbit.close. The Drillbit normally waits a specified amount // of time for ZK registration to drop. But, embedded Drillbits normally // don't use ZK, so no need to wait. put(ExecConstants.ZK_REFRESH, 0); // This is just a test, no need to be heavy-duty on threads. // This is the number of server and client RPC threads. The // production default is DEFAULT_SERVER_RPC_THREADS. put(ExecConstants.BIT_SERVER_RPC_THREADS, 2); // No need for many scanners except when explicitly testing that // behavior. Production default is DEFAULT_SCAN_THREADS put(ExecConstants.SCAN_THREADPOOL_SIZE, 4); // Define a useful root location for the ZK persistent // storage. Profiles will go here when running in distributed // mode. put(ZookeeperPersistentStoreProvider.DRILL_EXEC_SYS_STORE_PROVIDER_ZK_BLOBROOT, "/tmp/drill/tests"); } }; public static final String DEFAULT_BIT_NAME = "drillbit"; private final Map<String, Drillbit> bits = new HashMap<>(); private Drillbit defaultDrillbit; private boolean ownsZK; private ZookeeperHelper zkHelper; private RemoteServiceSet serviceSet; protected List<ClientFixture> clients = new ArrayList<>(); protected RestClientFixture restClientFixture; private boolean usesZk; private Properties clientProps; private final ClusterFixtureBuilder builder; ClusterFixture(ClusterFixtureBuilder builder) { this.builder = Preconditions.checkNotNull(builder); setClientProps(); configureZk(); try { createConfig(); allocator = RootAllocatorFactory.newRoot(config); startDrillbits(); applyOptions(); } catch (Exception e) { // Translate exceptions to unchecked to avoid cluttering // tests. Failures will simply fail the test itself. throw new IllegalStateException("Cluster fixture setup failed", e); } } /** * Set the client properties to be used by client fixture. */ private void setClientProps() { clientProps = builder.clientProps; } public Properties getClientProps() { return clientProps; } private void configureZk() { // Start ZK if requested. String zkConnect; if (builder.zkHelper != null) { // Case where the test itself started ZK and we're only using it. zkHelper = builder.zkHelper; ownsZK = false; } else if (builder.localZkCount > 0) { // Case where we need a local ZK just for this test cluster. zkHelper = new ZookeeperHelper(); zkHelper.startZookeeper(builder.localZkCount); ownsZK = true; } if (zkHelper != null) { zkConnect = zkHelper.getConnectionString(); // When using ZK, we need to pass in the connection property as // a config property. But, we can only do that if we are passing // in config properties defined at run time. Drill does not allow // combining locally-set properties and a config file: it is one // or the other. if (builder.configBuilder().hasResource()) { throw new IllegalArgumentException("Cannot specify a local ZK while using an external config file."); } builder.configProperty(ExecConstants.ZK_CONNECTION, zkConnect); // Forced to disable this, because currently we leak memory which is a known issue for query cancellations. // Setting this causes unit tests to fail. builder.configProperty(ExecConstants.RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS, true); } } private void createConfig() throws Exception { // Create a config // Because of the way DrillConfig works, we can set the ZK // connection string only if a property set is provided. config = builder.configBuilder.build(); if (builder.usingZk) { // Distribute drillbit using ZK (in-process or external) serviceSet = null; usesZk = true; } else { // Embedded Drillbit. serviceSet = RemoteServiceSet.getLocalServiceSet(); } } private void startDrillbits() throws Exception { // Start the Drillbits. Preconditions.checkArgument(builder.bitCount > 0); int bitCount = builder.bitCount; for (int i = 0; i < bitCount; i++) { Drillbit bit = new Drillbit(config, builder.configBuilder.getDefinitions(), serviceSet); bit.run(); // Bit name and registration. String name; if (builder.bitNames != null && i < builder.bitNames.length) { name = builder.bitNames[i]; } else { // Name the Drillbit by default. Most tests use one Drillbit, // so make the name simple: "drillbit." Only add a numeric suffix // when the test creates multiple bits. if (bitCount == 1) { name = DEFAULT_BIT_NAME; } else { name = DEFAULT_BIT_NAME + Integer.toString(i + 1); } } bits.put(name, bit); // Remember the first Drillbit, this is the default one returned from // drillbit(). if (i == 0) { defaultDrillbit = bit; } configureStoragePlugins(bit); } } private void configureStoragePlugins(Drillbit bit) throws Exception { // Skip plugins if not running in test mode. if (builder.dirTestWatcher == null) { return; } // Create the dfs name space builder.dirTestWatcher.newDfsTestTmpDir(); final StoragePluginRegistry pluginRegistry = bit.getContext().getStorage(); StoragePluginTestUtils.configureFormatPlugins(pluginRegistry); StoragePluginTestUtils.updateSchemaLocation( StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getDfsTestTmpDir(), TMP_SCHEMA); StoragePluginTestUtils.updateSchemaLocation( StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getRootDir(), ROOT_SCHEMA); StoragePluginTestUtils.updateSchemaLocation( StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, builder.dirTestWatcher.getRootDir(), SchemaFactory.DEFAULT_WS_NAME); // Create the mock data plugin MockStorageEngineConfig config = MockStorageEngineConfig.INSTANCE; config.setEnabled(true); pluginRegistry.put(MockStorageEngineConfig.NAME, config); } private void applyOptions() throws Exception { // Apply system options if (builder.systemOptions != null) { for (ClusterFixtureBuilder.RuntimeOption option : builder.systemOptions) { clientFixture().alterSystem(option.key, option.value); } } // Apply session options. if (builder.sessionOptions != null) { for (ClusterFixtureBuilder.RuntimeOption option : builder.sessionOptions) { clientFixture().alterSession(option.key, option.value); } } } public Drillbit drillbit() { return defaultDrillbit; } public Drillbit drillbit(String name) { return bits.get(name); } public Collection<Drillbit> drillbits() { return bits.values(); } public RemoteServiceSet serviceSet() { return serviceSet; } public ClientFixture.ClientBuilder clientBuilder() { return new ClientFixture.ClientBuilder(this); } public RestClientFixture.Builder restClientBuilder() { return new RestClientFixture.Builder(this); } public ClientFixture clientFixture() { if (clients.isEmpty()) { clientBuilder() .property(DrillProperties.DRILLBIT_CONNECTION, String.format("localhost:%s", drillbit().getUserPort())) .build(); } return clients.get(0); } /** * Create a test client for a specific host and port. * * @param host host, must be one of those created by this * fixture * @param port post, must be one of those created by this * fixture * @return a test client. Client will be closed when this cluster * fixture closes, or can be closed early */ public ClientFixture client(String host, int port) { return clientBuilder() .property(DrillProperties.DRILLBIT_CONNECTION, String.format("%s:%d", host, port)) .build(); } public RestClientFixture restClientFixture() { if (restClientFixture == null) { restClientFixture = restClientBuilder().build(); } return restClientFixture; } public DrillClient client() { return clientFixture().client(); } /** * Return a JDBC connection to the default (first) Drillbit. * Note that this code requires special setup of the test code. * Tests in the "exec" package do not normally have visibility * to the Drill JDBC driver. So, the test must put that code * on the class path manually in order for this code to load the * JDBC classes. The caller is responsible for closing the JDBC * connection before closing the cluster. (An enhancement is to * do the close automatically as is done for clients.) * * @return a JDBC connection to the default Drillbit */ public Connection jdbcConnection() { try { Class.forName("org.apache.drill.jdbc.Driver"); } catch (ClassNotFoundException e) { throw new IllegalStateException(e); } String connStr = "jdbc:drill:"; if (usesZK()) { connStr += "zk=" + zkHelper.getConnectionString(); } else { DrillbitEndpoint ep = drillbit().getContext().getEndpoint(); connStr += "drillbit=" + ep.getAddress() + ":" + ep.getUserPort(); } try { return DriverManager.getConnection(connStr); } catch (SQLException e) { throw new IllegalStateException(e); } } /** * Close the clients, Drillbits, allocator and * Zookeeper. Checks for exceptions. If an exception occurs, * continues closing, suppresses subsequent exceptions, and * throws the first exception at completion of close. This allows * the test code to detect any state corruption which only shows * itself when shutting down resources (memory leaks, for example.) */ @Override public void close() throws Exception { Exception ex = null; // Close clients. Clients remove themselves from the client // list. while (!clients.isEmpty()) { ex = safeClose(clients.get(0), ex); } for (Drillbit bit : drillbits()) { ex = safeClose(bit, ex); } bits.clear(); ex = safeClose(serviceSet, ex); serviceSet = null; ex = safeClose(allocator, ex); allocator = null; if (zkHelper != null && ownsZK) { try { zkHelper.stopZookeeper(); } catch (Exception e) { ex = ex == null ? e : ex; } } zkHelper = null; if (ex != null) { throw ex; } } /** * Shutdown the drillbit given the name of the drillbit. */ public void closeDrillbit(final String drillbitName) throws Exception { Exception ex = null; for (Drillbit bit : drillbits()) { if (bit.equals(bits.get(drillbitName))) { try { bit.close(); } catch (Exception e) { ex = ex == null ? e :ex; } } } if (ex != null) { throw ex; } } /** * Close a resource, suppressing the exception, and keeping * only the first exception that may occur. We assume that only * the first is useful, any others are probably down-stream effects * of that first one. * * @param item Item to be closed * @param ex exception to be returned if none thrown here * @return the first exception found */ private Exception safeClose(AutoCloseable item, Exception ex) { try { if (item != null) { item.close(); } } catch (Exception e) { ex = ex == null ? e : ex; } return ex; } public void defineStoragePlugin(String name, StoragePluginConfig config) { try { for (Drillbit drillbit : drillbits()) { StoragePluginRegistryImpl registry = (StoragePluginRegistryImpl) drillbit.getContext().getStorage(); registry.put(name, config); } } catch (PluginException e) { throw new IllegalStateException("Plugin definition failed", e); } } /** * Define a workspace within an existing storage plugin. Useful for * pointing to local file system files outside the Drill source tree. * * @param pluginName name of the plugin like "dfs". * @param schemaName name of the new schema * @param path directory location (usually local) * @param defaultFormat default format for files in the schema */ public void defineWorkspace(String pluginName, String schemaName, String path, String defaultFormat) { defineWorkspace(pluginName, schemaName, path, defaultFormat, null); } public void defineWorkspace(String pluginName, String schemaName, String path, String defaultFormat, FormatPluginConfig format) { defineWorkspace(pluginName, schemaName, path, defaultFormat, format, true); } public void defineImmutableWorkspace(String pluginName, String schemaName, String path, String defaultFormat, FormatPluginConfig format) { defineWorkspace(pluginName, schemaName, path, defaultFormat, format, false); } private void defineWorkspace(String pluginName, String schemaName, String path, String defaultFormat, FormatPluginConfig format, boolean writable) { for (Drillbit bit : drillbits()) { try { defineWorkspace(bit, pluginName, schemaName, path, defaultFormat, format, writable); } catch (PluginException e) { // This functionality is supposed to work in tests. Change // exception to unchecked to make test code simpler. throw new IllegalStateException(String.format( "Failed to define a workspace for plugin %s, schema %s, path %s, default format %s", pluginName, schemaName, path, defaultFormat), e); } } } private void defineWorkspace(Drillbit drillbit, String pluginName, String schemaName, String path, String defaultFormat, FormatPluginConfig format, boolean writable) throws PluginException { final StoragePluginRegistry pluginRegistry = drillbit.getContext().getStorage(); final FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getStoredConfig(pluginName); final WorkspaceConfig newTmpWSConfig = new WorkspaceConfig(path, writable, defaultFormat, false); Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>(); Optional.ofNullable(pluginConfig.getWorkspaces()) .ifPresent(newWorkspaces::putAll); newWorkspaces.put(schemaName, newTmpWSConfig); Map<String, FormatPluginConfig> newFormats = new HashMap<>(); Optional.ofNullable(pluginConfig.getFormats()) .ifPresent(newFormats::putAll); Optional.ofNullable(format) .ifPresent(f -> newFormats.put(defaultFormat, f)); updatePlugin(pluginRegistry, pluginName, pluginConfig, newWorkspaces, newFormats); } public void defineFormat(String pluginName, String name, FormatPluginConfig config) { defineFormats(pluginName, ImmutableMap.of(name, config)); } public void defineFormats(String pluginName, Map<String, FormatPluginConfig> formats) { for (Drillbit bit : drillbits()) { try { defineFormats(bit, pluginName, formats); } catch (PluginException e) { throw new IllegalStateException(e); } } } private void defineFormats(Drillbit drillbit, String pluginName, Map<String, FormatPluginConfig> formats) throws PluginException { StoragePluginRegistry pluginRegistry = drillbit.getContext().getStorage(); FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.copyConfig(pluginName); pluginConfig.getFormats().putAll(formats); pluginRegistry.put(pluginName, pluginConfig); } private void updatePlugin(StoragePluginRegistry pluginRegistry, String pluginName, FileSystemConfig pluginConfig, Map<String, WorkspaceConfig> newWorkspaces, Map<String, FormatPluginConfig> newFormats) throws PluginException { FileSystemConfig newPluginConfig = new FileSystemConfig( pluginConfig.getConnection(), pluginConfig.getConfig(), newWorkspaces == null ? pluginConfig.getWorkspaces() : newWorkspaces, newFormats == null ? pluginConfig.getFormats() : newFormats); newPluginConfig.setEnabled(pluginConfig.isEnabled()); pluginRegistry.put(pluginName, newPluginConfig); } public static final String EXPLAIN_PLAN_TEXT = "text"; public static final String EXPLAIN_PLAN_JSON = "json"; public static ClusterFixtureBuilder builder(BaseDirTestWatcher dirTestWatcher) { ClusterFixtureBuilder builder = new ClusterFixtureBuilder(dirTestWatcher) .sessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, MAX_WIDTH_PER_NODE); Properties props = new Properties(); props.putAll(ClusterFixture.TEST_CONFIGURATIONS); props.setProperty(ExecConstants.DRILL_TMP_DIR, dirTestWatcher.getTmpDir().getAbsolutePath()); props.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getHomeDir().getAbsolutePath()); props.setProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH, dirTestWatcher.getStoreDir().getAbsolutePath()); props.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS); // ALTER SESSION profiles are seldom interesting props.setProperty(ExecConstants.SKIP_ALTER_SESSION_QUERY_PROFILE, Boolean.TRUE.toString()); builder.configBuilder.configProps(props); return builder; } /** * Return a cluster builder without any of the usual defaults. Use * this only for special cases. Your code is responsible for all the * odd bits that must be set to get the setup right. See * {@link ClusterFixture#TEST_CONFIGURATIONS} for details. Note that * you are often better off using the defaults, then replacing selected * properties with the values you prefer. * * @return a fixture builder with no default properties set */ public static ClusterFixtureBuilder bareBuilder(BaseDirTestWatcher dirTestWatcher) { return new ClusterFixtureBuilder(dirTestWatcher); } /** * Shim class to allow the {@link TestBuilder} class to work with the * cluster fixture. */ public static class FixtureTestServices implements TestServices { private final ClientFixture client; public FixtureTestServices(ClientFixture client) { this.client = client; } @Override public BufferAllocator allocator() { return client.allocator(); } @Override public void test(String query) throws Exception { client.runQueriesAndLog(query); } @Override public List<QueryDataBatch> testRunAndReturn(QueryType type, Object query) throws Exception { return client.queryBuilder().query(type, (String) query).results(); } } /** * Return a cluster fixture built with standard options. This is a short-cut * for simple tests that don't need special setup. * * @param dirTestWatcher directory test watcher * @return a cluster fixture with standard options */ public static ClusterFixture standardCluster(BaseDirTestWatcher dirTestWatcher) { return builder(dirTestWatcher).build(); } /** * Convert a Java object (typically a boxed scalar) to a string * for use in SQL. Quotes strings but just converts others to * string format. If value to encode is null, return null. * * @param value the value to encode * @return the SQL-acceptable string equivalent */ public static String stringify(Object value) { if (value == null) { return null; } if (value instanceof String) { return "'" + value + "'"; } return value.toString(); } public static String getResource(String resource) throws IOException { // Unlike the Java routines, Guava does not like a leading slash. final URL url = Resources.getResource(trimSlash(resource)); if (url == null) { throw new IOException( String.format("Unable to find resource %s.", resource)); } return Resources.toString(url, Charsets.UTF_8); } /** * Load a resource file, returning the resource as a string. * "Hides" the checked exception as unchecked, which is fine * in a test as the unchecked exception will fail the test * without unnecessary error fiddling. * * @param resource path to the resource * @return the resource contents as a string */ public static String loadResource(String resource) { try { return getResource(resource); } catch (IOException e) { throw new IllegalStateException("Resource not found: " + resource, e); } } /** * Guava likes paths to resources without an initial slash, the JDK * needs a slash. Normalize the path when needed. * * @param path resource path with optional leading slash * @return same path without the leading slash */ public static String trimSlash(String path) { if (path == null) { return path; } else if (path.startsWith("/")) { return path.substring(1); } else { return path; } } /** * Create a temporary data directory which will be removed when the * cluster closes, and register it as a "dfs" name space. * * @param key The name to use for the directory and the name space. * Access the directory as "dfs.<key>". * @param defaultFormat The default storage format for the workspace. * @param formatPluginConfig The format plugin config. * @return location of the directory which can be used to create * temporary input files */ public File makeDataDir(String key, String defaultFormat, FormatPluginConfig formatPluginConfig) { File dir = builder.dirTestWatcher.makeSubDir(Paths.get(key)); defineWorkspace("dfs", key, dir.getAbsolutePath(), defaultFormat, formatPluginConfig); return dir; } public File getDrillTempDir() { return new File(URI.create(config.getString(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH)).getPath()); } public boolean usesZK() { return usesZk; } /** * Returns the directory that holds query profiles. Valid only for an * embedded Drillbit with local cluster coordinator &ndash; the normal * case for unit tests. * * @return query profile directory */ public File getProfileDir() { File baseDir; if (usesZk) { baseDir = new File(config.getString(ZookeeperPersistentStoreProvider.DRILL_EXEC_SYS_STORE_PROVIDER_ZK_BLOBROOT)); } else { baseDir = getDrillTempDir(); } return new File(baseDir, "profiles"); } public StoragePluginRegistry storageRegistry() { return drillbit().getContext().getStorage(); } public StoragePluginRegistry storageRegistry(String name) { return drillbit(name).getContext().getStorage(); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devrel.gmscore.tools.apk.arsc; import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.inject.Inject; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * Analyzes an APK to: * * <ul> * <li>Blame resource configurations on their entry count (entries keeping the config around). * <li>Blame strings in resources.arsc that have no base configuration. * <li>Blame resources on their different configurations. * </ul> */ public class ArscBlamer { /** Maps package key pool indices to blamed resources. */ private final Map<PackageChunk, List<ResourceEntry>[]> keyToBlame = new HashMap<>(); /** Maps types to blamed resources. */ private final Map<PackageChunk, List<ResourceEntry>[]> typeToBlame = new HashMap<>(); /** Maps package to blamed resources. */ private final Multimap<PackageChunk, ResourceEntry> packageToBlame = HashMultimap.create(); /** Maps string indices to blamed resources. */ private final List<ResourceEntry>[] stringToBlame; /** Maps type chunk entries to blamed resources. */ private final Multimap<TypeChunk.Entry, ResourceEntry> typeEntryToBlame = HashMultimap.create(); /** Maps resources to the type chunk entries they reference. */ private Multimap<ResourceEntry, TypeChunk.Entry> resourceEntries; /** Maps resources which have no base config to the type chunk entries they reference. */ private Multimap<ResourceEntry, TypeChunk.Entry> baselessKeys; /** Contains all of the type chunks in {@link #resourceTable}. */ private List<TypeChunk> typeChunks; /** This is the {@link ResourceTableChunk} inside of the resources.arsc file in the APK. */ private final ResourceTableChunk resourceTable; /** * Creates a new {@link ArscBlamer}. * * @param resourceTable The resources.arsc resource table to blame. */ @Inject public ArscBlamer(ResourceTableChunk resourceTable) { this.resourceTable = resourceTable; this.stringToBlame = createEntryListArray(resourceTable.getStringPool().getStringCount()); } /** Generates blame mappings. */ public void blame() { Multimap<ResourceEntry, TypeChunk.Entry> entries = getResourceEntries(); for (Entry<ResourceEntry, Collection<TypeChunk.Entry>> entry : entries.asMap().entrySet()) { ResourceEntry resourceEntry = entry.getKey(); PackageChunk packageChunk = Preconditions.checkNotNull( resourceTable.getPackage(resourceEntry.packageName())); int keyCount = packageChunk.getKeyStringPool().getStringCount(); int typeCount = packageChunk.getTypeStringPool().getStringCount(); for (TypeChunk.Entry chunkEntry : entry.getValue()) { blameKeyOrType(keyToBlame, packageChunk, chunkEntry.keyIndex(), resourceEntry, keyCount); blameKeyOrType(typeToBlame, packageChunk, chunkEntry.parent().getId() - 1, resourceEntry, typeCount); blameFromTypeChunkEntry(chunkEntry); } blamePackage(packageChunk, resourceEntry); } Multimaps.invertFrom(entries, typeEntryToBlame); for (TypeChunk.Entry entry : typeEntryToBlame.keySet()) { blameFromTypeChunkEntry(entry); } } private void blameKeyOrType(Map<PackageChunk, List<ResourceEntry>[]> keyOrType, PackageChunk packageChunk, int keyIndex, ResourceEntry entry, int entryCount) { if (!keyOrType.containsKey(packageChunk)) { keyOrType.put(packageChunk, createEntryListArray(entryCount)); } keyOrType.get(packageChunk)[keyIndex].add(entry); } private void blamePackage(PackageChunk packageChunk, ResourceEntry entry) { packageToBlame.put(packageChunk, entry); } private void blameFromTypeChunkEntry(TypeChunk.Entry chunkEntry) { for (ResourceValue value : getAllResourceValues(chunkEntry)) { for (ResourceEntry entry : typeEntryToBlame.get(chunkEntry)) { switch (value.type()) { case STRING: blameString(value.data(), entry); break; default: break; } } } } /** Returns all {@link ResourceValue} for a single {@code entry}. */ private Collection<ResourceValue> getAllResourceValues(TypeChunk.Entry entry) { Set<ResourceValue> values = new HashSet<ResourceValue>(); ResourceValue resourceValue = entry.value(); if (resourceValue != null) { values.add(resourceValue); } for (ResourceValue value : entry.values().values()) { values.add(value); } return values; } private void blameString(int stringIndex, ResourceEntry entry) { stringToBlame[stringIndex].add(entry); } /** Must first call {@link #blame}. */ public Map<PackageChunk, List<ResourceEntry>[]> getKeyToBlamedResources() { return Collections.unmodifiableMap(keyToBlame); } /** Must first call {@link #blame}. */ public Map<PackageChunk, List<ResourceEntry>[]> getTypeToBlamedResources() { return Collections.unmodifiableMap(typeToBlame); } /** Must first call {@link #blame}. */ public Multimap<PackageChunk, ResourceEntry> getPackageToBlamedResources() { return Multimaps.unmodifiableMultimap(packageToBlame); } /** Must first call {@link #blame}. */ public List<ResourceEntry>[] getStringToBlamedResources() { return stringToBlame; } /** Must first call {@link #blame}. */ public Multimap<TypeChunk.Entry, ResourceEntry> getTypeEntryToBlamedResources() { return Multimaps.unmodifiableMultimap(typeEntryToBlame); } /** Returns a multimap of keys for which there is no default resource. */ public Multimap<ResourceEntry, TypeChunk.Entry> getBaselessKeys() { if (baselessKeys != null) { return baselessKeys; } Multimap<ResourceEntry, TypeChunk.Entry> result = HashMultimap.create(); for (Entry<ResourceEntry, Collection<TypeChunk.Entry>> entry : getResourceEntries().asMap().entrySet()) { Collection<TypeChunk.Entry> chunkEntries = entry.getValue(); if (!hasBaseConfiguration(chunkEntries)) { result.putAll(entry.getKey(), chunkEntries); } } baselessKeys = result; return result; } /** Returns a multimap of resource entries to the chunk entries they reference in this APK. */ public Multimap<ResourceEntry, TypeChunk.Entry> getResourceEntries() { if (resourceEntries != null) { return resourceEntries; } Multimap<ResourceEntry, TypeChunk.Entry> result = HashMultimap.create(); for (TypeChunk typeChunk : getTypeChunks()) { for (TypeChunk.Entry entry : typeChunk.getEntries().values()) { result.put(ResourceEntry.create(entry), entry); } } resourceEntries = result; return result; } /** Returns all {@link TypeChunk} in resources.arsc. */ public List<TypeChunk> getTypeChunks() { if (typeChunks != null) { return typeChunks; } List<TypeChunk> result = new ArrayList<>(); for (PackageChunk packageChunk : resourceTable.getPackages()) { for (TypeChunk typeChunk : packageChunk.getTypeChunks()) { result.add(typeChunk); } } typeChunks = result; return result; } private boolean hasBaseConfiguration(Collection<TypeChunk.Entry> entries) { for (TypeChunk.Entry entry : entries) { if (entry.parent().getConfiguration().isDefault()) { return true; } } return false; } private static List<ResourceEntry>[] createEntryListArray(int size) { ArrayListResourceEntry[] result = new ArrayListResourceEntry[size]; for (int i = 0; i < size; ++i) { result[i] = new ArrayListResourceEntry(); } return result; } /** Allows creation of concrete parameterized type arr {@link ArscBlamer#createEntryListArray}. */ private static class ArrayListResourceEntry extends ArrayList<ResourceEntry> { private ArrayListResourceEntry() { super(2); // ~90-95% of these lists end up with only 1 or 2 elements. } } /** Describes a single resource entry. */ @AutoValue public abstract static class ResourceEntry { public abstract String packageName(); public abstract String typeName(); public abstract String entryName(); static ResourceEntry create(TypeChunk.Entry entry) { PackageChunk packageChunk = Preconditions.checkNotNull(entry.parent().getPackageChunk()); String packageName = packageChunk.getPackageName(); String typeName = entry.typeName(); String entryName = entry.key(); return new AutoValue_ArscBlamer_ResourceEntry(packageName, typeName, entryName); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.reactive.streams; import java.util.LinkedList; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import io.reactivex.Flowable; import org.apache.camel.component.reactive.streams.engine.DelayedMonoPublisher; import org.apache.camel.component.reactive.streams.support.TestSubscriber; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; public class DelayedMonoPublisherTest { private ExecutorService service; @BeforeEach public void init() { service = new ScheduledThreadPoolExecutor(3); } @AfterEach public void tearDown() throws Exception { service.shutdown(); service.awaitTermination(1, TimeUnit.SECONDS); } @Test public void testAlreadyAvailable() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setData(5); LinkedList<Integer> data = new LinkedList<>(); CountDownLatch latch = new CountDownLatch(1); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(1, data.size()); assertEquals(5, data.get(0).intValue()); } @Test public void testExceptionAlreadyAvailable() throws Exception { Exception ex = new RuntimeException("An exception"); DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setException(ex); LinkedList<Throwable> exceptions = new LinkedList<>(); CountDownLatch latch = new CountDownLatch(1); Flowable.fromPublisher(pub) .subscribe(item -> { }, e -> { exceptions.add(e); latch.countDown(); }); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(1, exceptions.size()); assertEquals(ex, exceptions.get(0)); } @Test public void testAvailableSoon() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); LinkedList<Integer> data = new LinkedList<>(); CountDownLatch latch = new CountDownLatch(1); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); Thread.yield(); pub.setData(5); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(1, data.size()); assertEquals(5, data.get(0).intValue()); } @Test public void testAvailableLater() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); LinkedList<Integer> data = new LinkedList<>(); CountDownLatch latch = new CountDownLatch(1); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); Thread.sleep(200); pub.setData(5); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(1, data.size()); assertEquals(5, data.get(0).intValue()); } @Test public void testMultipleSubscribers() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); ConcurrentLinkedDeque<Integer> data = new ConcurrentLinkedDeque<>(); CountDownLatch latch = new CountDownLatch(2); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); Thread.sleep(200); pub.setData(5); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(2, data.size()); for (Integer n : data) { assertEquals(5, n.intValue()); } } @Test public void testMultipleSubscribersMixedArrival() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); ConcurrentLinkedDeque<Integer> data = new ConcurrentLinkedDeque<>(); CountDownLatch latch = new CountDownLatch(2); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); Thread.sleep(200); pub.setData(5); Flowable.fromPublisher(pub) .doOnNext(data::add) .doOnComplete(latch::countDown) .subscribe(); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(2, data.size()); for (Integer n : data) { assertEquals(5, n.intValue()); } } @Test public void testMultipleSubscribersMixedArrivalException() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); Exception ex = new RuntimeException("An exception"); ConcurrentLinkedDeque<Throwable> exceptions = new ConcurrentLinkedDeque<>(); CountDownLatch latch = new CountDownLatch(2); Flowable.fromPublisher(pub) .subscribe(item -> { }, e -> { exceptions.add(e); latch.countDown(); }); Thread.sleep(200); pub.setException(ex); Flowable.fromPublisher(pub) .subscribe(item -> { }, e -> { exceptions.add(e); latch.countDown(); }); assertTrue(latch.await(1, TimeUnit.SECONDS)); assertEquals(2, exceptions.size()); for (Throwable t : exceptions) { assertEquals(ex, t); } } @Test public void testDelayedRequest() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setData(2); BlockingQueue<Integer> queue = new LinkedBlockingDeque<>(); TestSubscriber<Integer> sub = new TestSubscriber<Integer>() { @Override public void onNext(Integer o) { queue.add(o); } }; sub.setInitiallyRequested(0); pub.subscribe(sub); Thread.sleep(100); sub.request(1); Integer res = queue.poll(1, TimeUnit.SECONDS); assertEquals(Integer.valueOf(2), res); } @Test public void testDataOrExceptionAllowed() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); Exception ex = new RuntimeException("An exception"); pub.setException(ex); assertThrows(IllegalStateException.class, () -> pub.setData(1)); } @Test public void testDataOrExceptionAllowed2() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setData(1); Exception ex = new RuntimeException("An exception"); assertThrows(IllegalStateException.class, () -> pub.setException(ex)); } @Test public void testOnlyOneDataAllowed() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setData(1); assertThrows(IllegalStateException.class, () -> pub.setData(2)); } @Test public void testOnlyOneExceptionAllowed() throws Exception { DelayedMonoPublisher<Integer> pub = new DelayedMonoPublisher<>(service); pub.setException(new RuntimeException("An exception")); assertThrows(IllegalStateException.class, () -> pub.setException(new RuntimeException("An exception"))); } }
/* * Copyright (c) 2013 Jadler contributors * This program is made available under the terms of the MIT License. */ package net.jadler; import org.apache.commons.lang.Validate; import org.hamcrest.Matcher; import java.util.ArrayList; import java.util.List; import static net.jadler.matchers.BodyRequestMatcher.requestBody; import static net.jadler.matchers.HeaderRequestMatcher.requestHeader; import static net.jadler.matchers.MethodRequestMatcher.requestMethod; import static net.jadler.matchers.ParameterRequestMatcher.requestParameter; import static net.jadler.matchers.PathRequestMatcher.requestPath; import static net.jadler.matchers.QueryStringRequestMatcher.requestQueryString; import static net.jadler.matchers.RawBodyRequestMatcher.requestRawBody; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.notNullValue; /** * A base implementation of the {@link RequestMatching} interface. Collects all request predicates to a protected * collection available in extending classes. * * @param <T> type (either class or interface) of the class extending this abstract class. This type will be returned * by all methods introduced in {@link RequestMatching} implemented by this class so fluid request matching * is possible. */ public abstract class AbstractRequestMatching<T extends RequestMatching<T>> implements RequestMatching<T> { protected final List<Matcher<? super Request>> predicates; protected AbstractRequestMatching() { this.predicates = new ArrayList<Matcher<? super Request>>(); } /** * {@inheritDoc} */ @Override @SuppressWarnings("unchecked") public T that(final Matcher<? super Request> predicate) { Validate.notNull(predicate, "predicate cannot be null"); this.predicates.add(predicate); return (T)this; } /** * {@inheritDoc} */ @Override public T havingMethodEqualTo(final String method) { Validate.notEmpty(method, "method cannot be empty"); return havingMethod(equalToIgnoringCase(method)); } /** * {@inheritDoc} */ @Override public T havingMethod(final Matcher<? super String> predicate) { Validate.notNull(predicate, "predicate cannot be null"); return that(requestMethod(predicate)); } /** * {@inheritDoc} */ @Override public T havingBodyEqualTo(final String requestBody) { Validate.notNull(requestBody, "requestBody cannot be null, use an empty string instead"); return havingBody(equalTo(requestBody)); } /** * {@inheritDoc} */ @Override public T havingBody(final Matcher<? super String> predicate) { Validate.notNull(predicate, "predicate cannot be null"); return that(requestBody(predicate)); } /** * {@inheritDoc} */ @Override public T havingRawBodyEqualTo(final byte[] requestBody) { Validate.notNull(requestBody, "requestBody cannot be null, use an empty array instead"); return that(requestRawBody(equalTo(requestBody))); } /** * {@inheritDoc} */ @Override public T havingPathEqualTo(final String path) { Validate.notEmpty(path, "path cannot be empty"); return havingPath(equalTo(path)); } /** * {@inheritDoc} */ @Override public T havingPath(final Matcher<? super String> predicate) { Validate.notNull(predicate, "predicate cannot be null"); return that(requestPath(predicate)); } /** * {@inheritDoc} */ @Override public T havingQueryStringEqualTo(final String queryString) { return havingQueryString(equalTo(queryString)); } /** * {@inheritDoc} */ @Override public T havingQueryString(final Matcher<? super String> predicate) { Validate.notNull(predicate, "predicate cannot be null"); return that(requestQueryString(predicate)); } /** * {@inheritDoc} */ @Override public T havingParameterEqualTo(final String name, final String value) { Validate.notNull(value, "value cannot be null"); return havingParameter(name, hasItem(value)); } /** * {@inheritDoc} */ @Override public T havingParameter(final String name, final Matcher<? super List<String>> predicate) { Validate.notEmpty(name, "name cannot be empty"); Validate.notNull(predicate, "predicate cannot be null"); return that(requestParameter(name, predicate)); } /** * {@inheritDoc} */ @Override public T havingParameter(final String name) { return havingParameter(name, notNullValue()); } /** * {@inheritDoc} */ @Override @SuppressWarnings("unchecked") public T havingParameters(final String... names) { for (final String name: names) { havingParameter(name); } return (T)this; } /** * {@inheritDoc} */ @Override public T havingHeaderEqualTo(final String name, final String value) { Validate.notNull(value, "value cannot be null"); return havingHeader(name, hasItem(value)); } /** * {@inheritDoc} */ @Override public T havingHeader(final String name, final Matcher<? super List<String>> predicate) { Validate.notEmpty(name, "name cannot be empty"); Validate.notNull(predicate, "predicate cannot be null"); return that(requestHeader(name, predicate)); } /** * {@inheritDoc} */ @Override public T havingHeader(final String name) { return havingHeader(name, notNullValue()); } /** * {@inheritDoc} */ @Override @SuppressWarnings("unchecked") public T havingHeaders(final String... names) { Validate.notNull(names, "names cannot be null"); for (final String name: names) { havingHeader(name); } return (T)this; } }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.auth; import java.io.IOException; import java.security.MessageDigest; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Formatter; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.StringTokenizer; import org.apache.http.annotation.NotThreadSafe; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpRequest; import org.apache.http.auth.AuthenticationException; import org.apache.http.auth.ChallengeState; import org.apache.http.auth.ContextAwareAuthScheme; import org.apache.http.auth.Credentials; import org.apache.http.auth.AUTH; import org.apache.http.auth.MalformedChallengeException; import org.apache.http.auth.params.AuthParams; import org.apache.http.message.BasicNameValuePair; import org.apache.http.message.BasicHeaderValueFormatter; import org.apache.http.message.BufferedHeader; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.HttpContext; import org.apache.http.util.CharArrayBuffer; import org.apache.http.util.EncodingUtils; /** * Digest authentication scheme as defined in RFC 2617. * Both MD5 (default) and MD5-sess are supported. * Currently only qop=auth or no qop is supported. qop=auth-int * is unsupported. If auth and auth-int are provided, auth is * used. * <p> * Credential charset is configured via the * {@link org.apache.http.auth.params.AuthPNames#CREDENTIAL_CHARSET} * parameter of the HTTP request. * <p> * Since the digest username is included as clear text in the generated * Authentication header, the charset of the username must be compatible * with the * {@link org.apache.http.params.CoreProtocolPNames#HTTP_ELEMENT_CHARSET * http element charset}. * <p> * The following parameters can be used to customize the behavior of this * class: * <ul> * <li>{@link org.apache.http.auth.params.AuthPNames#CREDENTIAL_CHARSET}</li> * </ul> * * @since 4.0 */ @NotThreadSafe public class DigestScheme extends RFC2617Scheme { /** * Hexa values used when creating 32 character long digest in HTTP DigestScheme * in case of authentication. * * @see #encode(byte[]) */ private static final char[] HEXADECIMAL = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** Whether the digest authentication process is complete */ private boolean complete; private static final int QOP_UNKNOWN = -1; private static final int QOP_MISSING = 0; private static final int QOP_AUTH_INT = 1; private static final int QOP_AUTH = 2; private String lastNonce; private long nounceCount; private String cnonce; private String a1; private String a2; /** * Creates an instance of <tt>DigestScheme</tt> with the given challenge * state. * * @since 4.2 */ public DigestScheme(final ChallengeState challengeState) { super(challengeState); this.complete = false; } public DigestScheme() { this(null); } /** * Processes the Digest challenge. * * @param header the challenge header * * @throws MalformedChallengeException is thrown if the authentication challenge * is malformed */ @Override public void processChallenge( final Header header) throws MalformedChallengeException { super.processChallenge(header); this.complete = true; } /** * Tests if the Digest authentication process has been completed. * * @return <tt>true</tt> if Digest authorization has been processed, * <tt>false</tt> otherwise. */ public boolean isComplete() { String s = getParameter("stale"); if ("true".equalsIgnoreCase(s)) { return false; } else { return this.complete; } } /** * Returns textual designation of the digest authentication scheme. * * @return <code>digest</code> */ public String getSchemeName() { return "digest"; } /** * Returns <tt>false</tt>. Digest authentication scheme is request based. * * @return <tt>false</tt>. */ public boolean isConnectionBased() { return false; } public void overrideParamter(final String name, final String value) { getParameters().put(name, value); } /** * @deprecated (4.2) Use {@link ContextAwareAuthScheme#authenticate(Credentials, HttpRequest, org.apache.http.protocol.HttpContext)} */ @Deprecated public Header authenticate( final Credentials credentials, final HttpRequest request) throws AuthenticationException { return authenticate(credentials, request, new BasicHttpContext()); } /** * Produces a digest authorization string for the given set of * {@link Credentials}, method name and URI. * * @param credentials A set of credentials to be used for athentication * @param request The request being authenticated * * @throws org.apache.http.auth.InvalidCredentialsException if authentication credentials * are not valid or not applicable for this authentication scheme * @throws AuthenticationException if authorization string cannot * be generated due to an authentication failure * * @return a digest authorization string */ @Override public Header authenticate( final Credentials credentials, final HttpRequest request, final HttpContext context) throws AuthenticationException { if (credentials == null) { throw new IllegalArgumentException("Credentials may not be null"); } if (request == null) { throw new IllegalArgumentException("HTTP request may not be null"); } if (getParameter("realm") == null) { throw new AuthenticationException("missing realm in challenge"); } if (getParameter("nonce") == null) { throw new AuthenticationException("missing nonce in challenge"); } // Add method name and request-URI to the parameter map getParameters().put("methodname", request.getRequestLine().getMethod()); getParameters().put("uri", request.getRequestLine().getUri()); String charset = getParameter("charset"); if (charset == null) { charset = AuthParams.getCredentialCharset(request.getParams()); getParameters().put("charset", charset); } return createDigestHeader(credentials, request); } private static MessageDigest createMessageDigest( final String digAlg) throws UnsupportedDigestAlgorithmException { try { return MessageDigest.getInstance(digAlg); } catch (Exception e) { throw new UnsupportedDigestAlgorithmException( "Unsupported algorithm in HTTP Digest authentication: " + digAlg); } } /** * Creates digest-response header as defined in RFC2617. * * @param credentials User credentials * * @return The digest-response as String. */ private Header createDigestHeader( final Credentials credentials, final HttpRequest request) throws AuthenticationException { String uri = getParameter("uri"); String realm = getParameter("realm"); String nonce = getParameter("nonce"); String opaque = getParameter("opaque"); String method = getParameter("methodname"); String algorithm = getParameter("algorithm"); Set<String> qopset = new HashSet<String>(8); int qop = QOP_UNKNOWN; String qoplist = getParameter("qop"); if (qoplist != null) { StringTokenizer tok = new StringTokenizer(qoplist, ","); while (tok.hasMoreTokens()) { String variant = tok.nextToken().trim(); qopset.add(variant.toLowerCase(Locale.US)); } if (request instanceof HttpEntityEnclosingRequest && qopset.contains("auth-int")) { qop = QOP_AUTH_INT; } else if (qopset.contains("auth")) { qop = QOP_AUTH; } } else { qop = QOP_MISSING; } if (qop == QOP_UNKNOWN) { throw new AuthenticationException("None of the qop methods is supported: " + qoplist); } // If an algorithm is not specified, default to MD5. if (algorithm == null) { algorithm = "MD5"; } String charset = getParameter("charset"); if (charset == null) { charset = "ISO-8859-1"; } String digAlg = algorithm; if (digAlg.equalsIgnoreCase("MD5-sess")) { digAlg = "MD5"; } MessageDigest digester; try { digester = createMessageDigest(digAlg); } catch (UnsupportedDigestAlgorithmException ex) { throw new AuthenticationException("Unsuppported digest algorithm: " + digAlg); } String uname = credentials.getUserPrincipal().getName(); String pwd = credentials.getPassword(); if (nonce.equals(this.lastNonce)) { nounceCount++; } else { nounceCount = 1; cnonce = null; lastNonce = nonce; } StringBuilder sb = new StringBuilder(256); Formatter formatter = new Formatter(sb, Locale.US); formatter.format("%08x", nounceCount); String nc = sb.toString(); if (cnonce == null) { cnonce = createCnonce(); } a1 = null; a2 = null; // 3.2.2.2: Calculating digest if (algorithm.equalsIgnoreCase("MD5-sess")) { // H( unq(username-value) ":" unq(realm-value) ":" passwd ) // ":" unq(nonce-value) // ":" unq(cnonce-value) // calculated one per session sb.setLength(0); sb.append(uname).append(':').append(realm).append(':').append(pwd); String checksum = encode(digester.digest(EncodingUtils.getBytes(sb.toString(), charset))); sb.setLength(0); sb.append(checksum).append(':').append(nonce).append(':').append(cnonce); a1 = sb.toString(); } else { // unq(username-value) ":" unq(realm-value) ":" passwd sb.setLength(0); sb.append(uname).append(':').append(realm).append(':').append(pwd); a1 = sb.toString(); } String hasha1 = encode(digester.digest(EncodingUtils.getBytes(a1, charset))); if (qop == QOP_AUTH) { // Method ":" digest-uri-value a2 = method + ':' + uri; } else if (qop == QOP_AUTH_INT) { // Method ":" digest-uri-value ":" H(entity-body) HttpEntity entity = null; if (request instanceof HttpEntityEnclosingRequest) { entity = ((HttpEntityEnclosingRequest) request).getEntity(); } if (entity != null && !entity.isRepeatable()) { // If the entity is not repeatable, try falling back onto QOP_AUTH if (qopset.contains("auth")) { qop = QOP_AUTH; a2 = method + ':' + uri; } else { throw new AuthenticationException("Qop auth-int cannot be used with " + "a non-repeatable entity"); } } else { HttpEntityDigester entityDigester = new HttpEntityDigester(digester); try { if (entity != null) { entity.writeTo(entityDigester); } entityDigester.close(); } catch (IOException ex) { throw new AuthenticationException("I/O error reading entity content", ex); } a2 = method + ':' + uri + ':' + encode(entityDigester.getDigest()); } } else { a2 = method + ':' + uri; } String hasha2 = encode(digester.digest(EncodingUtils.getBytes(a2, charset))); // 3.2.2.1 String digestValue; if (qop == QOP_MISSING) { sb.setLength(0); sb.append(hasha1).append(':').append(nonce).append(':').append(hasha2); digestValue = sb.toString(); } else { sb.setLength(0); sb.append(hasha1).append(':').append(nonce).append(':').append(nc).append(':') .append(cnonce).append(':').append(qop == QOP_AUTH_INT ? "auth-int" : "auth") .append(':').append(hasha2); digestValue = sb.toString(); } String digest = encode(digester.digest(EncodingUtils.getAsciiBytes(digestValue))); CharArrayBuffer buffer = new CharArrayBuffer(128); if (isProxy()) { buffer.append(AUTH.PROXY_AUTH_RESP); } else { buffer.append(AUTH.WWW_AUTH_RESP); } buffer.append(": Digest "); List<BasicNameValuePair> params = new ArrayList<BasicNameValuePair>(20); params.add(new BasicNameValuePair("username", uname)); params.add(new BasicNameValuePair("realm", realm)); params.add(new BasicNameValuePair("nonce", nonce)); params.add(new BasicNameValuePair("uri", uri)); params.add(new BasicNameValuePair("response", digest)); if (qop != QOP_MISSING) { params.add(new BasicNameValuePair("qop", qop == QOP_AUTH_INT ? "auth-int" : "auth")); params.add(new BasicNameValuePair("nc", nc)); params.add(new BasicNameValuePair("cnonce", cnonce)); } if (algorithm != null) { params.add(new BasicNameValuePair("algorithm", algorithm)); } if (opaque != null) { params.add(new BasicNameValuePair("opaque", opaque)); } for (int i = 0; i < params.size(); i++) { BasicNameValuePair param = params.get(i); if (i > 0) { buffer.append(", "); } boolean noQuotes = "nc".equals(param.getName()) || "qop".equals(param.getName()); BasicHeaderValueFormatter.DEFAULT.formatNameValuePair(buffer, param, !noQuotes); } return new BufferedHeader(buffer); } String getCnonce() { return cnonce; } String getA1() { return a1; } String getA2() { return a2; } /** * Encodes the 128 bit (16 bytes) MD5 digest into a 32 characters long * <CODE>String</CODE> according to RFC 2617. * * @param binaryData array containing the digest * @return encoded MD5, or <CODE>null</CODE> if encoding failed */ static String encode(byte[] binaryData) { int n = binaryData.length; char[] buffer = new char[n * 2]; for (int i = 0; i < n; i++) { int low = (binaryData[i] & 0x0f); int high = ((binaryData[i] & 0xf0) >> 4); buffer[i * 2] = HEXADECIMAL[high]; buffer[(i * 2) + 1] = HEXADECIMAL[low]; } return new String(buffer); } /** * Creates a random cnonce value based on the current time. * * @return The cnonce value as String. */ public static String createCnonce() { SecureRandom rnd = new SecureRandom(); byte[] tmp = new byte[8]; rnd.nextBytes(tmp); return encode(tmp); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.workflow.instance.impl; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.stream.Collectors; import org.drools.core.common.InternalKnowledgeRuntime; import org.drools.mvel.MVELSafeHelper; import org.jbpm.process.core.ContextContainer; import org.jbpm.process.core.context.variable.VariableScope; import org.jbpm.process.core.correlation.CorrelationManager; import org.jbpm.process.core.timer.BusinessCalendar; import org.jbpm.process.core.timer.DateTimeUtils; import org.jbpm.process.core.timer.Timer; import org.jbpm.process.instance.ContextInstance; import org.jbpm.process.instance.InternalProcessRuntime; import org.jbpm.process.instance.context.variable.VariableScopeInstance; import org.jbpm.process.instance.impl.ProcessInstanceImpl; import org.jbpm.process.instance.timer.TimerInstance; import org.jbpm.util.PatternConstants; import org.jbpm.workflow.core.DroolsAction; import org.jbpm.workflow.core.impl.NodeImpl; import org.jbpm.workflow.core.node.ActionNode; import org.jbpm.workflow.core.node.AsyncEventNode; import org.jbpm.workflow.core.node.BoundaryEventNode; import org.jbpm.workflow.core.node.CompositeContextNode; import org.jbpm.workflow.core.node.DynamicNode; import org.jbpm.workflow.core.node.EndNode; import org.jbpm.workflow.core.node.EventNode; import org.jbpm.workflow.core.node.EventNodeInterface; import org.jbpm.workflow.core.node.EventSubProcessNode; import org.jbpm.workflow.core.node.ForEachNode; import org.jbpm.workflow.core.node.StateBasedNode; import org.jbpm.workflow.core.node.StateNode; import org.jbpm.workflow.instance.NodeInstance; import org.jbpm.workflow.instance.WorkflowProcessInstance; import org.jbpm.workflow.instance.node.CompositeContextNodeInstance; import org.jbpm.workflow.instance.node.CompositeNodeInstance; import org.jbpm.workflow.instance.node.DynamicNodeInstance; import org.jbpm.workflow.instance.node.EndNodeInstance; import org.jbpm.workflow.instance.node.EventBasedNodeInstanceInterface; import org.jbpm.workflow.instance.node.EventNodeInstance; import org.jbpm.workflow.instance.node.EventNodeInstanceInterface; import org.jbpm.workflow.instance.node.EventSubProcessNodeInstance; import org.jbpm.workflow.instance.node.FaultNodeInstance; import org.jbpm.workflow.instance.node.ForEachNodeInstance; import org.kie.api.definition.process.Node; import org.kie.api.definition.process.NodeContainer; import org.kie.api.definition.process.WorkflowProcess; import org.kie.api.runtime.EnvironmentName; import org.kie.api.runtime.KieRuntime; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.manager.RuntimeManager; import org.kie.api.runtime.process.EventListener; import org.kie.api.runtime.process.NodeInstanceContainer; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.rule.AgendaFilter; import org.kie.internal.process.CorrelationKey; import org.kie.internal.runtime.manager.InternalRuntimeManager; import org.kie.internal.runtime.manager.SessionNotFoundException; import org.kie.internal.runtime.manager.context.CaseContext; import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext; import org.mvel2.integration.VariableResolver; import org.mvel2.integration.VariableResolverFactory; import org.mvel2.integration.impl.ImmutableDefaultFactory; import org.mvel2.integration.impl.SimpleValueResolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.jbpm.workflow.instance.NodeInstance.CancelType.OBSOLETE; import static org.jbpm.workflow.instance.impl.DummyEventListener.EMPTY_EVENT_LISTENER; /** * Default implementation of a RuleFlow process instance. */ public abstract class WorkflowProcessInstanceImpl extends ProcessInstanceImpl implements WorkflowProcessInstance, org.jbpm.workflow.instance.NodeInstanceContainer { private static final long serialVersionUID = 510l; private static final Logger logger = LoggerFactory.getLogger(WorkflowProcessInstanceImpl.class); private final List<NodeInstance> nodeInstances = new ArrayList<NodeInstance>();; private AtomicLong singleNodeInstanceCounter = new AtomicLong(-1); private Map<String, List<EventListener>> eventListeners = new HashMap<>(); private Map<String, List<EventListener>> externalEventListeners = new HashMap<>(); private List<String> completedNodeIds = new ArrayList<>(); private List<String> activatingNodeIds; private Map<String, Integer> iterationLevels = new HashMap<>(); private int currentLevel; private boolean persisted = false; private Object faultData; private boolean signalCompletion = true; private String deploymentId; private String correlationKey; private Date startDate; private int slaCompliance = SLA_NA; private Date slaDueDate; private long slaTimerId = -1; private AgendaFilter agendaFilter; @Override public NodeContainer getNodeContainer() { return getWorkflowProcess(); } @Override public void addNodeInstance(final NodeInstance nodeInstance) { if (nodeInstance.getId() == -1) { // assign new id only if it does not exist as it might already be set by marshalling // it's important to keep same ids of node instances as they might be references e.g. exclusive group long id = singleNodeInstanceCounter.incrementAndGet(); ((NodeInstanceImpl) nodeInstance).setId(id); } this.nodeInstances.add(nodeInstance); } @Override public int getLevelForNode(String uniqueID) { if ("true".equalsIgnoreCase(System.getProperty("jbpm.loop.level.disabled"))) { return 1; } Integer value = iterationLevels.get(uniqueID); if (value == null && currentLevel == 0) { value = 1; } else if ((value == null && currentLevel > 0) || (value != null && currentLevel > 0 && value > currentLevel)) { value = currentLevel; } else { value++; } iterationLevels.put(uniqueID, value); return value; } @Override public void removeNodeInstance(final NodeInstance nodeInstance) { if (((NodeInstanceImpl) nodeInstance).isInversionOfControl()) { getKnowledgeRuntime().delete( getKnowledgeRuntime().getFactHandle(nodeInstance)); } this.nodeInstances.remove(nodeInstance); } @Override public Collection<org.kie.api.runtime.process.NodeInstance> getNodeInstances() { return new ArrayList<>(getNodeInstances(false)); } @Override public Collection<NodeInstance> getNodeInstances(boolean recursive) { Collection<NodeInstance> result = nodeInstances; if (recursive) { result = new ArrayList<>(result); for (Iterator<NodeInstance> iterator = nodeInstances.iterator(); iterator .hasNext(); ) { NodeInstance nodeInstance = iterator.next(); if (nodeInstance instanceof NodeInstanceContainer) { result.addAll(((org.jbpm.workflow.instance.NodeInstanceContainer) nodeInstance).getNodeInstances(true)); } } } return Collections.unmodifiableCollection(result); } @Override public NodeInstance getNodeInstance(long nodeInstanceId) { for (NodeInstance nodeInstance : nodeInstances) { if (nodeInstance.getId() == nodeInstanceId) { return nodeInstance; } } return null; } public NodeInstance getNodeInstanceByNodeId(long nodeId, boolean recursive) { for (NodeInstance nodeInstance : getNodeInstances(recursive)) { if (nodeInstance.getNodeId() == nodeId) { return nodeInstance; } } return null; } @Override public NodeInstance getNodeInstance(long nodeInstanceId, boolean recursive) { for (NodeInstance nodeInstance : getNodeInstances(recursive)) { if (nodeInstance.getId() == nodeInstanceId) { return nodeInstance; } } return null; } public List<String> getActiveNodeIds() { List<String> result = new ArrayList<>(); addActiveNodeIds(this, result); return result; } private void addActiveNodeIds(NodeInstanceContainer container, List<String> result) { for (org.kie.api.runtime.process.NodeInstance nodeInstance : container.getNodeInstances()) { result.add(((NodeImpl) nodeInstance.getNode()).getUniqueId()); if (nodeInstance instanceof NodeInstanceContainer) { addActiveNodeIds((NodeInstanceContainer) nodeInstance, result); } } } @Override public NodeInstance getFirstNodeInstance(final long nodeId) { for (final Iterator<NodeInstance> iterator = this.nodeInstances .iterator(); iterator.hasNext(); ) { final NodeInstance nodeInstance = iterator.next(); if (nodeInstance.getNodeId() == nodeId && nodeInstance.getLevel() == getCurrentLevel()) { return nodeInstance; } } return null; } public List<NodeInstance> getNodeInstances(final long nodeId) { List<NodeInstance> result = new ArrayList<>(); for (final Iterator<NodeInstance> iterator = this.nodeInstances .iterator(); iterator.hasNext(); ) { final NodeInstance nodeInstance = iterator.next(); if (nodeInstance.getNodeId() == nodeId) { result.add(nodeInstance); } } return result; } public List<NodeInstance> getNodeInstances(final long nodeId, final List<NodeInstance> currentView) { List<NodeInstance> result = new ArrayList<>(); for (final Iterator<NodeInstance> iterator = currentView.iterator(); iterator.hasNext();) { final NodeInstance nodeInstance = iterator.next(); if (nodeInstance.getNodeId() == nodeId) { result.add(nodeInstance); } } return result; } @Override public NodeInstance getNodeInstance(final Node node) { return getNodeInstance(node, true); } public NodeInstance getNodeInstance(final Node node, boolean wrap) { Node actualNode = node; // async continuation handling if (node instanceof AsyncEventNode) { actualNode = ((AsyncEventNode) node).getActualNode(); } else if (wrap && useAsync(node)) { actualNode = new AsyncEventNode(node); } NodeInstanceFactory conf = NodeInstanceFactoryRegistry.getInstance(getKnowledgeRuntime().getEnvironment()).getProcessNodeInstanceFactory(actualNode); if (conf == null) { throw new IllegalArgumentException("Illegal node type: " + node.getClass()); } NodeInstanceImpl nodeInstance = (NodeInstanceImpl) conf.getNodeInstance(actualNode, this, this); if (nodeInstance == null) { throw new IllegalArgumentException("Illegal node type: " + node.getClass()); } if (nodeInstance.isInversionOfControl()) { getKnowledgeRuntime().insert(nodeInstance); } return nodeInstance; } public long getNodeInstanceCounter() { return singleNodeInstanceCounter.get(); } public void internalSetNodeInstanceCounter(long nodeInstanceCounter) { this.singleNodeInstanceCounter = new AtomicLong(nodeInstanceCounter); } public AtomicLong internalGetNodeInstanceCounter() { return this.singleNodeInstanceCounter; } public WorkflowProcess getWorkflowProcess() { return (WorkflowProcess) getProcess(); } @Override public Object getVariable(String name) { // for disconnected process instances, try going through the variable scope instances // (as the default variable scope cannot be retrieved as the link to the process could // be null and the associated working memory is no longer accessible) if (getKnowledgeRuntime() == null) { List<ContextInstance> variableScopeInstances = getContextInstances(VariableScope.VARIABLE_SCOPE); if (variableScopeInstances != null && variableScopeInstances.size() == 1) { for (ContextInstance contextInstance : variableScopeInstances) { Object value = ((VariableScopeInstance) contextInstance).getVariable(name); if (value != null) { return value; } } } return null; } // else retrieve the variable scope VariableScopeInstance variableScopeInstance = (VariableScopeInstance) getContextInstance(VariableScope.VARIABLE_SCOPE); if (variableScopeInstance == null) { return null; } return variableScopeInstance.getVariable(name); } public Object getVariable(String name, List<ContextInstance> variableScopeInstances) { if (variableScopeInstances != null) { for (ContextInstance contextInstance : variableScopeInstances) { Object value = ((VariableScopeInstance) contextInstance).getVariable(name); if (value != null) { return value; } } } return null; } public Map<String, Object> getVariables() { // for disconnected process instances, try going through the variable scope instances // (as the default variable scope cannot be retrieved as the link to the process could // be null and the associated working memory is no longer accessible) if (getKnowledgeRuntime() == null) { List<ContextInstance> variableScopeInstances = getContextInstances(VariableScope.VARIABLE_SCOPE); if (variableScopeInstances == null) { return null; } Map<String, Object> result = new HashMap<>(); for (ContextInstance contextInstance : variableScopeInstances) { Map<String, Object> variables = ((VariableScopeInstance) contextInstance).getVariables(); result.putAll(variables); } return result; } // else retrieve the variable scope VariableScopeInstance variableScopeInstance = (VariableScopeInstance) getContextInstance(VariableScope.VARIABLE_SCOPE); if (variableScopeInstance == null) { return null; } return variableScopeInstance.getVariables(); } @Override public void setVariable(String name, Object value) { VariableScope variableScope = (VariableScope) ((ContextContainer) getProcess()).getDefaultContext(VariableScope.VARIABLE_SCOPE); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) getContextInstance(VariableScope.VARIABLE_SCOPE); if (variableScopeInstance == null) { throw new IllegalArgumentException("No variable scope found."); } variableScopeInstance.setVariable(name, variableScope.validateVariable(getProcessName(), name, value)); } @Override public void setState(final int state, String outcome, Object faultData) { this.faultData = faultData; setState(state, outcome); } @Override public void setState(final int state, String outcome) { if(getMetaData().containsKey("SUB_PROCESS_INTERRUPTION") || getState() == ProcessInstance.STATE_COMPLETED || getState() == ProcessInstance.STATE_ABORTED) { // avoid duplication calls return; } // TODO move most of this to ProcessInstanceImpl if (state == ProcessInstance.STATE_COMPLETED || state == ProcessInstance.STATE_ABORTED) { if (this.slaCompliance == SLA_PENDING) { if (System.currentTimeMillis() > slaDueDate.getTime()) { // completion of the process instance is after expected SLA due date, mark it accordingly this.slaCompliance = SLA_VIOLATED; } else { this.slaCompliance = state == ProcessInstance.STATE_COMPLETED ? SLA_MET : SLA_ABORTED; } } InternalKnowledgeRuntime kruntime = getKnowledgeRuntime(); InternalProcessRuntime processRuntime = (InternalProcessRuntime) kruntime.getProcessRuntime(); processRuntime.getProcessEventSupport().fireBeforeProcessCompleted(this, kruntime); // JBPM-8094 - set state after event super.setState(state, outcome); // deactivate all node instances of this process instance while (!nodeInstances.isEmpty()) { NodeInstance nodeInstance = nodeInstances.get(0); if (state == STATE_COMPLETED) { nodeInstance.cancel(OBSOLETE); } else { nodeInstance.cancel(); } } if (this.slaTimerId > -1) { processRuntime.getTimerManager().cancelTimer(this.slaTimerId); logger.debug("SLA Timer {} has been canceled", this.slaTimerId); } removeEventListeners(); processRuntime.getProcessInstanceManager().removeProcessInstance(this); processRuntime.getProcessEventSupport().fireAfterProcessCompleted(this, kruntime); if (isSignalCompletion()) { RuntimeManager manager = (RuntimeManager) kruntime.getEnvironment().get(EnvironmentName.RUNTIME_MANAGER); if (getParentProcessInstanceId() > 0 && manager != null) { try { org.kie.api.runtime.manager.Context<?> context = ProcessInstanceIdContext.get(getParentProcessInstanceId()); String caseId = (String) kruntime.getEnvironment().get(EnvironmentName.CASE_ID); if (caseId != null) { context = CaseContext.get(caseId); } RuntimeEngine runtime = manager.getRuntimeEngine(context); KieRuntime managedkruntime = runtime.getKieSession(); managedkruntime.signalEvent("processInstanceCompleted:" + getId(), this); } catch (SessionNotFoundException e) { // in case no session is found for parent process let's skip signal for process instance completion } } else { processRuntime.getSignalManager().signalEvent("processInstanceCompleted:" + getId(), this); } } } else { super.setState(state, outcome); } } @Override public void setState(final int state) { setState(state, null); } @Override public void disconnect() { removeEventListeners(); unregisterExternalEventNodeListeners(); for (NodeInstance nodeInstance : nodeInstances) { if (nodeInstance instanceof EventBasedNodeInstanceInterface) { ((EventBasedNodeInstanceInterface) nodeInstance).removeEventListeners(); } } super.disconnect(); } @Override public void reconnect() { validate(); super.reconnect(); for (NodeInstance nodeInstance : nodeInstances) { if (nodeInstance instanceof EventBasedNodeInstanceInterface) { ((EventBasedNodeInstanceInterface) nodeInstance) .addEventListeners(); } } registerExternalEventNodeListeners(); } @Override public String toString() { final StringBuilder sb = new StringBuilder("WorkflowProcessInstance"); sb.append(getId()); sb.append(" [processId="); sb.append(getProcessId()); sb.append(",state="); sb.append(getState()); sb.append("]"); return sb.toString(); } @Override public void start() { start(null); } @Override public void start(String trigger) { synchronized (this) { this.startDate = new Date(); registerExternalEventNodeListeners(); // activate timer event sub processes Node[] nodes = getNodeContainer().getNodes(); for (Node node : nodes) { if (node instanceof EventSubProcessNode) { Map<Timer, DroolsAction> timers = ((EventSubProcessNode) node).getTimers(); if (timers != null && !timers.isEmpty()) { EventSubProcessNodeInstance eventSubprocess = (EventSubProcessNodeInstance) getNodeInstance(node); eventSubprocess.trigger(null, org.jbpm.workflow.core.Node.CONNECTION_DEFAULT_TYPE); } } } super.start(trigger); } } @Override public void configureSLA() { String slaDueDateExpression = (String) getProcess().getMetaData().get("customSLADueDate"); if (slaDueDateExpression != null) { TimerInstance timer = configureSLATimer(slaDueDateExpression); if (timer != null) { this.slaTimerId = timer.getId(); this.slaDueDate = new Date(System.currentTimeMillis() + timer.getDelay()); this.slaCompliance = SLA_PENDING; logger.debug("SLA for process instance {} is PENDING with due date {}", this.getId(), this.slaDueDate); } } } public TimerInstance configureSLATimer(String slaDueDateExpression) { return this.configureSLATimer(slaDueDateExpression, null); } public TimerInstance configureSLATimer(String slaDueDateExpression, String timerName) { // setup SLA if provided slaDueDateExpression = resolveVariable(slaDueDateExpression); if (slaDueDateExpression == null || slaDueDateExpression.trim().isEmpty()) { logger.debug("Sla due date expression resolved to no value '{}'", slaDueDateExpression); return null; } logger.debug("SLA due date is set to {}", slaDueDateExpression); InternalKnowledgeRuntime kruntime = getKnowledgeRuntime(); long duration = -1; if (kruntime != null && kruntime.getEnvironment().get("jbpm.business.calendar") != null) { BusinessCalendar businessCalendar = (BusinessCalendar) kruntime.getEnvironment().get("jbpm.business.calendar"); duration = businessCalendar.calculateBusinessTimeAsDuration(slaDueDateExpression); } else { duration = DateTimeUtils.parseDuration(slaDueDateExpression); } TimerInstance timerInstance = new TimerInstance(); timerInstance.setId(-1); timerInstance.setDelay(duration); timerInstance.setPeriod(0); timerInstance.setName(timerName); if (useTimerSLATracking()) { ((InternalProcessRuntime) kruntime.getProcessRuntime()).getTimerManager().registerTimer(timerInstance, this); } return timerInstance; } protected void registerExternalEventNodeListeners() { for (Node node : getWorkflowProcess().getNodes()) { if (node instanceof EventNode && "external".equals(((EventNode) node).getScope())) { String eventType = ((EventNode) node).getType(); if (isVariableExpression(eventType)) { addEventListener(resolveVariable(eventType), EMPTY_EVENT_LISTENER, true); } else { addEventListener(eventType, EMPTY_EVENT_LISTENER, true); } } else if (node instanceof EventSubProcessNode) { List<String> events = ((EventSubProcessNode) node).getEvents(); for (String type : events) { addEventListener(type, EMPTY_EVENT_LISTENER, true); if (isVariableExpression(type)) { addEventListener(resolveVariable(type), EMPTY_EVENT_LISTENER, true); } } } else if (node instanceof DynamicNode && ((DynamicNode) node).getActivationEventName() != null) { addEventListener(((DynamicNode) node).getActivationEventName(), EMPTY_EVENT_LISTENER, true); } } if (getWorkflowProcess().getMetaData().containsKey("Compensation")) { addEventListener("Compensation", new CompensationEventListener(this), true); } } private void unregisterExternalEventNodeListeners() { for (Node node : getWorkflowProcess().getNodes()) { if (node instanceof EventNode && "external".equals(((EventNode) node).getScope())) { String eventType = ((EventNode) node).getType(); if (isVariableExpression(eventType)) { removeEventListener(resolveVariable(eventType), EMPTY_EVENT_LISTENER, true); } else { removeEventListener(eventType, EMPTY_EVENT_LISTENER, true); } } } } private void handleSLAViolation() { if (slaCompliance == SLA_PENDING) { InternalKnowledgeRuntime kruntime = getKnowledgeRuntime(); InternalProcessRuntime processRuntime = (InternalProcessRuntime) kruntime.getProcessRuntime(); processRuntime.getProcessEventSupport().fireBeforeSLAViolated(this, kruntime); logger.debug("SLA violated on process instance {}", getId()); this.slaCompliance = SLA_VIOLATED; this.slaTimerId = -1; processRuntime.getProcessEventSupport().fireAfterSLAViolated(this, kruntime); } } @Override @SuppressWarnings("unchecked") public void signalEvent(String type, Object event) { logger.debug("Signal {} received with data {} in process instance {}", type, event, getId()); synchronized (this) { if (getState() != ProcessInstance.STATE_ACTIVE) { return; } if ("timerTriggered".equals(type)) { TimerInstance timer = (TimerInstance) event; if (timer.getId() == slaTimerId) { handleSLAViolation(); // no need to pass the event along as it was purely for SLA tracking return; } } if ("slaViolation".equals(type)) { handleSLAViolation(); // no need to pass the event along as it was purely for SLA tracking return; } try { this.activatingNodeIds = new ArrayList<>(); List<EventListener> listeners = eventListeners.get(type); if (listeners != null) { for (EventListener listener : listeners) { listener.signalEvent(type, event); } } listeners = externalEventListeners.get(type); if (listeners != null) { for (EventListener listener : listeners) { listener.signalEvent(type, event); } } signal(this, (node) -> this.getNodeInstance(node), () -> this.getWorkflowProcess().getNodes(), type, event); if (((org.jbpm.workflow.core.WorkflowProcess) getWorkflowProcess()).isDynamic()) { for (Node node : getWorkflowProcess().getNodes()) { if (type.equals(node.getName()) && node.getIncomingConnections().isEmpty()) { NodeInstance nodeInstance = getNodeInstance(node); if (event != null) { Map<String, Object> dynamicParams = new HashMap<>(); if (event instanceof Map) { dynamicParams.putAll((Map<String, Object>) event); } else { dynamicParams.put("Data", event); } nodeInstance.setDynamicParameters(dynamicParams); } nodeInstance.trigger(null, org.jbpm.workflow.core.Node.CONNECTION_DEFAULT_TYPE); } } } } finally { if (this.activatingNodeIds != null) { this.activatingNodeIds.clear(); this.activatingNodeIds = null; } } } } private void signal(NodeInstanceContainer container, Function<Node,NodeInstance> nodeInstanceSupplier, Supplier<Node[]> resolveNodes, String type, Object event) { List<NodeInstance> currentView = container.getNodeInstances().stream().map(e -> (NodeInstance) e).collect(Collectors.toList()); for (Node node : resolveNodes.get()) { if (node instanceof EventNodeInterface && ((EventNodeInterface) node).acceptsEvent(type, event, getEventFilterResolver(this, node, currentView))) { if (node instanceof EventNode && ((EventNode) node).getFrom() == null) { EventNodeInstance eventNodeInstance = (EventNodeInstance) nodeInstanceSupplier.apply(node); eventNodeInstance.signalEvent(type, event); } else { if (node instanceof EventSubProcessNode && (resolveVariables(((EventSubProcessNode) node).getEvents()).contains(type))) { EventSubProcessNodeInstance eventNodeInstance = (EventSubProcessNodeInstance) nodeInstanceSupplier.apply(node); eventNodeInstance.signalEvent(type, event); } if (node instanceof DynamicNode && type.equals(((DynamicNode) node).getActivationEventName())) { DynamicNodeInstance dynamicNodeInstance = (DynamicNodeInstance) nodeInstanceSupplier.apply(node); dynamicNodeInstance.signalEvent(type, event); } else { List<NodeInstance> nodeInstances = getNodeInstances(node.getId(), currentView); if (nodeInstances != null && !nodeInstances.isEmpty()) { for (NodeInstance nodeInstance : nodeInstances) { ((EventNodeInstanceInterface) nodeInstance).signalEvent(type, event); } } } } } } } public Function<String, Object> getEventFilterResolver(NodeInstanceContainer container, Node node, List<NodeInstance> currentView) { if (node instanceof DynamicNode) { // special handling for dynamic node to allow to resolve variables from individual node instances of the dynamic node // instead of just relying on process instance's variables return (varExpresion) -> { List<NodeInstance> nodeInstances = getNodeInstances(node.getId(), currentView); if (nodeInstances != null && !nodeInstances.isEmpty()) { StringBuilder st = new StringBuilder(); for (NodeInstance ni : nodeInstances) { String result = resolveVariable(varExpresion, new NodeInstanceResolverFactory(ni)); st.append(result).append("###"); } return st.toString(); } else { NodeInstanceImpl instance = (NodeInstanceImpl) getNodeInstance(node.getId(), true); if (instance != null) { return instance.getVariable(varExpresion); } return null; } }; } else if(node instanceof BoundaryEventNode) { return (varExpresion) -> { Function<String, Object> getScopedVariable; if(container instanceof CompositeContextNodeInstance) { getScopedVariable = (name) -> getVariable(name, ((CompositeContextNodeInstance) container).getContextInstances(VariableScope.VARIABLE_SCOPE)); } else if (container instanceof WorkflowProcessInstanceImpl) { getScopedVariable = (name) -> ((WorkflowProcessInstanceImpl) container).getVariable(name); } else { getScopedVariable = null; } Object value = getScopedVariable.apply(varExpresion); if(value != null) { return value; } VariableResolverFactory resolverFactory = new ImmutableDefaultFactory() { @Override public boolean isResolveable(String varName) { return getScopedVariable.apply(varName) != null; } @Override public VariableResolver getVariableResolver(String varName) { return new SimpleValueResolver(getScopedVariable.apply(varName)); } }; return resolveExpressionVariable(varExpresion, resolverFactory).orElse(null); }; } else if (node instanceof ForEachNode) { return (varExpression) -> { try { // for each can have multiple outcomes 1 per item of the list so it should be computed like that ForEachNodeInstance forEachNodeInstance = (ForEachNodeInstance) getNodeInstanceByNodeId(node.getId(), true); if(forEachNodeInstance == null) { return new Object[0]; } List<CompositeContextNodeInstance> data = forEachNodeInstance.getNodeInstances().stream().filter(e -> e instanceof CompositeContextNodeInstance).map(e -> (CompositeContextNodeInstance) e).collect(Collectors.toList()); List<Object> outcome = new ArrayList<>(); for(CompositeContextNodeInstance nodeInstance : data) { Object resolvedValue = resolveExpressionVariable(varExpression, new NodeInstanceResolverFactory(nodeInstance)).orElse(null); if(resolvedValue != null) { outcome.add(resolvedValue); } } return outcome.toArray(); } catch (Throwable t) { return new Object[0]; } }; } else if (node instanceof EventSubProcessNode || node instanceof StateNode) { return (varName) -> { return resolveExpressionVariable(varName, new ProcessInstanceResolverFactory(this)).orElse(null); }; } else if (node instanceof CompositeContextNode) { return (varExpression) -> { List<NodeInstance> nodeInstances = getNodeInstances(node.getId(), currentView); List<Object> outcome = new ArrayList<>(); if (nodeInstances != null && !nodeInstances.isEmpty()) { for(NodeInstance nodeInstance : nodeInstances) { Object resolvedValue = resolveExpressionVariable(varExpression, new NodeInstanceResolverFactory(nodeInstance)).orElse(null); if(resolvedValue != null) { outcome.add(resolvedValue); } } } return outcome.toArray(); }; } else { return (varName) -> { return resolveExpressionVariable(varName, new ProcessInstanceResolverFactory(this)).orElse(null); }; } } private void validate() { InternalRuntimeManager manager = (InternalRuntimeManager) getKnowledgeRuntime().getEnvironment().get("RuntimeManager"); if (manager != null) { // check if process instance is owned by the same manager as the one owning ksession if (hasDeploymentId() && !manager.getIdentifier().equals(getDeploymentId())) { throw new IllegalStateException("Process instance " + getId() + " is owned by another deployment " + getDeploymentId() + " != " + manager.getIdentifier()); } } } protected List<String> resolveVariables(List<String> events) { return events.stream().map(this::resolveVariable).collect(Collectors.toList()); } private String resolveVariable(String s) { return resolveVariable(s, new ProcessInstanceResolverFactory(this)); } private String resolveVariable(String s, VariableResolverFactory factory) { Map<String, Object> replacements = new HashMap<>(); Matcher matcher = PatternConstants.PARAMETER_MATCHER.matcher(s); while (matcher.find()) { String paramName = matcher.group(1); if (replacements.get(paramName) == null) { Optional<Object> resolvedValue = resolveExpressionVariable(paramName, factory); replacements.put(paramName, resolvedValue.orElse(paramName)); } } for (Map.Entry<String, Object> replacement : replacements.entrySet()) { s = s.replace("#{" + replacement.getKey() + "}", replacement.getValue().toString()); } return s; } private Optional<Object> resolveExpressionVariable(String paramName, VariableResolverFactory factory) { try { // just in case is not an expression if(factory.isResolveable(paramName)) { return Optional.of(factory.getVariableResolver(paramName).getValue()); } return Optional.ofNullable(MVELSafeHelper.getEvaluator().eval(paramName, factory)); } catch (Throwable t) { logger.error("Could not find variable scope for variable {}", paramName); return Optional.empty(); } } @Override public void addEventListener(String type, EventListener listener, boolean external) { Map<String, List<EventListener>> eventListeners = external ? this.externalEventListeners : this.eventListeners; List<EventListener> listeners = eventListeners.computeIfAbsent(type, listenerType -> { final List<EventListener> newListenersList = new CopyOnWriteArrayList<>(); if (external) { ((InternalProcessRuntime) getKnowledgeRuntime().getProcessRuntime()) .getSignalManager().addEventListener(listenerType, this); } return newListenersList; }); listeners.add(listener); } @Override public void removeEventListener(String type, EventListener listener, boolean external) { Map<String, List<EventListener>> eventListeners = external ? this.externalEventListeners : this.eventListeners; List<EventListener> listeners = eventListeners.get(type); if (listeners != null) { listeners.remove(listener); if (listeners.isEmpty()) { eventListeners.remove(type); if (external) { ((InternalProcessRuntime) getKnowledgeRuntime().getProcessRuntime()) .getSignalManager().removeEventListener(type, this); } } } else { eventListeners.remove(type); } } private void removeEventListeners() { for (String type : externalEventListeners.keySet()) { ((InternalProcessRuntime) getKnowledgeRuntime().getProcessRuntime()) .getSignalManager().removeEventListener(type, this); } } @Override public String[] getEventTypes() { return externalEventListeners.keySet().stream().map(this::resolveVariable).collect(Collectors.toList()).toArray(new String[externalEventListeners.size()]); } @Override public void nodeInstanceCompleted(NodeInstance nodeInstance, String outType) { Node nodeInstanceNode = nodeInstance.getNode(); if (nodeInstanceNode != null) { Object compensationBoolObj = nodeInstanceNode.getMetaData().get("isForCompensation"); boolean isForCompensation = compensationBoolObj != null && (Boolean) compensationBoolObj; if (isForCompensation) { return; } } if (nodeInstance instanceof FaultNodeInstance || nodeInstance instanceof EndNodeInstance || ((org.jbpm.workflow.core.WorkflowProcess) getWorkflowProcess()).isDynamic() || nodeInstance instanceof CompositeNodeInstance) { if (((org.jbpm.workflow.core.WorkflowProcess) getProcess()).isAutoComplete() && canComplete()) { setState(ProcessInstance.STATE_COMPLETED); } } else { throw new IllegalArgumentException( "Completing a node instance that has no outgoing connection is not supported."); } } private boolean canComplete() { if (nodeInstances.isEmpty()) { return true; } else { int eventSubprocessCounter = 0; for (NodeInstance nodeInstance : nodeInstances) { Node node = nodeInstance.getNode(); if (node instanceof EventSubProcessNode) { if (((EventSubProcessNodeInstance) nodeInstance).getNodeInstances().isEmpty()) { eventSubprocessCounter++; } } else { return false; } } return eventSubprocessCounter == nodeInstances.size(); } } public void addCompletedNodeId(String uniqueId) { this.completedNodeIds.add(uniqueId.intern()); } public List<String> getCompletedNodeIds() { return new ArrayList<>(this.completedNodeIds); } @Override public int getCurrentLevel() { return currentLevel; } @Override public void setCurrentLevel(int currentLevel) { this.currentLevel = currentLevel; } public Map<String, Integer> getIterationLevels() { return iterationLevels; } public boolean isPersisted() { return persisted; } public void setPersisted(boolean persisted) { this.persisted = persisted; } public void addActivatingNodeId(String uniqueId) { if (this.activatingNodeIds == null) { return; } this.activatingNodeIds.add(uniqueId.intern()); } public List<String> getActivatingNodeIds() { if (this.activatingNodeIds == null) { return Collections.emptyList(); } return new ArrayList<>(this.activatingNodeIds); } @Override public Object getFaultData() { return faultData; } @Override public boolean isSignalCompletion() { return signalCompletion; } @Override public void setSignalCompletion(boolean signalCompletion) { this.signalCompletion = signalCompletion; } @Override public String getDeploymentId() { return deploymentId; } @Override public void setDeploymentId(String deploymentId) { this.deploymentId = deploymentId; } public String getCorrelationKey() { if (correlationKey == null && getMetaData().get("CorrelationKey") != null) { this.correlationKey = ((CorrelationKey) getMetaData().get("CorrelationKey")).toExternalForm(); } return correlationKey; } public void setCorrelationKey(String correlationKey) { this.correlationKey = correlationKey; } @Override public Date getStartDate() { return startDate; } public void internalSetStartDate(Date startDate) { if (this.startDate == null) { this.startDate = startDate; } } protected boolean hasDeploymentId() { return this.deploymentId != null && !this.deploymentId.isEmpty(); } protected boolean useAsync(final Node node) { if (!(node instanceof EventSubProcessNode) && (node instanceof ActionNode || node instanceof StateBasedNode || node instanceof EndNode)) { boolean asyncMode = Boolean.parseBoolean((String) node.getMetaData().get("customAsync")); if (asyncMode) { return asyncMode; } return Boolean.parseBoolean((String) getKnowledgeRuntime().getEnvironment().get("AsyncMode")); } return false; } protected boolean useTimerSLATracking() { String mode = (String) getKnowledgeRuntime().getEnvironment().get("SLATimerMode"); if (mode == null) { return true; } return Boolean.parseBoolean(mode); } @Override public int getSlaCompliance() { return slaCompliance; } public void internalSetSlaCompliance(int slaCompliance) { this.slaCompliance = slaCompliance; } @Override public Date getSlaDueDate() { return slaDueDate; } public void internalSetSlaDueDate(Date slaDueDate) { this.slaDueDate = slaDueDate; } public Long getSlaTimerId() { return slaTimerId; } public void internalSetSlaTimerId(Long slaTimerId) { this.slaTimerId = slaTimerId; } private boolean isVariableExpression(String eventType) { if (eventType == null) { return false; } Matcher matcher = PatternConstants.PARAMETER_MATCHER.matcher(eventType); return matcher.find(); } @Override public AgendaFilter getAgendaFilter() { return agendaFilter; } @Override public void setAgendaFilter( AgendaFilter agendaFilter ) { this.agendaFilter = agendaFilter; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.wellarchitected.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Output of a list answers call. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wellarchitected-2020-03-31/ListAnswers" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListAnswersResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { private String workloadId; private Integer milestoneNumber; private String lensAlias; /** * <p> * The ARN for the lens. * </p> */ private String lensArn; private java.util.List<AnswerSummary> answerSummaries; private String nextToken; /** * @param workloadId */ public void setWorkloadId(String workloadId) { this.workloadId = workloadId; } /** * @return */ public String getWorkloadId() { return this.workloadId; } /** * @param workloadId * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withWorkloadId(String workloadId) { setWorkloadId(workloadId); return this; } /** * @param milestoneNumber */ public void setMilestoneNumber(Integer milestoneNumber) { this.milestoneNumber = milestoneNumber; } /** * @return */ public Integer getMilestoneNumber() { return this.milestoneNumber; } /** * @param milestoneNumber * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withMilestoneNumber(Integer milestoneNumber) { setMilestoneNumber(milestoneNumber); return this; } /** * @param lensAlias */ public void setLensAlias(String lensAlias) { this.lensAlias = lensAlias; } /** * @return */ public String getLensAlias() { return this.lensAlias; } /** * @param lensAlias * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withLensAlias(String lensAlias) { setLensAlias(lensAlias); return this; } /** * <p> * The ARN for the lens. * </p> * * @param lensArn * The ARN for the lens. */ public void setLensArn(String lensArn) { this.lensArn = lensArn; } /** * <p> * The ARN for the lens. * </p> * * @return The ARN for the lens. */ public String getLensArn() { return this.lensArn; } /** * <p> * The ARN for the lens. * </p> * * @param lensArn * The ARN for the lens. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withLensArn(String lensArn) { setLensArn(lensArn); return this; } /** * @return */ public java.util.List<AnswerSummary> getAnswerSummaries() { return answerSummaries; } /** * @param answerSummaries */ public void setAnswerSummaries(java.util.Collection<AnswerSummary> answerSummaries) { if (answerSummaries == null) { this.answerSummaries = null; return; } this.answerSummaries = new java.util.ArrayList<AnswerSummary>(answerSummaries); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAnswerSummaries(java.util.Collection)} or {@link #withAnswerSummaries(java.util.Collection)} if you * want to override the existing values. * </p> * * @param answerSummaries * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withAnswerSummaries(AnswerSummary... answerSummaries) { if (this.answerSummaries == null) { setAnswerSummaries(new java.util.ArrayList<AnswerSummary>(answerSummaries.length)); } for (AnswerSummary ele : answerSummaries) { this.answerSummaries.add(ele); } return this; } /** * @param answerSummaries * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withAnswerSummaries(java.util.Collection<AnswerSummary> answerSummaries) { setAnswerSummaries(answerSummaries); return this; } /** * @param nextToken */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * @return */ public String getNextToken() { return this.nextToken; } /** * @param nextToken * @return Returns a reference to this object so that method calls can be chained together. */ public ListAnswersResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getWorkloadId() != null) sb.append("WorkloadId: ").append(getWorkloadId()).append(","); if (getMilestoneNumber() != null) sb.append("MilestoneNumber: ").append(getMilestoneNumber()).append(","); if (getLensAlias() != null) sb.append("LensAlias: ").append(getLensAlias()).append(","); if (getLensArn() != null) sb.append("LensArn: ").append(getLensArn()).append(","); if (getAnswerSummaries() != null) sb.append("AnswerSummaries: ").append(getAnswerSummaries()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListAnswersResult == false) return false; ListAnswersResult other = (ListAnswersResult) obj; if (other.getWorkloadId() == null ^ this.getWorkloadId() == null) return false; if (other.getWorkloadId() != null && other.getWorkloadId().equals(this.getWorkloadId()) == false) return false; if (other.getMilestoneNumber() == null ^ this.getMilestoneNumber() == null) return false; if (other.getMilestoneNumber() != null && other.getMilestoneNumber().equals(this.getMilestoneNumber()) == false) return false; if (other.getLensAlias() == null ^ this.getLensAlias() == null) return false; if (other.getLensAlias() != null && other.getLensAlias().equals(this.getLensAlias()) == false) return false; if (other.getLensArn() == null ^ this.getLensArn() == null) return false; if (other.getLensArn() != null && other.getLensArn().equals(this.getLensArn()) == false) return false; if (other.getAnswerSummaries() == null ^ this.getAnswerSummaries() == null) return false; if (other.getAnswerSummaries() != null && other.getAnswerSummaries().equals(this.getAnswerSummaries()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getWorkloadId() == null) ? 0 : getWorkloadId().hashCode()); hashCode = prime * hashCode + ((getMilestoneNumber() == null) ? 0 : getMilestoneNumber().hashCode()); hashCode = prime * hashCode + ((getLensAlias() == null) ? 0 : getLensAlias().hashCode()); hashCode = prime * hashCode + ((getLensArn() == null) ? 0 : getLensArn().hashCode()); hashCode = prime * hashCode + ((getAnswerSummaries() == null) ? 0 : getAnswerSummaries().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListAnswersResult clone() { try { return (ListAnswersResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.jackrabbit.oak.core.SimpleCommitContext; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.spi.JournalProperty; import org.apache.jackrabbit.oak.plugins.document.spi.JournalPropertyBuilder; import org.apache.jackrabbit.oak.plugins.document.spi.JournalPropertyService; import org.apache.jackrabbit.oak.plugins.observation.ChangeCollectorProvider; import org.apache.jackrabbit.oak.plugins.observation.ChangeSet; import org.apache.jackrabbit.oak.spi.commit.CommitContext; import org.apache.jackrabbit.oak.spi.commit.CommitHook; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EditorHook; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.commit.Observer; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.whiteboard.DefaultWhiteboard; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; public class ExternalChangesTest { @Rule public final DocumentMKBuilderProvider builderProvider = new DocumentMKBuilderProvider(); private DocumentNodeStore ns1; private DocumentNodeStore ns2; private CommitInfoCollector c1 = new CommitInfoCollector(); private CommitInfoCollector c2 = new CommitInfoCollector(); private JournalPropertyHandlerFactory tracker = new JournalPropertyHandlerFactory(); private Whiteboard wb = new DefaultWhiteboard(); @Before public void setUp() { tracker.start(wb); MemoryDocumentStore store = new MemoryDocumentStore(); ns1 = newDocumentNodeStore(store, 1); ns2 = newDocumentNodeStore(store, 2); ns1.addObserver(c1); ns2.addObserver(c2); } @Test public void defaultConfig() throws Exception{ assertEquals(50, ns1.getChangeSetMaxItems()); assertEquals(9, ns1.getChangeSetMaxDepth()); } @Test public void changeSetForExternalChanges() throws Exception{ NodeBuilder b1 = ns1.getRoot().builder(); b1.child("a"); b1.setProperty("foo1", "bar"); ns1.merge(b1, newCollectingHook(), newCommitInfo()); NodeBuilder b2 = ns1.getRoot().builder(); b2.child("b"); b2.setProperty("foo2", "bar"); ns1.merge(b2, newCollectingHook(), newCommitInfo()); ns1.backgroundWrite(); c2.reset(); ns2.backgroundRead(); CommitInfo ci = c2.getExternalChange(); CommitContext cc = (CommitContext) ci.getInfo().get(CommitContext.NAME); assertNotNull(cc); ChangeSet cs = (ChangeSet) cc.get(ChangeCollectorProvider.COMMIT_CONTEXT_OBSERVATION_CHANGESET); assertNotNull(cs); assertFalse(cs.anyOverflow()); assertThat(cs.getPropertyNames(), containsInAnyOrder("foo1", "foo2")); } @Test public void missingChangeSetResultsInOverflow() throws Exception{ NodeBuilder b1 = ns1.getRoot().builder(); b1.child("a"); b1.setProperty("foo1", "bar"); ns1.merge(b1, newCollectingHook(), newCommitInfo()); NodeBuilder b2 = ns1.getRoot().builder(); b2.child("b"); b2.setProperty("foo2", "bar"); //Commit without ChangeSet ns1.merge(b2, EmptyHook.INSTANCE, CommitInfo.EMPTY); ns1.backgroundWrite(); c2.reset(); ns2.backgroundRead(); CommitInfo ci = c2.getExternalChange(); CommitContext cc = (CommitContext) ci.getInfo().get(CommitContext.NAME); assertNotNull(cc); ChangeSet cs = (ChangeSet) cc.get(ChangeCollectorProvider.COMMIT_CONTEXT_OBSERVATION_CHANGESET); assertNotNull(cs); //ChangeSet should result in overflow assertTrue(cs.anyOverflow()); } @Test public void changeSetForBranchCommit() throws Exception{ final int NUM_NODES = DocumentRootBuilder.UPDATE_LIMIT / 2; final int NUM_PROPS = 10; Set<String> propNames = Sets.newHashSet(); NodeBuilder b1 = ns1.getRoot().builder(); for (int i = 0; i < NUM_NODES; i++) { NodeBuilder c = b1.child("n" + i); for (int j = 0; j < NUM_PROPS; j++) { c.setProperty("q" + j, "value"); c.setProperty("p" + j, "value"); propNames.add("q" + j); propNames.add("p" + j); } } ns1.merge(b1, newCollectingHook(), newCommitInfo()); ns1.backgroundWrite(); c2.reset(); ns2.backgroundRead(); CommitInfo ci = c2.getExternalChange(); CommitContext cc = (CommitContext) ci.getInfo().get(CommitContext.NAME); assertNotNull(cc); ChangeSet cs = (ChangeSet) cc.get(ChangeCollectorProvider.COMMIT_CONTEXT_OBSERVATION_CHANGESET); assertNotNull(cs); assertTrue(cs.getPropertyNames().containsAll(propNames)); } @Test public void journalService() throws Exception{ wb.register(JournalPropertyService.class, new TestJournalService(), null); //Do a dummy write so that journal property handler gets refreshed //and picks our newly registered service NodeBuilder b0 = ns1.getRoot().builder(); b0.child("0"); ns1.merge(b0, newCollectingHook(), newCommitInfo()); ns1.backgroundWrite(); NodeBuilder b1 = ns1.getRoot().builder(); b1.child("a"); CommitContext cc = new SimpleCommitContext(); cc.set(TestProperty.NAME, new TestProperty("foo")); ns1.merge(b1, newCollectingHook(), newCommitInfo(cc)); NodeBuilder b2 = ns1.getRoot().builder(); b2.child("b"); cc = new SimpleCommitContext(); cc.set(TestProperty.NAME, new TestProperty("bar")); ns1.merge(b2, newCollectingHook(), newCommitInfo(cc)); //null entry NodeBuilder b3 = ns1.getRoot().builder(); b3.child("c"); ns1.merge(b3, newCollectingHook(), newCommitInfo()); ns1.backgroundWrite(); c2.reset(); ns2.backgroundRead(); CommitInfo ci = c2.getExternalChange(); cc = (CommitContext) ci.getInfo().get(CommitContext.NAME); CumulativeTestProperty ct = (CumulativeTestProperty) cc.get(TestProperty.NAME); assertNotNull(ct); assertThat(ct.values, containsInAnyOrder("foo", "bar", "NULL")); } private CommitHook newCollectingHook(){ return new EditorHook(new ChangeCollectorProvider()); } private CommitInfo newCommitInfo(){ return newCommitInfo(new SimpleCommitContext()); } private CommitInfo newCommitInfo(CommitContext commitContext){ Map<String, Object> info = ImmutableMap.<String, Object>of(CommitContext.NAME, commitContext); return new CommitInfo(CommitInfo.OAK_UNKNOWN, CommitInfo.OAK_UNKNOWN, info); } private DocumentNodeStore newDocumentNodeStore(DocumentStore store, int clusterId) { return builderProvider.newBuilder() .setAsyncDelay(0) .setDocumentStore(store) .setJournalPropertyHandlerFactory(tracker) .setLeaseCheck(false) // disabled for debugging purposes .setClusterId(clusterId) .getNodeStore(); } private static class CommitInfoCollector implements Observer { List<CommitInfo> infos = Lists.newArrayList(); @Override public void contentChanged(@Nonnull NodeState root, @Nonnull CommitInfo info) { infos.add(info); } public CommitInfo getExternalChange(){ List<CommitInfo> result = Lists.newArrayList(); for (CommitInfo info : infos){ if (info.isExternal()) { result.add(info); } } assertEquals(1, result.size()); return result.get(0); } void reset(){ infos.clear(); } } private static class TestJournalService implements JournalPropertyService { @Override public JournalPropertyBuilder newBuilder() { return new TestJournalBuilder(); } @Override public String getName() { return TestProperty.NAME; } } private static class TestProperty implements JournalProperty { static final String NAME = "test.props"; final String value; public TestProperty(String value) { this.value = value; } } private static class CumulativeTestProperty implements JournalProperty { final Set<String> values = Sets.newHashSet(); } private static class TestJournalBuilder implements JournalPropertyBuilder<TestProperty>{ final CumulativeTestProperty allProps = new CumulativeTestProperty(); @Override public void addProperty(@Nullable TestProperty journalProperty) { if (journalProperty != null) { allProps.values.add(journalProperty.value); } else { allProps.values.add("NULL"); } } @Override public String buildAsString() { return Joiner.on(",").join(allProps.values); } @Override public void addSerializedProperty(@Nullable String s) { if (s != null){ Iterables.addAll(allProps.values, Splitter.on(',').split(s)); } } @Override public JournalProperty build() { return allProps; } } }
/* * Copyright 2015, The Querydsl Team (http://www.querydsl.com/team) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.querydsl.sql; import java.lang.reflect.Array; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.primitives.Primitives; import com.querydsl.core.types.Path; import com.querydsl.sql.types.ArrayType; import com.querydsl.sql.types.Null; import com.querydsl.sql.types.Type; /** * Configuration for SQLQuery instances * * @author tiwe * */ public final class Configuration { private static final Logger logger = LoggerFactory.getLogger(Configuration.class); static final Configuration DEFAULT = new Configuration(SQLTemplates.DEFAULT); private final JDBCTypeMapping jdbcTypeMapping = new JDBCTypeMapping(); private final JavaTypeMapping javaTypeMapping = new JavaTypeMapping(); private final NameMapping nameMapping = new NameMapping(); private final Map<String, Class<?>> typeToName = Maps.newHashMap(); private SQLTemplates templates; private SQLExceptionTranslator exceptionTranslator = DefaultSQLExceptionTranslator.DEFAULT; private final SQLListeners listeners = new SQLListeners(); private boolean hasTableColumnTypes = false; private boolean useLiterals = false; /** * Create a new Configuration instance * * @param templates templates for SQL serialization */ @SuppressWarnings("unchecked") public Configuration(SQLTemplates templates) { this.templates = templates; for (Type<?> customType : templates.getCustomTypes()) { javaTypeMapping.register(customType); } for (Map.Entry<SchemaAndTable, SchemaAndTable> entry : templates.getTableOverrides().entrySet()) { registerTableOverride(entry.getKey(), entry.getValue()); } if (templates.isArraysSupported()) { // register array types List<Class<?>> classes = ImmutableList.<Class<?>>of(String.class, Long.class, Integer.class, Short.class, Byte.class, Boolean.class, java.sql.Date.class, java.sql.Timestamp.class, java.sql.Time.class, Double.class, Float.class); for (Class<?> cl : classes) { int code = jdbcTypeMapping.get(cl); String name = templates.getTypeNameForCode(code); Class<?> arrType = Array.newInstance(cl, 0).getClass(); javaTypeMapping.register(new ArrayType(arrType, name)); if (Primitives.isWrapperType(cl) && !cl.equals(Byte.class)) { cl = Primitives.unwrap(cl); arrType = Array.newInstance(cl, 0).getClass(); javaTypeMapping.register(new ArrayType(arrType, name)); } } } } /** * Get the literal representation of the given constant * * @param o object * @return literal representation */ @SuppressWarnings("unchecked") public String asLiteral(Object o) { if (o == null || o instanceof Null) { return "null"; } else { Type type = javaTypeMapping.getType(o.getClass()); if (type != null) { return templates.serialize(type.getLiteral(o), type.getSQLTypes()[0]); } else { throw new IllegalArgumentException("Unsupported literal type " + o.getClass().getName()); } } } public SQLTemplates getTemplates() { return templates; } /** * Get the java type for the given jdbc type, table name and column name * * @param sqlType JDBC type * @param typeName JDBC type name * @param size size * @param digits digits * @param tableName table name * @param columnName column name * @return Java type */ public Class<?> getJavaType(int sqlType, String typeName, int size, int digits, String tableName, String columnName) { // table.column mapped class Type<?> type = javaTypeMapping.getType(tableName, columnName); if (type != null) { return type.getReturnedClass(); } else if (typeName != null && !typeName.isEmpty()) { typeName = typeName.toLowerCase(); // typename mapped class Class<?> clazz = typeToName.get(typeName); if (clazz != null) { return clazz; } if (sqlType == Types.ARRAY) { if (typeName.startsWith("_")) { typeName = typeName.substring(1); } else if (typeName.endsWith(" array")) { typeName = typeName.substring(0, typeName.length() - 6); } if (typeName.contains("[")) { typeName = typeName.substring(0, typeName.indexOf("[")); } if (typeName.contains("(")) { typeName = typeName.substring(0, typeName.indexOf("(")); } Integer sqlComponentType = templates.getCodeForTypeName(typeName); if (sqlComponentType == null) { logger.warn("Found no JDBC type for " + typeName + " using OTHER instead"); sqlComponentType = Types.OTHER; } Class<?> componentType = jdbcTypeMapping.get(sqlComponentType, size, digits); return Array.newInstance(componentType, 0).getClass(); } } // sql type mapped class return jdbcTypeMapping.get(sqlType, size, digits); } /** * Get the value at the given index from the result set * * @param <T> type to return * @param rs result set * @param path path * @param i one based index in result set row * @param clazz type * @return value * @throws SQLException */ @Nullable public <T> T get(ResultSet rs, @Nullable Path<?> path, int i, Class<T> clazz) throws SQLException { return getType(path, clazz).getValue(rs, i); } /** * Get the schema/table override * * @param key schema and table * @return overridden schema and table */ @Nullable public SchemaAndTable getOverride(SchemaAndTable key) { return nameMapping.getOverride(key); } /** * Get the column override * * @param key schema and table * @param column column * @return overridden column */ public String getColumnOverride(SchemaAndTable key, String column) { return nameMapping.getColumnOverride(key, column); } /** * Set the value at the given index in the statement * * @param <T> * @param stmt statement * @param path path * @param i one based index in statement * @param value value to bind * @throws SQLException */ @SuppressWarnings({ "unchecked", "rawtypes" }) public <T> void set(PreparedStatement stmt, Path<?> path, int i, T value) throws SQLException { if (value == null || value instanceof Null) { Integer sqlType = null; if (path != null) { ColumnMetadata columnMetadata = ColumnMetadata.getColumnMetadata(path); if (columnMetadata.hasJdbcType()) { sqlType = columnMetadata.getJdbcType(); } } if (sqlType != null) { stmt.setNull(i, sqlType); } else { stmt.setNull(i, Types.NULL); } } else { getType(path, (Class) value.getClass()).setValue(stmt, i, value); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private <T> Type<T> getType(@Nullable Path<?> path, Class<T> clazz) { if (hasTableColumnTypes && path != null && !clazz.equals(Null.class) && path.getMetadata().getParent() instanceof RelationalPath) { String table = ((RelationalPath) path.getMetadata().getParent()).getTableName(); String column = ColumnMetadata.getName(path); Type<T> type = (Type) javaTypeMapping.getType(table, column); if (type != null) { return type; } } return javaTypeMapping.getType(clazz); } /** * Get the SQL type name for the given java type * * @param type java type * @return SQL type name */ public String getTypeName(Class<?> type) { Integer jdbcType = jdbcTypeMapping.get(type); if (jdbcType == null) { jdbcType = javaTypeMapping.getType(type).getSQLTypes()[0]; } return templates.getTypeNameForCode(jdbcType); } /** * Get the SQL type name for a cast operation * * @param type java type * @return SQL type name */ public String getTypeNameForCast(Class<?> type) { Integer jdbcType = jdbcTypeMapping.get(type); if (jdbcType == null) { jdbcType = javaTypeMapping.getType(type).getSQLTypes()[0]; } return templates.getCastTypeNameForCode(jdbcType); } /** * Register a schema override * * @param oldSchema schema to override * @param newSchema override * @return previous override value */ public String registerSchemaOverride(String oldSchema, String newSchema) { return nameMapping.registerSchemaOverride(oldSchema, newSchema); } /** * Register a table override * * @param oldTable table to override * @param newTable override * @return previous override value */ public String registerTableOverride(String oldTable, String newTable) { return nameMapping.registerTableOverride(oldTable, newTable); } /** * Register a schema specific table override * * @param schema schema of table * @param oldTable table to override * @param newTable override * @return previous override value */ public String registerTableOverride(String schema, String oldTable, String newTable) { SchemaAndTable st = registerTableOverride(schema, oldTable, schema, newTable); return st != null ? st.getTable() : null; } /** * Register a schema specific table override * * @param schema schema of table * @param oldTable table to override * @param newSchema override schema * @param newTable override table * @return previous override value */ public SchemaAndTable registerTableOverride(String schema, String oldTable, String newSchema, String newTable) { return registerTableOverride(new SchemaAndTable(schema, oldTable), new SchemaAndTable(newSchema, newTable)); } /** * Register a schema specific table override * * @param from schema and table to override * @param to override * @return previous override */ public SchemaAndTable registerTableOverride(SchemaAndTable from, SchemaAndTable to) { return nameMapping.registerTableOverride(from, to); } /** * Register a column override * * @param schema schema * @param table table * @param oldColumn column * @param newColumn override * @return previous override */ public String registerColumnOverride(String schema, String table, String oldColumn, String newColumn) { return nameMapping.registerColumnOverride(schema, table, oldColumn, newColumn); } /** * Register a column override * * @param table table * @param oldColumn column * @param newColumn override * @return previous override */ public String registerColumnOverride(String table, String oldColumn, String newColumn) { return nameMapping.registerColumnOverride(table, oldColumn, newColumn); } /** * Register the given {@link Type} converter * * @param type type */ public void register(Type<?> type) { jdbcTypeMapping.register(type.getSQLTypes()[0], type.getReturnedClass()); javaTypeMapping.register(type); } /** * Register a typeName to Class mapping * * @param typeName SQL type name * @param clazz java type */ public void registerType(String typeName, Class<?> clazz) { typeToName.put(typeName.toLowerCase(), clazz); } /** * Override the binding for the given NUMERIC type * * @param total total amount of digits * @param decimal amount of fractional digits * @param javaType java type */ public void registerNumeric(int total, int decimal, Class<?> javaType) { jdbcTypeMapping.registerNumeric(total, decimal, javaType); } /** * Override multiple numeric bindings, both begin and end are inclusive * * @param beginTotal inclusive start of range * @param endTotal inclusive end of range * @param beginDecimal inclusive start of range * @param endDecimal inclusive end of range * @param javaType java type */ public void registerNumeric(int beginTotal, int endTotal, int beginDecimal, int endDecimal, Class <?> javaType) { for (int total = beginTotal; total <= endTotal; total++) { for (int decimal = beginDecimal; decimal <= endDecimal; decimal++) { registerNumeric(total, decimal, javaType); } } } /** * Register the given javaType for the given table and column * * @param table table * @param column column * @param javaType java type */ public void register(String table, String column, Class<?> javaType) { register(table, column, javaTypeMapping.getType(javaType)); } /** * Register the given {@link Type} converter for the given table and column * * @param table table * @param column column * @param type type */ public void register(String table, String column, Type<?> type) { javaTypeMapping.setType(table, column, type); hasTableColumnTypes = true; } /** * Translate the given SQLException * * @param ex SQLException to translate * @return translated exception */ public RuntimeException translate(SQLException ex) { return exceptionTranslator.translate(ex); } /** * Translate the given SQLException * * @param sql SQL string * @param bindings bindings * @param ex SQLException to translate * @return translated exception */ public RuntimeException translate(String sql, List<Object> bindings, SQLException ex) { return exceptionTranslator.translate(sql, bindings, ex); } /** * Add a listener * * @param listener listener */ public void addListener(SQLListener listener) { listeners.add(listener); } /** * Get the registered listener * * @return listeners as single listener instance */ public SQLListeners getListeners() { return listeners; } /** * Get whether literals are serialized or prepared statement bindings are used * * @return true for literals and false for bindings */ public boolean getUseLiterals() { return useLiterals; } /** * Set whether literals are used in SQL strings instead of parameter bindings (default: false) * * <p>Warning: When literals are used, prepared statement won't have any parameter bindings * and also batch statements will only be simulated, but not executed as actual batch statements.</p> * * @param useLiterals true for literals and false for bindings */ public void setUseLiterals(boolean useLiterals) { this.useLiterals = useLiterals; } /** * Set the exception translator * * @param exceptionTranslator exception translator */ public void setExceptionTranslator(SQLExceptionTranslator exceptionTranslator) { this.exceptionTranslator = exceptionTranslator; } /** * Set the templates to use for serialization * * @param templates templates */ public void setTemplates(SQLTemplates templates) { this.templates = templates; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.geo; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceRangeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** */ public class GeoDistanceTests extends ElasticsearchIntegrationTest { @Test public void simpleDistanceTests() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true) .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("name", "New York") .startObject("location").field("lat", 40.7143528).field("lon", -74.0059731).endObject() .endObject()), // to NY: 5.286 km client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() .field("name", "Times Square") .startObject("location").field("lat", 40.759011).field("lon", -73.9844722).endObject() .endObject()), // to NY: 0.4621 km client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject() .field("name", "Tribeca") .startObject("location").field("lat", 40.718266).field("lon", -74.007819).endObject() .endObject()), // to NY: 1.055 km client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject() .field("name", "Wall Street") .startObject("location").field("lat", 40.7051157).field("lon", -74.0088305).endObject() .endObject()), // to NY: 1.258 km client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject() .field("name", "Soho") .startObject("location").field("lat", 40.7247222).field("lon", -74).endObject() .endObject()), // to NY: 2.029 km client().prepareIndex("test", "type1", "6").setSource(jsonBuilder().startObject() .field("name", "Greenwich Village") .startObject("location").field("lat", 40.731033).field("lon", -73.9962255).endObject() .endObject()), // to NY: 8.572 km client().prepareIndex("test", "type1", "7").setSource(jsonBuilder().startObject() .field("name", "Brooklyn") .startObject("location").field("lat", 40.65).field("lon", -73.95).endObject() .endObject())); SearchResponse searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().hits().length, equalTo(5)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731).optimizeBbox("indexed"))) .execute().actionGet(); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().hits().length, equalTo(5)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); } // now with a PLANE type searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("3km").geoDistance(GeoDistance.PLANE).point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 5); assertThat(searchResponse.getHits().hits().length, equalTo(5)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); } // factor type is really too small for this resolution searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731).optimizeBbox("indexed"))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731).optimizeBbox("indexed"))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceRangeQuery("location").from("1.0km").to("2.0km").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 2); assertThat(searchResponse.getHits().hits().length, equalTo(2)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceRangeQuery("location").from("1.0km").to("2.0km").point(40.7143528, -74.0059731).optimizeBbox("indexed"))) .execute().actionGet(); assertHitCount(searchResponse, 2); assertThat(searchResponse.getHits().hits().length, equalTo(2)); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceRangeQuery("location").to("2.0km").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); searchResponse = client().prepareSearch() // from NY .setQuery(filteredQuery(matchAllQuery(), geoDistanceRangeQuery("location").from("2.0km").point(40.7143528, -74.0059731))) .execute().actionGet(); assertHitCount(searchResponse, 3); assertThat(searchResponse.getHits().hits().length, equalTo(3)); // SORTING searchResponse = client().prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("location").point(40.7143528, -74.0059731).order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 7); assertOrderedSearchHits(searchResponse, "1", "3", "4", "5", "6", "2", "7"); searchResponse = client().prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("location").point(40.7143528, -74.0059731).order(SortOrder.DESC)) .execute().actionGet(); assertHitCount(searchResponse, 7); assertOrderedSearchHits(searchResponse, "7", "2", "6", "5", "4", "3", "1"); } @Test public void testDistanceSortingMVFields() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true) .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject() .endObject().endObject(); assertAcked(prepareCreate("test") .addMapping("type1", xContentBuilder)); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("names", "New York") .startObject("locations").field("lat", 40.7143528).field("lon", -74.0059731).endObject() .endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() .field("names", "Times Square", "Tribeca") .startArray("locations") // to NY: 5.286 km .startObject().field("lat", 40.759011).field("lon", -73.9844722).endObject() // to NY: 0.4621 km .startObject().field("lat", 40.718266).field("lon", -74.007819).endObject() .endArray() .endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject() .field("names", "Wall Street", "Soho") .startArray("locations") // to NY: 1.055 km .startObject().field("lat", 40.7051157).field("lon", -74.0088305).endObject() // to NY: 1.258 km .startObject().field("lat", 40.7247222).field("lon", -74).endObject() .endArray() .endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject() .field("names", "Greenwich Village", "Brooklyn") .startArray("locations") // to NY: 2.029 km .startObject().field("lat", 40.731033).field("lon", -73.9962255).endObject() // to NY: 8.572 km .startObject().field("lat", 40.65).field("lon", -73.95).endObject() .endArray() .endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); // Order: Asc SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "2", "3", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); // Order: Asc, Mode: max searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); // Order: Desc searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); // Order: Desc, Mode: min searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "3", "2", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301d, 10d)); searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertFailures(client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")), RestStatus.BAD_REQUEST, containsString("sort_mode [sum] isn't supported for sorting by geo distance")); } @Test // Regression bug: https://github.com/elasticsearch/elasticsearch/issues/2851 public void testDistanceSortingWithMissingGeoPoint() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true) .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("names", "Times Square", "Tribeca") .startArray("locations") // to NY: 5.286 km .startObject().field("lat", 40.759011).field("lon", -73.9844722).endObject() // to NY: 0.4621 km .startObject().field("lat", 40.718266).field("lon", -74.007819).endObject() .endArray() .endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() .field("names", "Wall Street", "Soho") .endObject()).execute().actionGet(); refresh(); // Order: Asc SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 2); assertOrderedSearchHits(searchResponse, "1", "2"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); // Order: Desc searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC)) .execute().actionGet(); // Doc with missing geo point is first, is consistent with 0.20.x assertHitCount(searchResponse, 2); assertOrderedSearchHits(searchResponse, "2", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286d, 10d)); } @Test public void distanceScriptTests() throws Exception { double source_lat = 32.798; double source_long = -117.151; double target_lat = 32.81; double target_long = -117.21; XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("name", "TestPosition") .startObject("location").field("lat", source_lat).field("lon", source_long).endObject() .endObject()).execute().actionGet(); refresh(); SearchResponse searchResponse1 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.0001d)); SearchResponse searchResponse2 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.0001d)); SearchResponse searchResponse3 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance3, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d)); SearchResponse searchResponse4 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance4, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d)); SearchResponse searchResponse5 = client() .prepareSearch() .addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")")) .execute().actionGet(); Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance5, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d)); SearchResponse searchResponse6 = client() .prepareSearch() .addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")")) .execute().actionGet(); Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance6, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d)); SearchResponse searchResponse7 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance7, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d)); SearchResponse searchResponse8 = client().prepareSearch().addField("_source") .addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance8, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d)); } @Test public void testDistanceSortingNestedFields() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company") .startObject("properties") .startObject("name").field("type", "string").endObject() .startObject("branches") .field("type", "nested") .startObject("properties") .startObject("name").field("type", "string").endObject() .startObject("location").field("type", "geo_point").field("lat_lon", true) .startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("companies").addMapping("company", xContentBuilder)); ensureGreen(); indexRandom(true, client().prepareIndex("companies", "company", "1").setSource(jsonBuilder().startObject() .field("name", "company 1") .startArray("branches") .startObject() .field("name", "New York") .startObject("location").field("lat", 40.7143528).field("lon", -74.0059731).endObject() .endObject() .endArray() .endObject()), client().prepareIndex("companies", "company", "2").setSource(jsonBuilder().startObject() .field("name", "company 2") .startArray("branches") .startObject() .field("name", "Times Square") .startObject("location").field("lat", 40.759011).field("lon", -73.9844722).endObject() // to NY: 5.286 km .endObject() .startObject() .field("name", "Tribeca") .startObject("location").field("lat", 40.718266).field("lon", -74.007819).endObject() // to NY: 0.4621 km .endObject() .endArray() .endObject()), client().prepareIndex("companies", "company", "3").setSource(jsonBuilder().startObject() .field("name", "company 3") .startArray("branches") .startObject() .field("name", "Wall Street") .startObject("location").field("lat", 40.7051157).field("lon", -74.0088305).endObject() // to NY: 1.055 km .endObject() .startObject() .field("name", "Soho") .startObject("location").field("lat", 40.7247222).field("lon", -74).endObject() // to NY: 1.258 km .endObject() .endArray() .endObject()), client().prepareIndex("companies", "company", "4").setSource(jsonBuilder().startObject() .field("name", "company 4") .startArray("branches") .startObject() .field("name", "Greenwich Village") .startObject("location").field("lat", 40.731033).field("lon", -73.9962255).endObject() // to NY: 2.029 km .endObject() .startObject() .field("name", "Brooklyn") .startObject("location").field("lat", 40.65).field("lon", -73.95).endObject() // to NY: 8.572 km .endObject() .endArray() .endObject())); // Order: Asc SearchResponse searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "2", "3", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); // Order: Asc, Mode: max searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); // Order: Desc searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.DESC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); // Order: Desc, Mode: min searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "3", "2", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC)) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "1", "3", "2", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location").setNestedPath("branches") .point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC) ) .execute().actionGet(); assertHitCount(searchResponse, 4); assertOrderedSearchHits(searchResponse, "4", "2", "3", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d)); searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location").setNestedFilter(termQuery("branches.name", "brooklyn")) .point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC) ) .execute().actionGet(); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, hasId("4")); assertSearchHits(searchResponse, "1", "2", "3", "4"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); assertFailures(client().prepareSearch("companies").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).sortMode("sum")), RestStatus.BAD_REQUEST, containsString("sort_mode [sum] isn't supported for sorting by geo distance")); } /** * Issue 3073 */ @Test public void testGeoDistanceFilter() throws IOException { double lat = 40.720611; double lon = -73.998776; XContentBuilder mapping = JsonXContent.contentBuilder() .startObject() .startObject("location") .startObject("properties") .startObject("pin") .field("type", "geo_point") .field("geohash", true) .field("geohash_precision", 24) .field("lat_lon", true) .startObject("fielddata") .field("format", randomNumericFieldDataFormat()) .endObject() .endObject() .endObject() .endObject() .endObject(); XContentBuilder source = JsonXContent.contentBuilder() .startObject() .field("pin", GeoHashUtils.encode(lat, lon)) .endObject(); assertAcked(prepareCreate("locations").addMapping("location", mapping)); client().prepareIndex("locations", "location", "1").setCreate(true).setSource(source).execute().actionGet(); refresh(); client().prepareGet("locations", "location", "1").execute().actionGet(); SearchResponse result = client().prepareSearch("locations") .setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(QueryBuilders.geoDistanceQuery("pin") .geoDistance(GeoDistance.ARC) .lat(lat).lon(lon) .distance("1m")) .execute().actionGet(); assertHitCount(result, 1); } private double randomLon() { return randomDouble() * 360 - 180; } private double randomLat() { return randomDouble() * 180 - 90; } public void testDuelOptimizations() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point,lat_lon=true")); final int numDocs = scaledRandomIntBetween(3000, 10000); List<IndexRequestBuilder> docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { docs.add(client().prepareIndex("index", "type").setSource(jsonBuilder().startObject().startObject("location").field("lat", randomLat()).field("lon", randomLon()).endObject().endObject())); } indexRandom(true, docs); ensureSearchable(); for (int i = 0; i < 10; ++i) { final double originLat = randomLat(); final double originLon = randomLon(); final String distance = DistanceUnit.KILOMETERS.toString(randomInt(10000)); for (GeoDistance geoDistance : Arrays.asList(GeoDistance.ARC, GeoDistance.SLOPPY_ARC)) { logger.info("Now testing GeoDistance={}, distance={}, origin=({}, {})", geoDistance, distance, originLat, originLon); long matches = -1; for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) { SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery( QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance).geoDistance(geoDistance).optimizeBbox(optimizeBbox))).execute().actionGet(); assertSearchResponse(resp); logger.info("{} -> {} hits", optimizeBbox, resp.getHits().totalHits()); if (matches < 0) { matches = resp.getHits().totalHits(); } else { assertEquals(matches, resp.getHits().totalHits()); } } } } } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.test.volumestream; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import io.leangen.geantyref.TypeToken; import net.kyori.adventure.identity.Identity; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.event.HoverEvent; import net.kyori.adventure.text.format.NamedTextColor; import net.kyori.adventure.text.format.TextColor; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.checkerframework.checker.nullness.qual.NonNull; import org.spongepowered.api.Server; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.command.Command; import org.spongepowered.api.command.CommandResult; import org.spongepowered.api.command.exception.CommandException; import org.spongepowered.api.command.parameter.CommandContext; import org.spongepowered.api.command.parameter.Parameter; import org.spongepowered.api.command.parameter.managed.standard.VariableValueParameters; import org.spongepowered.api.config.ConfigDir; import org.spongepowered.api.data.persistence.DataContainer; import org.spongepowered.api.data.persistence.DataFormats; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.entity.living.player.server.ServerPlayer; import org.spongepowered.api.event.Cancellable; import org.spongepowered.api.event.CauseStackManager; import org.spongepowered.api.event.EventContextKeys; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.block.InteractBlockEvent; import org.spongepowered.api.event.cause.entity.SpawnTypes; import org.spongepowered.api.event.filter.cause.Root; import org.spongepowered.api.event.lifecycle.RegisterCommandEvent; import org.spongepowered.api.event.lifecycle.StoppingEngineEvent; import org.spongepowered.api.item.ItemTypes; import org.spongepowered.api.registry.RegistryTypes; import org.spongepowered.api.util.rotation.Rotation; import org.spongepowered.api.util.transformation.Transformation; import org.spongepowered.api.world.biome.Biome; import org.spongepowered.api.world.schematic.Schematic; import org.spongepowered.api.world.volume.archetype.ArchetypeVolume; import org.spongepowered.api.world.volume.stream.StreamOptions; import org.spongepowered.api.world.volume.stream.VolumeApplicators; import org.spongepowered.api.world.volume.stream.VolumeCollectors; import org.spongepowered.api.world.volume.stream.VolumePositionTranslators; import org.spongepowered.math.vector.Vector3i; import org.spongepowered.plugin.PluginContainer; import org.spongepowered.plugin.builtin.jvm.Plugin; import org.spongepowered.test.LoadableModule; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @Plugin("volumestreamtest") public final class VolumeStreamTest implements LoadableModule { private static final String FILE_ENDING = ".schem"; public static final TextColor SAVE = TextColor.color(0x856C); private static final TextColor GREEN = TextColor.color(0x6CA9FF); @Inject private PluginContainer plugin; @Inject private Logger logger; @Inject @ConfigDir(sharedRoot = false) private Path config; private Path schematicsDir; private final CopyPastaListener listener = new CopyPastaListener(); private static final Map<UUID, PlayerData> player_data = new HashMap<>(); private static PlayerData get(final Player pl) { PlayerData data = VolumeStreamTest.player_data.get(pl.uniqueId()); if (data == null) { data = new PlayerData(pl.uniqueId()); VolumeStreamTest.player_data.put(pl.uniqueId(), data); } return data; } @Override public void enable(final CommandContext ctx) { Sponge.eventManager().registerListeners(this.plugin, this.listener); } @Listener public void onShutdown(final StoppingEngineEvent<@NonNull Server> serverShutdown) { this.logger.log(Level.ERROR, "Clearing player clipboards"); VolumeStreamTest.player_data.clear(); } @Listener public void onGamePreInitialization(final RegisterCommandEvent<Command.Parameterized> event) throws IOException, CommandException { this.schematicsDir = this.config.resolve("schematics"); Files.createDirectories(this.config); final Parameter.Value<Biome> biomeKey = Parameter.registryElement( TypeToken.get(Biome.class), ImmutableList.of( VariableValueParameters.RegistryEntryBuilder.WORLD_FROM_LOCATABLE_HOLDER_PROVIDER , VariableValueParameters.RegistryEntryBuilder.WORLD_FROM_CAUSE_HOLDER_PROVIDER ), RegistryTypes.BIOME, "minecraft" ).key("format") .build(); event.register( this.plugin, Command.builder() .shortDescription(Component.text("Sets the biome in a selected region")) .permission(this.plugin.metadata().id() + ".command.setbiome") .addParameter(biomeKey) .executor(src -> { if (!(src.cause().root() instanceof ServerPlayer)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final ServerPlayer player = (ServerPlayer) src.cause().root(); final PlayerData data = VolumeStreamTest.get(player); if (data.getPos1() == null || data.getPos2() == null) { player.sendMessage( Identity.nil(), Component.text("You must set both positions before copying", NamedTextColor.RED) ); return CommandResult.success(); } final Vector3i min = data.getPos1().min(data.getPos2()); final Vector3i max = data.getPos1().max(data.getPos2()); final Biome target = src.requireOne(biomeKey); player.world().biomeStream(min, max, StreamOptions.forceLoadedAndCopied()) .map((world, biome, x, y, z) -> target) .apply(VolumeCollectors.of( player.world(), VolumePositionTranslators.identity(), VolumeApplicators.applyBiomes() )); return CommandResult.success(); }) .build(), "setBiome" ); event.register( this.plugin, Command.builder() .shortDescription(Component.text("Copies a region of the world to your clipboard")) .permission(this.plugin.metadata().id() + ".command.copy") .executor(src -> { if (!(src.cause().root() instanceof Player)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final Player player = (Player) src.cause().root(); final PlayerData data = VolumeStreamTest.get(player); if (data.getPos1() == null || data.getPos2() == null) { player.sendMessage( Identity.nil(), Component.text("You must set both positions before copying", NamedTextColor.RED) ); return CommandResult.success(); } final Vector3i min = data.getPos1().min(data.getPos2()); final Vector3i max = data.getPos1().max(data.getPos2()); data.setOrigin(player.blockPosition()); final ArchetypeVolume archetypeVolume = player.world().createArchetypeVolume( min, max, player.blockPosition()); data.setClipboard(archetypeVolume); player.sendMessage(Identity.nil(), Component.text("Saved to clipboard.", VolumeStreamTest.GREEN)); return CommandResult.success(); }).build(), "copy" ); event.register( this.plugin, Command.builder() .shortDescription(Component.text("Pastes your clipboard to where you are standing")) .permission(this.plugin.metadata().id() + ".command.paste") .executor(src -> { if (!(src.cause().root() instanceof ServerPlayer)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final ServerPlayer player = (ServerPlayer) src.cause().root(); final PlayerData data = VolumeStreamTest.get(player); final ArchetypeVolume volume = data.getClipboard(); if (volume == null) { player.sendMessage( Identity.nil(), Component.text("You must copy something before pasting", NamedTextColor.RED) ); return CommandResult.success(); } try (final CauseStackManager.StackFrame frame = Sponge.server().causeStackManager().pushCauseFrame()) { frame.pushCause(this.plugin); volume.applyToWorld(player.world(), player.blockPosition(), SpawnTypes.PLACEMENT::get); } src.sendMessage( Identity.nil(), Component.text("Pasted clipboard into world.", VolumeStreamTest.GREEN)); return CommandResult.success(); }).build(), "paste" ); final Parameter.Value<String> fileName = Parameter.string().key("fileName").build(); event.register( this.plugin, Command.builder() .shortDescription(Component.text("Pastes your clipboard to where you are standing")) .permission(this.plugin.metadata().id() + ".command.paste") .addParameter(fileName) .executor(src -> { if (!(src.cause().root() instanceof ServerPlayer)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final String file = src.requireOne(fileName); final Path desiredFilePath = this.schematicsDir.resolve(file + VolumeStreamTest.FILE_ENDING); if (Files.exists(desiredFilePath)) { throw new CommandException(Component.text(file + " already exists, please delete the file first", NamedTextColor.RED)); } if (Files.isDirectory(desiredFilePath)) { throw new CommandException(Component.text(file + "is a directory, please use a file name", NamedTextColor.RED)); } final ServerPlayer player = (ServerPlayer) src.cause().root(); final PlayerData data = VolumeStreamTest.get(player); final ArchetypeVolume volume = data.getClipboard(); if (volume == null) { player.sendMessage( Identity.nil(), Component.text("You must copy something before pasting", NamedTextColor.RED) ); return CommandResult.success(); } final Schematic schematic = Schematic.builder() .volume(data.getClipboard()) .metaValue(Schematic.METADATA_AUTHOR, player.name()) .metaValue(Schematic.METADATA_NAME, file) .build(); final DataContainer schematicData = Sponge.dataManager().translator(Schematic.class) .orElseThrow( () -> new IllegalStateException("Sponge doesn't have a DataTranslator for Schematics!")) .translate(schematic); try { final Path output = Files.createFile(desiredFilePath); DataFormats.NBT.get().writeTo( new GZIPOutputStream(Files.newOutputStream(output)), schematicData); player.sendMessage( Identity.nil(), Component.text("Saved schematic to " + output.toAbsolutePath(), VolumeStreamTest.SAVE) ); } catch (final Exception e) { e.printStackTrace(); final StringWriter writer = new StringWriter(); e.printStackTrace(new PrintWriter(writer)); final Component errorText = Component.text(writer.toString().replace("\t", " ") .replace("\r\n", "\n") .replace("\r", "\n") ); final TextComponent text = Component.text( "Error saving schematic: " + e.getMessage(), NamedTextColor.RED) .hoverEvent(HoverEvent.showText(errorText)); return CommandResult.builder() .error(text).build(); } return CommandResult.success(); }).build(), "save" ); event.register( this.plugin, Command.builder() .shortDescription(Component.text("Load a schematic from file")) .permission(this.plugin.metadata().id() + ".command.load") .addParameter(fileName) .executor(src -> { if (!(src.cause().root() instanceof ServerPlayer)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final ServerPlayer player = (ServerPlayer) src.cause().root(); final String file = src.requireOne(fileName); final Path desiredFilePath = this.schematicsDir.resolve(file + VolumeStreamTest.FILE_ENDING); if (!Files.isRegularFile(desiredFilePath)) { throw new CommandException(Component.text("File " + file + " was not a normal schemaic file")); } final Schematic schematic; final DataContainer schematicContainer; try (final GZIPInputStream stream = new GZIPInputStream(Files.newInputStream(desiredFilePath))) { schematicContainer = DataFormats.NBT.get().readFrom(stream); } catch (IOException e) { e.printStackTrace(); final StringWriter writer = new StringWriter(); e.printStackTrace(new PrintWriter(writer)); final Component errorText = Component.text(writer.toString().replace("\t", " ") .replace("\r\n", "\n") .replace("\r", "\n") ); final TextComponent text = Component.text( "Error loading schematic: " + e.getMessage(), NamedTextColor.RED) .hoverEvent(HoverEvent.showText(errorText)); return CommandResult.builder() .error(text).build(); } schematic = Sponge.dataManager().translator(Schematic.class) .orElseThrow(() -> new IllegalStateException("Expected a DataTranslator for a Schematic")) .translate(schematicContainer); src.sendMessage(Identity.nil(), Component.text("Loaded schematic from " + file, TextColor.color(0x003434))); final PlayerData data = VolumeStreamTest.get(player); data.setClipboard(schematic); data.setOrigin(player.blockPosition()); return CommandResult.success(); }) .build(), "load" ); final Parameter.Value<Rotation> rotation = Parameter.registryElement( TypeToken.get(Rotation.class), RegistryTypes.ROTATION) .key("rotation") .build(); event.register(this.plugin, Command .builder() .shortDescription(Component.text("Rotate clipboard")) .permission(this.plugin.metadata().id() + ".command.rotate") .addParameter(rotation) .executor(src -> { if (!(src.cause().root() instanceof ServerPlayer)) { src.sendMessage(Identity.nil(), Component.text("Player only.", NamedTextColor.RED)); return CommandResult.success(); } final ServerPlayer player = (ServerPlayer) src.cause().root(); final Rotation desiredRotation = src.requireOne(rotation); final Schematic schematic; final PlayerData data = VolumeStreamTest.get(player); if (data.clipboard == null) { throw new CommandException(Component.text("Load a clipboard first before trying to rotate it")); } final ArchetypeVolume newClipboard = data.clipboard.transform(Transformation.builder() .origin(data.clipboard.min().toDouble().add(data.clipboard.size().toDouble().div(2))) .rotate(desiredRotation) .build()); src.sendMessage(Identity.nil(), Component.text("Rotated clipboard " + desiredRotation.angle().degrees() + " degrees")); data.setClipboard(newClipboard); return CommandResult.success(); }) .build(), "rotate" ); } public static class CopyPastaListener { public static final TextColor TEAL = TextColor.color(0x008080); @Listener public void onLeftClick(final InteractBlockEvent.Primary event, @Root final Player player) { event.context().get(EventContextKeys.USED_ITEM).ifPresent(snapshot -> { final BlockSnapshot block = event.block(); if (snapshot.type().equals(ItemTypes.WOODEN_AXE.get()) && block != BlockSnapshot.empty()) { VolumeStreamTest.get(player).setPos1(block.position()); player.sendMessage( Component.text("Position 1 set to " + block.position(), CopyPastaListener.TEAL)); if (event instanceof Cancellable) { ((Cancellable) event).setCancelled(true); } } }); } @Listener public void onInteract(final InteractBlockEvent.Secondary event, @Root final Player player) { event.context().get(EventContextKeys.USED_ITEM).ifPresent(snapshot -> { final BlockSnapshot block = event.block(); if (snapshot.type().equals(ItemTypes.WOODEN_AXE.get()) && block != BlockSnapshot.empty()) { VolumeStreamTest.get(player).setPos2(block.position()); player.sendMessage( Component.text("Position 2 set to " + block.position(), CopyPastaListener.TEAL)); event.setCancelled(true); } }); } } public static class PlayerData { private final UUID uid; private Vector3i pos1; private Vector3i pos2; private Vector3i origin; private ArchetypeVolume clipboard; public PlayerData(final UUID uid) { this.uid = uid; } public UUID getUid() { return this.uid; } public Vector3i getPos1() { return this.pos1; } public void setPos1(final Vector3i pos) { this.pos1 = pos; } public Vector3i getPos2() { return this.pos2; } public void setPos2(final Vector3i pos) { this.pos2 = pos; } public ArchetypeVolume getClipboard() { return this.clipboard; } public void setClipboard(final ArchetypeVolume volume) { this.clipboard = volume; } public Vector3i getOrigin() { return this.origin; } public void setOrigin(final Vector3i origin) { this.origin = origin; } } }
/** * $RCSfile$ * $Revision: $ * $Date: $ * * Copyright (C) 2005-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.sip.sipaccount; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.jivesoftware.database.DbConnectionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * Database persistence for SipAccount class and database methods for stored SIP Accounts * * @author Thiago Rocha Camargo */ public class SipAccountDAO { private static final Logger Log = LoggerFactory.getLogger(SipAccountDAO.class); public static SipAccount getAccountByUser(String username) { String sql = "SELECT username, sipusername, sipauthuser, sipdisplayname, sippassword, sipserver, enabled, " + "status, stunserver, stunport, usestun, voicemail, outboundproxy, promptCredentials FROM ofSipUser " + "WHERE username = ? "; SipAccount sipAccount = null; Connection con = null; PreparedStatement psmt = null; ResultSet rs = null; try { con = DbConnectionManager.getConnection(); psmt = con.prepareStatement(sql); psmt.setString(1, username); rs = psmt.executeQuery(); if (rs.next()) { sipAccount = read(rs); } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, psmt, con); } return sipAccount; } private static SipAccount read(ResultSet rs) { SipAccount sipAccount = null; try { String username = rs.getString("username"); String sipusername = rs.getString("sipusername"); String authusername = rs.getString("sipauthuser"); String displayname = rs.getString("sipdisplayname"); String password = rs.getString("sippassword"); String server = rs.getString("sipserver"); String stunServer = rs.getString("stunserver"); String stunPort = rs.getString("stunport"); boolean useStun = rs.getInt("usestun") == 1; boolean enabled = rs.getInt("enabled") == 1; String voicemail = rs.getString("voicemail"); String outboundproxy = rs.getString("outboundproxy"); boolean promptCredentials = rs.getInt("promptCredentials") == 1; SipRegisterStatus status = SipRegisterStatus.valueOf(rs.getString("status")); sipAccount = new SipAccount(username); sipAccount.setSipUsername(sipusername); sipAccount.setAuthUsername(authusername); sipAccount.setDisplayName(displayname); sipAccount.setPassword(password); sipAccount.setServer(server); sipAccount.setEnabled(enabled); sipAccount.setStatus(status); sipAccount.setStunServer(stunServer); sipAccount.setStunPort(stunPort); sipAccount.setUseStun(useStun); sipAccount.setVoiceMailNumber(voicemail); sipAccount.setOutboundproxy(outboundproxy); sipAccount.setPromptCredentials(promptCredentials); } catch (SQLException e) { Log.error(e.getMessage(), e); } return sipAccount; } public static void insert(SipAccount sipAccount) throws SQLException { String sql = "INSERT INTO ofSipUser (username, sipusername, sipauthuser, sipdisplayname, sippassword, sipserver, enabled, status, stunserver, stunport, usestun, voicemail, outboundproxy, promptCredentials ) " + " values ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; Connection con = null; PreparedStatement psmt = null; ResultSet rs = null; try { con = DbConnectionManager.getConnection(); psmt = con.prepareStatement(sql); psmt.setString(1, sipAccount.getUsername()); psmt.setString(2, sipAccount.getSipUsername()); psmt.setString(3, sipAccount.getAuthUsername()); psmt.setString(4, sipAccount.getDisplayName()); psmt.setString(5, sipAccount.getPassword()); psmt.setString(6, sipAccount.getServer()); psmt.setInt(7, sipAccount.isEnabled() ? 1 : 0); psmt.setString(8, sipAccount.getStatus().name()); psmt.setString(9, sipAccount.getStunServer()); psmt.setString(10, sipAccount.getStunPort()); psmt.setInt(11, sipAccount.isUseStun() ? 1 : 0); psmt.setString(12, sipAccount.getVoiceMailNumber()); psmt.setString(13, sipAccount.getOutboundproxy()); psmt.setInt(14, sipAccount.isPromptCredentials() ? 1 : 0); psmt.executeUpdate(); } catch (SQLException e) { Log.error(e.getMessage(), e); throw new SQLException(e.getMessage()); } finally { DbConnectionManager.closeConnection(rs, psmt, con); } } public static void update(SipAccount sipAccount) throws SQLException { String sql = "UPDATE ofSipUser SET sipusername = ?, sipauthuser = ?, sipdisplayname = ?, sippassword = ?, sipserver = ?, enabled = ?, status = ?, stunserver = ?, stunport = ?, usestun = ?, voicemail= ?, outboundproxy = ?, promptCredentials = ? " + " WHERE username = ?"; Connection con = null; PreparedStatement psmt = null; try { con = DbConnectionManager.getConnection(); psmt = con.prepareStatement(sql); psmt.setString(1, sipAccount.getSipUsername()); psmt.setString(2, sipAccount.getAuthUsername()); psmt.setString(3, sipAccount.getDisplayName()); psmt.setString(4, sipAccount.getPassword()); psmt.setString(5, sipAccount.getServer()); psmt.setInt(6, sipAccount.isEnabled() ? 1 : 0); psmt.setString(7, sipAccount.getStatus().name()); psmt.setString(8, sipAccount.getStunServer()); psmt.setString(9, sipAccount.getStunPort()); psmt.setInt(10, sipAccount.isUseStun() ? 1 : 0); psmt.setString(11, sipAccount.getVoiceMailNumber()); psmt.setString(12, sipAccount.getOutboundproxy()); psmt.setInt(13, sipAccount.isPromptCredentials() ? 1 : 0); psmt.setString(14, sipAccount.getUsername()); psmt.executeUpdate(); } catch (SQLException e) { Log.error(e.getMessage(), e); throw new SQLException(e.getMessage()); } finally { DbConnectionManager.closeConnection(psmt, con); } } public static void remove(SipAccount sipAccount) { String sql = "DELETE FROM ofSipUser WHERE username = ?"; Connection con = null; PreparedStatement psmt = null; try { con = DbConnectionManager.getConnection(); psmt = con.prepareStatement(sql); psmt.setString(1, sipAccount.getUsername()); psmt.executeUpdate(); psmt.close(); } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(psmt, con); } } public static Collection<SipAccount> getUsers(int startIndex, int numResults) { String sql = "SELECT username, sipusername, sipauthuser, sipdisplayname, sippassword, sipserver, enabled, status, stunserver, stunport, usestun, voicemail, outboundproxy, promptCredentials FROM ofSipUser " + " ORDER BY USERNAME"; List<SipAccount> sipAccounts = new ArrayList<SipAccount>(numResults); Connection con = null; PreparedStatement pstmt = null; try { con = DbConnectionManager.getConnection(); pstmt = DbConnectionManager.createScrollablePreparedStatement(con, sql); ResultSet rs = pstmt.executeQuery(); DbConnectionManager.setFetchSize(rs, startIndex + numResults); DbConnectionManager.scrollResultSet(rs, startIndex); int count = 0; while (rs.next() && count < numResults) { sipAccounts.add(read(rs)); count++; } rs.close(); } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { try { if (pstmt != null) { pstmt.close(); } } catch (Exception e) { Log.error(e.getMessage(), e); } try { if (con != null) { con.close(); } } catch (Exception e) { Log.error(e.getMessage(), e); } } return sipAccounts; } public static int getUserCount() { int count = 0; String sql = "SELECT count(*) FROM ofSipUser"; Connection con = null; PreparedStatement pstmt = null; try { con = DbConnectionManager.getConnection(); pstmt = con.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); if (rs.next()) { count = rs.getInt(1); } rs.close(); } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { try { if (pstmt != null) { pstmt.close(); } } catch (Exception e) { Log.error(e.getMessage(), e); } try { if (con != null) { con.close(); } } catch (Exception e) { Log.error(e.getMessage(), e); } } return count; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.athena.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/UpdateDataCatalog" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateDataCatalogRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services account and * can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The remainder of the length * constraint of 256 is reserved for use by Athena. * </p> */ private String name; /** * <p> * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * </p> */ private String type; /** * <p> * New or modified text that describes the data catalog. * </p> */ private String description; /** * <p> * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose values * depend on the catalog type. * </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently supported * version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but not * both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> * </ul> */ private java.util.Map<String, String> parameters; /** * <p> * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services account and * can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The remainder of the length * constraint of 256 is reserved for use by Athena. * </p> * * @param name * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services * account and can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The * remainder of the length constraint of 256 is reserved for use by Athena. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services account and * can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The remainder of the length * constraint of 256 is reserved for use by Athena. * </p> * * @return The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services * account and can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The * remainder of the length constraint of 256 is reserved for use by Athena. */ public String getName() { return this.name; } /** * <p> * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services account and * can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The remainder of the length * constraint of 256 is reserved for use by Athena. * </p> * * @param name * The name of the data catalog to update. The catalog name must be unique for the Amazon Web Services * account and can use a maximum of 127 alphanumeric, underscore, at sign, or hyphen characters. The * remainder of the length constraint of 256 is reserved for use by Athena. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateDataCatalogRequest withName(String name) { setName(name); return this; } /** * <p> * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * </p> * * @param type * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * @see DataCatalogType */ public void setType(String type) { this.type = type; } /** * <p> * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * </p> * * @return Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * @see DataCatalogType */ public String getType() { return this.type; } /** * <p> * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * </p> * * @param type * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * @return Returns a reference to this object so that method calls can be chained together. * @see DataCatalogType */ public UpdateDataCatalogRequest withType(String type) { setType(type); return this; } /** * <p> * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * </p> * * @param type * Specifies the type of data catalog to update. Specify <code>LAMBDA</code> for a federated catalog, * <code>HIVE</code> for an external hive metastore, or <code>GLUE</code> for an Glue Data Catalog. * @return Returns a reference to this object so that method calls can be chained together. * @see DataCatalogType */ public UpdateDataCatalogRequest withType(DataCatalogType type) { this.type = type.toString(); return this; } /** * <p> * New or modified text that describes the data catalog. * </p> * * @param description * New or modified text that describes the data catalog. */ public void setDescription(String description) { this.description = description; } /** * <p> * New or modified text that describes the data catalog. * </p> * * @return New or modified text that describes the data catalog. */ public String getDescription() { return this.description; } /** * <p> * New or modified text that describes the data catalog. * </p> * * @param description * New or modified text that describes the data catalog. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateDataCatalogRequest withDescription(String description) { setDescription(description); return this; } /** * <p> * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose values * depend on the catalog type. * </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently supported * version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but not * both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> * </ul> * * @return Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose * values depend on the catalog type. </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently * supported version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but * not both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax * to specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> */ public java.util.Map<String, String> getParameters() { return parameters; } /** * <p> * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose values * depend on the catalog type. * </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently supported * version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but not * both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> * </ul> * * @param parameters * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose * values depend on the catalog type. </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently * supported version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but * not both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> */ public void setParameters(java.util.Map<String, String> parameters) { this.parameters = parameters; } /** * <p> * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose values * depend on the catalog type. * </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently supported * version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but not * both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> * </ul> * * @param parameters * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose * values depend on the catalog type. </p> * <ul> * <li> * <p> * For the <code>HIVE</code> data catalog type, use the following syntax. The <code>metadata-function</code> * parameter is required. <code>The sdk-version</code> parameter is optional and defaults to the currently * supported version. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, sdk-version=<i>version_number</i> </code> * </p> * </li> * <li> * <p> * For the <code>LAMBDA</code> data catalog type, use one of the following sets of required parameters, but * not both. * </p> * <ul> * <li> * <p> * If you have one Lambda function that processes metadata and another for reading the actual data, use the * following syntax. Both parameters are required. * </p> * <p> * <code>metadata-function=<i>lambda_arn</i>, record-function=<i>lambda_arn</i> </code> * </p> * </li> * <li> * <p> * If you have a composite Lambda function that processes both metadata and data, use the following syntax to * specify your Lambda function. * </p> * <p> * <code>function=<i>lambda_arn</i> </code> * </p> * </li> * </ul> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateDataCatalogRequest withParameters(java.util.Map<String, String> parameters) { setParameters(parameters); return this; } /** * Add a single Parameters entry * * @see UpdateDataCatalogRequest#withParameters * @returns a reference to this object so that method calls can be chained together. */ public UpdateDataCatalogRequest addParametersEntry(String key, String value) { if (null == this.parameters) { this.parameters = new java.util.HashMap<String, String>(); } if (this.parameters.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.parameters.put(key, value); return this; } /** * Removes all the entries added into Parameters. * * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateDataCatalogRequest clearParametersEntries() { this.parameters = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getType() != null) sb.append("Type: ").append(getType()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getParameters() != null) sb.append("Parameters: ").append(getParameters()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateDataCatalogRequest == false) return false; UpdateDataCatalogRequest other = (UpdateDataCatalogRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getParameters() == null ^ this.getParameters() == null) return false; if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode()); return hashCode; } @Override public UpdateDataCatalogRequest clone() { return (UpdateDataCatalogRequest) super.clone(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang.time.FastDateFormat; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Path; import org.apache.hadoop.net.NetUtils; import com.google.common.base.Preconditions; import com.google.common.net.InetAddresses; /** * General string utils */ @InterfaceAudience.Private @InterfaceStability.Unstable public class StringUtils { /** * Priority of the StringUtils shutdown hook. */ public static final int SHUTDOWN_HOOK_PRIORITY = 0; /** * Shell environment variables: $ followed by one letter or _ followed by * multiple letters, numbers, or underscores. The group captures the * environment variable name without the leading $. */ public static final Pattern SHELL_ENV_VAR_PATTERN = Pattern.compile("\\$([A-Za-z_]{1}[A-Za-z0-9_]*)"); /** * Windows environment variables: surrounded by %. The group captures the * environment variable name without the leading and trailing %. */ public static final Pattern WIN_ENV_VAR_PATTERN = Pattern.compile("%(.*?)%"); /** * Regular expression that matches and captures environment variable names * according to platform-specific rules. */ public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ? WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN; /** * Make a string representation of the exception. * @param e The exception to stringify * @return A string with exception name and call stack. */ public static String stringifyException(Throwable e) { StringWriter stm = new StringWriter(); PrintWriter wrt = new PrintWriter(stm); e.printStackTrace(wrt); wrt.close(); return stm.toString(); } /** * Given a full hostname, return the word upto the first dot. * @param fullHostname the full hostname * @return the hostname to the first dot */ public static String simpleHostname(String fullHostname) { if (InetAddresses.isInetAddress(fullHostname)) { return fullHostname; } int offset = fullHostname.indexOf('.'); if (offset != -1) { return fullHostname.substring(0, offset); } return fullHostname; } /** * Given an integer, return a string that is in an approximate, but human * readable format. * @param number the number to format * @return a human readable form of the integer * * @deprecated use {@link TraditionalBinaryPrefix#long2String(long, String, int)}. */ @Deprecated public static String humanReadableInt(long number) { return TraditionalBinaryPrefix.long2String(number, "", 1); } /** The same as String.format(Locale.ENGLISH, format, objects). */ public static String format(final String format, final Object... objects) { return String.format(Locale.ENGLISH, format, objects); } /** * Format a percentage for presentation to the user. * @param fraction the percentage as a fraction, e.g. 0.1 = 10% * @param decimalPlaces the number of decimal places * @return a string representation of the percentage */ public static String formatPercent(double fraction, int decimalPlaces) { return format("%." + decimalPlaces + "f%%", fraction*100); } /** * Given an array of strings, return a comma-separated list of its elements. * @param strs Array of strings * @return Empty string if strs.length is 0, comma separated list of strings * otherwise */ public static String arrayToString(String[] strs) { if (strs.length == 0) { return ""; } StringBuilder sbuf = new StringBuilder(); sbuf.append(strs[0]); for (int idx = 1; idx < strs.length; idx++) { sbuf.append(","); sbuf.append(strs[idx]); } return sbuf.toString(); } /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes * @param bytes * @param start start index, inclusively * @param end end index, exclusively * @return hex string representation of the byte array */ public static String byteToHexString(byte[] bytes, int start, int end) { if (bytes == null) { throw new IllegalArgumentException("bytes == null"); } StringBuilder s = new StringBuilder(); for(int i = start; i < end; i++) { s.append(format("%02x", bytes[i])); } return s.toString(); } /** Same as byteToHexString(bytes, 0, bytes.length). */ public static String byteToHexString(byte bytes[]) { return byteToHexString(bytes, 0, bytes.length); } /** * Convert a byte to a hex string. * @see #byteToHexString(byte[]) * @see #byteToHexString(byte[], int, int) * @param b byte * @return byte's hex value as a String */ public static String byteToHexString(byte b) { return byteToHexString(new byte[] {b}); } /** * Given a hexstring this will return the byte array corresponding to the * string * @param hex the hex String array * @return a byte array that is a hex string representation of the given * string. The size of the byte array is therefore hex.length/2 */ public static byte[] hexStringToByte(String hex) { byte[] bts = new byte[hex.length() / 2]; for (int i = 0; i < bts.length; i++) { bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16); } return bts; } /** * * @param uris */ public static String uriToString(URI[] uris){ if (uris == null) { return null; } StringBuilder ret = new StringBuilder(uris[0].toString()); for(int i = 1; i < uris.length;i++){ ret.append(","); ret.append(uris[i].toString()); } return ret.toString(); } /** * @param str * The string array to be parsed into an URI array. * @return <tt>null</tt> if str is <tt>null</tt>, else the URI array * equivalent to str. * @throws IllegalArgumentException * If any string in str violates RFC&nbsp;2396. */ public static URI[] stringToURI(String[] str){ if (str == null) return null; URI[] uris = new URI[str.length]; for (int i = 0; i < str.length;i++){ try{ uris[i] = new URI(str[i]); }catch(URISyntaxException ur){ throw new IllegalArgumentException( "Failed to create uri for " + str[i], ur); } } return uris; } /** * * @param str */ public static Path[] stringToPath(String[] str){ if (str == null) { return null; } Path[] p = new Path[str.length]; for (int i = 0; i < str.length;i++){ p[i] = new Path(str[i]); } return p; } /** * * Given a finish and start time in long milliseconds, returns a * String in the format Xhrs, Ymins, Z sec, for the time difference between two times. * If finish time comes before start time then negative valeus of X, Y and Z wil return. * * @param finishTime finish time * @param startTime start time */ public static String formatTimeDiff(long finishTime, long startTime){ long timeDiff = finishTime - startTime; return formatTime(timeDiff); } /** * * Given the time in long milliseconds, returns a * String in the format Xhrs, Ymins, Z sec. * * @param timeDiff The time difference to format */ public static String formatTime(long timeDiff){ StringBuilder buf = new StringBuilder(); long hours = timeDiff / (60*60*1000); long rem = (timeDiff % (60*60*1000)); long minutes = rem / (60*1000); rem = rem % (60*1000); long seconds = rem / 1000; if (hours != 0){ buf.append(hours); buf.append("hrs, "); } if (minutes != 0){ buf.append(minutes); buf.append("mins, "); } // return "0sec if no difference buf.append(seconds); buf.append("sec"); return buf.toString(); } /** * * Given the time in long milliseconds, returns a String in the sortable * format Xhrs, Ymins, Zsec. X, Y, and Z are always two-digit. If the time is * more than 100 hours ,it is displayed as 99hrs, 59mins, 59sec. * * @param timeDiff The time difference to format */ public static String formatTimeSortable(long timeDiff) { StringBuilder buf = new StringBuilder(); long hours = timeDiff / (60 * 60 * 1000); long rem = (timeDiff % (60 * 60 * 1000)); long minutes = rem / (60 * 1000); rem = rem % (60 * 1000); long seconds = rem / 1000; // if hours is more than 99 hours, it will be set a max value format if (hours > 99) { hours = 99; minutes = 59; seconds = 59; } buf.append(String.format("%02d", hours)); buf.append("hrs, "); buf.append(String.format("%02d", minutes)); buf.append("mins, "); buf.append(String.format("%02d", seconds)); buf.append("sec"); return buf.toString(); } /** * Formats time in ms and appends difference (finishTime - startTime) * as returned by formatTimeDiff(). * If finish time is 0, empty string is returned, if start time is 0 * then difference is not appended to return value. * * @param dateFormat date format to use * @param finishTime finish time * @param startTime start time * @return formatted value. */ public static String getFormattedTimeWithDiff(FastDateFormat dateFormat, long finishTime, long startTime) { String formattedFinishTime = dateFormat.format(finishTime); return getFormattedTimeWithDiff(formattedFinishTime, finishTime, startTime); } /** * Formats time in ms and appends difference (finishTime - startTime) * as returned by formatTimeDiff(). * If finish time is 0, empty string is returned, if start time is 0 * then difference is not appended to return value. * @param formattedFinishTime formattedFinishTime to use * @param finishTime finish time * @param startTime start time * @return formatted value. */ public static String getFormattedTimeWithDiff(String formattedFinishTime, long finishTime, long startTime){ StringBuilder buf = new StringBuilder(); if (0 != finishTime) { buf.append(formattedFinishTime); if (0 != startTime){ buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")"); } } return buf.toString(); } /** * Returns an arraylist of strings. * @param str the comma separated string values * @return the arraylist of the comma separated string values */ public static String[] getStrings(String str){ String delim = ","; return getStrings(str, delim); } /** * Returns an arraylist of strings. * @param str the string values * @param delim delimiter to separate the values * @return the arraylist of the separated string values */ public static String[] getStrings(String str, String delim){ Collection<String> values = getStringCollection(str, delim); if(values.size() == 0) { return null; } return values.toArray(new String[values.size()]); } /** * Returns a collection of strings. * @param str comma separated string values * @return an <code>ArrayList</code> of string values */ public static Collection<String> getStringCollection(String str){ String delim = ","; return getStringCollection(str, delim); } /** * Returns a collection of strings. * * @param str * String to parse * @param delim * delimiter to separate the values * @return Collection of parsed elements. */ public static Collection<String> getStringCollection(String str, String delim) { List<String> values = new ArrayList<String>(); if (str == null) return values; StringTokenizer tokenizer = new StringTokenizer(str, delim); while (tokenizer.hasMoreTokens()) { values.add(tokenizer.nextToken()); } return values; } /** * Splits a comma separated value <code>String</code>, trimming leading and * trailing whitespace on each value. Duplicate and empty values are removed. * * @param str a comma separated <String> with values, may be null * @return a <code>Collection</code> of <code>String</code> values, empty * Collection if null String input */ public static Collection<String> getTrimmedStringCollection(String str){ Set<String> set = new LinkedHashSet<String>( Arrays.asList(getTrimmedStrings(str))); set.remove(""); return set; } /** * Splits a comma or newline separated value <code>String</code>, trimming * leading and trailing whitespace on each value. * * @param str a comma or newline separated <code>String</code> with values, * may be null * @return an array of <code>String</code> values, empty array if null String * input */ public static String[] getTrimmedStrings(String str){ if (null == str || str.trim().isEmpty()) { return emptyStringArray; } return str.trim().split("\\s*[,\n]\\s*"); } final public static String[] emptyStringArray = {}; final public static char COMMA = ','; final public static String COMMA_STR = ","; final public static char ESCAPE_CHAR = '\\'; /** * Split a string using the default separator * @param str a string that may have escaped separator * @return an array of strings */ public static String[] split(String str) { return split(str, ESCAPE_CHAR, COMMA); } /** * Split a string using the given separator * @param str a string that may have escaped separator * @param escapeChar a char that be used to escape the separator * @param separator a separator char * @return an array of strings */ public static String[] split( String str, char escapeChar, char separator) { if (str==null) { return null; } ArrayList<String> strList = new ArrayList<String>(); StringBuilder split = new StringBuilder(); int index = 0; while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) { ++index; // move over the separator for next search strList.add(split.toString()); split.setLength(0); // reset the buffer } strList.add(split.toString()); // remove trailing empty split(s) int last = strList.size(); // last split while (--last>=0 && "".equals(strList.get(last))) { strList.remove(last); } return strList.toArray(new String[strList.size()]); } /** * Split a string using the given separator, with no escaping performed. * @param str a string to be split. Note that this may not be null. * @param separator a separator char * @return an array of strings */ public static String[] split( String str, char separator) { // String.split returns a single empty result for splitting the empty // string. if (str.isEmpty()) { return new String[]{""}; } ArrayList<String> strList = new ArrayList<String>(); int startIndex = 0; int nextIndex = 0; while ((nextIndex = str.indexOf(separator, startIndex)) != -1) { strList.add(str.substring(startIndex, nextIndex)); startIndex = nextIndex + 1; } strList.add(str.substring(startIndex)); // remove trailing empty split(s) int last = strList.size(); // last split while (--last>=0 && "".equals(strList.get(last))) { strList.remove(last); } return strList.toArray(new String[strList.size()]); } /** * Finds the first occurrence of the separator character ignoring the escaped * separators starting from the index. Note the substring between the index * and the position of the separator is passed. * @param str the source string * @param separator the character to find * @param escapeChar character used to escape * @param start from where to search * @param split used to pass back the extracted string */ public static int findNext(String str, char separator, char escapeChar, int start, StringBuilder split) { int numPreEscapes = 0; for (int i = start; i < str.length(); i++) { char curChar = str.charAt(i); if (numPreEscapes == 0 && curChar == separator) { // separator return i; } else { split.append(curChar); numPreEscapes = (curChar == escapeChar) ? (++numPreEscapes) % 2 : 0; } } return -1; } /** * Escape commas in the string using the default escape char * @param str a string * @return an escaped string */ public static String escapeString(String str) { return escapeString(str, ESCAPE_CHAR, COMMA); } /** * Escape <code>charToEscape</code> in the string * with the escape char <code>escapeChar</code> * * @param str string * @param escapeChar escape char * @param charToEscape the char to be escaped * @return an escaped string */ public static String escapeString( String str, char escapeChar, char charToEscape) { return escapeString(str, escapeChar, new char[] {charToEscape}); } // check if the character array has the character private static boolean hasChar(char[] chars, char character) { for (char target : chars) { if (character == target) { return true; } } return false; } /** * @param charsToEscape array of characters to be escaped */ public static String escapeString(String str, char escapeChar, char[] charsToEscape) { if (str == null) { return null; } StringBuilder result = new StringBuilder(); for (int i=0; i<str.length(); i++) { char curChar = str.charAt(i); if (curChar == escapeChar || hasChar(charsToEscape, curChar)) { // special char result.append(escapeChar); } result.append(curChar); } return result.toString(); } /** * Unescape commas in the string using the default escape char * @param str a string * @return an unescaped string */ public static String unEscapeString(String str) { return unEscapeString(str, ESCAPE_CHAR, COMMA); } /** * Unescape <code>charToEscape</code> in the string * with the escape char <code>escapeChar</code> * * @param str string * @param escapeChar escape char * @param charToEscape the escaped char * @return an unescaped string */ public static String unEscapeString( String str, char escapeChar, char charToEscape) { return unEscapeString(str, escapeChar, new char[] {charToEscape}); } /** * @param charsToEscape array of characters to unescape */ public static String unEscapeString(String str, char escapeChar, char[] charsToEscape) { if (str == null) { return null; } StringBuilder result = new StringBuilder(str.length()); boolean hasPreEscape = false; for (int i=0; i<str.length(); i++) { char curChar = str.charAt(i); if (hasPreEscape) { if (curChar != escapeChar && !hasChar(charsToEscape, curChar)) { // no special char throw new IllegalArgumentException("Illegal escaped string " + str + " unescaped " + escapeChar + " at " + (i-1)); } // otherwise discard the escape char result.append(curChar); hasPreEscape = false; } else { if (hasChar(charsToEscape, curChar)) { throw new IllegalArgumentException("Illegal escaped string " + str + " unescaped " + curChar + " at " + i); } else if (curChar == escapeChar) { hasPreEscape = true; } else { result.append(curChar); } } } if (hasPreEscape ) { throw new IllegalArgumentException("Illegal escaped string " + str + ", not expecting " + escapeChar + " in the end." ); } return result.toString(); } /** * Return a message for logging. * @param prefix prefix keyword for the message * @param msg content of the message * @return a message for logging */ public static String toStartupShutdownString(String prefix, String[] msg) { StringBuilder b = new StringBuilder(prefix); b.append("\n/************************************************************"); for(String s : msg) b.append("\n").append(prefix).append(s); b.append("\n************************************************************/"); return b.toString(); } /** * Print a log message for starting up and shutting down * @param clazz the class of the server * @param args arguments * @param LOG the target log object */ public static void startupShutdownMessage(Class<?> clazz, String[] args, final org.apache.commons.logging.Log LOG) { startupShutdownMessage(clazz, args, LogAdapter.create(LOG)); } /** * Print a log message for starting up and shutting down * @param clazz the class of the server * @param args arguments * @param LOG the target log object */ public static void startupShutdownMessage(Class<?> clazz, String[] args, final org.slf4j.Logger LOG) { startupShutdownMessage(clazz, args, LogAdapter.create(LOG)); } static void startupShutdownMessage(Class<?> clazz, String[] args, final LogAdapter LOG) { final String hostname = NetUtils.getHostname(); final String classname = clazz.getSimpleName(); LOG.info(createStartupShutdownMessage(classname, hostname, args)); if (SystemUtils.IS_OS_UNIX) { try { SignalLogger.INSTANCE.register(LOG); } catch (Throwable t) { LOG.warn("failed to register any UNIX signal loggers: ", t); } } ShutdownHookManager.get().addShutdownHook( new Runnable() { @Override public void run() { LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{ "Shutting down " + classname + " at " + hostname})); } }, SHUTDOWN_HOOK_PRIORITY); } /** * Generate the text for the startup/shutdown message of processes. * @param classname short classname of the class * @param hostname hostname * @param args Command arguments * @return a string to log. */ public static String createStartupShutdownMessage(String classname, String hostname, String[] args) { return toStartupShutdownString("STARTUP_MSG: ", new String[] { "Starting " + classname, " host = " + hostname, " args = " + Arrays.asList(args), " version = " + VersionInfo.getVersion(), " classpath = " + System.getProperty("java.class.path"), " build = " + VersionInfo.getUrl() + " -r " + VersionInfo.getRevision() + "; compiled by '" + VersionInfo.getUser() + "' on " + VersionInfo.getDate(), " java = " + System.getProperty("java.version") } ); } /** * The traditional binary prefixes, kilo, mega, ..., exa, * which can be represented by a 64-bit integer. * TraditionalBinaryPrefix symbol are case insensitive. */ public static enum TraditionalBinaryPrefix { KILO(10), MEGA(KILO.bitShift + 10), GIGA(MEGA.bitShift + 10), TERA(GIGA.bitShift + 10), PETA(TERA.bitShift + 10), EXA (PETA.bitShift + 10); public final long value; public final char symbol; public final int bitShift; public final long bitMask; private TraditionalBinaryPrefix(int bitShift) { this.bitShift = bitShift; this.value = 1L << bitShift; this.bitMask = this.value - 1L; this.symbol = toString().charAt(0); } /** * @return The TraditionalBinaryPrefix object corresponding to the symbol. */ public static TraditionalBinaryPrefix valueOf(char symbol) { symbol = Character.toUpperCase(symbol); for(TraditionalBinaryPrefix prefix : TraditionalBinaryPrefix.values()) { if (symbol == prefix.symbol) { return prefix; } } throw new IllegalArgumentException("Unknown symbol '" + symbol + "'"); } /** * Convert a string to long. * The input string is first be trimmed * and then it is parsed with traditional binary prefix. * * For example, * "-1230k" will be converted to -1230 * 1024 = -1259520; * "891g" will be converted to 891 * 1024^3 = 956703965184; * * @param s input string * @return a long value represented by the input string. */ public static long string2long(String s) { s = s.trim(); final int lastpos = s.length() - 1; final char lastchar = s.charAt(lastpos); if (Character.isDigit(lastchar)) return Long.parseLong(s); else { long prefix; try { prefix = TraditionalBinaryPrefix.valueOf(lastchar).value; } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid size prefix '" + lastchar + "' in '" + s + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)"); } long num = Long.parseLong(s.substring(0, lastpos)); if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) { throw new IllegalArgumentException(s + " does not fit in a Long"); } return num * prefix; } } /** * Convert a long integer to a string with traditional binary prefix. * * @param n the value to be converted * @param unit The unit, e.g. "B" for bytes. * @param decimalPlaces The number of decimal places. * @return a string with traditional binary prefix. */ public static String long2String(long n, String unit, int decimalPlaces) { if (unit == null) { unit = ""; } //take care a special case if (n == Long.MIN_VALUE) { return "-8 " + EXA.symbol + unit; } final StringBuilder b = new StringBuilder(); //take care negative numbers if (n < 0) { b.append('-'); n = -n; } if (n < KILO.value) { //no prefix b.append(n); return (unit.isEmpty()? b: b.append(" ").append(unit)).toString(); } else { //find traditional binary prefix int i = 0; for(; i < values().length && n >= values()[i].value; i++); TraditionalBinaryPrefix prefix = values()[i - 1]; if ((n & prefix.bitMask) == 0) { //exact division b.append(n >> prefix.bitShift); } else { final String format = "%." + decimalPlaces + "f"; String s = format(format, n/(double)prefix.value); //check a special rounding up case if (s.startsWith("1024")) { prefix = values()[i]; s = format(format, n/(double)prefix.value); } b.append(s); } return b.append(' ').append(prefix.symbol).append(unit).toString(); } } } /** * Escapes HTML Special characters present in the string. * @param string * @return HTML Escaped String representation */ public static String escapeHTML(String string) { if(string == null) { return null; } StringBuilder sb = new StringBuilder(); boolean lastCharacterWasSpace = false; char[] chars = string.toCharArray(); for(char c : chars) { if(c == ' ') { if(lastCharacterWasSpace){ lastCharacterWasSpace = false; sb.append("&nbsp;"); }else { lastCharacterWasSpace=true; sb.append(" "); } }else { lastCharacterWasSpace = false; switch(c) { case '<': sb.append("&lt;"); break; case '>': sb.append("&gt;"); break; case '&': sb.append("&amp;"); break; case '"': sb.append("&quot;"); break; default : sb.append(c);break; } } } return sb.toString(); } /** * @return a byte description of the given long interger value. */ public static String byteDesc(long len) { return TraditionalBinaryPrefix.long2String(len, "B", 2); } /** @deprecated use StringUtils.format("%.2f", d). */ @Deprecated public static String limitDecimalTo2(double d) { return format("%.2f", d); } /** * Concatenates strings, using a separator. * * @param separator Separator to join with. * @param strings Strings to join. */ public static String join(CharSequence separator, Iterable<?> strings) { Iterator<?> i = strings.iterator(); if (!i.hasNext()) { return ""; } StringBuilder sb = new StringBuilder(i.next().toString()); while (i.hasNext()) { sb.append(separator); sb.append(i.next().toString()); } return sb.toString(); } public static String join(char separator, Iterable<?> strings) { return join(separator + "", strings); } /** * Concatenates strings, using a separator. * * @param separator to join with * @param strings to join * @return the joined string */ public static String join(CharSequence separator, String[] strings) { // Ideally we don't have to duplicate the code here if array is iterable. StringBuilder sb = new StringBuilder(); boolean first = true; for (String s : strings) { if (first) { first = false; } else { sb.append(separator); } sb.append(s); } return sb.toString(); } public static String join(char separator, String[] strings) { return join(separator + "", strings); } /** * Convert SOME_STUFF to SomeStuff * * @param s input string * @return camelized string */ public static String camelize(String s) { StringBuilder sb = new StringBuilder(); String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_'); for (String word : words) sb.append(org.apache.commons.lang.StringUtils.capitalize(word)); return sb.toString(); } /** * Matches a template string against a pattern, replaces matched tokens with * the supplied replacements, and returns the result. The regular expression * must use a capturing group. The value of the first capturing group is used * to look up the replacement. If no replacement is found for the token, then * it is replaced with the empty string. * * For example, assume template is "%foo%_%bar%_%baz%", pattern is "%(.*?)%", * and replacements contains 2 entries, mapping "foo" to "zoo" and "baz" to * "zaz". The result returned would be "zoo__zaz". * * @param template String template to receive replacements * @param pattern Pattern to match for identifying tokens, must use a capturing * group * @param replacements Map<String, String> mapping tokens identified by the * capturing group to their replacement values * @return String template with replacements */ public static String replaceTokens(String template, Pattern pattern, Map<String, String> replacements) { StringBuffer sb = new StringBuffer(); Matcher matcher = pattern.matcher(template); while (matcher.find()) { String replacement = replacements.get(matcher.group(1)); if (replacement == null) { replacement = ""; } matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement)); } matcher.appendTail(sb); return sb.toString(); } /** * Get stack trace for a given thread. */ public static String getStackTrace(Thread t) { final StackTraceElement[] stackTrace = t.getStackTrace(); StringBuilder str = new StringBuilder(); for (StackTraceElement e : stackTrace) { str.append(e.toString() + "\n"); } return str.toString(); } /** * From a list of command-line arguments, remove both an option and the * next argument. * * @param name Name of the option to remove. Example: -foo. * @param args List of arguments. * @return null if the option was not found; the value of the * option otherwise. * @throws IllegalArgumentException if the option's argument is not present */ public static String popOptionWithArgument(String name, List<String> args) throws IllegalArgumentException { String val = null; for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) { String cur = iter.next(); if (cur.equals("--")) { // stop parsing arguments when you see -- break; } else if (cur.equals(name)) { iter.remove(); if (!iter.hasNext()) { throw new IllegalArgumentException("option " + name + " requires 1 " + "argument."); } val = iter.next(); iter.remove(); break; } } return val; } /** * From a list of command-line arguments, remove an option. * * @param name Name of the option to remove. Example: -foo. * @param args List of arguments. * @return true if the option was found and removed; false otherwise. */ public static boolean popOption(String name, List<String> args) { for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) { String cur = iter.next(); if (cur.equals("--")) { // stop parsing arguments when you see -- break; } else if (cur.equals(name)) { iter.remove(); return true; } } return false; } /** * From a list of command-line arguments, return the first non-option * argument. Non-option arguments are those which either come after * a double dash (--) or do not start with a dash. * * @param args List of arguments. * @return The first non-option argument, or null if there were none. */ public static String popFirstNonOption(List<String> args) { for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) { String cur = iter.next(); if (cur.equals("--")) { if (!iter.hasNext()) { return null; } cur = iter.next(); iter.remove(); return cur; } else if (!cur.startsWith("-")) { iter.remove(); return cur; } } return null; } /** * Converts all of the characters in this String to lower case with * Locale.ENGLISH. * * @param str string to be converted * @return the str, converted to lowercase. */ public static String toLowerCase(String str) { return str.toLowerCase(Locale.ENGLISH); } /** * Converts all of the characters in this String to upper case with * Locale.ENGLISH. * * @param str string to be converted * @return the str, converted to uppercase. */ public static String toUpperCase(String str) { return str.toUpperCase(Locale.ENGLISH); } /** * Compare strings locale-freely by using String#equalsIgnoreCase. * * @param s1 Non-null string to be converted * @param s2 string to be converted * @return the str, converted to uppercase. */ public static boolean equalsIgnoreCase(String s1, String s2) { Preconditions.checkNotNull(s1); // don't check non-null against s2 to make the semantics same as // s1.equals(s2) return s1.equalsIgnoreCase(s2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.parse.repl.metric; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.dump.metric.BootstrapDumpMetricCollector; import org.apache.hadoop.hive.ql.parse.repl.dump.metric.IncrementalDumpMetricCollector; import org.apache.hadoop.hive.ql.parse.repl.load.metric.BootstrapLoadMetricCollector; import org.apache.hadoop.hive.ql.parse.repl.load.metric.IncrementalLoadMetricCollector; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Metadata; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Progress; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Stage; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Metric; import org.junit.Assert; import org.junit.Before; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.Arrays; /** * Unit Test class for In Memory Replication Metric Collection. */ @RunWith(MockitoJUnitRunner.class) public class TestReplicationMetricCollector { HiveConf conf; @Before public void setup() throws Exception { conf = new HiveConf(); conf.set(Constants.SCHEDULED_QUERY_SCHEDULENAME, "repl"); conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "1"); MetricCollector.getInstance().init(conf); } @After public void finalize() { MetricCollector.getInstance().deinit(); } @Test public void testFailureCacheHardLimit() throws Exception { MetricCollector.getInstance().deinit(); conf = new HiveConf(); MetricCollector collector = MetricCollector.getInstance(); MetricCollector metricCollectorSpy = Mockito.spy(collector); Mockito.doReturn(1L).when(metricCollectorSpy).getMaxSize(Mockito.any()); metricCollectorSpy.init(conf); metricCollectorSpy.addMetric(new ReplicationMetric(1, "repl", 0, null)); try { metricCollectorSpy.addMetric(new ReplicationMetric(2, "repl", 0, null)); Assert.fail(); } catch (SemanticException e) { Assert.assertEquals("Metrics are not getting collected. ", e.getMessage()); } } @Test public void testFailureNoScheduledId() throws Exception { MetricCollector.getInstance().deinit(); conf = new HiveConf(); MetricCollector.getInstance().init(conf); ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "staging", conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); bootstrapDumpMetricCollector.reportStageStart("dump", metricMap); bootstrapDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS); Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size()); } @Test public void testFailureNoPolicyId() throws Exception { MetricCollector.getInstance().deinit(); conf = new HiveConf(); MetricCollector.getInstance().init(conf); ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "staging", conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); bootstrapDumpMetricCollector.reportStageStart("dump", metricMap); bootstrapDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS); Assert.assertEquals(0, MetricCollector.getInstance().getMetrics().size()); } @Test public void testSuccessBootstrapDumpMetrics() throws Exception { ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "staging", conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); bootstrapDumpMetricCollector.reportStageStart("dump", metricMap); bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1); List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2); bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); bootstrapDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10); bootstrapDumpMetricCollector.reportEnd(Status.SUCCESS); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.BOOTSTRAP, "staging"); expectedMetadata.setLastReplId(10); Progress expectedProgress = new Progress(); expectedProgress.setStatus(Status.SUCCESS); Stage dumpStage = new Stage("dump", Status.SUCCESS, 0); dumpStage.setEndTime(0); Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10); expectedTableMetric.setCurrentCount(3); Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1); expectedFuncMetric.setCurrentCount(1); dumpStage.addMetric(expectedTableMetric); dumpStage.addMetric(expectedFuncMetric); expectedProgress.addStage(dumpStage); ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 0, expectedMetadata); expectedMetric.setProgress(expectedProgress); checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name())); } @Test public void testSuccessIncrDumpMetrics() throws Exception { ReplicationMetricCollector incrDumpMetricCollector = new IncrementalDumpMetricCollector("db", "staging", conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); incrDumpMetricCollector.reportStageStart("dump", metricMap); incrDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1); List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); incrDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2); incrDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); incrDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10); incrDumpMetricCollector.reportEnd(Status.SUCCESS); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.INCREMENTAL, "staging"); expectedMetadata.setLastReplId(10); Progress expectedProgress = new Progress(); expectedProgress.setStatus(Status.SUCCESS); Stage dumpStage = new Stage("dump", Status.SUCCESS, 0); dumpStage.setEndTime(0); Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10); expectedTableMetric.setCurrentCount(3); Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1); expectedFuncMetric.setCurrentCount(1); dumpStage.addMetric(expectedTableMetric); dumpStage.addMetric(expectedFuncMetric); expectedProgress.addStage(dumpStage); ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 0, expectedMetadata); expectedMetric.setProgress(expectedProgress); checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name())); } @Test public void testSuccessBootstrapLoadMetrics() throws Exception { ReplicationMetricCollector bootstrapLoadMetricCollector = new BootstrapLoadMetricCollector("db", "staging", 1, conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); bootstrapLoadMetricCollector.reportStageStart("dump", metricMap); bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1); List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2); bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); bootstrapLoadMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10); bootstrapLoadMetricCollector.reportEnd(Status.SUCCESS); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.BOOTSTRAP, "staging"); expectedMetadata.setLastReplId(10); Progress expectedProgress = new Progress(); expectedProgress.setStatus(Status.SUCCESS); Stage dumpStage = new Stage("dump", Status.SUCCESS, 0); dumpStage.setEndTime(0); Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10); expectedTableMetric.setCurrentCount(3); Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1); expectedFuncMetric.setCurrentCount(1); dumpStage.addMetric(expectedTableMetric); dumpStage.addMetric(expectedFuncMetric); expectedProgress.addStage(dumpStage); ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 1, expectedMetadata); expectedMetric.setProgress(expectedProgress); checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name())); } @Test public void testSuccessIncrLoadMetrics() throws Exception { ReplicationMetricCollector incrLoadMetricCollector = new IncrementalLoadMetricCollector("db", "staging", 1, conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); incrLoadMetricCollector.reportStageStart("dump", metricMap); incrLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1); List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); incrLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2); incrLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); incrLoadMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10); incrLoadMetricCollector.reportEnd(Status.SUCCESS); actualMetrics = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, actualMetrics.size()); Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.INCREMENTAL, "staging"); expectedMetadata.setLastReplId(10); Progress expectedProgress = new Progress(); expectedProgress.setStatus(Status.SUCCESS); Stage dumpStage = new Stage("dump", Status.SUCCESS, 0); dumpStage.setEndTime(0); Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10); expectedTableMetric.setCurrentCount(3); Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1); expectedFuncMetric.setCurrentCount(1); dumpStage.addMetric(expectedTableMetric); dumpStage.addMetric(expectedFuncMetric); expectedProgress.addStage(dumpStage); ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 1, expectedMetadata); expectedMetric.setProgress(expectedProgress); checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name())); } private void checkSuccess(ReplicationMetric actual, ReplicationMetric expected, String stageName, List<String> metricNames) { Assert.assertEquals(expected.getDumpExecutionId(), actual.getDumpExecutionId()); Assert.assertEquals(expected.getPolicy(), actual.getPolicy()); Assert.assertEquals(expected.getScheduledExecutionId(), actual.getScheduledExecutionId()); Assert.assertEquals(expected.getMetadata().getReplicationType(), actual.getMetadata().getReplicationType()); Assert.assertEquals(expected.getMetadata().getDbName(), actual.getMetadata().getDbName()); Assert.assertEquals(expected.getMetadata().getStagingDir(), actual.getMetadata().getStagingDir()); Assert.assertEquals(expected.getMetadata().getLastReplId(), actual.getMetadata().getLastReplId()); Assert.assertEquals(expected.getProgress().getStatus(), actual.getProgress().getStatus()); Assert.assertEquals(expected.getProgress().getStageByName(stageName).getStatus(), actual.getProgress().getStageByName(stageName).getStatus()); for (String metricName : metricNames) { Assert.assertEquals(expected.getProgress().getStageByName(stageName).getMetricByName(metricName).getTotalCount(), actual.getProgress().getStageByName(stageName).getMetricByName(metricName).getTotalCount()); Assert.assertEquals(expected.getProgress().getStageByName(stageName).getMetricByName(metricName) .getCurrentCount(), actual.getProgress() .getStageByName(stageName).getMetricByName(metricName).getCurrentCount()); } } @Test public void testSuccessStageFailure() throws Exception { ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "staging", conf); Map<String, Long> metricMap = new HashMap<>(); metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10); metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1); bootstrapDumpMetricCollector.reportStageStart("dump", metricMap); bootstrapDumpMetricCollector.reportStageEnd("dump", Status.FAILED); List<ReplicationMetric> metricList = MetricCollector.getInstance().getMetrics(); Assert.assertEquals(1, metricList.size()); ReplicationMetric actualMetric = metricList.get(0); Assert.assertEquals(Status.FAILED, actualMetric.getProgress().getStatus()); } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.server; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.model.*; import org.jetbrains.idea.maven.project.MavenConsole; import org.jetbrains.idea.maven.utils.MavenLog; import org.jetbrains.idea.maven.utils.MavenProcessCanceledException; import org.jetbrains.idea.maven.utils.MavenProgressIndicator; import java.io.File; import java.rmi.NoSuchObjectException; import java.rmi.RemoteException; import java.rmi.server.UnicastRemoteObject; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public abstract class MavenEmbedderWrapper extends RemoteObjectWrapper<MavenServerEmbedder> { private Customization myCustomization; public MavenEmbedderWrapper(@Nullable RemoteObjectWrapper<?> parent) { super(parent); } @Override protected synchronized void onWrappeeCreated() throws RemoteException { super.onWrappeeCreated(); if (myCustomization != null) { doCustomize(); } } public void customizeForResolve(MavenConsole console, MavenProgressIndicator indicator) { setCustomization(console, indicator, null, false, false); perform(new Retriable<Object>() { @Override public Object execute() throws RemoteException { doCustomize(); return null; } }); } public void customizeForResolve(MavenWorkspaceMap workspaceMap, MavenConsole console, MavenProgressIndicator indicator, boolean alwaysUpdateSnapshot) { setCustomization(console, indicator, workspaceMap, false, alwaysUpdateSnapshot); perform(new Retriable<Object>() { @Override public Object execute() throws RemoteException { doCustomize(); return null; } }); } public void customizeForStrictResolve(MavenWorkspaceMap workspaceMap, MavenConsole console, MavenProgressIndicator indicator) { setCustomization(console, indicator, workspaceMap, true, false); perform(new Retriable<Object>() { @Override public Object execute() throws RemoteException { doCustomize(); return null; } }); } public void customizeForGetVersions() { perform(new Retriable<Object>() { @Override public Object execute() throws RemoteException { doCustomizeComponents(); return null; } }); } private synchronized void doCustomizeComponents() throws RemoteException { getOrCreateWrappee().customizeComponents(); } private synchronized void doCustomize() throws RemoteException { getOrCreateWrappee().customize(myCustomization.workspaceMap, myCustomization.failOnUnresolvedDependency, myCustomization.console, myCustomization.indicator, myCustomization.alwaysUpdateSnapshot); } @NotNull public MavenServerExecutionResult resolveProject(@NotNull final VirtualFile file, @NotNull final Collection<String> activeProfiles, @NotNull final Collection<String> inactiveProfiles) throws MavenProcessCanceledException { return perform(new RetriableCancelable<MavenServerExecutionResult>() { @Override public MavenServerExecutionResult execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee().resolveProject(new File(file.getPath()), activeProfiles, inactiveProfiles); } }); } @Nullable public String evaluateEffectivePom(@NotNull final VirtualFile file, @NotNull final Collection<String> activeProfiles, @NotNull final Collection<String> inactiveProfiles) throws MavenProcessCanceledException { return perform(new RetriableCancelable<String>() { @Override public String execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee() .evaluateEffectivePom(new File(file.getPath()), new ArrayList<String>(activeProfiles), new ArrayList<String>(inactiveProfiles)); } }); } @NotNull public MavenArtifact resolve(@NotNull final MavenArtifactInfo info, @NotNull final List<MavenRemoteRepository> remoteRepositories) throws MavenProcessCanceledException { return perform(new RetriableCancelable<MavenArtifact>() { @Override public MavenArtifact execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee().resolve(info, remoteRepositories); } }); } @NotNull public List<MavenArtifact> resolveTransitively( @NotNull final List<MavenArtifactInfo> artifacts, @NotNull final List<MavenRemoteRepository> remoteRepositories) throws MavenProcessCanceledException { return perform(new RetriableCancelable<List<MavenArtifact>>() { @Override public List<MavenArtifact> execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee().resolveTransitively(artifacts, remoteRepositories); } }); } @NotNull public List<String> retrieveVersions(@NotNull final String groupId, @NotNull final String artifactId, @NotNull final String remoteRepository) throws MavenProcessCanceledException { return perform(new RetriableCancelable<List<String>>() { @Override public List<String> execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee().retrieveAvailableVersions(groupId, artifactId, remoteRepository); } }); } public Collection<MavenArtifact> resolvePlugin(@NotNull final MavenPlugin plugin, @NotNull final List<MavenRemoteRepository> repositories, @NotNull final NativeMavenProjectHolder nativeMavenProject, final boolean transitive) throws MavenProcessCanceledException { int id; try { id = nativeMavenProject.getId(); } catch (RemoteException e) { // do not call handleRemoteError here since this error occurred because of previous remote error return Collections.emptyList(); } try { return getOrCreateWrappee().resolvePlugin(plugin, repositories, id, transitive); } catch (RemoteException e) { // do not try to reconnect here since we have lost NativeMavenProjectHolder anyway. handleRemoteError(e); return Collections.emptyList(); } catch (MavenServerProcessCanceledException e) { throw new MavenProcessCanceledException(); } } @NotNull public MavenServerExecutionResult execute(@NotNull final VirtualFile file, @NotNull final Collection<String> activeProfiles, @NotNull final Collection<String> inactiveProfiles, @NotNull final List<String> goals) throws MavenProcessCanceledException { return perform(new RetriableCancelable<MavenServerExecutionResult>() { @Override public MavenServerExecutionResult execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee() .execute(new File(file.getPath()), activeProfiles, inactiveProfiles, goals, Collections.<String>emptyList(), false, false); } }); } @NotNull public MavenServerExecutionResult execute(@NotNull final VirtualFile file, @NotNull final Collection<String> activeProfiles, @NotNull final Collection<String> inactiveProfiles, @NotNull final List<String> goals, @NotNull final List<String> selectedProjects, final boolean alsoMake, final boolean alsoMakeDependents) throws MavenProcessCanceledException { return perform(new RetriableCancelable<MavenServerExecutionResult>() { @Override public MavenServerExecutionResult execute() throws RemoteException, MavenServerProcessCanceledException { return getOrCreateWrappee() .execute(new File(file.getPath()), activeProfiles, inactiveProfiles, goals, selectedProjects, alsoMake, alsoMakeDependents); } }); } public void reset() { MavenServerEmbedder w = getWrappee(); if (w == null) return; try { w.reset(); } catch (RemoteException e) { handleRemoteError(e); } resetCustomization(); } public void release() { MavenServerEmbedder w = getWrappee(); if (w == null) return; try { w.release(); } catch (RemoteException e) { handleRemoteError(e); } resetCustomization(); } public void clearCaches() { MavenServerEmbedder w = getWrappee(); if (w == null) return; try { w.clearCaches(); } catch (RemoteException e) { handleRemoteError(e); } } public void clearCachesFor(MavenId projectId) { MavenServerEmbedder w = getWrappee(); if (w == null) return; try { w.clearCachesFor(projectId); } catch (RemoteException e) { handleRemoteError(e); } } private synchronized void setCustomization(MavenConsole console, MavenProgressIndicator indicator, MavenWorkspaceMap workspaceMap, boolean failOnUnresolvedDependency, boolean alwaysUpdateSnapshot) { resetCustomization(); myCustomization = new Customization(MavenServerManager.wrapAndExport(console), MavenServerManager.wrapAndExport(indicator), workspaceMap, failOnUnresolvedDependency, alwaysUpdateSnapshot); } private synchronized void resetCustomization() { if (myCustomization == null) return; try { UnicastRemoteObject.unexportObject(myCustomization.console, true); } catch (NoSuchObjectException e) { MavenLog.LOG.warn(e); } try { UnicastRemoteObject.unexportObject(myCustomization.indicator, true); } catch (NoSuchObjectException e) { MavenLog.LOG.warn(e); } myCustomization = null; } private static class Customization { private final MavenServerConsole console; private final MavenServerProgressIndicator indicator; private final MavenWorkspaceMap workspaceMap; private final boolean failOnUnresolvedDependency; private final boolean alwaysUpdateSnapshot; private Customization(MavenServerConsole console, MavenServerProgressIndicator indicator, MavenWorkspaceMap workspaceMap, boolean failOnUnresolvedDependency, boolean alwaysUpdateSnapshot) { this.console = console; this.indicator = indicator; this.workspaceMap = workspaceMap; this.failOnUnresolvedDependency = failOnUnresolvedDependency; this.alwaysUpdateSnapshot = alwaysUpdateSnapshot; } } }
/******************************************************************************* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package emlab.gen.domain.factory; import java.io.IOException; import java.util.List; import org.codehaus.groovy.syntax.ReadException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import com.googlecode.jcsv.reader.CSVEntryParser; import emlab.gen.domain.agent.EnergyProducer; import emlab.gen.domain.contract.Loan; import emlab.gen.domain.technology.PowerGeneratingTechnology; import emlab.gen.domain.technology.PowerGridNode; import emlab.gen.domain.technology.PowerPlant; import emlab.gen.repository.Reps; /** * The power plant entry parser, takes rows of a CSV table and turns it into * power plants in the database when the simulation starts. * * The columns of the table need to be defined in the following order: * * Name|TechnologyName|LocationName|Age|OwnerName|Capacity|Efficiency * * and column headers should be given. * * TechnologyName (of class PowerGeneratingTechnology), OwnerName (of class * EnergyProducer) and LocationName (of class PowerGridNode) need to correspond * exactly to the names defined in the scenario file. * * The entries of the columns OwnerName, Capacity and Efficiency may be left * empty. In this case the owner is randomly assigned, the capacity set to the * standard capacity times the locational capacity factor, and the efficiency is * calculated from the age of the power plant and the learning curve of the * technology. The columns OwnerName, Capacity, and Efficiency maybe left away * entirely (but only if the columns to the right are also left away). * * @author JCRichstein * */ public class PowerPlantEntryParser implements CSVEntryParser<PowerPlant> { private final List<EnergyProducer> producers; private final List<PowerGeneratingTechnology> technologies; private final List<PowerGridNode> powerGridNodes; /** * */ public PowerPlantEntryParser(List<EnergyProducer> producers, List<PowerGeneratingTechnology> technologies, List<PowerGridNode> powerGridNodes) { this.producers = producers; this.technologies = technologies; this.powerGridNodes = powerGridNodes; } @Autowired Reps reps; static final Logger logger = LoggerFactory.getLogger(PowerPlantEntryParser.class); @Override public PowerPlant parseEntry(String... data) { String name = data[0]; String technologyName = data[1]; String locationName = data[2]; int age = Integer.parseInt(data[3]); String ownerName = ""; if (data.length > 3) ownerName = data[4]; double capacity = 0; if (data.length > 4 && !data[5].isEmpty()) capacity = Double.parseDouble(data[5]); double efficiency = 0; if (data.length > 5 && !data[6].isEmpty()) { efficiency = Double.parseDouble(data[6]); } EnergyProducer energyProducer = null; if (!ownerName.isEmpty()) { for (EnergyProducer producer : producers) { if (producer.getName().equals(ownerName)) { energyProducer = producer; break; } } } else { energyProducer = getRandomProducer(producers); } PowerGeneratingTechnology pgt = null; if (!technologyName.isEmpty()) { for (PowerGeneratingTechnology ppTechnology : technologies) { if (ppTechnology.getName().equals(technologyName)) { pgt = ppTechnology; break; } } } else { pgt = technologies.get(0); } PowerGridNode powerGridNode = null; if (!locationName.isEmpty()) { for (PowerGridNode node : powerGridNodes) { if (node.getName().equals(locationName)) { powerGridNode = node; break; } } } else { try { throw new ReadException("Location fields is not allowed to be empty!", new IOException()); } catch (ReadException e) { e.printStackTrace(); } } return createPowerPlant(name, pgt, energyProducer, powerGridNode, age, capacity, efficiency); } private PowerPlant createPowerPlant(String name, PowerGeneratingTechnology technology, EnergyProducer energyProducer, PowerGridNode location, int age, double capacity, double efficiency) { PowerPlant plant = new PowerPlant().persist(); plant.setName(name); plant.setTechnology(technology); plant.setOwner(energyProducer); plant.setLocation(location); plant.setConstructionStartTime(-(technology.getExpectedLeadtime() + technology.getExpectedPermittime() + age)); plant.setActualLeadtime(plant.getTechnology().getExpectedLeadtime()); plant.setActualPermittime(plant.getTechnology().getExpectedPermittime()); plant.setExpectedEndOfLife(plant.getConstructionStartTime() + plant.getActualPermittime() + plant.getActualLeadtime() + plant.getTechnology().getExpectedLifetime()); if (capacity == 0) { plant.setActualNominalCapacity(technology.getCapacity() * location.getCapacityMultiplicationFactor()); } else { plant.setActualNominalCapacity(capacity); } plant.calculateAndSetActualInvestedCapital(plant.getConstructionStartTime()); if (efficiency == 0) { plant.calculateAndSetActualEfficiency(plant.getConstructionStartTime()); } else { plant.setActualEfficiency(efficiency); } plant.calculateAndSetActualFixedOperatingCosts(plant.getConstructionStartTime()); plant.setDismantleTime(1000); Loan loan = new Loan().persist(); loan.setFrom(energyProducer); loan.setTo(null); double amountPerPayment = determineLoanAnnuities( plant.getActualInvestedCapital() * energyProducer.getDebtRatioOfInvestments(), plant.getTechnology() .getDepreciationTime(), energyProducer.getLoanInterestRate()); loan.setAmountPerPayment(amountPerPayment); loan.setTotalNumberOfPayments(plant.getTechnology().getDepreciationTime()); loan.setLoanStartTime(plant.getConstructionStartTime()); loan.setNumberOfPaymentsDone(-plant.getConstructionStartTime());// Some // payments // are // already // made plant.setLoan(loan); return plant; } private EnergyProducer getRandomProducer(List<EnergyProducer> producers) { if (producers.size() > 0) { int size = producers.size(); int index = getRandomIndexFromList(size); return producers.get(index); } return null; } private int getRandomIndexFromList(int size) { return (int) Math.min(Math.floor(Math.random() * size), size - 1); } public double determineLoanAnnuities(double totalLoan, double payBackTime, double interestRate) { double q = 1 + interestRate; double annuity = totalLoan * (Math.pow(q, payBackTime) * (q - 1)) / (Math.pow(q, payBackTime) - 1); return annuity; } }
package edu.ucsc.soe.reductionist; import automata.AutomataException; import automata.svpa.SVPA; import automata.svpa.TaggedSymbol; import org.junit.Test; import org.roaringbitmap.RoaringBitmap; import org.sat4j.specs.TimeoutException; import theory.svpa.equalityalgebra.EqualityPredicate; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.json.JsonWriter; import java.io.FileWriter; import java.util.Arrays; import java.util.Collection; import java.util.List; public class TestReductionist { public void outputResults(JsonObjectBuilder outp, Reductionist.Cardinalities cards, String netCardKey, String cardKey) { outp.add(netCardKey, cards.total); JsonArrayBuilder cs = Json.createArrayBuilder(); for(int i = 0; i < cards.cards.length; i++) { cs.add(cards.cards[i]); } outp.add(cardKey, cs); } void doCheck(Reductionist r, Collection<String> tagsIn, Collection<String> tagsOut, JsonObjectBuilder outs, String key) throws TimeoutException, AutomataException { long startTime = System.nanoTime(); SVPA<EqualityPredicate<FiniteSetPred, RoaringBitmap>, RoaringBitmap> prop = r.tagSetProperty(tagsIn). intersectionWith( r.tagSetAbsentProperty(tagsOut), r.theory); SVPA<EqualityPredicate<FiniteSetPred, RoaringBitmap>, RoaringBitmap> svpa = r.svpa.intersectionWith(prop, r.theory); long midTime = System.nanoTime(); List<TaggedSymbol<RoaringBitmap>> witness = r.witnessForProperty(prop); long endTime = System.nanoTime(); long propT = midTime - startTime; long checkT = endTime - startTime; JsonObjectBuilder results = Json.createObjectBuilder(); outs.add(key+"Build", propT/1000000.0); outs.add(key+"Check", checkT/1000000.0); Reductionist.Cardinalities cards = r.getCardinalities(svpa, 6000); outputResults(outs, cards, key+"NetCards", key+"Cards"); } @Test public void CreateSVPA () throws Exception { JsonObjectBuilder results = Json.createObjectBuilder(); int lim = 6000; JsonObjectBuilder ttSmallResults = Json.createObjectBuilder(); long a,b; a = System.nanoTime(); Reductionist talktownSmall = Reductionist.fromJSONFile("int10_experiment_benchmarks/talktown.json", false); b = System.nanoTime(); ttSmallResults.add("constructionTime", (b-a)/1000000.0); a = System.nanoTime(); Reductionist.Cardinalities pCards = talktownSmall.getCardinalities(talktownSmall.svpa, lim); b = System.nanoTime(); ttSmallResults.add("cardsTime", (b-a)/1000000.0); outputResults(ttSmallResults, pCards, "netCards", "cards"); a = System.nanoTime(); talktownSmall = Reductionist.fromJSONFile("int10_experiment_benchmarks/talktown.json", true); b = System.nanoTime(); ttSmallResults.add("constructionTimeEMs", (b-a)/1000000.0); Collection<String> tags, tagsIn, tagsOut; SVPA<EqualityPredicate<FiniteSetPred, RoaringBitmap>, RoaringBitmap> prop; List<TaggedSymbol<RoaringBitmap>> witness; System.out.println("E1"); tagsIn = Arrays.asList("Moves:request name", "PushObligation:introduce self"); tagsOut = Arrays.asList("Moves:say nice to meet you", "Moves:say rude remark"); doCheck(talktownSmall, tagsIn, tagsOut, ttSmallResults, "easy"); //hard boolean hard = false; if(hard) { System.out.println("H1"); tagsIn = Arrays.asList( "Moves:introduce self", "PushObligation:repair incorrect first name usage", "Propositions:subject=interlocutor;feature_type=first name;feature_value=speaker.belief(interlocutor, 'first name');feature_object_itself=None", "Propositions:subject=speaker;feature_type=first name;feature_value=speaker.first_name;feature_object_itself=None", "ViolationConditions:awkward rehash<--lambda conversation: conversation.earlier_move(conversation.speaker, 'introduce self')", "Moves:say nice to meet you", "Moves:say first name", "Preconditions:lambda speaker, interlocutor: speaker.belief(interlocutor, 'first name')", "PushObligation:say nice to meet you", "Preconditions:lambda conversation: conversation.earlier_move(conversation.interlocutor, 'introduce self')", "Moves:introduce self back", "Preconditions:lambda conversation: conversation.last_interlocutor_turn.performed_move('say nice to meet you')", "Preconditions:lambda speaker, interlocutor: speaker.inaccurate_belief(interlocutor, 'first name')", //"Context:NULL", "Preconditions:lambda speaker: speaker.personality.high_e", "Preconditions:lambda speaker, interlocutor: interlocutor in speaker.relationships", "Preconditions:lambda conversation: conversation.has_obligation(conversation.speaker, 'say nice to meet you')", "Preconditions:lambda conversation: conversation.earlier_move(conversation.interlocutor, 'say first name')" ); // System.out.println("H2"); tagsOut = Arrays.asList( "Preconditions:lambda speaker: speaker.moves", "Preconditions:lambda conversation: len(conversation.turns) >= 6", "Preconditions:lambda speaker: speaker.mind.preoccupation and speaker.belief(speaker.mind.preoccupation, 'first name')", "Preconditions:lambda conversation: not conversation.speaker.belief(conversation.speaker_subject_match, 'first name')", "Moves:ask do you know someone", "Preconditions:lambda conversation: conversation.last_turn and conversation.last_turn.speaker is conversation.speaker", "Context:subject:first_name=speaker.belief(speaker.mind.preoccupation, 'first name')", "Preconditions:lambda speaker, interlocutor: interlocutor not in speaker.mind.mental_models", "Moves:respond about the weather", "ViolationConditions:awkward rehash<--lambda conversation: conversation.earlier_move('either', 'reask about the weather')", "Preconditions:lambda conversation: conversation.earlier_move(conversation.speaker, 'assert about the weather')", "Preconditions:lambda conversation: conversation.earlier_move(conversation.interlocutor, 'ask how are you')", "Preconditions:lambda speaker: speaker.male and speaker.occupation and speaker.occupation.level < 2", "Preconditions:lambda speaker: speaker.personality.cold", "Moves:ask how are you", "PushObligation:redress incorrect first name usage", "Context:subject:last_name=speaker.belief(speaker.mind.preoccupation, 'last name')", "Preconditions:lambda conversation: not conversation.earlier_move(conversation.interlocutor, 'say first name')" ); doCheck(talktownSmall, tagsIn, tagsOut, ttSmallResults, "hard"); } results.add("talktown-small", ttSmallResults); JsonObjectBuilder hackResults = Json.createObjectBuilder(); a = System.nanoTime(); Reductionist hackers = Reductionist.fromJSONFile("int10_experiment_benchmarks/HackerTexts.json", false); b = System.nanoTime(); hackResults.add("constructionTime", (b-a)/1000000.0); a = System.nanoTime(); pCards = hackers.getCardinalities(hackers.svpa, lim); b = System.nanoTime(); hackResults.add("cardsTime", (b-a)/1000000.0); outputResults(hackResults, pCards, "netCards", "cards"); a = System.nanoTime(); hackers = Reductionist.fromJSONFile("int10_experiment_benchmarks/HackerTexts.json", true); b = System.nanoTime(); hackResults.add("constructionTimeEMs", (b-a)/1000000.0); System.out.println("E1"); tagsIn = Arrays.asList("relationship:== Work", "linkSuspicion:ClearlySuspicious"); tagsOut = Arrays.asList("jerk:> 2", "directness:>= 2"); doCheck(hackers, tagsIn, tagsOut, hackResults, "easy"); //hard boolean hard2 = false; if(hard2) { System.out.println("H1"); tagsIn = Arrays.asList( "jerk:> 2", "jerk:>= 1", "directness:> 2", "relationship:== Work", "directness:>= 2", "jerk:<= 3", "assertiveness:>= 2", "linkSuspicion:Suggestive" ); // System.out.println("H2"); tagsOut = Arrays.asList( "linkSuspicion:Amiguous", "directness:<= 3", "relationship:== Communities", "relationship:== Family", "jerk:< 2", "jerk:>= 2", "directness:>= 1", "assertiveness:<= 3" ); doCheck(hackers, tagsIn, tagsOut, hackResults, "hard"); // prop = hackers. // tagSetProperty(tagsIn). // intersectionWith( // hackers.tagSetAbsentProperty(tagsOut), // hackers.theory); // witness = hackers.witnessForProperty(prop); // pCards = talktownSmall.getCardinalities(prop, lim); // outputResults(ttSmallResults, pCards, "netCardsHard2", "cardsHard2"); } results.add("hackers", hackResults); JsonObjectBuilder jukeResults = Json.createObjectBuilder(); a = System.nanoTime(); Reductionist juke = Reductionist.fromJSONFile("int10_experiment_benchmarks/jukejoint.json", false); b = System.nanoTime(); jukeResults.add("constructionTime", (b-a)/1000000.0); a = System.nanoTime(); pCards = juke.getCardinalities(juke.svpa, lim); b = System.nanoTime(); jukeResults.add("cardsTime", (b-a)/1000000.0); outputResults(jukeResults, pCards, "netCards", "cards"); a = System.nanoTime(); juke = Reductionist.fromJSONFile("int10_experiment_benchmarks/jukejoint.json", true); b = System.nanoTime(); jukeResults.add("constructionTimeEMs", (b-a)/1000000.0); System.out.println("E1"); tagsIn = Arrays.asList( "Signals:disappointment 1", // "Signals:do depart 1", // "Preconditions:lambda thinker: not ('do depart' in thinker.mind.receptors and thinker.mind.receptors['do depart'].voltage >= thinker.game.config.summative_thought_receptor_voltage_threshold)", "Preconditions:lambda thinker: not thinker.mind.thoughts", "Signals:this town 1" ); tagsOut = Arrays.asList( // "Preconditions:lambda thinker: 'don\\'t depart' in thinker.mind.receptors and thinker.mind.receptors[\"don\\'t depart\"].most_associated_signals(n=3, excluding=[\"do depart\", \"no romance here\", \"disappointment\", \"commitment\", \"love\", \"deception\"])[0] == \\'this town\\'", // "Effects:lambda thinker: thinker.make_decision", "Preconditions:lambda thinker: thinker.love_interest", "Signals:no romance here 1", "Preconditions:lambda thinker: not thinker.spouse" ); doCheck(juke, tagsIn, tagsOut, jukeResults, "easy"); //hard boolean hard3 = false; if(hard3) { System.out.println("H1"); tagsIn = Arrays.asList( "Preconditions:lambda thinker: thinker.personality.gregarious", "Signals:love 1", "Signals:disappointment 1", "Preconditions:lambda thinker: thinker.spouse", "Signals:don't depart 1", "Preconditions:lambda thinker: thinker.love_interest and thinker.love_interest is not thinker.spouse", "Preconditions:lambda thinker: thinker.love_interest and thinker.love_interest.female", "Preconditions:lambda thinker: len(thinker.friends) < 5", "Signals:do depart 1", "Preconditions:lambda thinker: not ('do depart' in thinker.mind.receptors and thinker.mind.receptors['do depart'].voltage >= thinker.game.config.summative_thought_receptor_voltage_threshold)", "Preconditions:lambda thinker: not (\"don't depart\" in thinker.mind.receptors and thinker.mind.receptors[\"don't depart\"].voltage >= thinker.game.config.summative_thought_receptor_voltage_threshold)", "Preconditions:lambda thinker: thinker.spouse and thinker.marriage.duration > 2", "Preconditions:lambda thinker: len(thinker.friends) is 0", "Signals:my partner 1", "Preconditions:lambda thinker: thinker.requited_love_interest", "Signals:commitment 1" ); // System.out.println("H2"); tagsOut = Arrays.asList( "Preconditions:lambda thinker: \"don\\'t depart\" in thinker.mind.receptors and thinker.mind.receptors[\"don\\'t depart\"].most_associated_signals(n=3, excluding=[\"do depart\", \"no romance here\", \"disappointment\", \"commitment\", \"love\", \"deception\"])[0] == \\'my job\\'", "Preconditions:lambda thinker: \\'do depart\\' in thinker.mind.receptors and thinker.mind.receptors[\\'do depart\\'].most_associated_signals(n=3, excluding=[\"don\\'t depart\", \\'disappointment\\', \\'commitment\\', \\'love\\', \\'deception\\'])[1] == \\'my partner\\'", "Preconditions:lambda thinker: thinker.kids", "Preconditions:lambda thinker: thinker.boss and thinker is not thinker.boss and thinker.dislikes(thinker.boss) and not thinker.hates(thinker.boss)", "Signals:new job elsewhere 1", "Preconditions:lambda thinker: not thinker.mind.last_thought_had_signal(\"don\\'t depart\")", "Preconditions:lambda thinker: not thinker.personality.gregarious and not thinker.personality.cold", "Preconditions:lambda thinker: thinker.occupation and thinker.occupation.years_experience > 2", "Preconditions:lambda thinker: \"don\\'t depart\" in thinker.mind.receptors and thinker.mind.receptors[\"don\\'t depart\"].most_associated_signals(n=3, excluding=[\"do depart\", \"no romance here\", \"disappointment\", \"commitment\", \"love\", \"deception\"])[1] == \\'this town\\'", "Preconditions:lambda thinker: thinker.age < 30 and thinker.boss and thinker.boss.age > 50", "Preconditions:lambda thinker: thinker.mind.last_thought_had_signal(\\'dodepart\\')", "Preconditions:lambda thinker: \\'do depart\\' in thinker.mind.receptors and thinker.mind.receptors[\\'do depart\\'].most_associated_signals(n=3, excluding=[\"don\\'t depart\", \\'disappointment\\', \\'commitment\\', \\'love\\', \\'deception\\'])[1] == \\'no romance here\\'", "Preconditions:lambda thinker: \"don\\'t depart\" in thinker.mind.receptors and thinker.mind.receptors[\"don\\'t depart\"].most_associated_signals(n=3, excluding=[\"do depart\", \"no romance here\", \"disappointment\", \"commitment\", \"love\", \"deception\"])[0] == \\'my love interest\\'", "Preconditions:lambda thinker: \"don\\'t depart\" in thinker.mind.receptors and thinker.mind.receptors[\"don\\'t depart\"].most_associated_signals(n=3, excluding=[\"do depart\", \"no romance here\", \"disappointment\", \"commitment\", \"love\", \"deception\"])[1] == \\'new job elsewhere\\'", "Effects:lambda thinker: thinker.make_decision", "Preconditions:lambda thinker: thinker.boss and thinker is not thinker.boss" ); doCheck(juke, tagsIn, tagsOut, jukeResults, "hard"); } results.add("juke", jukeResults); JsonObjectBuilder ttLargeResults = Json.createObjectBuilder(); Reductionist talktownLarge = Reductionist.fromJSONFile("int10_experiment_benchmarks/talktown-aiide-study-2016.json", false); b = System.nanoTime(); ttLargeResults.add("constructionTime", (b-a)/1000000.0); a = System.nanoTime(); pCards = talktownLarge.getCardinalities(talktownLarge.svpa, lim); b = System.nanoTime(); ttLargeResults.add("cardsTime", (b-a)/1000000.0); outputResults(ttLargeResults, pCards, "netCards", "cards"); a = System.nanoTime(); talktownLarge = Reductionist.fromJSONFile("int10_experiment_benchmarks/talktown-aiide-study-2016.json", true); b = System.nanoTime(); ttLargeResults.add("constructionTimeEMs", (b-a)/1000000.0); results.add("talktown_large", ttLargeResults); //System.out.println(results.build().toString()); FileWriter fw = new FileWriter("results.json"); JsonWriter jw = Json.createWriter(fw); jw.write(results.build()); jw.close(); fw.close(); } }
package com.elmakers.mine.bukkit.item; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.UUID; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import org.bukkit.Bukkit; import org.bukkit.Color; import org.bukkit.Material; import org.bukkit.attribute.Attribute; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.meta.ItemMeta; import org.bukkit.inventory.meta.LeatherArmorMeta; import org.bukkit.inventory.meta.PotionMeta; import org.bukkit.potion.PotionEffect; import com.elmakers.mine.bukkit.api.item.ItemUpdatedCallback; import com.elmakers.mine.bukkit.api.magic.MageController; import com.elmakers.mine.bukkit.block.MaterialAndData; import com.elmakers.mine.bukkit.utility.CompatibilityLib; import com.elmakers.mine.bukkit.utility.ConfigurationUtils; import com.google.common.collect.ImmutableSet; public class ItemData implements com.elmakers.mine.bukkit.api.item.ItemData, ItemUpdatedCallback, Cloneable { public static final String MINECRAFT_ITEM_PREFIX = "minecraft:"; public static double EARN_SCALE = 0.5; private static class PendingUpdate { public ItemStack item; public ItemUpdatedCallback callback; public PendingUpdate(ItemStack item, ItemUpdatedCallback callback) { this.item = item; this.callback = callback; } } private final MageController controller; private String key; private String baseKey; private String materialKey; private ItemStack item; private ConfigurationSection configuration; private double worth; private Double earns; private Integer damage; private Set<String> categories = ImmutableSet.of(); private String creatorId; private String creator; private boolean cache = true; private boolean locked; private boolean loaded; private boolean exactIngredient; private boolean replaceOnEquip; private List<String> discoverRecipes; private List<PendingUpdate> pending = null; public ItemData(ItemStack itemStack, MageController controller) { this.controller = controller; this.item = CompatibilityLib.getItemUtils().getCopy(itemStack); String itemKey = itemStack.getType().toString(); if (itemStack.getAmount() > 1) { itemKey += "@" + itemStack.getAmount(); } this.setKey(itemKey); } public ItemData(String materialKey, MageController controller) { this(materialKey, materialKey, controller); } public ItemData(String key, String materialKey, MageController controller) { this.controller = controller; this.setKey(key); this.materialKey = materialKey; } public ItemData(String key, ConfigurationSection configuration, MageController controller) { this.controller = controller; this.configuration = configuration; this.setKey(key); this.materialKey = key; worth = configuration.getDouble("worth", worth); if (configuration.contains("earns")) { earns = configuration.getDouble("earns"); } else { earns = null; } creator = configuration.getString("creator"); creatorId = configuration.getString("creator_id"); locked = configuration.getBoolean("locked"); replaceOnEquip = configuration.getBoolean("replace_on_equip"); exactIngredient = configuration.getBoolean("exact_ingredient"); discoverRecipes = ConfigurationUtils.getStringList(configuration, "discover_recipes"); damage = ConfigurationUtils.getOptionalInteger(configuration, "damage"); cache = configuration.getBoolean("cache", true); // Slightly more efficient if this has been overridden to an empty list if (discoverRecipes != null && discoverRecipes.isEmpty()) { discoverRecipes = null; } Collection<String> categoriesList = ConfigurationUtils.getStringList(configuration, "categories"); if (categoriesList != null) { categories = ImmutableSet.copyOf(categoriesList); } } private ItemStack createItemFromConfiguration() throws InvalidMaterialException { ConfigurationSection configuration = this.configuration; // Save this configuration for later if we're not caching the item, otherwise we are done with it. if (cache) { this.configuration = null; } ItemStack item = null; if (configuration.isItemStack("item")) { item = configuration.getItemStack("item"); } else if (configuration.isConfigurationSection("item")) { ConfigurationSection itemConfiguration = configuration.getConfigurationSection("item"); String materialKey = itemConfiguration.getString("type", key); materialKey = cleanMinecraftItemName(materialKey); MaterialAndData material = new MaterialAndData(materialKey); if (material.isValid()) { item = material.getItemStack(1); } if (item == null) { throw new InvalidMaterialException("Invalid item key: " + materialKey); } ConfigurationSection tagSection = itemConfiguration.getConfigurationSection("tags"); if (tagSection != null) { item = CompatibilityLib.getItemUtils().makeReal(item); CompatibilityLib.getInventoryUtils().saveTagsToItem(tagSection, item); } } else { String materialKey = configuration.getString("item", key); materialKey = cleanMinecraftItemName(materialKey); MaterialAndData material = new MaterialAndData(materialKey); if (material.isValid()) { item = material.getItemStack(1); } if (item == null) { throw new InvalidMaterialException("Invalid item key: " + materialKey); } } if (item == null) { throw new InvalidMaterialException("Invalid item configuration: " + key); } Collection<ConfigurationSection> attributes = ConfigurationUtils.getNodeList(configuration, "attributes"); if (attributes != null && !attributes.isEmpty()) { item = CompatibilityLib.getItemUtils().makeReal(item); for (ConfigurationSection attributeConfig : attributes) { String attributeKey = attributeConfig.getString("type"); attributeKey = attributeConfig.getString("attribute", attributeKey); try { Attribute attribute = Attribute.valueOf(attributeKey.toUpperCase()); double value = attributeConfig.getDouble("amount"); value = attributeConfig.getDouble("value", value); String slot = attributeConfig.getString("slot"); String uuidString = attributeConfig.getString("uuid"); UUID uuid = null; if (uuidString != null) { try { uuid = UUID.fromString(uuidString); } catch (Exception ignore) { } } if (uuid == null) { uuid = UUID.randomUUID(); } int operation = attributeConfig.getInt("operation", 0); if (!CompatibilityLib.getCompatibilityUtils().setItemAttribute(item, attribute, value, slot, operation, uuid)) { Bukkit.getLogger().warning("Failed to set attribute: " + attributeKey); } } catch (Exception ex) { Bukkit.getLogger().warning("Invalid attribute: " + attributeKey); } } } else { ConfigurationSection simpleAttributes = configuration.getConfigurationSection("attributes"); if (simpleAttributes != null) { CompatibilityLib.getInventoryUtils().applyAttributes(item, simpleAttributes, configuration.getString("attribute_slot")); } } // Convenience methods for top-level name, lore and tags ConfigurationSection tagSection = configuration.getConfigurationSection("tags"); if (tagSection != null) { item = CompatibilityLib.getItemUtils().makeReal(item); CompatibilityLib.getInventoryUtils().saveTagsToItem(tagSection, item); } String customName = configuration.getString("name"); if (customName == null) { customName = controller.getMessages().getIfSet("items." + key + ".name"); } if (customName != null) { ItemMeta meta = item.getItemMeta(); meta.setDisplayName(CompatibilityLib.getCompatibilityUtils().translateColors(customName)); item.setItemMeta(meta); } List<String> lore = configuration.getStringList("lore"); if (lore == null) { lore = controller.getMessages().getAll("items." + key + ".lore"); } if (lore != null && !lore.isEmpty()) { ItemMeta meta = item.getItemMeta(); for (int i = 0; i < lore.size(); i++) { lore.set(i, CompatibilityLib.getCompatibilityUtils().translateColors(lore.get(i))); } meta.setLore(lore); item.setItemMeta(meta); } ConfigurationSection color = configuration.getConfigurationSection("color"); if (color != null) { ItemMeta meta = item.getItemMeta(); if (meta instanceof LeatherArmorMeta) { int red = color.getInt("red"); int green = color.getInt("green"); int blue = color.getInt("blue"); LeatherArmorMeta leather = (LeatherArmorMeta)meta; leather.setColor(Color.fromRGB(red, green, blue)); item.setItemMeta(meta); } } ConfigurationSection potionEffects = configuration.getConfigurationSection("potion_effects"); if (potionEffects != null) { ItemMeta meta = item.getItemMeta(); if (meta instanceof PotionMeta) { PotionMeta potion = (PotionMeta)meta; int potionEffectDuration = configuration.getInt("potion_effect_duration"); Collection<PotionEffect> effects = ConfigurationUtils.getPotionEffects(potionEffects, potionEffectDuration); for (PotionEffect effect : effects) { potion.addCustomEffect(effect, true); } item.setItemMeta(potion); } } return item; } private void setKey(String key) { this.key = key; checkKey(); } public void checkKey() { String[] pieces = StringUtils.split(key, "@", 2); baseKey = pieces[0]; if (worth == 0 && pieces.length > 1) { try { int amount = Integer.parseInt(pieces[1]); if (amount > 1) { com.elmakers.mine.bukkit.api.item.ItemData singular = controller.getItem(baseKey); if (singular != null) { worth = singular.getWorth() * amount; } } } catch (Exception ignore) { } } } public ItemData(String key, ItemStack item, double worth, MageController controller) throws Exception { this.controller = controller; if (item == null) { throw new Exception("Invalid item"); } this.key = key; this.materialKey = key; this.item = item; this.worth = worth; } public static String cleanMinecraftItemName(String materialKey) { if (materialKey.startsWith(MINECRAFT_ITEM_PREFIX)) { materialKey = materialKey.substring(MINECRAFT_ITEM_PREFIX.length()); } return materialKey; } public ItemData createVariant(String key, short damage) throws Exception { ItemData variant = (ItemData)this.clone(); variant.damage = (int)damage; variant.key = key; variant.materialKey = key; if (variant.item != null) { variant.item = variant.item.clone(); CompatibilityLib.getDeprecatedUtils().setItemDamage(variant.item, damage); } return variant; } @Override public String getKey() { return key; } @Override public String getBaseKey() { return baseKey; } @Override public double getWorth() { return worth; } @Override public double getEarns() { return earns == null ? worth * EARN_SCALE : earns; } @Override public boolean hasCustomEarns() { return earns != null; } @Override public Set<String> getCategories() { return categories; } @Nullable @Override public ItemStack getItemStack(int amount) { return getItemStack(amount, null); } @Nullable @Override public ItemStack getItemStack(int amount, ItemUpdatedCallback callback) { return getItemStack((Integer)amount, callback); } @Nullable @Override public ItemStack getItemStack() { return getItemStack(null, null); } @Nullable private ItemStack getItemStack(Integer amount, ItemUpdatedCallback callback) { ItemStack newItem = CompatibilityLib.getItemUtils().getCopy(getOrCreateItemStack()); if (newItem == null) { if (callback != null) { callback.updated(null); } return null; } if (pending != null) { pending.add(new PendingUpdate(newItem, callback)); } else if (callback != null) { callback.updated(newItem); } if (amount != null) { newItem.setAmount(amount); } return newItem; } @Nonnull public ItemStack getOrCreateItemStack() { if (item == null || !cache) { if (configuration != null) { try { item = createItemFromConfiguration(); } catch (InvalidMaterialException ex) { controller.info("Invalid item type '" + key + "', may not exist on your server version: " + ex.getMessage(), 2); } if (item == null) { item = new ItemStack(Material.AIR); } } else { item = controller.createItem(materialKey, null, false, this); if (!loaded && CompatibilityLib.getInventoryUtils().isSkull(item)) { pending = new ArrayList<>(); } if (item == null) { controller.getLogger().warning("Invalid item key: " + materialKey); item = new ItemStack(Material.AIR); } } if (item != null && damage != null) { CompatibilityLib.getDeprecatedUtils().setItemDamage(item, (short)(int)damage); } } return item; } @Override public String getCreator() { return creator; } @Override public String getCreatorId() { return creatorId; } @Override public Material getType() { return getOrCreateItemStack().getType(); } public int getCustomModelData() { return CompatibilityLib.getNBTUtils().getInt(getOrCreateItemStack(), "CustomModelData", 0); } @Nullable @Deprecated @Override public org.bukkit.material.MaterialData getMaterialData() { ItemStack item = getOrCreateItemStack(); org.bukkit.material.MaterialData materialData = item.getData(); materialData.setData((byte)item.getDurability()); return materialData; } @Override public int getDurability() { return CompatibilityLib.getDeprecatedUtils().getItemDamage(getOrCreateItemStack()); } @Override public int getAmount() { return getOrCreateItemStack().getAmount(); } @Nullable @Override public ItemMeta getItemMeta() { return getOrCreateItemStack().getItemMeta(); } @Nullable @Override public MaterialAndData getMaterialAndData() { return new MaterialAndData(getOrCreateItemStack()); } @Override public boolean isLocked() { return this.locked; } @Override public boolean isExactIngredient() { return exactIngredient; } public boolean isReplaceOnEquip() { return this.replaceOnEquip; } @Override public void updated(@Nullable ItemStack itemStack) { loaded = true; if (pending != null && itemStack != null) { this.item = itemStack; ItemMeta populatedMeta = itemStack.getItemMeta(); Object profile = CompatibilityLib.getInventoryUtils().getSkullProfile(populatedMeta); for (PendingUpdate update : pending) { // We're assuming the only thing that changes here is skull profile if (profile != null) { ItemStack item = update.item; ItemMeta meta = item.getItemMeta(); CompatibilityLib.getInventoryUtils().setSkullProfile(meta, profile); item.setItemMeta(meta); } if (update.callback != null) { update.callback.updated(update.item); } } } pending = null; } public int getMaxDurability() { ItemStack itemStack = getItemStack(); return itemStack == null ? 0 : itemStack.getType().getMaxDurability(); } @Nullable @Override public Collection<String> getDiscoverRecipes() { return discoverRecipes; } @Override public void addDiscoverRecipe(String recipe) { if (discoverRecipes == null) { discoverRecipes = new ArrayList<>(); } discoverRecipes.add(recipe); } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms; // Start of user code for imports import java.util.ArrayList; import java.util.List; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.Enumerator; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.util.EcoreAdapterFactory; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import org.eclipse.emf.eef.runtime.EEFRuntimePlugin; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep; import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils; import org.eclipse.emf.eef.runtime.ui.widgets.EEFFeatureEditorDialog; import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer; import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.events.FocusAdapter; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.forms.widgets.Form; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.eclipse.ui.forms.widgets.Section; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class TaskPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, TaskPropertiesEditionPart { protected Text description; protected Text commentsList; protected Button editCommentsList; protected EList commentsListList; protected Text taskName; protected Text taskGroup; protected EMFComboViewer triggerType; protected Text count; protected Text interval; protected Text cron; protected Text pinnedServers; protected Text taskImplementation; protected ReferencesTable taskProperties; protected List<ViewerFilter> taskPropertiesBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> taskPropertiesFilters = new ArrayList<ViewerFilter>(); /** * For {@link ISection} use only. */ public TaskPropertiesEditionPartForm() { super(); } /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public TaskPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit) * */ public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) { ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent); Form form = scrolledForm.getForm(); view = form.getBody(); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(widgetFactory, view); return scrolledForm; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart# * createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite) * */ public void createControls(final FormToolkit widgetFactory, Composite view) { CompositionSequence taskStep = new BindingCompositionSequence(propertiesEditionComponent); CompositionStep propertiesStep = taskStep.addStep(EsbViewsRepository.Task.Properties.class); propertiesStep.addStep(EsbViewsRepository.Task.Properties.description); propertiesStep.addStep(EsbViewsRepository.Task.Properties.commentsList); propertiesStep.addStep(EsbViewsRepository.Task.Properties.taskName); propertiesStep.addStep(EsbViewsRepository.Task.Properties.taskGroup); propertiesStep.addStep(EsbViewsRepository.Task.Properties.triggerType); propertiesStep.addStep(EsbViewsRepository.Task.Properties.count); propertiesStep.addStep(EsbViewsRepository.Task.Properties.interval); propertiesStep.addStep(EsbViewsRepository.Task.Properties.cron); propertiesStep.addStep(EsbViewsRepository.Task.Properties.pinnedServers); propertiesStep.addStep(EsbViewsRepository.Task.Properties.taskImplementation); propertiesStep.addStep(EsbViewsRepository.Task.Properties.taskProperties); composer = new PartComposer(taskStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.Task.Properties.class) { return createPropertiesGroup(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.description) { return createDescriptionText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.commentsList) { return createCommentsListMultiValuedEditor(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.taskName) { return createTaskNameText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.taskGroup) { return createTaskGroupText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.triggerType) { return createTriggerTypeEMFComboViewer(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.count) { return createCountText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.interval) { return createIntervalText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.cron) { return createCronText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.pinnedServers) { return createPinnedServersText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.taskImplementation) { return createTaskImplementationText(widgetFactory, parent); } if (key == EsbViewsRepository.Task.Properties.taskProperties) { return createTaskPropertiesTableComposition(widgetFactory, parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) { Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED); propertiesSection.setText(EsbMessages.TaskPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL); propertiesSectionData.horizontalSpan = 3; propertiesSection.setLayoutData(propertiesSectionData); Composite propertiesGroup = widgetFactory.createComposite(propertiesSection); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); propertiesSection.setClient(propertiesGroup); return propertiesGroup; } protected Composite createDescriptionText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.description, EsbMessages.TaskPropertiesEditionPart_DescriptionLabel); description = widgetFactory.createText(parent, ""); //$NON-NLS-1$ description.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData descriptionData = new GridData(GridData.FILL_HORIZONTAL); description.setLayoutData(descriptionData); description.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.description, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.description, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, description.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); description.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.description, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, description.getText())); } } }); EditingUtils.setID(description, EsbViewsRepository.Task.Properties.description); EditingUtils.setEEFtype(description, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.description, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createDescriptionText // End of user code return parent; } /** * */ protected Composite createCommentsListMultiValuedEditor(FormToolkit widgetFactory, Composite parent) { commentsList = widgetFactory.createText(parent, "", SWT.READ_ONLY); //$NON-NLS-1$ GridData commentsListData = new GridData(GridData.FILL_HORIZONTAL); commentsListData.horizontalSpan = 2; commentsList.setLayoutData(commentsListData); EditingUtils.setID(commentsList, EsbViewsRepository.Task.Properties.commentsList); EditingUtils.setEEFtype(commentsList, "eef::MultiValuedEditor::field"); //$NON-NLS-1$ editCommentsList = widgetFactory.createButton(parent, getDescription(EsbViewsRepository.Task.Properties.commentsList, EsbMessages.TaskPropertiesEditionPart_CommentsListLabel), SWT.NONE); GridData editCommentsListData = new GridData(); editCommentsList.setLayoutData(editCommentsListData); editCommentsList.addSelectionListener(new SelectionAdapter() { /** * {@inheritDoc} * * @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent) * */ public void widgetSelected(SelectionEvent e) { EEFFeatureEditorDialog dialog = new EEFFeatureEditorDialog( commentsList.getShell(), "Task", new AdapterFactoryLabelProvider(adapterFactory), //$NON-NLS-1$ commentsListList, EsbPackage.eINSTANCE.getEsbElement_CommentsList().getEType(), null, false, true, null, null); if (dialog.open() == Window.OK) { commentsListList = dialog.getResult(); if (commentsListList == null) { commentsListList = new BasicEList(); } commentsList.setText(commentsListList.toString()); propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.commentsList, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, new BasicEList(commentsListList))); setHasChanged(true); } } }); EditingUtils.setID(editCommentsList, EsbViewsRepository.Task.Properties.commentsList); EditingUtils.setEEFtype(editCommentsList, "eef::MultiValuedEditor::browsebutton"); //$NON-NLS-1$ // Start of user code for createCommentsListMultiValuedEditor // End of user code return parent; } protected Composite createTaskNameText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.taskName, EsbMessages.TaskPropertiesEditionPart_TaskNameLabel); taskName = widgetFactory.createText(parent, ""); //$NON-NLS-1$ taskName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData taskNameData = new GridData(GridData.FILL_HORIZONTAL); taskName.setLayoutData(taskNameData); taskName.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskName.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskName, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, taskName.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); taskName.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskName.getText())); } } }); EditingUtils.setID(taskName, EsbViewsRepository.Task.Properties.taskName); EditingUtils.setEEFtype(taskName, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.taskName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createTaskNameText // End of user code return parent; } protected Composite createTaskGroupText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.taskGroup, EsbMessages.TaskPropertiesEditionPart_TaskGroupLabel); taskGroup = widgetFactory.createText(parent, ""); //$NON-NLS-1$ taskGroup.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData taskGroupData = new GridData(GridData.FILL_HORIZONTAL); taskGroup.setLayoutData(taskGroupData); taskGroup.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskGroup, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskGroup.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskGroup, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, taskGroup.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); taskGroup.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskGroup, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskGroup.getText())); } } }); EditingUtils.setID(taskGroup, EsbViewsRepository.Task.Properties.taskGroup); EditingUtils.setEEFtype(taskGroup, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.taskGroup, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createTaskGroupText // End of user code return parent; } /** * @generated NOT */ protected Composite createTriggerTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.triggerType, EsbMessages.TaskPropertiesEditionPart_TriggerTypeLabel); triggerType = new EMFComboViewer(parent); triggerType.setContentProvider(new ArrayContentProvider()); triggerType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory())); GridData triggerTypeData = new GridData(GridData.FILL_HORIZONTAL); triggerType.getCombo().setLayoutData(triggerTypeData); triggerType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() { @Override public void handleEvent(Event arg0) { arg0.doit = false; } }); triggerType.addSelectionChangedListener(new ISelectionChangedListener() { /** * {@inheritDoc} * * @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent) * */ public void selectionChanged(SelectionChangedEvent event) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.triggerType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getTriggerType())); } }); triggerType.setID(EsbViewsRepository.Task.Properties.triggerType); FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.triggerType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createTriggerTypeEMFComboViewer // End of user code return parent; } protected Composite createCountText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.count, EsbMessages.TaskPropertiesEditionPart_CountLabel); count = widgetFactory.createText(parent, ""); //$NON-NLS-1$ count.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData countData = new GridData(GridData.FILL_HORIZONTAL); count.setLayoutData(countData); count.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.count, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, count.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.count, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, count.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); count.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.count, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, count.getText())); } } }); EditingUtils.setID(count, EsbViewsRepository.Task.Properties.count); EditingUtils.setEEFtype(count, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.count, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createCountText // End of user code return parent; } protected Composite createIntervalText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.interval, EsbMessages.TaskPropertiesEditionPart_IntervalLabel); interval = widgetFactory.createText(parent, ""); //$NON-NLS-1$ interval.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData intervalData = new GridData(GridData.FILL_HORIZONTAL); interval.setLayoutData(intervalData); interval.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.interval, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, interval.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.interval, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, interval.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); interval.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.interval, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, interval.getText())); } } }); EditingUtils.setID(interval, EsbViewsRepository.Task.Properties.interval); EditingUtils.setEEFtype(interval, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.interval, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createIntervalText // End of user code return parent; } protected Composite createCronText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.cron, EsbMessages.TaskPropertiesEditionPart_CronLabel); cron = widgetFactory.createText(parent, ""); //$NON-NLS-1$ cron.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData cronData = new GridData(GridData.FILL_HORIZONTAL); cron.setLayoutData(cronData); cron.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.cron, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, cron.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.cron, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, cron.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); cron.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.cron, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, cron.getText())); } } }); EditingUtils.setID(cron, EsbViewsRepository.Task.Properties.cron); EditingUtils.setEEFtype(cron, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.cron, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createCronText // End of user code return parent; } protected Composite createPinnedServersText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.pinnedServers, EsbMessages.TaskPropertiesEditionPart_PinnedServersLabel); pinnedServers = widgetFactory.createText(parent, ""); //$NON-NLS-1$ pinnedServers.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData pinnedServersData = new GridData(GridData.FILL_HORIZONTAL); pinnedServers.setLayoutData(pinnedServersData); pinnedServers.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.pinnedServers, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, pinnedServers.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.pinnedServers, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, pinnedServers.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); pinnedServers.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.pinnedServers, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, pinnedServers.getText())); } } }); EditingUtils.setID(pinnedServers, EsbViewsRepository.Task.Properties.pinnedServers); EditingUtils.setEEFtype(pinnedServers, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.pinnedServers, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createPinnedServersText // End of user code return parent; } protected Composite createTaskImplementationText(FormToolkit widgetFactory, Composite parent) { createDescription(parent, EsbViewsRepository.Task.Properties.taskImplementation, EsbMessages.TaskPropertiesEditionPart_TaskImplementationLabel); taskImplementation = widgetFactory.createText(parent, ""); //$NON-NLS-1$ taskImplementation.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER); widgetFactory.paintBordersFor(parent); GridData taskImplementationData = new GridData(GridData.FILL_HORIZONTAL); taskImplementation.setLayoutData(taskImplementationData); taskImplementation.addFocusListener(new FocusAdapter() { /** * @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent) * */ @Override @SuppressWarnings("synthetic-access") public void focusLost(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskImplementation, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskImplementation.getText())); propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskImplementation, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST, null, taskImplementation.getText())); } } /** * @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent) */ @Override public void focusGained(FocusEvent e) { if (propertiesEditionComponent != null) { propertiesEditionComponent .firePropertiesChanged(new PropertiesEditionEvent( TaskPropertiesEditionPartForm.this, null, PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED, null, null)); } } }); taskImplementation.addKeyListener(new KeyAdapter() { /** * @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent) * */ @Override @SuppressWarnings("synthetic-access") public void keyPressed(KeyEvent e) { if (e.character == SWT.CR) { if (propertiesEditionComponent != null) propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskImplementation, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, taskImplementation.getText())); } } }); EditingUtils.setID(taskImplementation, EsbViewsRepository.Task.Properties.taskImplementation); EditingUtils.setEEFtype(taskImplementation, "eef::Text"); //$NON-NLS-1$ FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.taskImplementation, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$ // Start of user code for createTaskImplementationText // End of user code return parent; } /** * @param container * */ protected Composite createTaskPropertiesTableComposition(FormToolkit widgetFactory, Composite parent) { this.taskProperties = new ReferencesTable(getDescription(EsbViewsRepository.Task.Properties.taskProperties, EsbMessages.TaskPropertiesEditionPart_TaskPropertiesLabel), new ReferencesTableListener() { public void handleAdd() { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskProperties, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null)); taskProperties.refresh(); } public void handleEdit(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskProperties, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element)); taskProperties.refresh(); } public void handleMove(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskProperties, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); taskProperties.refresh(); } public void handleRemove(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskProperties, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); taskProperties.refresh(); } public void navigateTo(EObject element) { } }); for (ViewerFilter filter : this.taskPropertiesFilters) { this.taskProperties.addFilter(filter); } this.taskProperties.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.Task.Properties.taskProperties, EsbViewsRepository.FORM_KIND)); this.taskProperties.createControls(parent, widgetFactory); this.taskProperties.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertiesEditionPartForm.this, EsbViewsRepository.Task.Properties.taskProperties, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData taskPropertiesData = new GridData(GridData.FILL_HORIZONTAL); taskPropertiesData.horizontalSpan = 3; this.taskProperties.setLayoutData(taskPropertiesData); this.taskProperties.setLowerBound(0); this.taskProperties.setUpperBound(-1); taskProperties.setID(EsbViewsRepository.Task.Properties.taskProperties); taskProperties.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$ // Start of user code for createTaskPropertiesTableComposition // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getDescription() * */ public String getDescription() { return description.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setDescription(String newValue) * */ public void setDescription(String newValue) { if (newValue != null) { description.setText(newValue); } else { description.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.description); if (eefElementEditorReadOnlyState && description.isEnabled()) { description.setEnabled(false); description.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !description.isEnabled()) { description.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getCommentsList() * */ public EList getCommentsList() { return commentsListList; } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setCommentsList(EList newValue) * */ public void setCommentsList(EList newValue) { commentsListList = newValue; if (newValue != null) { commentsList.setText(commentsListList.toString()); } else { commentsList.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.commentsList); if (eefElementEditorReadOnlyState && commentsList.isEnabled()) { commentsList.setEnabled(false); commentsList.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !commentsList.isEnabled()) { commentsList.setEnabled(true); } } public void addToCommentsList(Object newValue) { commentsListList.add(newValue); if (newValue != null) { commentsList.setText(commentsListList.toString()); } else { commentsList.setText(""); //$NON-NLS-1$ } } public void removeToCommentsList(Object newValue) { commentsListList.remove(newValue); if (newValue != null) { commentsList.setText(commentsListList.toString()); } else { commentsList.setText(""); //$NON-NLS-1$ } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getTaskName() * */ public String getTaskName() { return taskName.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setTaskName(String newValue) * */ public void setTaskName(String newValue) { if (newValue != null) { taskName.setText(newValue); } else { taskName.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.taskName); if (eefElementEditorReadOnlyState && taskName.isEnabled()) { taskName.setEnabled(false); taskName.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !taskName.isEnabled()) { taskName.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getTaskGroup() * */ public String getTaskGroup() { return taskGroup.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setTaskGroup(String newValue) * */ public void setTaskGroup(String newValue) { if (newValue != null) { taskGroup.setText(newValue); } else { taskGroup.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.taskGroup); if (eefElementEditorReadOnlyState && taskGroup.isEnabled()) { taskGroup.setEnabled(false); taskGroup.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !taskGroup.isEnabled()) { taskGroup.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getTriggerType() * */ public Enumerator getTriggerType() { Enumerator selection = (Enumerator) ((StructuredSelection) triggerType.getSelection()).getFirstElement(); return selection; } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#initTriggerType(Object input, Enumerator current) */ public void initTriggerType(Object input, Enumerator current) { triggerType.setInput(input); triggerType.modelUpdating(new StructuredSelection(current)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.triggerType); if (eefElementEditorReadOnlyState && triggerType.isEnabled()) { triggerType.setEnabled(false); triggerType.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !triggerType.isEnabled()) { triggerType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setTriggerType(Enumerator newValue) * */ public void setTriggerType(Enumerator newValue) { triggerType.modelUpdating(new StructuredSelection(newValue)); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.triggerType); if (eefElementEditorReadOnlyState && triggerType.isEnabled()) { triggerType.setEnabled(false); triggerType.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !triggerType.isEnabled()) { triggerType.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getCount() * */ public String getCount() { return count.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setCount(String newValue) * */ public void setCount(String newValue) { if (newValue != null) { count.setText(newValue); } else { count.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.count); if (eefElementEditorReadOnlyState && count.isEnabled()) { count.setEnabled(false); count.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !count.isEnabled()) { count.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getInterval() * */ public String getInterval() { return interval.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setInterval(String newValue) * */ public void setInterval(String newValue) { if (newValue != null) { interval.setText(newValue); } else { interval.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.interval); if (eefElementEditorReadOnlyState && interval.isEnabled()) { interval.setEnabled(false); interval.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !interval.isEnabled()) { interval.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getCron() * */ public String getCron() { return cron.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setCron(String newValue) * */ public void setCron(String newValue) { if (newValue != null) { cron.setText(newValue); } else { cron.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.cron); if (eefElementEditorReadOnlyState && cron.isEnabled()) { cron.setEnabled(false); cron.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !cron.isEnabled()) { cron.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getPinnedServers() * */ public String getPinnedServers() { return pinnedServers.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setPinnedServers(String newValue) * */ public void setPinnedServers(String newValue) { if (newValue != null) { pinnedServers.setText(newValue); } else { pinnedServers.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.pinnedServers); if (eefElementEditorReadOnlyState && pinnedServers.isEnabled()) { pinnedServers.setEnabled(false); pinnedServers.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !pinnedServers.isEnabled()) { pinnedServers.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#getTaskImplementation() * */ public String getTaskImplementation() { return taskImplementation.getText(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#setTaskImplementation(String newValue) * */ public void setTaskImplementation(String newValue) { if (newValue != null) { taskImplementation.setText(newValue); } else { taskImplementation.setText(""); //$NON-NLS-1$ } boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.taskImplementation); if (eefElementEditorReadOnlyState && taskImplementation.isEnabled()) { taskImplementation.setEnabled(false); taskImplementation.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !taskImplementation.isEnabled()) { taskImplementation.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#initTaskProperties(EObject current, EReference containingFeature, EReference feature) */ public void initTaskProperties(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); taskProperties.setContentProvider(contentProvider); taskProperties.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.Task.Properties.taskProperties); if (eefElementEditorReadOnlyState && taskProperties.isEnabled()) { taskProperties.setEnabled(false); taskProperties.setToolTipText(EsbMessages.Task_ReadOnly); } else if (!eefElementEditorReadOnlyState && !taskProperties.isEnabled()) { taskProperties.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#updateTaskProperties() * */ public void updateTaskProperties() { taskProperties.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#addFilterTaskProperties(ViewerFilter filter) * */ public void addFilterToTaskProperties(ViewerFilter filter) { taskPropertiesFilters.add(filter); if (this.taskProperties != null) { this.taskProperties.addFilter(filter); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#addBusinessFilterTaskProperties(ViewerFilter filter) * */ public void addBusinessFilterToTaskProperties(ViewerFilter filter) { taskPropertiesBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertiesEditionPart#isContainedInTaskPropertiesTable(EObject element) * */ public boolean isContainedInTaskPropertiesTable(EObject element) { return ((ReferencesTableSettings)taskProperties.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.Task_Part_Title; } // Start of user code additional methods // End of user code }
/* * Copyright (c) 2009-2012 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.scene; import com.jme3.asset.AssetNotFoundException; import com.jme3.bounding.BoundingVolume; import com.jme3.collision.Collidable; import com.jme3.collision.CollisionResults; import com.jme3.export.InputCapsule; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.OutputCapsule; import com.jme3.material.Material; import com.jme3.math.Matrix4f; import com.jme3.renderer.Camera; import com.jme3.scene.VertexBuffer.Type; import com.jme3.util.TempVars; import java.io.IOException; import java.util.Queue; import java.util.logging.Level; import java.util.logging.Logger; /** * <code>Geometry</code> defines a leaf node of the scene graph. The leaf node * contains the geometric data for rendering objects. It manages all rendering * information such as a {@link Material} object to define how the surface * should be shaded and the {@link Mesh} data to contain the actual geometry. * * @author Kirill Vainer */ public class Geometry extends Spatial { // Version #1: removed shared meshes. // models loaded with shared mesh will be automatically fixed. public static final int SAVABLE_VERSION = 1; private static final Logger logger = Logger.getLogger(Geometry.class.getName()); protected Mesh mesh; protected transient int lodLevel = 0; protected Material material; /** * When true, the geometry's transform will not be applied. */ protected boolean ignoreTransform = false; protected transient Matrix4f cachedWorldMat = new Matrix4f(); /** * Specifies which {@link GeometryGroupNode} this <code>Geometry</code> * is managed by. */ protected GeometryGroupNode groupNode; /** * The start index of this <code>Geometry's</code> inside * the {@link GeometryGroupNode}. */ protected int startIndex = -1; /** * Serialization only. Do not use. */ public Geometry() { this(null); } /** * Create a geometry node without any mesh data. * Both the mesh and the material are null, the geometry * cannot be rendered until those are set. * * @param name The name of this geometry */ public Geometry(String name) { super(name); // For backwards compatibility, only clear the "requires // update" flag if we are not a subclass of Geometry. // This prevents subclass from silently failing to receive // updates when they upgrade. setRequiresUpdates(Geometry.class != getClass()); } /** * Create a geometry node with mesh data. * The material of the geometry is null, it cannot * be rendered until it is set. * * @param name The name of this geometry * @param mesh The mesh data for this geometry */ public Geometry(String name, Mesh mesh) { this(name); if (mesh == null) { throw new IllegalArgumentException("mesh cannot be null"); } this.mesh = mesh; } @Override public boolean checkCulling(Camera cam) { if (isGrouped()) { setLastFrustumIntersection(Camera.FrustumIntersect.Outside); return false; } return super.checkCulling(cam); } /** * @return If ignoreTransform mode is set. * * @see Geometry#setIgnoreTransform(boolean) */ public boolean isIgnoreTransform() { return ignoreTransform; } /** * @param ignoreTransform If true, the geometry's transform will not be applied. */ public void setIgnoreTransform(boolean ignoreTransform) { this.ignoreTransform = ignoreTransform; } /** * Sets the LOD level to use when rendering the mesh of this geometry. * Level 0 indicates that the default index buffer should be used, * levels [1, LodLevels + 1] represent the levels set on the mesh * with {@link Mesh#setLodLevels(com.jme3.scene.VertexBuffer[]) }. * * @param lod The lod level to set */ @Override public void setLodLevel(int lod) { if (mesh.getNumLodLevels() == 0) { throw new IllegalStateException("LOD levels are not set on this mesh"); } if (lod < 0 || lod >= mesh.getNumLodLevels()) { throw new IllegalArgumentException("LOD level is out of range: " + lod); } lodLevel = lod; if (isGrouped()) { groupNode.onMeshChange(this); } } /** * Returns the LOD level set with {@link #setLodLevel(int) }. * * @return the LOD level set */ public int getLodLevel() { return lodLevel; } /** * Returns this geometry's mesh vertex count. * * @return this geometry's mesh vertex count. * * @see Mesh#getVertexCount() */ public int getVertexCount() { return mesh.getVertexCount(); } /** * Returns this geometry's mesh triangle count. * * @return this geometry's mesh triangle count. * * @see Mesh#getTriangleCount() */ public int getTriangleCount() { return mesh.getTriangleCount(); } /** * Sets the mesh to use for this geometry when rendering. * * @param mesh the mesh to use for this geometry * * @throws IllegalArgumentException If mesh is null */ public void setMesh(Mesh mesh) { if (mesh == null) { throw new IllegalArgumentException(); } this.mesh = mesh; setBoundRefresh(); if (isGrouped()) { groupNode.onMeshChange(this); } } /** * Returns the mesh to use for this geometry * * @return the mesh to use for this geometry * * @see #setMesh(com.jme3.scene.Mesh) */ public Mesh getMesh() { return mesh; } /** * Sets the material to use for this geometry. * * @param material the material to use for this geometry */ @Override public void setMaterial(Material material) { this.material = material; if (isGrouped()) { groupNode.onMaterialChange(this); } } /** * Returns the material that is used for this geometry. * * @return the material that is used for this geometry * * @see #setMaterial(com.jme3.material.Material) */ public Material getMaterial() { return material; } /** * @return The bounding volume of the mesh, in model space. */ public BoundingVolume getModelBound() { return mesh.getBound(); } /** * Updates the bounding volume of the mesh. Should be called when the * mesh has been modified. */ public void updateModelBound() { mesh.updateBound(); setBoundRefresh(); } /** * <code>updateWorldBound</code> updates the bounding volume that contains * this geometry. The location of the geometry is based on the location of * all this node's parents. * * @see Spatial#updateWorldBound() */ @Override protected void updateWorldBound() { super.updateWorldBound(); if (mesh == null) { throw new NullPointerException("Geometry: " + getName() + " has null mesh"); } if (mesh.getBound() != null) { if (ignoreTransform) { // we do not transform the model bound by the world transform, // just use the model bound as-is worldBound = mesh.getBound().clone(worldBound); } else { worldBound = mesh.getBound().transform(worldTransform, worldBound); } } } @Override protected void updateWorldTransforms() { super.updateWorldTransforms(); computeWorldMatrix(); if (isGrouped()) { groupNode.onTransformChange(this); } // geometry requires lights to be sorted worldLights.sort(true); } /** * Associate this <code>Geometry</code> with a {@link GeometryGroupNode}. * * Should only be called by the parent {@link GeometryGroupNode}. * * @param node Which {@link GeometryGroupNode} to associate with. * @param startIndex The starting index of this geometry in the group. */ public void associateWithGroupNode(GeometryGroupNode node, int startIndex) { if (isGrouped()) { unassociateFromGroupNode(); } this.groupNode = node; this.startIndex = startIndex; } /** * Removes the {@link GeometryGroupNode} association from this * <code>Geometry</code>. * * Should only be called by the parent {@link GeometryGroupNode}. */ public void unassociateFromGroupNode() { if (groupNode != null) { // Once the geometry is removed // from the parent, the group node needs to be updated. groupNode.onGeoemtryUnassociated(this); groupNode = null; // change the default to -1 to make error detection easier startIndex = -1; } } @Override public boolean removeFromParent() { return super.removeFromParent(); } @Override protected void setParent(Node parent) { super.setParent(parent); // If the geometry is managed by group node we need to unassociate. if (parent == null && isGrouped()) { unassociateFromGroupNode(); } } /** * Indicate that the transform of this spatial has changed and that * a refresh is required. */ // NOTE: Spatial has an identical implementation of this method, // thus it was commented out. // @Override // protected void setTransformRefresh() { // refreshFlags |= RF_TRANSFORM; // setBoundRefresh(); // } /** * Recomputes the matrix returned by {@link Geometry#getWorldMatrix() }. * This will require a localized transform update for this geometry. */ public void computeWorldMatrix() { // Force a local update of the geometry's transform checkDoTransformUpdate(); // Compute the cached world matrix cachedWorldMat.loadIdentity(); cachedWorldMat.setRotationQuaternion(worldTransform.getRotation()); cachedWorldMat.setTranslation(worldTransform.getTranslation()); TempVars vars = TempVars.get(); Matrix4f scaleMat = vars.tempMat4; scaleMat.loadIdentity(); scaleMat.scale(worldTransform.getScale()); cachedWorldMat.multLocal(scaleMat); vars.release(); } /** * A {@link Matrix4f matrix} that transforms the {@link Geometry#getMesh() mesh} * from model space to world space. This matrix is computed based on the * {@link Geometry#getWorldTransform() world transform} of this geometry. * In order to receive updated values, you must call {@link Geometry#computeWorldMatrix() } * before using this method. * * @return Matrix to transform from local space to world space */ public Matrix4f getWorldMatrix() { return cachedWorldMat; } /** * Sets the model bound to use for this geometry. * This alters the bound used on the mesh as well via * {@link Mesh#setBound(com.jme3.bounding.BoundingVolume) } and * forces the world bounding volume to be recomputed. * * @param modelBound The model bound to set */ @Override public void setModelBound(BoundingVolume modelBound) { this.worldBound = null; mesh.setBound(modelBound); setBoundRefresh(); // NOTE: Calling updateModelBound() would cause the mesh // to recompute the bound based on the geometry thus making // this call useless! //updateModelBound(); } public int collideWith(Collidable other, CollisionResults results) { // Force bound to update checkDoBoundUpdate(); // Update transform, and compute cached world matrix computeWorldMatrix(); assert (refreshFlags & (RF_BOUND | RF_TRANSFORM)) == 0; if (mesh != null) { // NOTE: BIHTree in mesh already checks collision with the // mesh's bound int prevSize = results.size(); int added = mesh.collideWith(other, cachedWorldMat, worldBound, results); int newSize = results.size(); for (int i = prevSize; i < newSize; i++) { results.getCollisionDirect(i).setGeometry(this); } return added; } return 0; } @Override public void depthFirstTraversal(SceneGraphVisitor visitor) { visitor.visit(this); } @Override protected void breadthFirstTraversal(SceneGraphVisitor visitor, Queue<Spatial> queue) { } /** * Determine whether this <code>Geometry</code> is managed by a * {@link GeometryGroupNode} or not. * * @return True if managed by a {@link GeometryGroupNode}. */ public boolean isGrouped() { return groupNode != null; } /** * @deprecated Use {@link #isGrouped()} instead. */ @Deprecated public boolean isBatched() { return isGrouped(); } /** * This version of clone is a shallow clone, in other words, the * same mesh is referenced as the original geometry. * Exception: if the mesh is marked as being a software * animated mesh, (bind pose is set) then the positions * and normals are deep copied. */ @Override public Geometry clone(boolean cloneMaterial) { Geometry geomClone = (Geometry) super.clone(cloneMaterial); // This geometry is managed, // but the cloned one is not attached to anything, hence not managed. if (geomClone.isGrouped()) { geomClone.groupNode = null; geomClone.startIndex = -1; } geomClone.cachedWorldMat = cachedWorldMat.clone(); if (material != null) { if (cloneMaterial) { geomClone.material = material.clone(); } else { geomClone.material = material; } } if (mesh != null && mesh.getBuffer(Type.BindPosePosition) != null) { geomClone.mesh = mesh.cloneForAnim(); } return geomClone; } /** * This version of clone is a shallow clone, in other words, the * same mesh is referenced as the original geometry. * Exception: if the mesh is marked as being a software * animated mesh, (bind pose is set) then the positions * and normals are deep copied. */ @Override public Geometry clone() { return clone(true); } /** * Create a deep clone of the geometry. This creates an identical copy of * the mesh with the vertex buffer data duplicated. */ @Override public Spatial deepClone() { Geometry geomClone = clone(true); geomClone.mesh = mesh.deepClone(); return geomClone; } @Override public void write(JmeExporter ex) throws IOException { super.write(ex); OutputCapsule oc = ex.getCapsule(this); oc.write(mesh, "mesh", null); if (material != null) { oc.write(material.getAssetName(), "materialName", null); } oc.write(material, "material", null); oc.write(ignoreTransform, "ignoreTransform", false); } @Override public void read(JmeImporter im) throws IOException { super.read(im); InputCapsule ic = im.getCapsule(this); mesh = (Mesh) ic.readSavable("mesh", null); material = null; String matName = ic.readString("materialName", null); if (matName != null) { // Material name is set, // Attempt to load material via J3M try { material = im.getAssetManager().loadMaterial(matName); } catch (AssetNotFoundException ex) { // Cannot find J3M file. logger.log(Level.FINE, "Cannot locate {0} for geometry {1}", new Object[]{matName, key}); } } // If material is NULL, try to load it from the geometry if (material == null) { material = (Material) ic.readSavable("material", null); } ignoreTransform = ic.readBoolean("ignoreTransform", false); if (ic.getSavableVersion(Geometry.class) == 0) { // Fix shared mesh (if set) Mesh sharedMesh = getUserData(UserData.JME_SHAREDMESH); if (sharedMesh != null) { getMesh().extractVertexData(sharedMesh); setUserData(UserData.JME_SHAREDMESH, null); } } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; /** * <p> * Provides details for the <code>LambdaFunctionFailed</code> event. * </p> */ public class LambdaFunctionFailedEventAttributes implements Serializable, Cloneable { /** * The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. */ private Long scheduledEventId; /** * The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. */ private Long startedEventId; /** * The reason provided for the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 256<br/> */ private String reason; /** * The details of the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 32768<br/> */ private String details; /** * The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. * * @return The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. */ public Long getScheduledEventId() { return scheduledEventId; } /** * The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. * * @param scheduledEventId The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. */ public void setScheduledEventId(Long scheduledEventId) { this.scheduledEventId = scheduledEventId; } /** * The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param scheduledEventId The ID of the <code>LambdaFunctionScheduled</code> event that was * recorded when this AWS Lambda function was scheduled. This information * can be useful for diagnosing problems by tracing back the chain of * events leading up to this event. * * @return A reference to this updated object so that method calls can be chained * together. */ public LambdaFunctionFailedEventAttributes withScheduledEventId(Long scheduledEventId) { this.scheduledEventId = scheduledEventId; return this; } /** * The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. * * @return The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. */ public Long getStartedEventId() { return startedEventId; } /** * The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. * * @param startedEventId The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. */ public void setStartedEventId(Long startedEventId) { this.startedEventId = startedEventId; } /** * The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param startedEventId The ID of the <code>LambdaFunctionStarted</code> event recorded in the * history. * * @return A reference to this updated object so that method calls can be chained * together. */ public LambdaFunctionFailedEventAttributes withStartedEventId(Long startedEventId) { this.startedEventId = startedEventId; return this; } /** * The reason provided for the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 256<br/> * * @return The reason provided for the failure (if any). */ public String getReason() { return reason; } /** * The reason provided for the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 256<br/> * * @param reason The reason provided for the failure (if any). */ public void setReason(String reason) { this.reason = reason; } /** * The reason provided for the failure (if any). * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 256<br/> * * @param reason The reason provided for the failure (if any). * * @return A reference to this updated object so that method calls can be chained * together. */ public LambdaFunctionFailedEventAttributes withReason(String reason) { this.reason = reason; return this; } /** * The details of the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 32768<br/> * * @return The details of the failure (if any). */ public String getDetails() { return details; } /** * The details of the failure (if any). * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 32768<br/> * * @param details The details of the failure (if any). */ public void setDetails(String details) { this.details = details; } /** * The details of the failure (if any). * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>0 - 32768<br/> * * @param details The details of the failure (if any). * * @return A reference to this updated object so that method calls can be chained * together. */ public LambdaFunctionFailedEventAttributes withDetails(String details) { this.details = details; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getScheduledEventId() != null) sb.append("ScheduledEventId: " + getScheduledEventId() + ","); if (getStartedEventId() != null) sb.append("StartedEventId: " + getStartedEventId() + ","); if (getReason() != null) sb.append("Reason: " + getReason() + ","); if (getDetails() != null) sb.append("Details: " + getDetails() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getScheduledEventId() == null) ? 0 : getScheduledEventId().hashCode()); hashCode = prime * hashCode + ((getStartedEventId() == null) ? 0 : getStartedEventId().hashCode()); hashCode = prime * hashCode + ((getReason() == null) ? 0 : getReason().hashCode()); hashCode = prime * hashCode + ((getDetails() == null) ? 0 : getDetails().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LambdaFunctionFailedEventAttributes == false) return false; LambdaFunctionFailedEventAttributes other = (LambdaFunctionFailedEventAttributes)obj; if (other.getScheduledEventId() == null ^ this.getScheduledEventId() == null) return false; if (other.getScheduledEventId() != null && other.getScheduledEventId().equals(this.getScheduledEventId()) == false) return false; if (other.getStartedEventId() == null ^ this.getStartedEventId() == null) return false; if (other.getStartedEventId() != null && other.getStartedEventId().equals(this.getStartedEventId()) == false) return false; if (other.getReason() == null ^ this.getReason() == null) return false; if (other.getReason() != null && other.getReason().equals(this.getReason()) == false) return false; if (other.getDetails() == null ^ this.getDetails() == null) return false; if (other.getDetails() != null && other.getDetails().equals(this.getDetails()) == false) return false; return true; } @Override public LambdaFunctionFailedEventAttributes clone() { try { return (LambdaFunctionFailedEventAttributes) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }